gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/* ************************************************************************
LEBAH PORTAL FRAMEWORK
Copyright (C) 2007 Shamsul Bahrin
* ************************************************************************ */
package lebah.portal;
import java.io.PrintWriter;
import java.util.Hashtable;
import java.util.Vector;
import javax.portlet.GenericPortlet;
import javax.portlet.PortletConfig;
import javax.portlet.PortletMode;
import javax.portlet.RenderRequest;
import javax.portlet.RenderResponse;
import javax.portlet.WindowState;
import javax.servlet.ServletConfig;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import lebah.db.DbException;
import lebah.portal.db.CustomClass;
import lebah.portal.db.UserPage;
import lebah.portal.db.UserTrackerLog;
import lebah.portal.element.Module2;
import lebah.portal.velocity.VTemplate;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
/**
* @author Shamsul Bahrin Abd Mutalib
* @version 1.01
*/
public class DisplayContent2 {
public static void showThreeColumnsWithSingleTop(VelocityEngine engine,
VelocityContext context,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
String module,
PrintWriter out,
HttpSession session) throws Exception {
//Initiate VTemplate objects
Moduler cModuler = new Moduler(engine, context, req, res);
//prepare String for module title
//String moduleTitle = "";
//String moduleRealTitle = "";
//--JSR 168 implementation
PortletInfo portletInfo = new PortletInfo();
//
out.println("<tr><td>");
out.println("<table border=\"0\" cellpadding=\"1\" cellspacing=\"1\" width=\"100%\">");
//first columns shall span 3
out.println("<tr><td valign=\"top\">");
//Module2 currentModule = (Module2) vmodules.elementAt(0);
Module2 module1 = cModuler.getFirstModule();
doPrintModule(module1, portletInfo, engine, context, svtCfg, req, res, module, out, session);
out.println("</td></tr></table>");
out.println("</td></tr>");
out.println("<tr><td>");
out.println("<table border=\"0\" cellpadding=\"1\" cellspacing=\"1\" width=\"100%\">");
out.println("<tr><td valign=\"top\" width=\"20%\">");
cModuler.removeModule(module1);
for ( int colnum=0; colnum < 3; colnum++ ) {
if ( colnum == 1 ) out.println("</td><td valign=\"top\" width=\"60%\">");
if ( colnum == 2 ) out.println("</td><td valign=\"top\" width=\"20%\">");
out.println("<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" width=\"100%\">");
Vector vmodules = cModuler.getModulesInColumn(colnum);
//Iterate thru all the modules, and open them
for ( int cn=0; cn < vmodules.size(); cn++ ) {
//be carefull... might throw NullPointerException
Module2 currentModule = (Module2) vmodules.elementAt(cn);
doPrintModule(currentModule, portletInfo, engine, context, svtCfg, req, res, module, out, session);
}
out.println("</table>");
}
out.println("</td></tr></table>");
out.println("</td></tr>");
}
private static void doPrintModule(Module2 currentModule, PortletInfo portletInfo, VelocityEngine engine,
VelocityContext context,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
String module,
PrintWriter out,
HttpSession session) throws Exception {
String moduleTitle = "";
String moduleRealTitle = "";
if ( currentModule != null ) {
module = currentModule.getId();
moduleTitle = currentModule.getCustomTitle();
moduleRealTitle = currentModule.getTitle();
portletInfo.id = module;
portletInfo.title = moduleTitle;
} else {
//Log.print("Attempted was denied due to NullPointerException!");
res.sendRedirect("");
}
Object content = renderContent(engine, context, svtCfg, req, res, module, portletInfo);
//SHOW CONTENT
out.println("<tr><td>");
out.println("<table class=\"module_frame\" width=\"100%\" cellpadding=\"0\" cellspacing=\"1\" border=\"0\">");
if ( !"".equals(moduleRealTitle)) {
out.println("<tr><td>");
context.put("moduleTitle", moduleTitle);
ModuleTitle cModuleTitle = new ModuleTitle(engine, context, req, res);
try {
cModuleTitle.print();
} catch ( Exception ex ) {
out.println(ex.getMessage());
}
out.println("</td></tr>");
}
out.println("<tr><td>");
try {
printContent(content, svtCfg, req, res, out, portletInfo);
} catch ( Exception ex ) {
out.println( ex.getMessage() );
}
out.println("</td></tr>");
out.println("</table>");
out.println("</td></tr>");
}
public static void showNavigationType(VelocityEngine engine,
VelocityContext context,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
String module,
PrintWriter out,
HttpSession session) throws Exception {
//Initiate VTemplate objects
Moduler cModuler = new Moduler(engine, context, req, res);
//prepare String for module title
String moduleTitle = "";
String moduleRealTitle = "";
//--JSR 168 implementation
PortletInfo portletInfo = new PortletInfo();
//
Object content = null;
//be carefull... might throw NullPointerException
Module2 currentModule = cModuler.getModuleById(module);
if ( currentModule != null ) {
moduleTitle = currentModule.getCustomTitle();
moduleRealTitle = currentModule.getTitle();
portletInfo.id = module;
portletInfo.title = moduleTitle;
content = renderContent(engine, context, svtCfg, req, res, module, portletInfo);
} else {
Module2 firstModule = cModuler.getFirstModule();
if ( firstModule!= null ) {
module = firstModule.getId();
moduleTitle = firstModule.getCustomTitle();
portletInfo.id = module;
portletInfo.title = moduleTitle;
content = renderContent(engine, context, svtCfg, req, res, module, portletInfo);
}
if ( firstModule == null ) {
//res.sendRedirect("../expired.jsp");
moduleTitle = "Modules has not been setup!";
}
session.setAttribute("_portal_module", module);
}
out.println("<tr><td>");
out.println("<table width=\"100%\" cellpadding=\"0\" cellspacing=\"0\" border=\"0\">");
out.println("<tr><td class=\"navigation_menu\" align=\"center\" valign=\"top\" width=\"180\" nowrap>");
//DISPLAY LEFT MENU
try {
cModuler.print();
} catch ( Exception ex ) {
out.println( ex.getMessage() );
}
out.println("</td><td valign=\"top\">");
//SHOW CONTENT
out.println("<table width=\"100%\" cellpadding=\"0\" cellspacing=\"0\" border=\"0\">");
if ( !"".equals(moduleRealTitle)) {
out.println("<tr><td>");
context.put("moduleTitle", moduleTitle);
ModuleTitle cModuleTitle = new ModuleTitle(engine, context, req, res);
try {
cModuleTitle.print();
} catch ( Exception ex ) {
out.println(ex.getMessage());
}
out.println("</td></tr>");
}
out.println("<tr><td>");
try {
printContent(content, svtCfg, req, res, out, portletInfo);
} catch ( Exception ex ) {
out.println( ex.getMessage() );
} finally {
content = null;
}
out.println("</td></tr>");
out.println("</table>");
out.println("</td></tr>");
out.println("</table>");
out.println("</td></tr>");
}
public static void showTopNavigationType(VelocityEngine engine,
VelocityContext context,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
String module,
PrintWriter out,
HttpSession session) throws Exception {
//Initiate VTemplate objects
TopModuler cModuler = new TopModuler(engine, context, req, res);
//prepare String for module title
String moduleTitle = "";
String moduleRealTitle = "";
//--JSR 168 implementation
PortletInfo portletInfo = new PortletInfo();
//
Object content = null;
//be carefull... might throw NullPointerException
Module2 currentModule = cModuler.getModuleById(module);
if ( currentModule != null ) {
moduleTitle = currentModule.getCustomTitle();
moduleRealTitle = currentModule.getTitle();
portletInfo.id = module;
portletInfo.title = moduleTitle;
content = renderContent(engine, context, svtCfg, req, res, module, portletInfo);
} else {
Module2 firstModule = cModuler.getFirstModule();
if ( firstModule!= null ) {
module = firstModule.getId();
moduleTitle = firstModule.getCustomTitle();
portletInfo.id = module;
portletInfo.title = moduleTitle;
content = renderContent(engine, context, svtCfg, req, res, module, portletInfo);
}
if ( firstModule == null ) {
//res.sendRedirect("../expired.jsp");
moduleTitle = "Modules has not been setup!";
}
session.setAttribute("_portal_module", module);
}
out.println("<tr><td>");
out.println("<table width=\"100%\" cellpadding=\"0\" cellspacing=\"0\" border=\"0\">");
out.println("<tr><td class=\"navigation_menu\" align=\"center\" valign=\"top\" nowrap>");
//DISPLAY TOP MENU
try {
cModuler.print();
} catch ( Exception ex ) {
out.println( ex.getMessage() );
}
out.println("</td></tr><tr><td valign=\"top\">");
//SHOW CONTENT
out.println("<table width=\"100%\" cellpadding=\"0\" cellspacing=\"0\" border=\"0\">");
if ( !"".equals(moduleRealTitle)) {
out.println("<tr><td>");
context.put("moduleTitle", moduleTitle);
ModuleTitle cModuleTitle = new ModuleTitle(engine, context, req, res);
try {
cModuleTitle.print();
} catch ( Exception ex ) {
out.println(ex.getMessage());
}
out.println("</td></tr>");
}
out.println("<tr><td>");
try {
printContent(content, svtCfg, req, res, out, portletInfo);
} catch ( Exception ex ) {
out.println( ex.getMessage() );
} finally {
content = null;
}
out.println("</td></tr>");
out.println("</table>");
out.println("</td></tr>");
out.println("</table>");
out.println("</td></tr>");
}
public static void showModularType(VelocityEngine engine,
VelocityContext context,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
String module,
PrintWriter out,
HttpSession session) throws Exception {
//Initiate VTemplate objects
Moduler cModuler = new Moduler(engine, context, req, res);
//prepare String for module title
String moduleTitle = "";
String moduleRealTitle = "";
//--JSR 168 implementation
PortletInfo portletInfo = new PortletInfo();
//
//Iterate thru all the modules, and open them
context.put("CONTENT_MODULE", cModuler);
while ( cModuler.hasMoreModules() ) {
//be carefull... might throw NullPointerException
Module2 currentModule = cModuler.getNext();
if ( currentModule != null ) {
module = currentModule.getId();
moduleTitle = currentModule.getCustomTitle();
moduleRealTitle = currentModule.getTitle();
portletInfo.id = module;
portletInfo.title = moduleTitle;
} else {
//Log.print("Attempted was denied due to NullPointerException!");
res.sendRedirect("");
}
Object content = renderContent(engine, context, svtCfg, req, res, module, portletInfo);
//SHOW CONTENT
//out.println("<tr><td>");
//out.println("<table class=\"module_frame\" width=\"100%\" cellpadding=\"0\" cellspacing=\"1\" border=\"0\">");
context.put("PORTAL_MODULE_TITLE", moduleRealTitle);
if ( !"".equals(moduleRealTitle)) {
//out.println("<tr><td>");
context.put("moduleTitle", moduleTitle);
/*
ModuleTitle cModuleTitle = new ModuleTitle(engine, context, req, res);
try {
cModuleTitle.print();
} catch ( Exception ex ) {
out.println(ex.getMessage());
}
*/
//out.println("</td></tr>");
}
//out.println("<tr><td>");
try {
printContent(content, svtCfg, req, res, out, portletInfo);
} catch ( Exception ex ) {
out.println( ex.getMessage() );
}
out.println("</td></tr>");
out.println("</table>");
out.println("</td></tr>");
//to give a gap effect
out.println("<tr><td height=\"1\"> </td></tr>");
}
}
public static void showNarrowWideType(VelocityEngine engine,
VelocityContext context,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
String module,
PrintWriter out,
HttpSession session) throws Exception {
//Initiate VTemplate objects
Moduler cModuler = new Moduler(engine, context, req, res);
//prepare String for module title
String moduleTitle = "";
String moduleRealTitle = "";
//--JSR 168 implementation
PortletInfo portletInfo = new PortletInfo();
//
out.println("<tr><td>");
out.println("<table width=\"100%\" border=\"0\" cellpadding=\"1\" cellspacing=\"1\">");
out.println("<tr><td valign=\"top\" width=\"30%\">");
for ( int colnum=0; colnum < 2; colnum++ ) {
if ( colnum == 1 ) {
//out.println("</td><td width=\"1\"></td><td width=\"70%\" valign=\"top\">");
out.println("</td><td width=\"70%\" valign=\"top\">");
}
out.println("<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" width=\"100%\">");
Vector vmodules = cModuler.getModulesInColumn(colnum);
//Iterate thru all the modules, and open them
for ( int cn=0; cn < vmodules.size(); cn++ ) {
//be carefull... might throw NullPointerException
Module2 currentModule = (Module2) vmodules.elementAt(cn);
if ( currentModule != null ) {
module = currentModule.getId();
moduleTitle = currentModule.getCustomTitle();
moduleRealTitle = currentModule.getTitle();
portletInfo.id = module;
portletInfo.title = moduleTitle;
} else {
//Log.print("Attempted was denied due to NullPointerException!");
res.sendRedirect("");
}
Object content = renderContent(engine, context, svtCfg, req, res, module, portletInfo);
//SHOW CONTENT
out.println("<tr><td>");
out.println("<table class=\"module_frame\" width=\"100%\" cellpadding=\"0\" cellspacing=\"1\" border=\"0\">");
if ( !"".equals(moduleRealTitle)) {
out.println("<tr><td>");
context.put("moduleTitle", moduleTitle);
ModuleTitle cModuleTitle = new ModuleTitle(engine, context, req, res);
try {
cModuleTitle.print();
} catch ( Exception ex ) {
out.println(ex.getMessage());
}
out.println("</td></tr>");
}
out.println("<tr><td>");
try {
printContent(content, svtCfg, req, res, out, portletInfo);
} catch ( Exception ex ) {
out.println( ex.getMessage() );
}
out.println("</td></tr>");
out.println("</table>");
if ( colnum == 0 ) {
out.println("</td>");
} else {
out.println("</td></tr>");
}
//to give a gap effect
out.println("<tr><td height=\"1\"> </td></tr>");
}
out.println("</table>");
}
out.println("</td></tr></table>");
out.println("</td></tr>");
}
public static void showTwoColumnsType(VelocityEngine engine,
VelocityContext context,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
String module,
PrintWriter out,
HttpSession session) throws Exception {
//Initiate VTemplate objects
Moduler cModuler = new Moduler(engine, context, req, res);
//prepare String for module title
String moduleTitle = "";
String moduleRealTitle = "";
//--JSR 168 implementation
PortletInfo portletInfo = new PortletInfo();
//
out.println("<tr><td>");
out.println("<table border=\"0\" cellpadding=\"1\" cellspacing=\"1\" width=\"100%\">");
out.println("<tr><td valign=\"top\" width=\"50%\">");
for ( int colnum=0; colnum < 2; colnum++ ) {
if ( colnum > 0 ) out.println("</td><td valign=\"top\" width=\"50%\">");
out.println("<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" width=\"100%\">");
Vector vmodules = cModuler.getModulesInColumn(colnum);
//Iterate thru all the modules, and open them
for ( int cn=0; cn < vmodules.size(); cn++ ) {
//be carefull... might throw NullPointerException
Module2 currentModule = (Module2) vmodules.elementAt(cn);
if ( currentModule != null ) {
module = currentModule.getId();
moduleTitle = currentModule.getCustomTitle();
moduleRealTitle = currentModule.getTitle();
portletInfo.id = module;
portletInfo.title = moduleTitle;
} else {
//Log.print("Attempted was denied due to NullPointerException!");
res.sendRedirect("");
}
Object content = renderContent(engine, context, svtCfg, req, res, module, portletInfo);
//SHOW CONTENT
out.println("<tr><td>");
out.println("<table class=\"module_frame\" width=\"100%\" cellpadding=\"0\" cellspacing=\"1\" border=\"0\">");
if ( !"".equals(moduleRealTitle)) {
out.println("<tr><td>");
context.put("moduleTitle", moduleTitle);
ModuleTitle cModuleTitle = new ModuleTitle(engine, context, req, res);
try {
cModuleTitle.print();
} catch ( Exception ex ) {
out.println(ex.getMessage());
}
out.println("</td></tr>");
}
out.println("<tr><td>");
try {
printContent(content, svtCfg, req, res, out, portletInfo);
} catch ( Exception ex ) {
out.println( ex.getMessage() );
}
out.println("</td></tr>");
out.println("</table>");
out.println("</td></tr>");
//to give a gap effect
out.println("<tr><td height=\"1\"> </td></tr>");
}
out.println("</table>");
}
out.println("</td></tr></table>");
out.println("</td></tr>");
}
public static void showThreeColumnsType(VelocityEngine engine,
VelocityContext context,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
String module,
PrintWriter out,
HttpSession session) throws Exception {
//Initiate VTemplate objects
Moduler cModuler = new Moduler(engine, context, req, res);
//prepare String for module title
String moduleTitle = "";
String moduleRealTitle = "";
//--JSR 168 implementation
PortletInfo portletInfo = new PortletInfo();
//
out.println("<tr><td>");
out.println("<table border=\"0\" cellpadding=\"1\" cellspacing=\"1\" width=\"100%\">");
out.println("<tr><td valign=\"top\" width=\"20%\">");
for ( int colnum=0; colnum < 3; colnum++ ) {
if ( colnum == 1 ) out.println("</td><td valign=\"top\" width=\"60%\">");
if ( colnum == 2 ) out.println("</td><td valign=\"top\" width=\"20%\">");
out.println("<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" width=\"100%\">");
Vector vmodules = cModuler.getModulesInColumn(colnum);
//Iterate thru all the modules, and open them
for ( int cn=0; cn < vmodules.size(); cn++ ) {
//be carefull... might throw NullPointerException
Module2 currentModule = (Module2) vmodules.elementAt(cn);
if ( currentModule != null ) {
module = currentModule.getId();
moduleTitle = currentModule.getCustomTitle();
moduleRealTitle = currentModule.getTitle();
portletInfo.id = module;
portletInfo.title = moduleTitle;
} else {
//Log.print("Attempted was denied due to NullPointerException!");
res.sendRedirect("");
}
Object content = renderContent(engine, context, svtCfg, req, res, module, portletInfo);
//SHOW CONTENT
out.println("<tr><td>");
out.println("<table class=\"module_frame\" width=\"100%\" cellpadding=\"0\" cellspacing=\"1\" border=\"0\">");
if ( !"".equals(moduleRealTitle)) {
out.println("<tr><td>");
context.put("moduleTitle", moduleTitle);
ModuleTitle cModuleTitle = new ModuleTitle(engine, context, req, res);
try {
cModuleTitle.print();
} catch ( Exception ex ) {
out.println(ex.getMessage());
}
out.println("</td></tr>");
}
out.println("<tr><td>");
try {
printContent(content, svtCfg, req, res, out, portletInfo);
} catch ( Exception ex ) {
out.println( ex.getMessage() );
}
out.println("</td></tr>");
out.println("</table>");
out.println("</td></tr>");
//to give a gap effect
//out.println("<tr><td height=\"1\"> </td></tr>");
}
out.println("</table>");
}
out.println("</td></tr></table>");
out.println("</td></tr>");
}
private static void printContent(Object content,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
PrintWriter out,
PortletInfo portletInfo) throws Exception {
if ( content != null ) {
if ( content instanceof VTemplate ) {
((VTemplate) content).print();
}
else if ( content instanceof MerakPortlet ) {
((MerakPortlet) content).doView(req, res);
}
else if ( content instanceof GenericPortlet ) {
Hashtable portletState = getPortletState(svtCfg, req, res, out, portletInfo);
RenderRequest renderRequest = (RenderRequest) portletState.get("renderRequest");
RenderResponse renderResponse = (RenderResponse) portletState.get("renderResponse");
PortletConfig config = (PortletConfig) portletState.get("config");
GenericPortlet portlet = (GenericPortlet) content;
portlet.init(config);
portlet.render(renderRequest, renderResponse);
}
}
}
private static Hashtable getPortletState(ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
PrintWriter out,
PortletInfo portletInfo) throws Exception {
Hashtable h = new Hashtable();
MerakContext context = new MerakContext();
context.httpServletRequest = req;
MerakConfig config = new MerakConfig();
config.portletInfo = portletInfo;
config.portletContext = context;
MerakResponse renderResponse = new MerakResponse();
MerakRequest renderRequest = new MerakRequest();
renderRequest.windowState = WindowState.NORMAL;
renderRequest.portletMode = PortletMode.VIEW;
//renderResponse.printWriter = res.getWriter();
renderResponse.printWriter = out;
//renderResponse.outputStream = res.getOutputStream(); //will cause exception: getWriter() has already been called for this response
//kalau dah panggil getWriter(), lepas tu tak boleh panggil OutputStream() .... so nak wat cam na?
renderRequest.httpServletRequest = req;
renderResponse.httpServletResponse = res;
h.put("renderRequest", renderRequest);
h.put("renderResponse", renderResponse);
h.put("config", config);
return h;
}
private static Object renderContent(VelocityEngine engine,
VelocityContext context,
ServletConfig svtCfg,
HttpServletRequest req,
HttpServletResponse res,
String module,
PortletInfo portletInfo) {
HttpSession session = req.getSession();
Object content = null;
try {
//Class klazz = Class.forName(CustomClass.getName(module));
//content = klazz.newInstance();
if ( !"".equals(module) ) {
if ( session.getAttribute("_log_module") != null &&
!module.equals((String) session.getAttribute("_log_module"))) {
UserTrackerLog.save(req, (String) session.getAttribute("_portal_login"), module);
session.setAttribute("_log_module", module);
}
else
session.setAttribute("_log_module", module);
}
content = ClassLoadManager.load(CustomClass.getName(module), module, req.getRequestedSessionId());
if ( content instanceof VTemplate ) {
((VTemplate) content).setEnvironment(engine, context, req, res);
((VTemplate) content).setServletContext(svtCfg.getServletContext());
((VTemplate) content).setServletConfig(svtCfg);
((VTemplate) content).setId(module);
}
/*
else if ( content instanceof MerakPortlet ) {
((MerakPortlet) content).setEnvironment(req, res);
}
*/
//content is HtmlContainer or RSSContainer
if ( content instanceof HtmlContainer ) {
//get the url for this content
String url = UserPage.getUrlForHtmlContainer(module);
if ( url != null ) ((HtmlContainer) content).setUrl(url);
}
if ( content instanceof XMLContainer ) {
//get the url for this content
Hashtable h = UserPage.getUrlAndXslForXMLContainer(module);
if ( h != null ) {
((XMLContainer) content).setXml((String) h.get("xml"));
((XMLContainer) content).setXsl((String) h.get("xsl"));
}
}
if ( content instanceof Attributable ) {
Hashtable h = UserPage.getValuesForAttributable(module);
if ( h != null ) {
((Attributable) content).setValues(h);
}
}
} catch ( DbException dbx ) {
content = new ErrorMsg(engine, context, req, res);
((ErrorMsg) content).setError("Database Error : " + dbx.getMessage());
} catch ( ClassNotFoundException cnfex ) {
content = new ErrorMsg(engine, context, req, res);
((ErrorMsg) content).setError("ClassNotFoundException : " + cnfex.getMessage());
} catch ( InstantiationException iex ) {
content = new ErrorMsg(engine, context, req, res);
((ErrorMsg) content).setError("InstantiationException : " + iex.getMessage());
} catch ( IllegalAccessException illex ) {
content = new ErrorMsg(engine, context, req, res);
((ErrorMsg) content).setError("IllegalAccessException : " + illex.getMessage());
} catch ( Exception ex ) {
content = new ErrorMsg(engine, context, req, res);
((ErrorMsg) content).setError("Other Exception during class initiation : " + ex.getMessage());
}
return content;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.wake.test.time;
import org.apache.reef.tang.Injector;
import org.apache.reef.tang.JavaConfigurationBuilder;
import org.apache.reef.tang.Tang;
import org.apache.reef.wake.EventHandler;
import org.apache.reef.wake.impl.LoggingUtils;
import org.apache.reef.wake.impl.ThreadPoolStage;
import org.apache.reef.wake.time.Time;
import org.apache.reef.wake.time.event.Alarm;
import org.apache.reef.wake.time.runtime.LogicalTimer;
import org.apache.reef.wake.time.runtime.RuntimeClock;
import org.apache.reef.wake.time.runtime.Timer;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.logging.Level;
public class ClockTest {
private static RuntimeClock buildClock() throws Exception {
final JavaConfigurationBuilder builder = Tang.Factory.getTang()
.newConfigurationBuilder();
final Injector injector = Tang.Factory.getTang()
.newInjector(builder.build());
return injector.getInstance(RuntimeClock.class);
}
private static RuntimeClock buildLogicalClock() throws Exception {
final JavaConfigurationBuilder builder = Tang.Factory.getTang()
.newConfigurationBuilder();
builder.bind(Timer.class, LogicalTimer.class);
final Injector injector = Tang.Factory.getTang()
.newInjector(builder.build());
return injector.getInstance(RuntimeClock.class);
}
@Test
public void testClock() throws Exception {
LoggingUtils.setLoggingLevel(Level.FINE);
final RuntimeClock clock = buildClock();
new Thread(clock).start();
final RandomAlarmProducer alarmProducer = new RandomAlarmProducer(clock);
ThreadPoolStage<Alarm> stage = new ThreadPoolStage<>(alarmProducer, 10);
try {
stage.onNext(null);
Thread.sleep(5000);
Assert.assertTrue(alarmProducer.getEventCount() > 40);
} finally {
stage.close();
clock.close();
}
}
@Test
public void testAlarmRegistrationRaceConditions() throws Exception {
LoggingUtils.setLoggingLevel(Level.FINE);
final RuntimeClock clock = buildClock();
new Thread(clock).start();
final EventRecorder earlierAlarmRecorder = new EventRecorder();
final EventRecorder laterAlarmRecorder = new EventRecorder();
try {
// Schedule an Alarm that's far in the future
clock.scheduleAlarm(5000, laterAlarmRecorder);
Thread.sleep(1000);
// By now, RuntimeClockImpl should be in a timed wait() for 5000 ms.
// Scheduler an Alarm that should fire before the existing Alarm:
clock.scheduleAlarm(2000, earlierAlarmRecorder);
Thread.sleep(1000);
// The earlier Alarm shouldn't have fired yet (we've only slept 1/2 time):
Assert.assertEquals(0, earlierAlarmRecorder.events.size());
Thread.sleep(1500);
// The earlier Alarm should have fired, since 3500 > 2000 ms have passed:
Assert.assertEquals(1, earlierAlarmRecorder.events.size());
// And the later Alarm shouldn't have fired yet:
Assert.assertEquals(0, laterAlarmRecorder.events.size());
Thread.sleep(2000);
// The later Alarm should have fired, since 5500 > 5000 ms have passed:
Assert.assertEquals(1, laterAlarmRecorder.events.size());
} finally {
clock.close();
}
}
@Test
public void testSimultaneousAlarms() throws Exception {
LoggingUtils.setLoggingLevel(Level.FINE);
final RuntimeClock clock = buildLogicalClock();
new Thread(clock).start();
final EventRecorder alarmRecorder = new EventRecorder();
try {
clock.scheduleAlarm(1000, alarmRecorder);
clock.scheduleAlarm(1000, alarmRecorder);
Thread.sleep(2000);
Assert.assertEquals(2, alarmRecorder.events.size());
} finally {
clock.close();
}
}
@Test
public void testAlarmOrder() throws Exception {
LoggingUtils.setLoggingLevel(Level.FINE);
final RuntimeClock clock = buildLogicalClock();
new Thread(clock).start();
final EventRecorder alarmRecorder = new EventRecorder();
try {
int numAlarms = 10;
long[] expected = new long[numAlarms];
for (int i = 0; i < numAlarms; ++i) {
clock.scheduleAlarm(i * 100, alarmRecorder);
expected[i] = i * 100;
}
Thread.sleep(2000);
Long[] actualLong = new Long[numAlarms];
alarmRecorder.timestamps.toArray(actualLong);
long[] actual = new long[numAlarms];
for (int i = 0; i < numAlarms; ++i) {
actual[i] = actualLong[i];
}
Assert.assertArrayEquals(expected, actual);
} finally {
clock.close();
}
}
/**
* An EventHandler that records the events that it sees.
*/
private static class EventRecorder implements EventHandler<Alarm> {
/**
* A synchronized List of the events recorded by this EventRecorder.
*/
public final List<Time> events = Collections.synchronizedList(new ArrayList<Time>());
public final List<Long> timestamps = Collections.synchronizedList(new ArrayList<Long>());
@Override
public void onNext(final Alarm event) {
timestamps.add(event.getTimeStamp());
events.add(event);
}
}
private static class RandomAlarmProducer implements EventHandler<Alarm> {
private final RuntimeClock clock;
private final Random rand;
private int eventCount = 0;
public RandomAlarmProducer(RuntimeClock clock) {
this.clock = clock;
this.rand = new Random();
this.eventCount = 0;
}
int getEventCount() {
return eventCount;
}
@Override
public void onNext(final Alarm value) {
eventCount += 1;
int duration = rand.nextInt(100) + 1;
clock.scheduleAlarm(duration, this);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.update;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.RoutingMissingException;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.create.TransportCreateIndexAction;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.delete.TransportDeleteAction;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.index.TransportIndexAction;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.AutoCreateIndex;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.action.support.single.instance.TransportInstanceSingleOperationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.PlainShardIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndexAlreadyExistsException;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
import java.util.Collections;
import java.util.Map;
/**
*/
public class TransportUpdateAction extends TransportInstanceSingleOperationAction<UpdateRequest, UpdateResponse> {
private final TransportDeleteAction deleteAction;
private final TransportIndexAction indexAction;
private final AutoCreateIndex autoCreateIndex;
private final TransportCreateIndexAction createIndexAction;
private final UpdateHelper updateHelper;
private final IndicesService indicesService;
@Inject
public TransportUpdateAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
TransportIndexAction indexAction, TransportDeleteAction deleteAction, TransportCreateIndexAction createIndexAction,
UpdateHelper updateHelper, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
IndicesService indicesService, AutoCreateIndex autoCreateIndex) {
super(settings, UpdateAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpdateRequest::new);
this.indexAction = indexAction;
this.deleteAction = deleteAction;
this.createIndexAction = createIndexAction;
this.updateHelper = updateHelper;
this.indicesService = indicesService;
this.autoCreateIndex = autoCreateIndex;
}
@Override
protected String executor() {
return ThreadPool.Names.INDEX;
}
@Override
protected UpdateResponse newResponse() {
return new UpdateResponse();
}
@Override
protected boolean retryOnFailure(Throwable e) {
return TransportActions.isShardNotAvailableException(e);
}
@Override
protected void resolveRequest(ClusterState state, UpdateRequest request) {
resolveAndValidateRouting(state.metaData(), request.concreteIndex(), request);
}
public static void resolveAndValidateRouting(MetaData metaData, String concreteIndex, UpdateRequest request) {
request.routing((metaData.resolveIndexRouting(request.parent(), request.routing(), request.index())));
// Fail fast on the node that received the request, rather than failing when translating on the index or delete request.
if (request.routing() == null && metaData.routingRequired(concreteIndex, request.type())) {
throw new RoutingMissingException(concreteIndex, request.type(), request.id());
}
}
@Override
protected void doExecute(final UpdateRequest request, final ActionListener<UpdateResponse> listener) {
// if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API
if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) {
createIndexAction.execute(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener<CreateIndexResponse>() {
@Override
public void onResponse(CreateIndexResponse result) {
innerExecute(request, listener);
}
@Override
public void onFailure(Throwable e) {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// we have the index, do it
try {
innerExecute(request, listener);
} catch (Throwable e1) {
listener.onFailure(e1);
}
} else {
listener.onFailure(e);
}
}
});
} else {
innerExecute(request, listener);
}
}
private void innerExecute(final UpdateRequest request, final ActionListener<UpdateResponse> listener) {
super.doExecute(request, listener);
}
@Override
protected ShardIterator shards(ClusterState clusterState, UpdateRequest request) {
if (request.shardId() != -1) {
return clusterState.routingTable().index(request.concreteIndex()).shard(request.shardId()).primaryShardIt();
}
ShardIterator shardIterator = clusterService.operationRouting()
.indexShards(clusterState, request.concreteIndex(), request.type(), request.id(), request.routing());
ShardRouting shard;
while ((shard = shardIterator.nextOrNull()) != null) {
if (shard.primary()) {
return new PlainShardIterator(shardIterator.shardId(), Collections.singletonList(shard));
}
}
return new PlainShardIterator(shardIterator.shardId(), Collections.<ShardRouting>emptyList());
}
@Override
protected void shardOperation(final UpdateRequest request, final ActionListener<UpdateResponse> listener) {
shardOperation(request, listener, 0);
}
protected void shardOperation(final UpdateRequest request, final ActionListener<UpdateResponse> listener, final int retryCount) {
final IndexService indexService = indicesService.indexServiceSafe(request.concreteIndex());
final IndexShard indexShard = indexService.getShard(request.shardId());
final UpdateHelper.Result result = updateHelper.prepare(request, indexShard);
switch (result.operation()) {
case UPSERT:
IndexRequest upsertRequest = result.action();
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference upsertSourceBytes = upsertRequest.source();
indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse response) {
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
if (request.fields() != null && request.fields().length > 0) {
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true);
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
} else {
update.setGetResult(null);
}
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException) {
if (retryCount < request.retryOnConflict()) {
logger.trace("Retry attempt [{}] of [{}] on version conflict on [{}][{}][{}]",
retryCount + 1, request.retryOnConflict(), request.index(), request.shardId(), request.id());
threadPool.executor(executor()).execute(new ActionRunnable<UpdateResponse>(listener) {
@Override
protected void doRun() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
break;
case INDEX:
IndexRequest indexRequest = result.action();
// we fetch it from the index request so we don't generate the bytes twice, its already done in the index request
final BytesReference indexSourceBytes = indexRequest.source();
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
@Override
public void onResponse(IndexResponse response) {
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes));
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new ActionRunnable<UpdateResponse>(listener) {
@Override
protected void doRun() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
break;
case DELETE:
deleteAction.execute(result.action(), new ActionListener<DeleteResponse>() {
@Override
public void onResponse(DeleteResponse response) {
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false);
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null));
listener.onResponse(update);
}
@Override
public void onFailure(Throwable e) {
e = ExceptionsHelper.unwrapCause(e);
if (e instanceof VersionConflictEngineException) {
if (retryCount < request.retryOnConflict()) {
threadPool.executor(executor()).execute(new ActionRunnable<UpdateResponse>(listener) {
@Override
protected void doRun() {
shardOperation(request, listener, retryCount + 1);
}
});
return;
}
}
listener.onFailure(e);
}
});
break;
case NONE:
UpdateResponse update = result.action();
IndexService indexServiceOrNull = indicesService.indexService(request.concreteIndex());
if (indexServiceOrNull != null) {
IndexShard shard = indexService.getShardOrNull(request.shardId());
if (shard != null) {
shard.noopUpdate(request.type());
}
}
listener.onResponse(update);
break;
default:
throw new IllegalStateException("Illegal operation " + result.operation());
}
}
}
| |
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.distributed;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
import com.facebook.buck.distributed.thrift.BuildJobStateFileHashEntry;
import com.facebook.buck.testutil.FakeExecutor;
import com.facebook.buck.testutil.integration.TemporaryPaths;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
public class ServerContentsProviderTest {
private static long FUTURE_TIMEOUT_SECONDS = 1;
private static final String HASH1 = "abcd";
private static final String HASH2 = "xkcd";
private static final String HASH3 = "buck";
private static final String HASH4 = "face";
private static final String HASH5 = "book";
private static final String FILE_CONTENTS1 = "my";
private static final String FILE_CONTENTS2 = "super";
private static final String FILE_CONTENTS3 = "cool";
private static final String FILE_CONTENTS4 = "contents";
private Path path1, path2, path3, path4, path5;
@Rule public TemporaryPaths tmp = new TemporaryPaths();
private DistBuildService distBuildService;
private ServerContentsProvider provider;
private FakeExecutor fakeScheduledExecutor;
private FileMaterializationStatsTracker statsTracker;
@Before
public void setUp() {
distBuildService = EasyMock.createMock(DistBuildService.class);
fakeScheduledExecutor = new FakeExecutor();
statsTracker = EasyMock.createStrictMock(FileMaterializationStatsTracker.class);
path1 = tmp.getRoot().resolve("file1");
path2 = tmp.getRoot().resolve("file2");
path3 = tmp.getRoot().resolve("file3");
path4 = tmp.getRoot().resolve("file4");
path5 = tmp.getRoot().resolve("file5");
}
@After
public void tearDown() {
if (provider != null) {
provider.close();
}
}
private void initProvider(long bufferPeriodMs, int bufferMaxSize) {
provider =
new ServerContentsProvider(
distBuildService,
fakeScheduledExecutor,
MoreExecutors.newDirectExecutorService(),
statsTracker,
bufferPeriodMs,
bufferMaxSize);
}
@Test
public void testMultiFetchPeriodWorks()
throws IOException, InterruptedException, ExecutionException, TimeoutException {
initProvider(1, 100);
ImmutableMap.Builder<String, byte[]> result1 = new ImmutableMap.Builder<>();
result1.put(HASH1, FILE_CONTENTS1.getBytes(StandardCharsets.UTF_8));
result1.put(HASH2, FILE_CONTENTS2.getBytes(StandardCharsets.UTF_8));
expect(distBuildService.multiFetchSourceFiles(ImmutableSet.of(HASH1, HASH2)))
.andReturn(result1.build())
.once();
statsTracker.recordPeriodicCasMultiFetch(EasyMock.anyLong());
expectLastCall().once();
ImmutableMap.Builder<String, byte[]> result2 = new ImmutableMap.Builder<>();
result2.put(HASH3, FILE_CONTENTS3.getBytes(StandardCharsets.UTF_8));
expect(distBuildService.multiFetchSourceFiles(ImmutableSet.of(HASH3)))
.andReturn(result2.build())
.once();
statsTracker.recordPeriodicCasMultiFetch(EasyMock.anyLong());
expectLastCall().once();
replay(distBuildService);
replay(statsTracker);
Future<?> future1, future2, future3;
future1 =
provider.materializeFileContentsAsync(
new BuildJobStateFileHashEntry().setSha1(HASH1), path1);
future2 =
provider.materializeFileContentsAsync(
new BuildJobStateFileHashEntry().setSha1(HASH2), path2);
fakeScheduledExecutor.drain();
future1.get(FUTURE_TIMEOUT_SECONDS, TimeUnit.SECONDS);
future2.get(FUTURE_TIMEOUT_SECONDS, TimeUnit.SECONDS);
future3 =
provider.materializeFileContentsAsync(
new BuildJobStateFileHashEntry().setSha1(HASH3), path3);
fakeScheduledExecutor.drain();
future3.get(FUTURE_TIMEOUT_SECONDS, TimeUnit.SECONDS);
// Extra run to check for calls with zero HashCodes.
fakeScheduledExecutor.drain();
verify(distBuildService);
verify(statsTracker);
Assert.assertArrayEquals(
FILE_CONTENTS1.getBytes(StandardCharsets.UTF_8), Files.readAllBytes(path1));
Assert.assertArrayEquals(
FILE_CONTENTS2.getBytes(StandardCharsets.UTF_8), Files.readAllBytes(path2));
Assert.assertArrayEquals(
FILE_CONTENTS3.getBytes(StandardCharsets.UTF_8), Files.readAllBytes(path3));
}
@Test
@SuppressWarnings("PMD.EmptyCatchBlock")
public void testMultiFetchMaxBufferSizeWorks()
throws IOException, InterruptedException, ExecutionException, TimeoutException {
initProvider(1000 * 60 * 60 * 24, 2);
// We should get request for 2 files first.
ImmutableMap.Builder<String, byte[]> result1 = new ImmutableMap.Builder<>();
result1.put(HASH1, FILE_CONTENTS1.getBytes(StandardCharsets.UTF_8));
result1.put(HASH2, FILE_CONTENTS2.getBytes(StandardCharsets.UTF_8));
expect(distBuildService.multiFetchSourceFiles(ImmutableSet.of(HASH1, HASH2)))
.andReturn(result1.build())
.once();
statsTracker.recordFullBufferCasMultiFetch(EasyMock.anyLong());
expectLastCall().once();
// Then 2 again.
ImmutableMap.Builder<String, byte[]> result2 = new ImmutableMap.Builder<>();
result2.put(HASH3, FILE_CONTENTS3.getBytes(StandardCharsets.UTF_8));
result2.put(HASH4, FILE_CONTENTS4.getBytes(StandardCharsets.UTF_8));
expect(distBuildService.multiFetchSourceFiles(ImmutableSet.of(HASH3, HASH4)))
.andReturn(result2.build())
.once();
statsTracker.recordFullBufferCasMultiFetch(EasyMock.anyLong());
expectLastCall().once();
// One lone request (for HASH5) should never be fetched.
replay(distBuildService);
replay(statsTracker);
Future<?> future1, future2, future3, future4, future5;
future1 =
provider.materializeFileContentsAsync(
new BuildJobStateFileHashEntry().setSha1(HASH1), path1);
future2 =
provider.materializeFileContentsAsync(
new BuildJobStateFileHashEntry().setSha1(HASH2), path2);
future3 =
provider.materializeFileContentsAsync(
new BuildJobStateFileHashEntry().setSha1(HASH3), path3);
future4 =
provider.materializeFileContentsAsync(
new BuildJobStateFileHashEntry().setSha1(HASH4), path4);
future5 =
provider.materializeFileContentsAsync(
new BuildJobStateFileHashEntry().setSha1(HASH5), path5);
// We should not need to drain the scheduler.
// Scheduler is only supposed to be used for periodic cleanup.
future1.get(FUTURE_TIMEOUT_SECONDS, TimeUnit.SECONDS);
future2.get(FUTURE_TIMEOUT_SECONDS, TimeUnit.SECONDS);
future3.get(FUTURE_TIMEOUT_SECONDS, TimeUnit.SECONDS);
future4.get(FUTURE_TIMEOUT_SECONDS, TimeUnit.SECONDS);
verify(distBuildService);
verify(statsTracker);
Assert.assertArrayEquals(
FILE_CONTENTS1.getBytes(StandardCharsets.UTF_8), Files.readAllBytes(path1));
Assert.assertArrayEquals(
FILE_CONTENTS2.getBytes(StandardCharsets.UTF_8), Files.readAllBytes(path2));
Assert.assertArrayEquals(
FILE_CONTENTS3.getBytes(StandardCharsets.UTF_8), Files.readAllBytes(path3));
Assert.assertArrayEquals(
FILE_CONTENTS4.getBytes(StandardCharsets.UTF_8), Files.readAllBytes(path4));
try {
future5.get(FUTURE_TIMEOUT_SECONDS, TimeUnit.SECONDS);
Assert.fail("Timeout was expected.");
} catch (TimeoutException e) {
// expected
}
}
}
| |
/*
* =================================================================================================
* Copyright (C) 2015 Martin Albedinsky
* =================================================================================================
* Licensed under the Apache License, Version 2.0 or later (further "License" only).
* -------------------------------------------------------------------------------------------------
* You may use this file only in compliance with the License. More details and copy of this License
* you may obtain at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* You can redistribute, modify or publish any part of the code written within this file but as it
* is described in the License, the software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES or CONDITIONS OF ANY KIND.
*
* See the License for the specific language governing permissions and limitations under the License.
* =================================================================================================
*/
package com.albedinsky.android.officium.service;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import java.io.IOException;
import okhttp3.ResponseBody;
import retrofit2.Converter;
/**
* A {@link BaseServiceObject} implementation that represents an error occurred during service execution.
* Service error may be defined as error response send by the server where in such case the error/response
* code may be obtained via {@link #getErrorCode()} along with the error body via {@link #getErrorBody()}
* respectively. Or it may be defined as a failure that has caused the associated service and request
* to fail. In such case {@link #isFailure()} will return {@code true} and the occurred failure may
* be obtained via {@link #getFailure()}.
*
* @author Martin Albedinsky
*/
public class ServiceError extends BaseServiceObject {
/**
* Interface ===================================================================================
*/
/**
* Constants ===================================================================================
*/
/**
* Log TAG.
*/
// private static final String TAG = "ServiceError";
/**
* Static members ==============================================================================
*/
/**
* Members =====================================================================================
*/
/**
* Throwable failure specified for this service error. May be {@code null} if this error is not
* a failure.
*
* @see #isFailure()
*/
private final Throwable mFailure;
/**
* Error code specified for this service error along with error body.
*
* @see #isFailure()
*/
private final int mErrorCode;
/**
* Error body specified for this service error along with error code. May be {@code null} if this
* error represents a failure.
*
* @see #isFailure()
*/
private final ResponseBody mErrorBody;
/**
* Converter used to convert error body to the desired object.
*
* @see #getErrorBodyAs(Class)
*/
private Converter<ResponseBody, ?> mErrorBodyConverter;
/**
* Constructors ================================================================================
*/
/**
* Creates a new instance of ServiceError with the specified <var>errorCode</var> and <var>errorBody</var>
* received from the server as error response for the associated service and request.
*
* @param errorCode The error/response code returned by the server.
* @param errorBody The body of error response returned by the server.
* @see #getErrorCode()
* @see #getErrorBody()
*/
public ServiceError(int errorCode, @NonNull ResponseBody errorBody) {
this.mErrorCode = errorCode;
this.mErrorBody = errorBody;
this.mFailure = null;
}
/**
* Creates a new instance of ServiceError with the specified <var>failure</var>.
*
* @param failure The failure that has caused the associated service and request to fail.
* @see #isFailure()
* @see #getFailure()
*/
public ServiceError(@NonNull Throwable failure) {
this.mErrorCode = 0;
this.mErrorBody = null;
this.mFailure = failure;
}
/**
* Creates a new instance of ServiceError with data of the given one.
* <p>
* <b>Note</b>, that this is same as creating <b>shallow</b> copy of the error object.
*
* @param other The other service error of which data to copy to the new one.
*/
public ServiceError(@NonNull ServiceError other) {
this.mErrorCode = other.mErrorCode;
this.mErrorBody = other.mErrorBody;
this.mFailure = other.mFailure;
this.mErrorBodyConverter = other.mErrorBodyConverter;
}
/**
* Methods =====================================================================================
*/
/**
* Checks whether this service error represents an error response that has been received from
* the server for the associated service and request.
*
* @return {@code True} if this error is an error response send by the server where the response
* code may be obtained via {@link #getErrorCode()} and the error body via {@link #getErrorBody()}
* respectively.
* @see #isFailure()
*/
public final boolean isError() {
return mErrorBody != null;
}
/**
* Returns the error code of the response send by the server as result to call to the associated
* service and request.
*
* @return Error code specified for this service error.
* @throws NullPointerException If this error is not an error response but a failure.
* @see #getErrorBody()
*/
public int getErrorCode() {
if (mErrorBody == null) throw new NullPointerException("Not an error but a failure!");
return mErrorCode;
}
/**
* Returns the error body of the response send by the server as result to call to the associated
* service and request.
*
* @return Error body specified for this service error.
* @throws NullPointerException If this error is not an error response but a failure.
* @see #getErrorCode()
*/
@NonNull
public ResponseBody getErrorBody() {
if (mErrorBody == null) throw new NullPointerException("Not an error but a failure!");
return mErrorBody;
}
/**
* Sets a converter that should be used to covert error body of this service error to the desired
* object when {@link #getErrorBodyAs(Class)} is called.
*
* @param converter The desired converter to be used for conversion. May be {@code null} to clear
* the current one.
*/
public void setErrorBodyConverter(@Nullable Converter<ResponseBody, ?> converter) {
this.mErrorBodyConverter = converter;
}
/**
* Returns the error body of this service error as the desired type.
*
* @param classOfT Class ot the type to which to convert the error body.
* @param <T> The desired type as which to return error body or {@code null} if conversion fails.
* @return Error body converted to the requested type.
* @throws NullPointerException If this error is not an error response but a failure.
* @throws NullPointerException If no converter has been specified.
*/
@Nullable
public <T> T getErrorBodyAs(@NonNull Class<T> classOfT) {
if (mErrorBody == null) throw new NullPointerException("Not an error but a failure!");
if (mErrorBodyConverter == null) throw new NullPointerException("No error body converter specified!");
T errorBody = null;
try {
errorBody = (T) mErrorBodyConverter.convert(mErrorBody);
} catch (IOException e) {
e.printStackTrace();
} catch (ClassCastException e) {
throw new IllegalArgumentException("Specified class(" + classOfT + ") does not match that specified for converter!", e);
} finally {
mErrorBody.close();
}
return errorBody;
}
/**
* Checks whether this service error represents a failure that has caused the associated service
* and request to fail during its execution.
*
* @return {@code True} if this error is a failure that may be obtained via {@link #getFailure()},
* {@code false} if it is an error response.
* @see #isError()
*/
public final boolean isFailure() {
return mFailure != null;
}
/**
* Returns the failure that has caused the associated service and request to fail.
*
* @return Throwable failure specified for this service error.
* @throws NullPointerException If this error is not a failure but an error response.
*/
@NonNull
public Throwable getFailure() {
if (mFailure == null) throw new NullPointerException("Not a failure but an error!");
return mFailure;
}
/**
*/
@Override
@SuppressWarnings("StringBufferReplaceableByString")
public String toString() {
final boolean isFailure = isFailure();
final StringBuilder builder = new StringBuilder(64);
builder.append(getClass().getSimpleName());
builder.append("{isFailure: ");
builder.append(isFailure);
builder.append(", errorCode: ");
builder.append(isFailure ? "NONE" : mErrorCode);
builder.append(", errorBody: ");
builder.append(isFailure ? "NONE" : mErrorBody);
builder.append(", failure: ");
builder.append(isFailure ? mFailure : "NONE");
return builder.append("}").toString();
}
/**
* Inner classes ===============================================================================
*/
}
| |
/*
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.core.auth.dto;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* Model for DCR Client (OAuth2 Application)
*/
public final class DCRClientInfo {
@SerializedName("client_id")
private String clientId;
@SerializedName("registration_client_uri")
private String registrationClientUri;
@SerializedName("registration_access_token")
private String registrationAccessToken;
@SerializedName("client_id_issued_at")
private String clientIdIssuedAt;
@SerializedName("client_secret")
private String clientSecret;
@SerializedName("client_secret_expires_at")
private String clientSecretExpiresAt;
@SerializedName("client_name")
private String clientName;
@SerializedName("redirect_uris")
private List<String> redirectURIs;
@SerializedName("grant_types")
private List<String> grantTypes;
@SerializedName("token_endpoint_auth_method")
private String tokenEndpointAuthMethod;
@SerializedName("logo_uri")
private String logoUri;
@SerializedName("jwks_uri")
private String jwksUri;
// remove commented lines after fixing https://wso2.org/jira/browse/IDENTITY-6972
/* @SerializedName("userinfo_signed_response_alg")
private String userinfoSignedResponseAlg;*/
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getClientSecret() {
return clientSecret;
}
public void setClientSecret(String clientSecret) {
this.clientSecret = clientSecret;
}
public void setClientName(String clientName) {
this.clientName = clientName;
}
public void setGrantTypes(List<String> grantTypes) {
this.grantTypes = grantTypes;
}
public String getClientName() {
return clientName;
}
public List<String> getGrantTypes() {
return grantTypes;
}
public String getRegistrationClientUri() {
return registrationClientUri;
}
public void setRegistrationClientUri(String registrationClientUri) {
this.registrationClientUri = registrationClientUri;
}
public String getRegistrationAccessToken() {
return registrationAccessToken;
}
public void setRegistrationAccessToken(String registrationAccessToken) {
this.registrationAccessToken = registrationAccessToken;
}
public String getClientIdIssuedAt() {
return clientIdIssuedAt;
}
public void setClientIdIssuedAt(String clientIdIssuedAt) {
this.clientIdIssuedAt = clientIdIssuedAt;
}
public String getClientSecretExpiresAt() {
return clientSecretExpiresAt;
}
public void setClientSecretExpiresAt(String clientSecretExpiresAt) {
this.clientSecretExpiresAt = clientSecretExpiresAt;
}
public List<String> getRedirectURIs() {
return redirectURIs;
}
public void setRedirectURIs(List<String> redirectURIs) {
this.redirectURIs = redirectURIs;
}
public String getTokenEndpointAuthMethod() {
return tokenEndpointAuthMethod;
}
public void setTokenEndpointAuthMethod(String tokenEndpointAuthMethod) {
this.tokenEndpointAuthMethod = tokenEndpointAuthMethod;
}
public String getLogoUri() {
return logoUri;
}
public void setLogoUri(String logoUri) {
this.logoUri = logoUri;
}
public String getJwksUri() {
return jwksUri;
}
public void setJwksUri(String jwksUri) {
this.jwksUri = jwksUri;
}
/* public String getUserinfoSignedResponseAlg() {
return userinfoSignedResponseAlg;
}
public void setUserinfoSignedResponseAlg(String userinfoSignedResponseAlg) {
this.userinfoSignedResponseAlg = userinfoSignedResponseAlg;
}*/
public void addGrantType(String grantType) {
if (grantType == null) {
return;
}
if (grantTypes == null) {
grantTypes = new ArrayList<>();
}
grantTypes.add(grantType);
}
public void addCallbackUrl(String callback) {
if (callback == null) {
return;
}
if (redirectURIs == null) {
redirectURIs = new ArrayList<>();
}
redirectURIs.add(callback);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof DCRClientInfo)) {
return false;
}
DCRClientInfo that = (DCRClientInfo) o;
return Objects.equals(clientId, that.clientId) &&
Objects.equals(registrationClientUri, that.registrationClientUri) &&
Objects.equals(registrationAccessToken, that.registrationAccessToken) &&
Objects.equals(clientIdIssuedAt, that.clientIdIssuedAt) &&
Objects.equals(clientSecret, that.clientSecret) &&
Objects.equals(clientSecretExpiresAt, that.clientSecretExpiresAt) &&
Objects.equals(clientName, that.clientName) &&
Objects.equals(redirectURIs, that.redirectURIs) &&
Objects.equals(grantTypes, that.grantTypes) &&
Objects.equals(tokenEndpointAuthMethod, that.tokenEndpointAuthMethod) &&
Objects.equals(logoUri, that.logoUri) &&
Objects.equals(jwksUri, that.jwksUri);
}
@Override
public int hashCode() {
return Objects.hash(clientId, registrationClientUri, registrationAccessToken, clientIdIssuedAt, clientSecret,
clientSecretExpiresAt, clientName, redirectURIs, grantTypes, tokenEndpointAuthMethod, logoUri,
jwksUri);
}
}
| |
package org.redis.objects;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import lombok.experimental.Builder;
import org.redis.objects.exceptions.RedisobjectsException;
import org.redis.objects.serializer.Serializer;
import redis.clients.jedis.Jedis;
import redis.clients.jedis.JedisPool;
import redis.clients.jedis.Pipeline;
import redis.clients.jedis.Transaction;
import redis.clients.util.SafeEncoder;
/**
* Redis set.
*
* @author Mathieu MAST
* @param <V>
*/
public class RedisSet<V> extends RedisObject<V, V> implements Set<V> {
public RedisSet(final JedisPool jedisPool, final String name) {
this(jedisPool, name, null, null, null, null);
}
@Builder
public RedisSet(final JedisPool jedisPool, final String name, Boolean syncImmediate, Integer maxWithoutSync, Integer delayBeforeSync, Serializer serializer) {
super(jedisPool, name, syncImmediate, maxWithoutSync, delayBeforeSync, serializer);
}
/**
* {@inheritDoc}
*/
@Override
public int size() {
return run(new Work<Integer>() {
@Override
public Integer work(Jedis jedis) {
Long l = jedis.scard(name);
if (null == l) {
return 0;
} else {
return l.intValue();
}
}
});
}
/**
* {@inheritDoc}
*/
@Override
public boolean isEmpty() {
return 0 == size();
}
/**
* {@inheritDoc}
*/
@Override
public boolean contains(final Object o) {
return run(new Work<Boolean>() {
@Override
public Boolean work(Jedis jedis) {
try {
Boolean exists = jedis.sismember(SafeEncoder.encode(name), valueToBytes((V) o));
return null == exists ? false : exists;
} catch (IOException ex) {
throw new RedisobjectsException(ex);
}
}
});
}
/**
* {@inheritDoc}
*/
@Override
public Iterator<V> iterator() {
return run(new Work<Iterator<V>>() {
@Override
public Iterator<V> work(Jedis jedis) {
try {
List<V> list = new ArrayList<>();
Set<byte[]> res = jedis.smembers(SafeEncoder.encode(name));
for (byte[] bytes : res) {
list.add(bytesToValue(bytes));
}
return list.iterator();
} catch (IOException | ClassNotFoundException ex) {
throw new RedisobjectsException(ex);
}
}
});
}
/**
* {@inheritDoc}
*/
@Override
public Object[] toArray() {
return run(new Work<Object[]>() {
@Override
public Object[] work(Jedis jedis) {
try {
List<V> list = new ArrayList<>();
Set<byte[]> res = jedis.smembers(SafeEncoder.encode(name));
for (byte[] bytes : res) {
list.add(bytesToValue(bytes));
}
return list.toArray();
} catch (IOException | ClassNotFoundException ex) {
throw new RedisobjectsException(ex);
}
}
});
}
/**
* {@inheritDoc}
*/
@Override
public <T> T[] toArray(final T[] a) {
return run(new Work<T[]>() {
@Override
public T[] work(Jedis jedis) {
try {
List<V> list = new ArrayList<>();
Set<byte[]> res = jedis.smembers(SafeEncoder.encode(name));
for (byte[] bytes : res) {
list.add(bytesToValue(bytes));
}
return list.toArray(a);
} catch (IOException | ClassNotFoundException ex) {
throw new RedisobjectsException(ex);
}
}
});
}
/**
* Add object (WARNING: always returning true).
*
* @param e object
* @return true
*/
@Override
public boolean add(final V e) {
return pipelined(new PipelinedWork<Boolean>() {
@Override
public Boolean work(Pipeline pipeline) {
try {
pipeline.sadd(SafeEncoder.encode(name), valueToBytes((V) e));
return true;
} catch (IOException ex) {
throw new RedisobjectsException(ex);
}
}
});
}
/**
* Remove object (WARNING: always returning true).
*
* @return true
*/
@Override
public boolean remove(final Object o) {
return pipelined(new PipelinedWork<Boolean>() {
@Override
public Boolean work(Pipeline pipeline) {
try {
pipeline.srem(SafeEncoder.encode(name), valueToBytes((V) o));
return true;
} catch (IOException ex) {
throw new RedisobjectsException(ex);
}
}
});
}
/**
* {@inheritDoc}
*/
@Override
public boolean containsAll(final Collection<?> c) {
return run(new Work<Boolean>() {
@Override
public Boolean work(Jedis jedis) {
try {
for (Object o : c) {
Boolean exists = jedis.sismember(SafeEncoder.encode(name), valueToBytes((V) o));
if (null == exists || false == exists) {
return false;
}
}
return true;
} catch (IOException ex) {
throw new RedisobjectsException(ex);
}
}
});
}
/**
* Add objects (WARNING: always returning true).
*
* @param c objects
* @return true
*/
@Override
public boolean addAll(final Collection<? extends V> c) {
return run(new Work<Boolean>() {
@Override
public Boolean work(Jedis jedis) {
Transaction tr = jedis.multi();
try {
for (V value : c) {
tr.sadd(SafeEncoder.encode(name), valueToBytes(value));
}
tr.exec();
} catch (IOException ex) {
tr.discard();
throw new RedisobjectsException(ex);
}
return true;
}
});
}
/**
* {@inheritDoc}
*/
@Override
public boolean retainAll(final Collection<?> c) {
return run(new Work<Boolean>() {
@Override
public Boolean work(Jedis jedis) {
boolean res = false;
Set<byte[]> list = jedis.smembers(SafeEncoder.encode(name));
Transaction tr = jedis.multi();
try {
for (byte[] bytes : list) {
V value = (V) bytesToValue(bytes);
if (!c.contains(value)) {
tr.srem(SafeEncoder.encode(name), bytes);
res = true;
}
}
tr.exec();
} catch (IOException | ClassNotFoundException ex) {
tr.discard();
throw new RedisobjectsException(ex);
}
return res;
}
});
}
/**
* Remove objects (WARNING: always returning true).
*
* @param c objects
* @return true
*/
@Override
public boolean removeAll(final Collection<?> c) {
return run(new Work<Boolean>() {
@Override
public Boolean work(Jedis jedis) {
Transaction tr = jedis.multi();
try {
for (Object object : c) {
tr.srem(SafeEncoder.encode(name), valueToBytes((V) object));
}
tr.exec();
} catch (IOException ex) {
tr.discard();
throw new RedisobjectsException(ex);
}
return true;
}
});
}
/**
* {@inheritDoc}
*/
@Override
public void clear() {
pipelined(new PipelinedWork<Boolean>() {
@Override
public Boolean work(Pipeline pipeline) {
pipeline.del(SafeEncoder.encode(name));
return true;
}
});
}
}
| |
// Modified or written by Lambdascale SRL for inclusion with lambdaj.
// Copyright (c) 2009-2010 Mario Fusco.
// Licensed under the Apache License, Version 2.0 (the "License")
package ch.lambdaj.collection;
import ch.lambdaj.*;
import ch.lambdaj.group.*;
import ch.lambdaj.function.convert.*;
import ch.lambdaj.util.*;
import org.hamcrest.*;
import java.io.*;
import java.lang.reflect.*;
import java.util.*;
import static org.hamcrest.Matchers.not;
/**
* An Iterable that extends the Iterable interface with the fluent interface methods provided by lambdaj
* @author Gianfranco Tognana
* @author Mario Fusco
*/
public class LambdaIterable<T> extends AbstractLambdaCollection<T> implements Iterable<T>, Cloneable, Serializable {
LambdaIterable(Iterable<? extends T> inner) {
super(inner);
}
/**
* {@inheritDoc}
*/
public LambdaIterator<T> iterator() {
return new LambdaIterator<T>(innerIterable.iterator());
}
/**
* Returns a proxy of the class of the first object in this iterable that when invoked with a method returning a String
* returns a comma separated string that is the concatenation of the Strings resulting from the invocation
* of the same method on each item in this iterable
* @return A proxy of the class of the first object in this iterable representing a join lambda function
* @throws IllegalArgumentException if this iterable is null or empty
*/
public T joinFrom() {
return Lambda.joinFrom(innerIterable);
}
/**
* Returns a proxy of the class of the first object in this iterable that when invoked with a method returning a String
* returns a string separated with the given separator that is the concatenation of the Strings resulting from the invocation
* of the same method on each item in this iterable
* @param separator The String used to separe the Strings produced by this proxy
* @return A proxy of the class of the first object in this iterable representing a join lambda function
* @throws IllegalArgumentException if this iterable is null or empty
*/
public T joinFrom(String separator) {
return Lambda.joinFrom(innerIterable, separator);
}
/**
* Returns a proxy of the class of the first object in this iterable that when invoked with a method returning a number
* returns the sum of the numbers resulting from the invocation of the same method on each item in this iterable
* @return A proxy of the class of the first object in this iterable representing a sum lambda function
* @throws IllegalArgumentException if this iterable is null or empty
*/
public T sumFrom() {
return Lambda.sumFrom(innerIterable);
}
/**
* Returns a proxy of the class of the first object in this iterable that when invoked with a method returning a Comparable
* returns the maximum of the Caomparables resulting from the invocation of the same method on each item in this iterable
* @return A proxy of the class of the first object in this iterable representing a sum lambda function
* @throws IllegalArgumentException if this iterable is null or empty
*/
public T maxFrom() {
return Lambda.maxFrom(innerIterable);
}
/**
* Returns a proxy of the class of the first object in this iterable that when invoked with a method returning a Comparable
* returns the minimimum of the Caomparables resulting from the invocation of the same method on each item in this iterable
* @return A proxy of the class of the first object in this iterable representing a sum lambda function
* @throws IllegalArgumentException if this iterable is null or empty
*/
public T minFrom() {
return Lambda.minFrom(innerIterable);
}
/**
* Retains all the objects in this iterable that match the given hamcrest Matcher
* @param matcher The hamcrest Matcher used to filter this iterable
* @return A sublist of this containing all the objects that match the given hamcrest Matcher
*/
public LambdaIterable<T> retain(Matcher<?> matcher) {
doRetain(matcher);
return this;
}
void doRetain(Matcher<?> matcher) {
setInner(Lambda.select(innerIterable, matcher));
}
/**
* Removes all the objects in this iterable that match the given hamcrest Matcher
* @param matcher The hamcrest Matcher used to retain this iterable
* @return A sublist of this containing all the objects that don't match the given hamcrest Matcher
*/
public LambdaIterable<T> remove(Matcher<?> matcher) {
doRemove(matcher);
return this;
}
void doRemove(Matcher<?> matcher) {
setInner(Lambda.select(innerIterable, not(matcher)));
}
/**
* Sorts all the items in this iterable on the respective values of the given argument.
* @param argument An argument defined using the {@link Lambda#on(Class)} method
* @return A List with the same items of this iterable sorted on the respective value of the given argument
*/
public LambdaIterable<T> sort(Object argument) {
doSort(argument);
return this;
}
void doSort(Object argument) {
setInner((Iterable<? extends T>)Lambda.sort(innerIterable, argument));
}
/**
* Sorts all the items in this iterable on the respective values of the given argument.
* @param argument An argument defined using the {@link Lambda#on(Class)} method
* @param option Sorting option e.g.: Sort.DESCENDING + Sort.IGNORE_CASE
* @return A List with the same items of this iterable sorted on the respective value of the given argument
*/
public LambdaIterable<T> sort(Object argument, int option) {
doSort(argument, option);
return this;
}
void doSort(Object argument, int option) {
setInner((Iterable<? extends T>)Lambda.sort(innerIterable, argument, option));
}
/**
* Converts all the object in this iterable using the given {@link Converter}.
* @param converter The converter that specifies how each object in the iterable must be converted
* @return A LambdaIterable containing all the objects in this iterable converted using the given {@link Converter}
*/
public <V> LambdaIterable<V> convert(Converter<T, V> converter) {
return new LambdaIterable<V>(doConvert(converter));
}
<V> List<V> doConvert(Converter<T, V> converter) {
return Lambda.convert(innerIterable, converter);
}
/**
* Converts all the object in this iterable extracting the property defined by the given argument.
* @param argument An argument defined using the {@link Lambda#on(Class)} method
* @return A LambdaIterable containing the argument's value extracted from the object in this iterable
*/
public <V> LambdaIterable<V> extract(V argument) {
return new LambdaIterable<V>(doExtract(argument));
}
<V> List<V> doExtract(V argument) {
return Lambda.extract(innerIterable, argument);
}
/**
* Replace with the given replacer all the items in this iterable that match the given matcher
* @param matcher The hamcrest Matcher used to retain this iterable
* @param replacer The item with which all the items that matches will be replaced
* @return A LambdaIterable with all the items matching the given matcher replaced by the given replacer
*/
public LambdaIterable<T> replace(Matcher<?> matcher, T replacer) {
doReplace(matcher, replacer);
return this;
}
void doReplace(Matcher<?> matcher, T replacer) {
Collection<T> list = new ArrayList<T>();
for (T item : innerIterable) { list.add(matcher.matches(item) ? replacer : item); }
setInner(list);
}
/**
* Returns a Set containing only distinict items in this iterable
* @return A Set containing only distinict items in this iterable
*/
public LambdaSet<T> distinct() {
Set<T> set = new HashSet<T>();
for (T item : innerIterable) { set.add(item); }
return new LambdaSet<T>(set);
}
/**
* Selects all the items in this iterable having a different value on the given argument defined using the on method.
* @param argument An argument defined using the {@link Lambda#on(Class)} method
* @return A LambdaIterable with the same items of the given iterable but containing no duplicate values on the given argument
*/
public LambdaIterable<T> distinct(Object argument) {
doDistinct(argument);
return this;
}
void doDistinct(Object argument) {
setInner((Iterable<? extends T>)Lambda.selectDistinctArgument(innerIterable, argument));
}
/**
* Converts the objects in this iterable in objects of the given target Class.
* The objects are created by invoking its constructor passing to it the values taken
* from the object to be converted using the given arguments.
* @param targetClass The class in which the objects in the given iterable must be converted
* @param arguments The arguments of the objects to be converted that will be used to create the objects of the target class
* @return A list of map where each map is the result of the projection of an object in this iterable
*/
public <V> LambdaIterable<V> project(Class<V> targetClass, Object... arguments) {
return new LambdaIterable(doProject(targetClass, arguments));
}
<V> List<V> doProject(Class<V> targetClass, Object... arguments) {
return Lambda.project(innerIterable, targetClass, arguments);
}
/**
* Organizes the given list of items in (hierarchy of) groups based on the given grouping conditions
* @param conditions The conditions that define how the items have to be grouped
* @return The items grouped by the given conditions
*/
public LambdaGroup<T> group(GroupCondition<?>... conditions) {
return new LambdaGroup(Lambda.group(innerIterable, conditions));
}
/**
* Returns a shallow copy of this Collection instance. (The elements themselves are not copied.)
* @return A clone of this Collection instance
*/
@Override
public LambdaIterable<T> clone() {
return new LambdaIterable<T>(innerClone(new ArrayList<T>()));
}
Iterable<? extends T> innerClone(Collection<? extends T> emptyInner) {
try {
return (Iterable<T>)IntrospectionUtil.clone(innerIterable);
} catch (CloneNotSupportedException e) { }
for (T item : innerIterable) { ((Collection<T>)emptyInner).add(item); }
return emptyInner;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
/*
* This file is part of the RTF library of the FOP project, which was originally
* created by Bertrand Delacretaz bdelacretaz@codeconsult.ch and by other
* contributors to the jfor project (www.jfor.org), who agreed to donate jfor to
* the FOP project.
*/
package org.apache.fop.render.rtf.rtflib.rtfdoc;
import java.io.IOException;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Vector;
/**
* Singelton of the RTF style sheet table.
* This class belongs to the jfor:stylesheet tag processing.
* This work was originally authored by <a href="mailto:a.putz@skynamics.com">Andreas Putz</a>
*/
public final class RtfStyleSheetTable {
//////////////////////////////////////////////////
// @@ Symbolic constants
//////////////////////////////////////////////////
/** Start index number for the stylesheet reference table */
private static int startIndex = 15;
/** OK status value for attribute handling */
public static final int STATUS_OK = 0;
/** Status value for attribute handling, if the stylesheet not found and
* the stylesheet set to the default stylesheet */
public static final int STATUS_DEFAULT = 1;
/** Standard style name */
private static final String STANDARD_STYLE = "Standard";
//////////////////////////////////////////////////
// @@ Singleton
//////////////////////////////////////////////////
/** Singelton instance */
private static RtfStyleSheetTable instance = new RtfStyleSheetTable();
//////////////////////////////////////////////////
// @@ Members
//////////////////////////////////////////////////
/** Table of styles */
private Hashtable styles;
/** Used, style attributes to this vector */
private Hashtable attrTable;
/** Used, style names to this vector */
private Vector nameTable;
/** Default style */
private String defaultStyleName = STANDARD_STYLE;
//////////////////////////////////////////////////
// @@ Construction
//////////////////////////////////////////////////
/**
* Constructor.
*/
private RtfStyleSheetTable() {
styles = new Hashtable();
attrTable = new Hashtable();
nameTable = new Vector();
}
/**
* Singelton.
*
* @return The instance of RtfStyleSheetTable
*/
public static RtfStyleSheetTable getInstance() {
return instance;
}
//////////////////////////////////////////////////
// @@ Member access
//////////////////////////////////////////////////
/**
* Sets the default style.
* @param styleName Name of the default style, defined in the stylesheet
*/
public void setDefaultStyle(String styleName) {
this.defaultStyleName = styleName;
}
/**
* Gets the name of the default style.
* @return Default style name.
*/
public String getDefaultStyleName() {
if (attrTable.get(defaultStyleName) != null) {
return defaultStyleName;
}
if (attrTable.get(STANDARD_STYLE) != null) {
defaultStyleName = STANDARD_STYLE;
return defaultStyleName;
}
return null;
}
//////////////////////////////////////////////////
// @@ Public methods
//////////////////////////////////////////////////
/**
* Adds a style to the table.
* @param name Name of style to add
* @param attrs Rtf attributes which defines the style
*/
public void addStyle(String name, RtfAttributes attrs) {
nameTable.addElement(name);
if (attrs != null) {
attrTable.put(name, attrs);
}
styles.put(name, nameTable.size() - 1 + startIndex);
}
/**
* Adds the style attributes to the given attributes.
* @param name Name of style, of which the attributes will copied to attr
* @param attr Default rtf attributes
* @return Status value
*/
public int addStyleToAttributes(String name, RtfAttributes attr) {
// Sets status to ok
int status = STATUS_OK;
// Gets the style number from table
Integer style = (Integer) styles.get(name);
if (style == null && !name.equals(defaultStyleName)) {
// If style not found, and style was not the default style, try the default style
name = defaultStyleName;
style = (Integer) styles.get(name);
// set status for default style setting
status = STATUS_DEFAULT;
}
// Returns the status for invalid styles
if (style == null) {
return status;
}
// Adds the attributes to default attributes, if not available in default attributes
attr.set("cs", style);
Object o = attrTable.get(name);
if (o != null) {
RtfAttributes rtfAttr = (RtfAttributes) o;
for (Iterator names = rtfAttr.nameIterator(); names.hasNext();) {
String attrName = (String) names.next();
if (!attr.isSet(attrName)) {
Integer i = (Integer) rtfAttr.getValue(attrName);
if (i == null) {
attr.set(attrName);
} else {
attr.set(attrName, i);
}
}
}
}
return status;
}
/**
* Writes the rtf style sheet table.
* @param header Rtf header is the parent
* @throws IOException On write error
*/
public void writeStyleSheet(RtfHeader header) throws IOException {
if (styles == null || styles.size() == 0) {
return;
}
header.writeGroupMark(true);
header.writeControlWord("stylesheet");
int number = nameTable.size();
for (int i = 0; i < number; i++) {
String name = (String) nameTable.elementAt(i);
header.writeGroupMark(true);
header.writeControlWord("*\\" + this.getRtfStyleReference(name));
Object o = attrTable.get(name);
if (o != null) {
header.writeAttributes((RtfAttributes) o, RtfText.ATTR_NAMES);
header.writeAttributes((RtfAttributes) o, RtfText.ALIGNMENT);
}
header.write(name + ";");
header.writeGroupMark(false);
}
header.writeGroupMark(false);
}
/**
* Gets the rtf style reference from the table.
* @param name Name of Style
* @return Rtf attribute of the style reference
*/
private String getRtfStyleReference(String name) {
return "cs" + styles.get(name).toString();
}
}
| |
package diplom;
import java.io.IOException;
//import java.net.URI;
//import java.nio.charset.Charset;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
//import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
//import org.apache.hadoop.hbase.mapreduce.hadoopbackport.TotalOrderPartitioner;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.ToolRunner;
public class LoadIPDns {
private static final String TABLE_NAME = "rdns";
public static class Map extends Mapper<LongWritable, Text, ImmutableBytesWritable, KeyValue> {
ImmutableBytesWritable hkey = new ImmutableBytesWritable();
static int count = 0;
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
String[] fields = line.split(",");
count++;
if (fields.length == 2) {
//System.out.println("Line read: " + line);
String[] ipParts = fields[0].split("\\.");
String ip = fields[0];
if (Integer.parseInt(ipParts[0]) < 10)
ipParts[0] = "00" + ipParts[0];
else if (Integer.parseInt(ipParts[0]) < 100)
ipParts[0] = "0" + ipParts[0];
if (Integer.parseInt(ipParts[1]) < 10)
ipParts[1] = "00" + ipParts[1];
else if (Integer.parseInt(ipParts[1]) < 100)
ipParts[1] = "0" + ipParts[1];
if (Integer.parseInt(ipParts[2]) < 10)
ipParts[2] = "00" + ipParts[2];
else if (Integer.parseInt(ipParts[2]) < 100)
ipParts[2] = "0" + ipParts[2];
if (Integer.parseInt(ipParts[3]) < 10)
ipParts[3] = "00" + ipParts[3];
else if (Integer.parseInt(ipParts[3]) < 100)
ipParts[3] = "0" + ipParts[3];
ip = ipParts[0] + "." + ipParts[1] + "." + ipParts[2] + "." + ipParts[3];
hkey.set(ip.getBytes());
KeyValue kv = new KeyValue(hkey.get(), Bytes.toBytes("dnsnames"), Bytes.toBytes("rdns"),
Bytes.toBytes(fields[1]));
context.write(hkey, kv);
value.clear();
hkey = new ImmutableBytesWritable();
}
}
public void cleanup (Context context) {
System.out.println(count);
}
}
@SuppressWarnings("unused")
private static void loadHFiles()throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.addResource("hbase-default.xml");
conf.addResource("hbase-site.xml");
HBaseAdmin hadmin = new HBaseAdmin(conf);
Path hfofDir= new Path("rdns_result/dnsnames");
FileSystem fs = hfofDir.getFileSystem(conf);
//if (!fs.exists(hfofDir)) {
// throw new FileNotFoundException("HFileOutputFormat dir " +
// hfofDir + " not found");
//}
// FileStatus[] familyDirStatuses = fs.listStatus(hfofDir);
//if (familyDirStatuses == null) {
// throw new FileNotFoundException("No families found in " + hfofDir);
//}
int length =0;
byte[][] splits = new byte[18000][];
Path[] hfiles = FileUtil.stat2Paths(fs.listStatus(hfofDir));
for (Path hfile : hfiles) {
if (hfile.getName().startsWith("_")) continue;
HFile.Reader hfr = HFile.createReader(fs, hfile, new CacheConfig(conf));
//HFile.Reader hfr = new HFile.Reader(fs, hfile, null, false);
final byte[] first;
try {
hfr.loadFileInfo();
first = hfr.getFirstRowKey();
} finally {
hfr.close();
}
//System.out.println("out/I/"+hfile.getName()+" \t "+Bytes.toStringBinary(first));
splits[length]=first.clone();
length++;
}
byte[][] splits1 = new byte[length][];
for (int i = 0; i < splits1.length; i++) {
splits1[i]=splits[i];
}
Arrays.sort(splits1, Bytes.BYTES_COMPARATOR);
HTableDescriptor desc = new HTableDescriptor(TABLE_NAME);
HColumnDescriptor family= new HColumnDescriptor("dnsnames");
//family.setCompressionType(Algorithm.SNAPPY);
desc.addFamily(family);
//family= new HColumnDescriptor("C");
//desc.addFamily(family);
//for (int i = 0; i < splits.length; i++) {
// System.out.println(Bytes.toStringBinary(splits[i]));
//}
conf.setInt("zookeeper.session.timeout", 600000);
if(hadmin.tableExists(TABLE_NAME)){
//hadmin.disableTable(TABLE_NAME);
//hadmin.deleteTable(TABLE_NAME);
}
else{
hadmin.createTable(desc, splits1);
}
//hadmin.createTable(desc);
String[] args1 = new String[2];
args1[0]="rdns_result";
args1[1]=TABLE_NAME;
//args1[1]="new2";
ToolRunner.run(new LoadIncrementalHFiles(HBaseConfiguration.create()), args1);
hadmin.close();
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
conf.set("hbase.table.name", TABLE_NAME);
Job job = new Job(conf, "loadipdns");
job.setJarByClass(LoadIPDns.class);
job.setOutputKeyClass(ImmutableBytesWritable.class);
job.setOutputValueClass(KeyValue.class);
job.setMapperClass(Map.class);
job.setReducerClass(Reducer.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(HFileOutputFormat.class);
HTable hTable = new HTable(conf, TABLE_NAME);
HFileOutputFormat.configureIncrementalLoad(job, hTable);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
//job.setPartitionerClass(TotalOrderPartitioner.class);
//job.setNumReduceTasks(113);
//Path input = new Path(args[2]);
//input = input.makeQualified(input.getFileSystem(conf));
//Path partitionFile = new Path(input, "part-r-00000");
//System.out.println("Partition file path:" + partitionFile);
//TotalOrderPartitioner.setPartitionFile(job.getConfiguration(), partitionFile);
//URI partitionUri = new URI(partitionFile.toString() + "#_partitions");
//DistributedCache.addCacheFile(partitionUri, job.getConfiguration());
//DistributedCache.createSymlink(job.getConfiguration());
job.waitForCompletion(true);
//LoadIPDns.loadHFiles();
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Created by IntelliJ IDEA.
* User: max
* Date: Jun 6, 2002
* Time: 4:54:58 PM
* To change template for new class use
* Code Style | Class Templates options (Tools | IDE Options).
*/
package com.intellij.openapi.editor.actions;
import com.intellij.ide.ui.customization.CustomActionsSchema;
import com.intellij.openapi.actionSystem.ActionGroup;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.ActionPlaces;
import com.intellij.openapi.actionSystem.ActionPopupMenu;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.event.EditorMouseEvent;
import com.intellij.openapi.editor.event.EditorMouseEventArea;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.util.EditorPopupHandler;
import java.awt.*;
import java.awt.event.MouseEvent;
import java.util.List;
public class EditorActionUtil {
protected static final Object EDIT_COMMAND_GROUP = Key.create("EditGroup");
public static final Object DELETE_COMMAND_GROUP = Key.create("DeleteGroup");
private EditorActionUtil() {
}
public static void scrollRelatively(Editor editor, int lineShift) {
if (lineShift != 0) {
editor.getScrollingModel().scrollVertically(
editor.getScrollingModel().getVerticalScrollOffset() + lineShift * editor.getLineHeight()
);
}
Rectangle viewRectangle = editor.getScrollingModel().getVisibleArea();
int lineNumber = editor.getCaretModel().getVisualPosition().line;
if (viewRectangle != null) {
VisualPosition startPos = editor.xyToVisualPosition(new Point(0, viewRectangle.y));
int start = startPos.line + 1;
VisualPosition endPos = editor.xyToVisualPosition(new Point(0, viewRectangle.y + viewRectangle.height));
int end = endPos.line - 2;
if (lineNumber < start) {
editor.getCaretModel().moveCaretRelatively(0, start - lineNumber, false, false, true);
}
else if (lineNumber > end) {
editor.getCaretModel().moveCaretRelatively(0, end - lineNumber, false, false, true);
}
}
}
public static void moveCaretRelativelyAndScroll(Editor editor,
int columnShift,
int lineShift,
boolean withSelection) {
Rectangle visibleArea = editor.getScrollingModel().getVisibleArea();
VisualPosition pos = editor.getCaretModel().getVisualPosition();
Point caretLocation = editor.visualPositionToXY(pos);
int caretVShift = caretLocation.y - visibleArea.y;
editor.getCaretModel().moveCaretRelatively(columnShift, lineShift, withSelection, false, false);
//editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
VisualPosition caretPos = editor.getCaretModel().getVisualPosition();
Point caretLocation2 = editor.visualPositionToXY(caretPos);
editor.getScrollingModel().scrollVertically(caretLocation2.y - caretVShift);
}
@SuppressWarnings({"AssignmentToForLoopParameter"})
public static void indentLine(Project project, Editor editor, int lineNumber, int indent) {
EditorSettings editorSettings = editor.getSettings();
Document document = editor.getDocument();
int spacesEnd = 0;
int lineStart = 0;
if (lineNumber < document.getLineCount()) {
lineStart = document.getLineStartOffset(lineNumber);
int lineEnd = document.getLineEndOffset(lineNumber);
spacesEnd = lineStart;
CharSequence text = document.getCharsSequence();
for (; spacesEnd <= lineEnd; spacesEnd++) {
if (spacesEnd == lineEnd) {
break;
}
char c = text.charAt(spacesEnd);
if (c != '\t' && c != ' ') {
break;
}
}
}
int oldLength = editor.offsetToLogicalPosition(spacesEnd).column;
int newLength = oldLength + indent;
if (newLength < 0) {
newLength = 0;
}
StringBuffer buf = new StringBuffer(newLength);
int tabSize = editorSettings.getTabSize(project);
for (int i = 0; i < newLength;) {
if (tabSize > 0 && editorSettings.isUseTabCharacter(project) && i + tabSize <= newLength) {
buf.append('\t');
i += tabSize;
}
else {
buf.append(' ');
i++;
}
}
int newCaretOffset = editor.getCaretModel().getOffset();
if (newCaretOffset >= spacesEnd) {
newCaretOffset += buf.length() - (spacesEnd - lineStart);
}
if (buf.length() > 0) {
if (spacesEnd > lineStart) {
document.replaceString(lineStart, spacesEnd, buf.toString());
}
else {
document.insertString(lineStart, buf.toString());
}
}
else {
if (spacesEnd > lineStart) {
document.deleteString(lineStart, spacesEnd);
}
}
editor.getCaretModel().moveToOffset(newCaretOffset);
}
public static boolean isWordStart(CharSequence text, int offset, boolean isCamel) {
char prev = offset > 0 ? text.charAt(offset - 1) : 0;
char current = text.charAt(offset);
char next = offset + 1 < text.length() ? text.charAt(offset + 1) : 0;
final boolean firstIsIdentifierPart = Character.isJavaIdentifierPart(prev);
final boolean secondIsIdentifierPart = Character.isJavaIdentifierPart(current);
if (!firstIsIdentifierPart && secondIsIdentifierPart) {
return true;
}
if (isCamel) {
if (firstIsIdentifierPart && secondIsIdentifierPart &&
(isLowerCaseOrDigit(prev) && Character.isUpperCase(current) ||
prev == '_' && current != '_' ||
Character.isUpperCase(prev) && Character.isUpperCase(current) && isLowerCaseOrDigit(next))) {
return true;
}
}
return (Character.isWhitespace(prev) || firstIsIdentifierPart) &&
!Character.isWhitespace(current) && !secondIsIdentifierPart;
}
private static boolean isLowerCaseOrDigit(char c) {
return Character.isLowerCase(c) || Character.isDigit(c);
}
public static boolean isWordEnd(CharSequence text, int offset, boolean isCamel) {
char prev = offset > 0 ? text.charAt(offset - 1) : 0;
char current = text.charAt(offset);
char next = offset + 1 < text.length() ? text.charAt(offset + 1) : 0;
final boolean firstIsIdentifierPart = Character.isJavaIdentifierPart(prev);
final boolean secondIsIdentifierPart = Character.isJavaIdentifierPart(current);
if (firstIsIdentifierPart && !secondIsIdentifierPart) {
return true;
}
if (isCamel) {
if (firstIsIdentifierPart && secondIsIdentifierPart &&
(Character.isLowerCase(prev) && Character.isUpperCase(current) || prev != '_' && current == '_' ||
Character.isUpperCase(prev) && Character.isUpperCase(current) && Character.isLowerCase(next))) {
return true;
}
}
return !Character.isWhitespace(prev) && !firstIsIdentifierPart &&
(Character.isWhitespace(current) || secondIsIdentifierPart);
}
public static void moveCaretToLineStart(Editor editor, boolean isWithSelection) {
Document document = editor.getDocument();
SelectionModel selectionModel = editor.getSelectionModel();
int selectionStart = selectionModel.getLeadSelectionOffset();
CaretModel caretModel = editor.getCaretModel();
LogicalPosition blockSelectionStart = selectionModel.hasBlockSelection()
? selectionModel.getBlockStart()
: caretModel.getLogicalPosition();
EditorSettings editorSettings = editor.getSettings();
int logCaretLine = caretModel.getLogicalPosition().line;
VisualPosition currentVisCaret = caretModel.getVisualPosition();
VisualPosition caretLogLineStartVis = editor.offsetToVisualPosition(document.getLineStartOffset(logCaretLine));
if (currentVisCaret.line > caretLogLineStartVis.line) {
// Caret is located not at the first visual line of soft-wrapped logical line.
if (editorSettings.isSmartHome()) {
moveCaretToStartOfSoftWrappedLine(editor, currentVisCaret, currentVisCaret.line - caretLogLineStartVis.line);
}
else {
caretModel.moveToVisualPosition(new VisualPosition(currentVisCaret.line, 0));
}
setupSelection(editor, isWithSelection, selectionStart, blockSelectionStart);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
return;
}
// Skip folded lines.
int logLineToUse = logCaretLine - 1;
while (logLineToUse >= 0 && editor.offsetToVisualPosition(document.getLineEndOffset(logLineToUse)).line == currentVisCaret.line) {
logLineToUse--;
}
logLineToUse++;
if (logLineToUse >= document.getLineCount() || !editorSettings.isSmartHome()) {
editor.getCaretModel().moveToLogicalPosition(new LogicalPosition(logLineToUse, 0));
}
else if (logLineToUse == logCaretLine) {
int line = currentVisCaret.line;
int column;
if (currentVisCaret.column == 0) {
column = findSmartIndentColumn(editor, currentVisCaret.line);
}
else {
column = findFirstNonSpaceColumnOnTheLine(editor, currentVisCaret.line);
if (column >= currentVisCaret.column) {
column = 0;
}
}
caretModel.moveToVisualPosition(new VisualPosition(line, Math.max(column, 0)));
}
else {
LogicalPosition logLineEndLog = editor.offsetToLogicalPosition(document.getLineEndOffset(logLineToUse));
VisualPosition logLineEndVis = editor.logicalToVisualPosition(logLineEndLog);
if (logLineEndLog.softWrapLinesOnCurrentLogicalLine > 0) {
moveCaretToStartOfSoftWrappedLine(editor, logLineEndVis, logLineEndLog.softWrapLinesOnCurrentLogicalLine);
}
else {
int line = logLineEndVis.line;
int column = 0;
if (currentVisCaret.column == 0 && editorSettings.isSmartHome()) {
findSmartIndentColumn(editor, line);
}
caretModel.moveToVisualPosition(new VisualPosition(line, column));
}
}
setupSelection(editor, isWithSelection, selectionStart, blockSelectionStart);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
}
private static void moveCaretToStartOfSoftWrappedLine(Editor editor, VisualPosition currentVisual, int softWrappedLines) {
CaretModel caretModel = editor.getCaretModel();
LogicalPosition startLineLogical = editor.visualToLogicalPosition(new VisualPosition(currentVisual.line, 0));
int startLineOffset = editor.logicalPositionToOffset(startLineLogical);
SoftWrapModel softWrapModel = editor.getSoftWrapModel();
SoftWrap softWrap = softWrapModel.getSoftWrap(startLineOffset);
if (softWrap == null) {
// Don't expect to be here.
int column = findFirstNonSpaceColumnOnTheLine(editor, currentVisual.line);
int columnToMove = column;
if (currentVisual.column <= column && currentVisual.column > 0) {
columnToMove = 0;
}
caretModel.moveToVisualPosition(new VisualPosition(currentVisual.line, columnToMove));
return;
}
if (currentVisual.column > softWrap.getIndentInColumns()) {
caretModel.moveToOffset(softWrap.getStart());
}
else if (currentVisual.column > 0) {
caretModel.moveToVisualPosition(new VisualPosition(currentVisual.line, 0));
}
else {
// We assume that caret is already located at zero visual column of soft-wrapped line if control flow reaches this place.
int newVisualCaretLine = currentVisual.line - 1;
int newVisualCaretColumn = -1;
if (softWrappedLines > 1) {
int offset = editor.logicalPositionToOffset(editor.visualToLogicalPosition(new VisualPosition(newVisualCaretLine, 0)));
SoftWrap prevLineSoftWrap = softWrapModel.getSoftWrap(offset);
if (prevLineSoftWrap != null) {
newVisualCaretColumn = prevLineSoftWrap.getIndentInColumns();
}
}
if (newVisualCaretColumn < 0) {
newVisualCaretColumn = findFirstNonSpaceColumnOnTheLine(editor, newVisualCaretLine);
}
caretModel.moveToVisualPosition(new VisualPosition(newVisualCaretLine, newVisualCaretColumn));
}
}
private static int findSmartIndentColumn(Editor editor, int visualLine) {
for (int i = visualLine; i >= 0; i--) {
int column = findFirstNonSpaceColumnOnTheLine(editor, i);
if (column >= 0) {
return column;
}
}
return 0;
}
/**
* Tries to find visual column that points to the first non-white space symbol at the visual line at the given editor.
*
* @param editor target editor
* @param visualLineNumber target visual line
* @return visual column that points to the first non-white space symbol at the target visual line if the one exists;
* <code>'-1'</code> otherwise
*/
public static int findFirstNonSpaceColumnOnTheLine(Editor editor, int visualLineNumber) {
Document document = editor.getDocument();
VisualPosition visLine = new VisualPosition(visualLineNumber, 0);
int logLine = editor.visualToLogicalPosition(visLine).line;
int logLineStartOffset = document.getLineStartOffset(logLine);
int logLineEndOffset = document.getLineEndOffset(logLine);
LogicalPosition logLineStart = editor.offsetToLogicalPosition(logLineStartOffset);
VisualPosition visLineStart = editor.logicalToVisualPosition(logLineStart);
boolean softWrapIntroducedLine = visLineStart.line != visualLineNumber;
if (!softWrapIntroducedLine) {
int offset = findFirstNonSpaceOffsetInRange(document.getCharsSequence(), logLineStartOffset, logLineEndOffset);
if (offset >= 0) {
return EditorUtil.calcColumnNumber(editor, document.getCharsSequence(), logLineStartOffset, offset);
}
else {
return -1;
}
}
int lineFeedsToSkip = visualLineNumber - visLineStart.line;
List<? extends SoftWrap> softWraps = editor.getSoftWrapModel().getSoftWrapsForLine(logLine);
for (SoftWrap softWrap : softWraps) {
CharSequence softWrapText = softWrap.getText();
int softWrapLineFeedsNumber = StringUtil.countNewLines(softWrapText);
if (softWrapLineFeedsNumber < lineFeedsToSkip) {
lineFeedsToSkip -= softWrapLineFeedsNumber;
continue;
}
// Point to the first non-white space symbol at the target soft wrap visual line or to the first non-white space symbol
// of document line that follows it if possible.
int softWrapTextLength = softWrapText.length();
boolean skip = true;
for (int j = 0; j < softWrapTextLength; j++) {
if (softWrapText.charAt(j) == '\n') {
skip = --lineFeedsToSkip > 0;
continue;
}
if (skip) {
continue;
}
int nextSoftWrapLineFeedOffset = StringUtil.indexOf(softWrapText, '\n', j, softWrapTextLength);
int end = findFirstNonSpaceOffsetInRange(softWrapText, j, softWrapTextLength);
if (end >= 0) {
// Non space symbol is contained at soft wrap text after offset that corresponds to the target visual line start.
if (nextSoftWrapLineFeedOffset < 0 || end < nextSoftWrapLineFeedOffset) {
return EditorUtil.calcColumnNumber(editor, softWrapText, j, end);
}
else {
return -1;
}
}
if (nextSoftWrapLineFeedOffset >= 0) {
// There are soft wrap-introduced visual lines after the target one
return -1;
}
}
int end = findFirstNonSpaceOffsetInRange(document.getCharsSequence(), softWrap.getStart(), logLineEndOffset);
if (end >= 0) {
return EditorUtil.calcColumnNumber(editor, document.getCharsSequence(), softWrap.getStart(), end);
}
else {
return -1;
}
}
return -1;
}
public static int findFirstNonSpaceOffsetOnTheLine(Document document, int lineNumber) {
int lineStart = document.getLineStartOffset(lineNumber);
int lineEnd = document.getLineEndOffset(lineNumber);
int result = findFirstNonSpaceOffsetInRange(document.getCharsSequence(), lineStart, lineEnd);
return result >= 0 ? result : lineEnd;
}
/**
* Tries to find non white space symbol at the given range at the given document.
*
* @param text text to be inspected
* @param start target start offset (inclusive)
* @param end target end offset (exclusive)
* @return index of the first non-white space character at the given document at the given range if the one is found;
* <code>'-1'</code> otherwise
*/
public static int findFirstNonSpaceOffsetInRange(CharSequence text, int start, int end) {
for (; start < end; start++) {
char c = text.charAt(start);
if (c != ' ' && c != '\t') {
return start;
}
}
return -1;
}
public static void moveCaretToLineEnd(Editor editor, boolean isWithSelection) {
Document document = editor.getDocument();
SelectionModel selectionModel = editor.getSelectionModel();
int selectionStart = selectionModel.getLeadSelectionOffset();
CaretModel caretModel = editor.getCaretModel();
LogicalPosition blockSelectionStart = selectionModel.hasBlockSelection()
? selectionModel.getBlockStart()
: caretModel.getLogicalPosition();
SoftWrapModel softWrapModel = editor.getSoftWrapModel();
int lineNumber = editor.getCaretModel().getLogicalPosition().line;
if (lineNumber >= document.getLineCount()) {
LogicalPosition pos = new LogicalPosition(lineNumber, 0);
editor.getCaretModel().moveToLogicalPosition(pos);
setupSelection(editor, isWithSelection, selectionStart, blockSelectionStart);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
return;
}
VisualPosition currentVisualCaret = editor.getCaretModel().getVisualPosition();
VisualPosition visualEndOfLineWithCaret
= new VisualPosition(currentVisualCaret.line, EditorUtil.getLastVisualLineColumnNumber(editor, currentVisualCaret.line));
// There is a possible case that the caret is already located at the visual end of line and the line is soft wrapped.
// We want to move the caret to the end of the next visual line then.
if (currentVisualCaret.equals(visualEndOfLineWithCaret)) {
LogicalPosition logical = editor.visualToLogicalPosition(visualEndOfLineWithCaret);
int offset = editor.logicalPositionToOffset(logical);
if (offset < editor.getDocument().getTextLength()) {
SoftWrap softWrap = softWrapModel.getSoftWrap(offset);
if (softWrap == null) {
// Same offset may correspond to positions on different visual lines in case of soft wraps presence
// (all soft-wrap introduced virtual text is mapped to the same offset as the first document symbol after soft wrap).
// Hence, we check for soft wraps presence at two offsets.
softWrap = softWrapModel.getSoftWrap(offset + 1);
}
int line = currentVisualCaret.line;
int column = currentVisualCaret.column;
if (softWrap != null) {
line++;
column = EditorUtil.getLastVisualLineColumnNumber(editor, line);
}
visualEndOfLineWithCaret = new VisualPosition(line, column);
}
}
int offset;
int newOffset;
LogicalPosition logLineEnd = editor.visualToLogicalPosition(visualEndOfLineWithCaret);
offset = editor.logicalPositionToOffset(logLineEnd);
lineNumber = logLineEnd.line;
newOffset = offset;
CharSequence text = document.getCharsSequence();
for (int i = newOffset - 1; i >= document.getLineStartOffset(lineNumber); i--) {
if (softWrapModel.getSoftWrap(i) != null) {
newOffset = offset;
break;
}
if (text.charAt(i) != ' ' && text.charAt(i) != '\t') {
break;
}
newOffset = i;
}
// Move to the calculated end of visual line if caret is located on a last non-white space symbols on a line and there are
// remaining white space symbols.
if (newOffset == offset || newOffset == caretModel.getOffset()) {
caretModel.moveToVisualPosition(visualEndOfLineWithCaret);
}
else {
caretModel.moveToOffset(newOffset);
}
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
setupSelection(editor, isWithSelection, selectionStart, blockSelectionStart);
}
public static void moveCaretToNextWord(Editor editor, boolean isWithSelection) {
Document document = editor.getDocument();
SelectionModel selectionModel = editor.getSelectionModel();
int selectionStart = selectionModel.getLeadSelectionOffset();
CaretModel caretModel = editor.getCaretModel();
LogicalPosition blockSelectionStart = selectionModel.hasBlockSelection()
? selectionModel.getBlockStart()
: caretModel.getLogicalPosition();
int offset = caretModel.getOffset();
CharSequence text = document.getCharsSequence();
if (offset == document.getTextLength() - 1) {
return;
}
int newOffset = offset + 1;
int lineNumber = caretModel.getLogicalPosition().line;
if (lineNumber >= document.getLineCount()) return;
int maxOffset = document.getLineEndOffset(lineNumber);
if (newOffset > maxOffset) {
if (lineNumber + 1 >= document.getLineCount()) {
return;
}
maxOffset = document.getLineEndOffset(lineNumber + 1);
}
boolean camel = editor.getSettings().isCamelWords();
for (; newOffset < maxOffset; newOffset++) {
if (isWordStart(text, newOffset, camel)) {
break;
}
}
caretModel.moveToOffset(newOffset);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
setupSelection(editor, isWithSelection, selectionStart, blockSelectionStart);
}
private static void setupSelection(Editor editor,
boolean isWithSelection,
int selectionStart, LogicalPosition blockSelectionStart) {
SelectionModel selectionModel = editor.getSelectionModel();
if (isWithSelection) {
CaretModel caretModel = editor.getCaretModel();
if (editor.isColumnMode()) {
selectionModel.setBlockSelection(blockSelectionStart, caretModel.getLogicalPosition());
}
else {
selectionModel.setSelection(selectionStart, caretModel.getVisualPosition(), caretModel.getOffset());
}
}
else {
selectionModel.removeSelection();
}
}
public static void moveCaretToPreviousWord(Editor editor, boolean isWithSelection) {
Document document = editor.getDocument();
SelectionModel selectionModel = editor.getSelectionModel();
int selectionStart = selectionModel.getLeadSelectionOffset();
CaretModel caretModel = editor.getCaretModel();
LogicalPosition blockSelectionStart = selectionModel.hasBlockSelection()
? selectionModel.getBlockStart()
: caretModel.getLogicalPosition();
int offset = editor.getCaretModel().getOffset();
if (offset == 0) return;
int lineNumber = editor.getCaretModel().getLogicalPosition().line;
CharSequence text = document.getCharsSequence();
int newOffset = offset - 1;
int minOffset = lineNumber > 0 ? document.getLineEndOffset(lineNumber - 1) : 0;
boolean camel = editor.getSettings().isCamelWords();
for (; newOffset > minOffset; newOffset--) {
if (isWordStart(text, newOffset, camel)) break;
}
editor.getCaretModel().moveToOffset(newOffset);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
setupSelection(editor, isWithSelection, selectionStart, blockSelectionStart);
}
public static void moveCaretPageUp(Editor editor, boolean isWithSelection) {
((EditorEx)editor).stopOptimizedScrolling();
int lineHeight = editor.getLineHeight();
Rectangle visibleArea = editor.getScrollingModel().getVisibleArea();
int linesIncrement = visibleArea.height / lineHeight;
editor.getScrollingModel().scrollVertically(visibleArea.y - visibleArea.y % lineHeight - linesIncrement * lineHeight);
int lineShift = -linesIncrement;
editor.getCaretModel().moveCaretRelatively(0, lineShift, isWithSelection, editor.isColumnMode(), true);
}
public static void moveCaretPageDown(Editor editor, boolean isWithSelection) {
((EditorEx)editor).stopOptimizedScrolling();
int lineHeight = editor.getLineHeight();
Rectangle visibleArea = editor.getScrollingModel().getVisibleArea();
int linesIncrement = visibleArea.height / lineHeight;
int allowedBottom = ((EditorEx)editor).getContentSize().height - visibleArea.height;
editor.getScrollingModel().scrollVertically(
Math.min(allowedBottom, visibleArea.y - visibleArea.y % lineHeight + linesIncrement * lineHeight));
editor.getCaretModel().moveCaretRelatively(0, linesIncrement, isWithSelection, editor.isColumnMode(), true);
}
public static void moveCaretPageTop(Editor editor, boolean isWithSelection) {
int lineHeight = editor.getLineHeight();
SelectionModel selectionModel = editor.getSelectionModel();
int selectionStart = selectionModel.getLeadSelectionOffset();
CaretModel caretModel = editor.getCaretModel();
LogicalPosition blockSelectionStart = selectionModel.hasBlockSelection()
? selectionModel.getBlockStart()
: caretModel.getLogicalPosition();
Rectangle visibleArea = editor.getScrollingModel().getVisibleArea();
int lineNumber = visibleArea.y / lineHeight;
if (visibleArea.y % lineHeight > 0) {
lineNumber++;
}
VisualPosition pos = new VisualPosition(lineNumber, editor.getCaretModel().getVisualPosition().column);
editor.getCaretModel().moveToVisualPosition(pos);
setupSelection(editor, isWithSelection, selectionStart, blockSelectionStart);
}
public static void moveCaretPageBottom(Editor editor, boolean isWithSelection) {
int lineHeight = editor.getLineHeight();
SelectionModel selectionModel = editor.getSelectionModel();
int selectionStart = selectionModel.getLeadSelectionOffset();
CaretModel caretModel = editor.getCaretModel();
LogicalPosition blockSelectionStart = selectionModel.hasBlockSelection()
? selectionModel.getBlockStart()
: caretModel.getLogicalPosition();
Rectangle visibleArea = editor.getScrollingModel().getVisibleArea();
int lineNumber = (visibleArea.y + visibleArea.height) / lineHeight - 1;
VisualPosition pos = new VisualPosition(lineNumber, editor.getCaretModel().getVisualPosition().column);
editor.getCaretModel().moveToVisualPosition(pos);
setupSelection(editor, isWithSelection, selectionStart, blockSelectionStart);
}
public static EditorPopupHandler createEditorPopupHandler(final String groupId) {
return new EditorPopupHandler() {
public void invokePopup(final EditorMouseEvent event) {
if (!event.isConsumed() && event.getArea() == EditorMouseEventArea.EDITING_AREA) {
ActionGroup group = (ActionGroup)CustomActionsSchema.getInstance().getCorrectedAction(groupId);
ActionPopupMenu popupMenu = ActionManager.getInstance().createActionPopupMenu(ActionPlaces.EDITOR_POPUP, group);
MouseEvent e = event.getMouseEvent();
final Component c = e.getComponent();
if (c != null && c.isShowing()) {
popupMenu.getComponent().show(c, e.getX(), e.getY());
}
e.consume();
}
}
};
}
}
| |
package semanticMarkup.ling.learn.knowledge;
import java.util.Iterator;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import semanticMarkup.know.lib.WordNetPOSKnowledgeBase;
import semanticMarkup.ling.learn.dataholder.DataHolder;
import semanticMarkup.ling.learn.dataholder.WordPOSKey;
import semanticMarkup.ling.learn.dataholder.WordPOSValue;
import semanticMarkup.ling.learn.utility.LearnerUtility;
/**
* Learn a set of seed nouns (singular and plural forms) by applying a number of
* rules based on heuristics on the collection.
*
* @author Dongye
*
*/
public class HeuristicNounLearnerUseSuffix implements IModule {
private LearnerUtility myLearnerUtility;
public HeuristicNounLearnerUseSuffix(LearnerUtility learnerUtility) {
this.myLearnerUtility = learnerUtility;
}
@Override
public void run(DataHolder dataholderHandler) {
this.posBySuffix(dataholderHandler);
}
/**
* for each unknown word in unknownwords table seperate root and suffix if
* root is a word in WN or in unknownwords table make the unknowword a "b"
* boundary
*
* suffix: -fid(adj), -form (adj), -ish(adj), -less(adj), -like (adj)),
* -merous(adj), -most(adj), -shaped(adj), -ous(adj)
*/
public void posBySuffix(DataHolder dataholderHandler) {
PropertyConfigurator.configure("conf/log4j.properties");
Logger myLogger = Logger.getLogger("learn.posBySuffix");
myLogger.trace("Enter posBySuffix");
Iterator<Map.Entry<String, String>> iterator = dataholderHandler
.getUnknownWordHolder().entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, String> unknownWordEntry = iterator.next();
String unknownWord = unknownWordEntry.getKey();
String unknownWordTag = unknownWordEntry.getValue();
if (unknownWordTag.equals("unknown")) {
// boolean flag1 =
posBySuffixCase1Helper(dataholderHandler, unknownWord);
// boolean flag2 =
posBySuffixCase2Helper(dataholderHandler, unknownWord);
}
}
myLogger.trace("Quite posBySuffix");
}
public boolean posBySuffixCase1Helper(DataHolder dataholderHandler, String unknownWord) {
PropertyConfigurator.configure("conf/log4j.properties");
Logger myLogger = Logger.getLogger("learn.posBySuffix");
String pattern1 = "^[a-z_]+(" + Constant.SUFFIX + ")$";
myLogger.debug("Pattern1: " + pattern1);
if (unknownWord.matches(pattern1)) {
Matcher matcher = Pattern
.compile("(.*?)(" + Constant.SUFFIX + ")$").matcher(
unknownWord);
if ((unknownWord.matches("^[a-zA-Z0-9_-]+$")) && matcher.matches()) {
myLogger.debug("posBySuffix - check word: " + unknownWord);
String base = matcher.group(1);
String suffix = matcher.group(2);
if (this.containSuffix(dataholderHandler, unknownWord, base, suffix)) {
myLogger.debug("Pass\n");
dataholderHandler.updateDataHolder(unknownWord, "b", "*",
"wordpos", 0);
myLogger.debug("posBySuffix - set word: " + unknownWord);
return true;
} else {
myLogger.debug("Not Pass\n");
}
}
}
return false;
}
public boolean posBySuffixCase2Helper(DataHolder dataholderHandler, String unknownWord) {
PropertyConfigurator.configure("conf/log4j.properties");
Logger myLogger = Logger.getLogger("learn.posBySuffix");
String pattern2 = "^[._.][a-z]+"; // , _nerved
myLogger.debug("Pattern2: " + pattern2);
if (unknownWord.matches(pattern2)) {
dataholderHandler.getWordPOSHolder().put(
new WordPOSKey(unknownWord, "b"),
new WordPOSValue("*", 0, 0, null, null));
myLogger.debug("posbysuffix set " + unknownWord
+ " a boundary word\n");
return true;
}
return false;
}
/**
* return false or true depending on if the word contains the suffix as the
* suffix
*
* @param word
* @param base
* @param suffix
* @return
*/
public boolean containSuffix(DataHolder dataholderHandler, String word, String base, String suffix) {
PropertyConfigurator.configure("conf/log4j.properties");
Logger myLogger = Logger.getLogger("learn.posBySuffix.containSuffix");
myLogger.trace("Enter containSuffix");
boolean flag = false; // return value
boolean wordInWN = false; // if this word is in WordNet
boolean baseInWN = false;
WordNetPOSKnowledgeBase myWN = this.myLearnerUtility
.getWordNetPOSKnowledgeBase();
// check base
if (base.length() == 0) {
myLogger.trace("case 0");
return true;
}
base.replaceAll("_", ""); // cup_shaped
if (myWN.contains(word)) {
myLogger.trace("case 1.1");
wordInWN = true; // word is in WordNet
} else {
myLogger.trace("case 1.2");
wordInWN = false;
}
if (myWN.contains(base)) {
myLogger.trace("case 2.1");
baseInWN = true;
} else {
myLogger.trace("case 2.2");
baseInWN = false;
}
// if WN pos is adv, return 1: e.g. ly, or if $base is in
// unknownwords table
if (suffix.equals("ly")) {
myLogger.trace("case 3.1");
if (wordInWN) {
if (myWN.isAdverb(word)) {
return true;
}
}
// if the word is in unknown word set, return true
if (dataholderHandler.getUnknownWordHolder().containsKey(base)) {
return true;
}
}
// if WN recognize superlative, comparative adjs, return 1: e.g. er, est
else if (suffix.equals("er") || suffix.equals("est")) {
myLogger.trace("case 3.2");
if (wordInWN) {
boolean case1 = !myWN.isAdjective(word);
boolean case2 = myWN.isAdjective(base);
if (case1 && case2) {
return true;
} else {
return false;
}
}
}
// if $base is in WN or unknownwords table, or if $word has sole pos
// adj in WN, return 1: e.g. scalelike
else {
myLogger.trace("case 3.3");
if (myWN.isSoleAdjective(word)) {
return true;
}
if (baseInWN) {
return true;
}
if (dataholderHandler.getUnknownWordHolder().containsKey(base)) {
return true;
}
}
return flag;
}
}
| |
package com.conveyal.analysis.controllers;
import com.conveyal.analysis.AnalysisServerException;
import com.conveyal.analysis.UserPermissions;
import com.conveyal.analysis.components.TaskScheduler;
import com.conveyal.analysis.grids.SeamlessCensusGridExtractor;
import com.conveyal.analysis.models.DataGroup;
import com.conveyal.analysis.models.OpportunityDataset;
import com.conveyal.analysis.models.Region;
import com.conveyal.analysis.models.SpatialDataSource;
import com.conveyal.analysis.persistence.AnalysisCollection;
import com.conveyal.analysis.persistence.AnalysisDB;
import com.conveyal.analysis.persistence.Persistence;
import com.conveyal.analysis.util.FileItemInputStreamProvider;
import com.conveyal.analysis.util.HttpUtils;
import com.conveyal.analysis.util.JsonUtil;
import com.conveyal.file.FileStorage;
import com.conveyal.file.FileStorageFormat;
import com.conveyal.file.FileStorageKey;
import com.conveyal.file.FileUtils;
import com.conveyal.r5.analyst.FreeFormPointSet;
import com.conveyal.r5.analyst.Grid;
import com.conveyal.r5.analyst.PointSet;
import com.conveyal.r5.analyst.progress.NoopProgressListener;
import com.conveyal.r5.analyst.progress.Task;
import com.conveyal.r5.analyst.progress.WorkProduct;
import com.conveyal.r5.analyst.progress.WorkProductType;
import com.conveyal.r5.util.ExceptionUtils;
import com.conveyal.r5.util.InputStreamProvider;
import com.conveyal.r5.util.ProgressListener;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.io.Files;
import com.mongodb.QueryBuilder;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileItemFactory;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.io.FilenameUtils;
import org.bson.types.ObjectId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import spark.Request;
import spark.Response;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import static com.conveyal.analysis.datasource.DataSourceUtil.detectUploadFormatAndValidate;
import static com.conveyal.analysis.util.JsonUtil.toJson;
import static com.conveyal.file.FileCategory.GRIDS;
import static com.conveyal.r5.analyst.WebMercatorGridPointSet.parseZoom;
import static com.conveyal.r5.analyst.progress.WorkProductType.OPPORTUNITY_DATASET;
/**
* Controller that handles fetching opportunity datasets (grids and other pointset formats).
*/
public class OpportunityDatasetController implements HttpController {
private static final Logger LOG = LoggerFactory.getLogger(OpportunityDatasetController.class);
// Component Dependencies
private final FileStorage fileStorage;
private final TaskScheduler taskScheduler;
private final SeamlessCensusGridExtractor extractor;
// Database tables
private final AnalysisCollection<DataGroup> dataGroupCollection;
public OpportunityDatasetController (
FileStorage fileStorage,
TaskScheduler taskScheduler,
SeamlessCensusGridExtractor extractor,
AnalysisDB database
) {
this.fileStorage = fileStorage;
this.taskScheduler = taskScheduler;
this.extractor = extractor;
this.dataGroupCollection = database.getAnalysisCollection("dataGroups", DataGroup.class);
}
/** Store upload status objects FIXME trivial Javadoc */
private final List<OpportunityDatasetUploadStatus> uploadStatuses = new ArrayList<>();
private ObjectNode getJsonUrl (FileStorageKey key) {
return JsonUtil.objectNode().put("url", fileStorage.getURL(key));
}
private void addStatusAndRemoveOldStatuses(OpportunityDatasetUploadStatus status) {
uploadStatuses.add(status);
LocalDateTime now = LocalDateTime.now();
uploadStatuses.removeIf(s -> s.completedAt != null &&
LocalDateTime.ofInstant(s.completedAt.toInstant(), ZoneId.systemDefault()).isBefore(now.minusDays(7))
);
}
private Collection<OpportunityDataset> getRegionDatasets(Request req, Response res) {
return Persistence.opportunityDatasets.findPermitted(
QueryBuilder.start("regionId").is(req.params("regionId")).get(),
UserPermissions.from(req)
);
}
private Object getOpportunityDataset(Request req, Response res) {
OpportunityDataset dataset = Persistence.opportunityDatasets.findByIdFromRequestIfPermitted(req);
if (dataset.format == FileStorageFormat.GRID) {
return getJsonUrl(dataset.getStorageKey());
} else {
// Currently the UI can only visualize grids, not other kinds of datasets (freeform points).
// We do generate a rasterized grid for each of the freeform pointsets we create, so ideally we'd redirect
// to that grid for display and preview, but the freeform and corresponding grid pointset have different
// IDs and there are no references between them.
LOG.error("We cannot yet visualize freeform pointsets. Returning nothing to the UI.");
return null;
}
}
private List<OpportunityDatasetUploadStatus> getRegionUploadStatuses(Request req, Response res) {
String regionId = req.params("regionId");
return uploadStatuses
.stream()
.filter(status -> status.regionId.equals(regionId))
.collect(Collectors.toList());
}
private boolean clearStatus(Request req, Response res) {
String statusId = req.params("statusId");
return uploadStatuses.removeIf(s -> s.id.equals(statusId));
}
private OpportunityDatasetUploadStatus downloadLODES (Request req, Response res) {
final String regionId = req.params("regionId");
final int zoom = parseZoom(req.queryParams("zoom"));
final UserPermissions userPermissions = UserPermissions.from(req);
final Region region = Persistence.regions.findByIdIfPermitted(regionId, userPermissions);
// Common UUID for all LODES datasets created in this download (e.g. so they can be grouped together and
// deleted as a batch using deleteSourceSet) TODO use DataGroup and DataSource (creating only one DataSource per region).
// The bucket name contains the specific lodes data set and year so works as an appropriate name
final OpportunityDatasetUploadStatus status = new OpportunityDatasetUploadStatus(regionId, extractor.sourceName);
addStatusAndRemoveOldStatuses(status);
// TODO we should be reusing the same source from Mongo, not making new ephemeral ones on each extract operation
SpatialDataSource source = new SpatialDataSource(userPermissions, extractor.sourceName);
source.regionId = regionId;
// Make a new group that will containin the N OpportunityDatasets we're saving.
String description = String.format("Import %s to %s", extractor.sourceName, region.name);
DataGroup dataGroup = new DataGroup(userPermissions, source._id.toString(), description);
taskScheduler.enqueue(Task.create("Extracting LODES data")
.forUser(userPermissions)
.setHeavy(true)
.withAction((progressListener) -> {
try {
status.message = "Extracting census data for region";
List<Grid> grids = extractor.censusDataForBounds(region.bounds, zoom, progressListener);
updateAndStoreDatasets(source, dataGroup, status, grids, progressListener);
} catch (IOException e) {
status.completeWithError(e);
LOG.error("Exception processing LODES data: " + ExceptionUtils.stackTraceString(e));
}
}));
return status;
}
/**
* Given a list of new PointSets, serialize each PointSet and save it to S3, then create a metadata object about
* that PointSet and store it in Mongo.
*/
private void updateAndStoreDatasets (SpatialDataSource source,
DataGroup dataGroup,
OpportunityDatasetUploadStatus status,
List<? extends PointSet> pointSets,
com.conveyal.r5.analyst.progress.ProgressListener progressListener) {
status.status = Status.UPLOADING;
status.totalGrids = pointSets.size();
progressListener.beginTask("Storing opportunity data", pointSets.size());
// Create an OpportunityDataset holding some metadata about each PointSet (Grid or FreeForm).
final List<OpportunityDataset> datasets = new ArrayList<>();
for (PointSet pointSet : pointSets) {
OpportunityDataset dataset = new OpportunityDataset();
dataset.sourceName = source.name;
dataset.sourceId = source._id.toString();
dataset.dataGroupId = dataGroup._id.toString();
dataset.createdBy = source.createdBy;
dataset.accessGroup = source.accessGroup;
dataset.regionId = source.regionId;
dataset.name = pointSet.name;
dataset.totalPoints = pointSet.featureCount();
dataset.totalOpportunities = pointSet.sumTotalOpportunities();
dataset.format = getFormatCode(pointSet);
if (dataset.format == FileStorageFormat.FREEFORM) {
dataset.name = String.join(" ", pointSet.name, "(freeform)");
}
dataset.setWebMercatorExtents(pointSet);
// TODO make origin and destination pointsets reference each other and indicate they are suitable
// for one-to-one analyses
// Store the PointSet metadata in Mongo and accumulate these objects into the method return list.
Persistence.opportunityDatasets.create(dataset);
datasets.add(dataset);
// Persist a serialized representation of each PointSet (not the metadata) to S3 or other object storage.
// TODO this should probably be pulled out to another method, and possibly called one frame up.
// Persisting the PointSets to S3 is a separate task than making metadata and storing in Mongo.
try {
if (pointSet instanceof Grid) {
File gridFile = FileUtils.createScratchFile("grid");
OutputStream fos = new GZIPOutputStream(new FileOutputStream(gridFile));
((Grid)pointSet).write(fos);
fileStorage.moveIntoStorage(dataset.getStorageKey(FileStorageFormat.GRID), gridFile);
} else if (pointSet instanceof FreeFormPointSet) {
// Upload serialized freeform pointset back to S3
FileStorageKey fileStorageKey = new FileStorageKey(GRIDS, source.regionId + "/" + dataset._id +
".pointset");
File pointsetFile = FileUtils.createScratchFile("pointset");
OutputStream os = new GZIPOutputStream(new FileOutputStream(pointsetFile));
((FreeFormPointSet)pointSet).write(os);
fileStorage.moveIntoStorage(fileStorageKey, pointsetFile);
} else {
throw new IllegalArgumentException("Unrecognized PointSet type, cannot persist it.");
}
status.uploadedGrids += 1;
if (status.uploadedGrids == status.totalGrids) {
status.completeSuccessfully();
}
LOG.info("Moved {}/{} files into storage for {}", status.uploadedGrids, status.totalGrids, status.name);
} catch (NumberFormatException e) {
throw new AnalysisServerException("Error attempting to parse number in uploaded file: " + e.toString());
} catch (Exception e) {
status.completeWithError(e);
throw AnalysisServerException.unknown(e);
}
progressListener.increment();
}
// Set the workProduct - TODO update UI so it can handle a link to a group of OPPORTUNITY_DATASET
dataGroupCollection.insert(dataGroup);
progressListener.setWorkProduct(WorkProduct.forDataGroup(OPPORTUNITY_DATASET, dataGroup, source.regionId));
}
private static FileStorageFormat getFormatCode (PointSet pointSet) {
if (pointSet instanceof FreeFormPointSet) {
return FileStorageFormat.FREEFORM;
} else if (pointSet instanceof Grid) {
return FileStorageFormat.GRID;
} else {
throw new RuntimeException("Unknown pointset type.");
}
}
/**
* Given a CSV file, converts each property (CSV column) into a freeform (non-gridded) pointset.
*
* The provided multipart form data must include latField and lonField. To indicate paired origins and destinations
* (e.g. to use results from an origin-destination survey in a one-to-one regional analysis), the form data should
* include the optional latField2 and lonField2 fields.
*
* This method executes in a blocking (synchronous) manner, but it can take a while so should be called within an
* non-blocking asynchronous task.
*/
private List<FreeFormPointSet> createFreeFormPointSetsFromCsv(FileItem csvFileItem, Map<String, String> params) {
String latField = params.get("latField");
String lonField = params.get("lonField");
if (latField == null || lonField == null) {
throw AnalysisServerException.fileUpload("You must specify a latitude and longitude column.");
}
// The name of the column containing a unique identifier for each row. May be missing (null).
String idField = params.get("idField");
// The name of the column containing the opportunity counts at each point. May be missing (null).
String countField = params.get("countField");
// Optional secondary latitude, longitude, and count fields.
// This allows you to create two matched parallel pointsets of the same size with the same IDs.
String latField2 = params.get("latField2");
String lonField2 = params.get("lonField2");
try {
List<FreeFormPointSet> pointSets = new ArrayList<>();
InputStreamProvider csvStreamProvider = new FileItemInputStreamProvider(csvFileItem);
pointSets.add(FreeFormPointSet.fromCsv(csvStreamProvider, latField, lonField, idField, countField));
// The second pair of lat and lon fields allow creating two matched pointsets from the same CSV.
// This is used for one-to-one travel times between specific origins/destinations.
if (latField2 != null && lonField2 != null) {
pointSets.add(FreeFormPointSet.fromCsv(csvStreamProvider, latField2, lonField2, idField, countField));
}
return pointSets;
} catch (Exception e) {
throw AnalysisServerException.fileUpload("Could not convert CSV to Freeform PointSet: " + e.toString());
}
}
/**
* Handle many types of file upload. Returns a OpportunityDatasetUploadStatus which has a handle to request status.
* The request should be a multipart/form-data POST request, containing uploaded files and associated parameters.
*/
private OpportunityDatasetUploadStatus createOpportunityDataset(Request req, Response res) {
// Extract user info, uploaded files and form fields from the incoming request.
final UserPermissions userPermissions = UserPermissions.from(req);
final Map<String, List<FileItem>> formFields = HttpUtils.getRequestFiles(req.raw());
// Parse required fields. Will throw a ServerException on failure.
final String sourceName = HttpUtils.getFormField(formFields, "Name", true);
final String regionId = HttpUtils.getFormField(formFields, "regionId", true);
final int zoom = parseZoom(HttpUtils.getFormField(formFields, "zoom", false));
// Create a region-wide status object tracking the processing of opportunity data.
// Create the status object before doing anything including input and parameter validation, so that any problems
// are recorded in a persistent purpose-built way rather than falling back on the UI's catch-all error window.
// TODO more standardized mechanism for tracking asynchronous tasks and catching exceptions on them
OpportunityDatasetUploadStatus status = new OpportunityDatasetUploadStatus(regionId, sourceName);
addStatusAndRemoveOldStatuses(status);
final List<FileItem> fileItems;
final FileStorageFormat uploadFormat;
final Map<String, String> parameters;
try {
// Validate inputs and parameters, which will throw an exception if there's anything wrong with them.
// Call remove() rather than get() so that subsequent code will see only string parameters, not the files.
fileItems = formFields.remove("files");
uploadFormat = detectUploadFormatAndValidate(fileItems);
parameters = extractStringParameters(formFields);
} catch (Exception e) {
status.completeWithError(e);
return status;
}
// We are going to call several potentially slow blocking methods to create and persist new pointsets.
// This whole series of actions will be run sequentially but within an asynchronous Executor task.
// After enqueueing, the status is returned so the UI can track progress.
taskScheduler.enqueueHeavyTask(() -> {
try {
// A place to accumulate all the PointSets created, both FreeForm and Grids.
List<PointSet> pointsets = new ArrayList<>();
if (uploadFormat == FileStorageFormat.GRID) {
LOG.info("Detected opportunity dataset stored in Conveyal binary format.");
pointsets.addAll(createGridsFromBinaryGridFiles(fileItems, status));
} else if (uploadFormat == FileStorageFormat.SHP) {
LOG.info("Detected opportunity dataset stored as ESRI shapefile.");
pointsets.addAll(createGridsFromShapefile(fileItems, zoom, status));
} else if (uploadFormat == FileStorageFormat.CSV) {
LOG.info("Detected opportunity dataset stored as CSV");
// Create a grid even when user has requested a freeform pointset so we have something to visualize.
FileItem csvFileItem = fileItems.get(0);
// FIXME why were we uploading to S3 using the file path not the UUID?
// writeFileToS3(csvFile);
// TODO report progress / status as with grids. That involves pre-scanning the CSV which would be
// facilitated by retaining the CSV server side and later converting to pointset.
boolean requestedFreeForm = Boolean.parseBoolean(parameters.get("freeform"));
// Hack to enable freeform pointset building without exposing a UI element, via file name.
if (csvFileItem.getName().contains("FREEFORM_PS.")) {
requestedFreeForm = true;
}
if (requestedFreeForm) {
LOG.info("Processing CSV as freeform (rather than gridded) pointset as requested.");
// This newer process creates a FreeFormPointSet only for the specified count fields,
// as well as a Grid to assist in visualization of the uploaded data.
for (FreeFormPointSet freeForm : createFreeFormPointSetsFromCsv(csvFileItem, parameters)) {
Grid gridFromFreeForm = Grid.fromFreeForm(freeForm, zoom);
pointsets.add(freeForm);
pointsets.add(gridFromFreeForm);
}
} else {
// This is the common default process: create a grid for every non-ignored field in the CSV.
pointsets.addAll(createGridsFromCsv(csvFileItem, formFields, zoom, status));
}
}
if (pointsets.isEmpty()) {
throw new RuntimeException("No opportunity dataset was created from the files uploaded.");
}
LOG.info("Moving opportunity datasets into storage and adding metadata to database.");
// Create a single unique ID string that will be referenced by all opportunity datasets produced by
// this upload. This allows us to group together datasets from the same source and associate them with
// the file(s) that produced them.
// Currently we are creating the DataSource document in Mongo but not actually saving the source files.
// Some methods like createGridsFromShapefile above "consume" those files by moving them into a tempdir.
SpatialDataSource source = new SpatialDataSource(userPermissions, sourceName);
source.regionId = regionId;
DataGroup dataGroup = new DataGroup(userPermissions, source._id.toString(), "Import opportunity data");
updateAndStoreDatasets(source, dataGroup, status, pointsets, new NoopProgressListener());
} catch (Exception e) {
e.printStackTrace();
status.completeWithError(e);
}
});
return status;
}
/**
* Given pre-parsed multipart POST data containing some text fields, pull those fields out into a simple String
* Map to simplify later use, performing some validation in the process.
* All FileItems are expected to be form fields, not uploaded files, and all items should have only a single subitem
* which can be understood as a UTF-8 String.
*/
private Map<String, String> extractStringParameters(Map<String, List<FileItem>> formFields) {
// All other keys should be for String parameters.
Map<String, String> parameters = new HashMap<>();
formFields.forEach((key, items) -> {
if (items.size() != 1) {
LOG.error("In multipart form upload, key '{}' had {} sub-items (expected one).", key, items.size());
}
FileItem fileItem = items.get(0);
if (fileItem.isFormField()) {
try {
parameters.put(key, fileItem.getString("UTF-8"));
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
LOG.warn("In multipart form upload, key '{}' was not for a form field.", key);
}
});
return parameters;
}
private OpportunityDataset editOpportunityDataset(Request request, Response response) throws IOException {
return Persistence.opportunityDatasets.updateFromJSONRequest(request);
}
private Collection<OpportunityDataset> deleteSourceSet(Request request, Response response) {
String sourceId = request.params("sourceId");
UserPermissions userPermissions = UserPermissions.from(request);
Collection<OpportunityDataset> datasets = Persistence.opportunityDatasets.findPermitted(
QueryBuilder.start("sourceId").is(sourceId).get(), userPermissions);
datasets.forEach(dataset -> deleteDataset(dataset._id, userPermissions));
return datasets;
}
private OpportunityDataset deleteOpportunityDataset(Request request, Response response) {
String opportunityDatasetId = request.params("_id");
return deleteDataset(opportunityDatasetId, UserPermissions.from(request));
}
/**
* Delete an Opportunity Dataset from the database and all formats from the file store.
*/
private OpportunityDataset deleteDataset(String id, UserPermissions userPermissions) {
OpportunityDataset dataset = Persistence.opportunityDatasets.removeIfPermitted(id, userPermissions);
if (dataset == null) {
throw AnalysisServerException.notFound("Opportunity dataset could not be found.");
} else {
// Several of these files may not exist. FileStorage::delete contract states this will be handled cleanly.
fileStorage.delete(dataset.getStorageKey(FileStorageFormat.GRID));
fileStorage.delete(dataset.getStorageKey(FileStorageFormat.PNG));
fileStorage.delete(dataset.getStorageKey(FileStorageFormat.GEOTIFF));
}
return dataset;
}
/**
* Create a grid from WGS 84 points in a CSV file.
* The supplied CSV file will not be deleted - it may be used again to make another (freeform) pointset.
* TODO explain latField2 usage
* @return one or two Grids for each numeric column in the CSV input.
*/
private List<Grid> createGridsFromCsv(FileItem csvFileItem,
Map<String, List<FileItem>> query,
int zoom,
OpportunityDatasetUploadStatus status) throws Exception {
String latField = HttpUtils.getFormField(query, "latField", true);
String lonField = HttpUtils.getFormField(query, "lonField", true);
String idField = HttpUtils.getFormField(query, "idField", false);
// Optional fields to run grid construction twice with two different sets of points.
// This is only really useful when creating grids to visualize freeform pointsets for one-to-one analyses.
String latField2 = HttpUtils.getFormField(query, "latField2", false);
String lonField2 = HttpUtils.getFormField(query, "lonField2", false);
List<String> ignoreFields = Arrays.asList(idField, latField2, lonField2);
InputStreamProvider csvStreamProvider = new FileItemInputStreamProvider(csvFileItem);
List<Grid> grids = Grid.fromCsv(csvStreamProvider, latField, lonField, ignoreFields, zoom, status);
// TODO verify correctness of this second pass
if (latField2 != null && lonField2 != null) {
ignoreFields = Arrays.asList(idField, latField, lonField);
grids.addAll(Grid.fromCsv(csvStreamProvider, latField2, lonField2, ignoreFields, zoom, status));
}
return grids;
}
/**
* Create a grid from an input stream containing a binary grid file.
* For those in the know, we can upload manually created binary grid files.
*/
private List<Grid> createGridsFromBinaryGridFiles(List<FileItem> uploadedFiles,
OpportunityDatasetUploadStatus status) throws Exception {
List<Grid> grids = new ArrayList<>();
status.totalFeatures = uploadedFiles.size();
for (FileItem fileItem : uploadedFiles) {
Grid grid = Grid.read(fileItem.getInputStream());
String name = fileItem.getName();
// Remove ".grid" from the name
if (name.contains(".grid")) name = name.split(".grid")[0];
grid.name = name;
grids.add(grid);
status.completedFeatures += 1;
}
status.completedFeatures = status.totalFeatures;
return grids;
}
/**
* Preconditions: fileItems must contain SHP, DBF, and PRJ files, and optionally SHX. All files should have the
* same base name, and should not contain any other files but these three or four.
*/
private List<Grid> createGridsFromShapefile(List<FileItem> fileItems,
int zoom,
OpportunityDatasetUploadStatus status) throws Exception {
// In the caller, we should have already verified that all files have the same base name and have an extension.
// Extract the relevant files: .shp, .prj, .dbf, and .shx.
// We need the SHX even though we're looping over every feature as they might be sparse.
Map<String, FileItem> filesByExtension = new HashMap<>();
for (FileItem fileItem : fileItems) {
filesByExtension.put(FilenameUtils.getExtension(fileItem.getName()).toUpperCase(), fileItem);
}
// Copy the shapefile component files into a temporary directory with a fixed base name.
File tempDir = Files.createTempDir();
File shpFile = new File(tempDir, "grid.shp");
filesByExtension.get("SHP").write(shpFile);
File prjFile = new File(tempDir, "grid.prj");
filesByExtension.get("PRJ").write(prjFile);
File dbfFile = new File(tempDir, "grid.dbf");
filesByExtension.get("DBF").write(dbfFile);
// The .shx file is an index. It is optional, and not needed for dense shapefiles.
if (filesByExtension.containsKey("SHX")) {
File shxFile = new File(tempDir, "grid.shx");
filesByExtension.get("SHX").write(shxFile);
}
List<Grid> grids = Grid.fromShapefile(shpFile, zoom, status);
tempDir.delete();
return grids;
}
/**
* Respond to a request with a redirect to a downloadable file.
* @param req should specify regionId, opportunityDatasetId, and an available download format (.tiff or .grid)
*/
private Object downloadOpportunityDataset (Request req, Response res) throws IOException {
FileStorageFormat downloadFormat;
String format = req.params("format");
try {
downloadFormat = FileStorageFormat.valueOf(format.toUpperCase());
} catch (IllegalArgumentException iae) {
LOG.warn("Unable to interpret format path parameter '{}', using legacy code path.", format);
// This code handles the deprecated endpoint for retrieving opportunity datasets.
// get("/api/opportunities/:regionId/:gridKey") has the same path pattern as this endpoint.
String regionId = req.params("_id");
String gridKey = format;
FileStorageKey storageKey = new FileStorageKey(GRIDS, String.format("%s/%s.grid", regionId, gridKey));
return getJsonUrl(storageKey);
}
if (FileStorageFormat.GRID.equals(downloadFormat)) return getOpportunityDataset(req, res);
final OpportunityDataset opportunityDataset = Persistence.opportunityDatasets.findByIdFromRequestIfPermitted(req);
FileStorageKey gridKey = opportunityDataset.getStorageKey(FileStorageFormat.GRID);
FileStorageKey formatKey = opportunityDataset.getStorageKey(downloadFormat);
// if this grid is not on S3 in the requested format, try to get the .grid format
if (!fileStorage.exists(gridKey)) {
throw AnalysisServerException.notFound("Requested grid does not exist.");
}
if (!fileStorage.exists(formatKey)) {
// get the grid and convert it to the requested format
File gridFile = fileStorage.getFile(gridKey);
Grid grid = Grid.read(new GZIPInputStream(new FileInputStream(gridFile))); // closes input stream
File localFile = FileUtils.createScratchFile(downloadFormat.toString());
FileOutputStream fos = new FileOutputStream(localFile);
if (FileStorageFormat.PNG.equals(downloadFormat)) {
grid.writePng(fos);
} else if (FileStorageFormat.GEOTIFF.equals(downloadFormat)) {
grid.writeGeotiff(fos);
}
fileStorage.moveIntoStorage(formatKey, localFile);
}
return getJsonUrl(formatKey);
}
/**
* Implements R5 ProgressListener interface to allow code in R5 to update it.
* This is serialized into HTTP responses so all fields must be public.
* TODO generalize into a system for tracking progress on all asynchronous server-side tasks.
*/
public static class OpportunityDatasetUploadStatus implements ProgressListener {
public String id;
public int totalFeatures = 0;
public int completedFeatures = 0;
public int totalGrids = 0;
public int uploadedGrids = 0;
public String regionId;
public Status status = Status.PROCESSING;
public String name;
public String message;
public Date createdAt;
public Date completedAt;
OpportunityDatasetUploadStatus(String regionId, String name) {
this.id = new ObjectId().toString();
this.regionId = regionId;
this.name = name;
this.createdAt = new Date();
}
private void completed (Status status) {
this.status = status;
this.completedAt = new Date();
}
public void completeWithError (Exception e) {
message = "Unable to create opportunity dataset. " + ExceptionUtils.stackTraceString(e);
completed(Status.ERROR);
}
public void completeSuccessfully () {
completed(Status.DONE);
}
@Override
public void setTotalItems (int nTotal) {
totalFeatures = nTotal;
}
@Override
public void setCompletedItems (int nComplete) {
completedFeatures = nComplete;
}
}
private enum Status {
UPLOADING, PROCESSING, ERROR, DONE
}
@Override
public void registerEndpoints (spark.Service sparkService) {
sparkService.path("/api/opportunities", () -> {
sparkService.post("", this::createOpportunityDataset, toJson);
sparkService.post("/region/:regionId/download", this::downloadLODES, toJson);
sparkService.get("/region/:regionId/status", this::getRegionUploadStatuses, toJson);
sparkService.delete("/region/:regionId/status/:statusId", this::clearStatus, toJson);
sparkService.get("/region/:regionId", this::getRegionDatasets, toJson);
sparkService.delete("/source/:sourceId", this::deleteSourceSet, toJson);
sparkService.delete("/:_id", this::deleteOpportunityDataset, toJson);
sparkService.get("/:_id", this::getOpportunityDataset, toJson);
sparkService.put("/:_id", this::editOpportunityDataset, toJson);
sparkService.get("/:_id/:format", this::downloadOpportunityDataset, toJson);
});
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.thrift;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.thrift.ThriftServerRunner.ImplType;
import org.apache.hadoop.hbase.util.InfoServer;
import org.apache.hadoop.hbase.util.Strings;
import org.apache.hadoop.hbase.util.VersionInfo;
import org.apache.hadoop.net.DNS;
import org.apache.hadoop.util.Shell.ExitCodeException;
/**
* ThriftServer- this class starts up a Thrift server which implements the
* Hbase API specified in the Hbase.thrift IDL file. The server runs in an
* independent process.
*/
@InterfaceAudience.Private
public class ThriftServer {
private static final Log LOG = LogFactory.getLog(ThriftServer.class);
private static final String MIN_WORKERS_OPTION = "minWorkers";
private static final String MAX_WORKERS_OPTION = "workers";
private static final String MAX_QUEUE_SIZE_OPTION = "queue";
private static final String KEEP_ALIVE_SEC_OPTION = "keepAliveSec";
static final String BIND_OPTION = "bind";
static final String COMPACT_OPTION = "compact";
static final String FRAMED_OPTION = "framed";
static final String PORT_OPTION = "port";
private static final String DEFAULT_BIND_ADDR = "0.0.0.0";
private static final int DEFAULT_LISTEN_PORT = 9090;
private Configuration conf;
ThriftServerRunner serverRunner;
private InfoServer infoServer;
//
// Main program and support routines
//
public ThriftServer(Configuration conf) {
this.conf = HBaseConfiguration.create(conf);
}
private static void printUsageAndExit(Options options, int exitCode)
throws ExitCodeException {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("Thrift", null, options,
"To start the Thrift server run 'bin/hbase-daemon.sh start thrift'\n" +
"To shutdown the thrift server run 'bin/hbase-daemon.sh stop " +
"thrift' or send a kill signal to the thrift server pid",
true);
throw new ExitCodeException(exitCode, "");
}
/**
* Start up or shuts down the Thrift server, depending on the arguments.
* @param args
*/
void doMain(final String[] args) throws Exception {
processOptions(args);
UserProvider userProvider = UserProvider.instantiate(conf);
// login the server principal (if using secure Hadoop)
if (userProvider.isHadoopSecurityEnabled() && userProvider.isHBaseSecurityEnabled()) {
String machineName =
Strings.domainNamePointerToHostName(DNS.getDefaultHost(
conf.get("hbase.thrift.dns.interface", "default"),
conf.get("hbase.thrift.dns.nameserver", "default")));
userProvider
.login("hbase.thrift.keytab.file", "hbase.thrift.kerberos.principal", machineName);
}
serverRunner = new ThriftServerRunner(conf);
// Put up info server.
int port = conf.getInt("hbase.thrift.info.port", 9095);
if (port >= 0) {
conf.setLong("startcode", System.currentTimeMillis());
String a = conf.get("hbase.thrift.info.bindAddress", "0.0.0.0");
infoServer = new InfoServer("thrift", a, port, false, conf);
infoServer.setAttribute("hbase.conf", conf);
infoServer.start();
}
serverRunner.run();
}
/**
* Parse the command line options to set parameters the conf.
*/
private void processOptions(final String[] args) throws Exception {
Options options = new Options();
options.addOption("b", BIND_OPTION, true, "Address to bind " +
"the Thrift server to. [default: " + DEFAULT_BIND_ADDR + "]");
options.addOption("p", PORT_OPTION, true, "Port to bind to [default: " +
DEFAULT_LISTEN_PORT + "]");
options.addOption("f", FRAMED_OPTION, false, "Use framed transport");
options.addOption("c", COMPACT_OPTION, false, "Use the compact protocol");
options.addOption("h", "help", false, "Print help information");
options.addOption(null, "infoport", true, "Port for web UI");
options.addOption("m", MIN_WORKERS_OPTION, true,
"The minimum number of worker threads for " +
ImplType.THREAD_POOL.simpleClassName());
options.addOption("w", MAX_WORKERS_OPTION, true,
"The maximum number of worker threads for " +
ImplType.THREAD_POOL.simpleClassName());
options.addOption("q", MAX_QUEUE_SIZE_OPTION, true,
"The maximum number of queued requests in " +
ImplType.THREAD_POOL.simpleClassName());
options.addOption("k", KEEP_ALIVE_SEC_OPTION, true,
"The amount of time in secods to keep a thread alive when idle in " +
ImplType.THREAD_POOL.simpleClassName());
options.addOptionGroup(ImplType.createOptionGroup());
CommandLineParser parser = new PosixParser();
CommandLine cmd = parser.parse(options, args);
// This is so complicated to please both bin/hbase and bin/hbase-daemon.
// hbase-daemon provides "start" and "stop" arguments
// hbase should print the help if no argument is provided
List<String> commandLine = Arrays.asList(args);
boolean stop = commandLine.contains("stop");
boolean start = commandLine.contains("start");
boolean invalidStartStop = (start && stop) || (!start && !stop);
if (cmd.hasOption("help") || invalidStartStop) {
if (invalidStartStop) {
LOG.error("Exactly one of 'start' and 'stop' has to be specified");
}
printUsageAndExit(options, 1);
}
// Get port to bind to
try {
int listenPort = Integer.parseInt(cmd.getOptionValue(PORT_OPTION,
String.valueOf(DEFAULT_LISTEN_PORT)));
conf.setInt(ThriftServerRunner.PORT_CONF_KEY, listenPort);
} catch (NumberFormatException e) {
LOG.error("Could not parse the value provided for the port option", e);
printUsageAndExit(options, -1);
}
// check for user-defined info server port setting, if so override the conf
try {
if (cmd.hasOption("infoport")) {
String val = cmd.getOptionValue("infoport");
conf.setInt("hbase.thrift.info.port", Integer.valueOf(val));
LOG.debug("Web UI port set to " + val);
}
} catch (NumberFormatException e) {
LOG.error("Could not parse the value provided for the infoport option", e);
printUsageAndExit(options, -1);
}
// Make optional changes to the configuration based on command-line options
optionToConf(cmd, MIN_WORKERS_OPTION,
conf, TBoundedThreadPoolServer.MIN_WORKER_THREADS_CONF_KEY);
optionToConf(cmd, MAX_WORKERS_OPTION,
conf, TBoundedThreadPoolServer.MAX_WORKER_THREADS_CONF_KEY);
optionToConf(cmd, MAX_QUEUE_SIZE_OPTION,
conf, TBoundedThreadPoolServer.MAX_QUEUED_REQUESTS_CONF_KEY);
optionToConf(cmd, KEEP_ALIVE_SEC_OPTION,
conf, TBoundedThreadPoolServer.THREAD_KEEP_ALIVE_TIME_SEC_CONF_KEY);
// Set general thrift server options
boolean compact = cmd.hasOption(COMPACT_OPTION) ||
conf.getBoolean(ThriftServerRunner.COMPACT_CONF_KEY, false);
conf.setBoolean(ThriftServerRunner.COMPACT_CONF_KEY, compact);
boolean framed = cmd.hasOption(FRAMED_OPTION) ||
conf.getBoolean(ThriftServerRunner.FRAMED_CONF_KEY, false);
conf.setBoolean(ThriftServerRunner.FRAMED_CONF_KEY, framed);
if (cmd.hasOption(BIND_OPTION)) {
conf.set(
ThriftServerRunner.BIND_CONF_KEY, cmd.getOptionValue(BIND_OPTION));
}
ImplType.setServerImpl(cmd, conf);
}
public void stop() {
if (this.infoServer != null) {
LOG.info("Stopping infoServer");
try {
this.infoServer.stop();
} catch (Exception ex) {
ex.printStackTrace();
}
}
serverRunner.shutdown();
}
private static void optionToConf(CommandLine cmd, String option,
Configuration conf, String destConfKey) {
if (cmd.hasOption(option)) {
String value = cmd.getOptionValue(option);
LOG.info("Set configuration key:" + destConfKey + " value:" + value);
conf.set(destConfKey, value);
}
}
/**
* @param args
* @throws Exception
*/
public static void main(String [] args) throws Exception {
VersionInfo.logVersion();
try {
new ThriftServer(HBaseConfiguration.create()).doMain(args);
} catch (ExitCodeException ex) {
System.exit(ex.getExitCode());
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.execution.buffer;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.units.DataSize;
import io.prestosql.execution.StateMachine;
import io.prestosql.execution.buffer.OutputBuffers.OutputBufferId;
import io.prestosql.memory.context.AggregatedMemoryContext;
import io.prestosql.memory.context.MemoryReservationHandler;
import io.prestosql.memory.context.SimpleLocalMemoryContext;
import io.prestosql.spi.Page;
import io.prestosql.spi.type.BigintType;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.atomic.AtomicInteger;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.prestosql.execution.buffer.BufferResult.emptyResults;
import static io.prestosql.execution.buffer.BufferState.OPEN;
import static io.prestosql.execution.buffer.BufferState.TERMINAL_BUFFER_STATES;
import static io.prestosql.execution.buffer.BufferTestUtils.MAX_WAIT;
import static io.prestosql.execution.buffer.BufferTestUtils.NO_WAIT;
import static io.prestosql.execution.buffer.BufferTestUtils.acknowledgeBufferResult;
import static io.prestosql.execution.buffer.BufferTestUtils.addPage;
import static io.prestosql.execution.buffer.BufferTestUtils.assertBufferResultEquals;
import static io.prestosql.execution.buffer.BufferTestUtils.assertFinished;
import static io.prestosql.execution.buffer.BufferTestUtils.assertFutureIsDone;
import static io.prestosql.execution.buffer.BufferTestUtils.assertQueueClosed;
import static io.prestosql.execution.buffer.BufferTestUtils.assertQueueState;
import static io.prestosql.execution.buffer.BufferTestUtils.createBufferResult;
import static io.prestosql.execution.buffer.BufferTestUtils.createPage;
import static io.prestosql.execution.buffer.BufferTestUtils.enqueuePage;
import static io.prestosql.execution.buffer.BufferTestUtils.getBufferResult;
import static io.prestosql.execution.buffer.BufferTestUtils.getFuture;
import static io.prestosql.execution.buffer.BufferTestUtils.serializePage;
import static io.prestosql.execution.buffer.BufferTestUtils.sizeOfPages;
import static io.prestosql.execution.buffer.OutputBuffers.BROADCAST_PARTITION_ID;
import static io.prestosql.execution.buffer.OutputBuffers.BufferType.BROADCAST;
import static io.prestosql.execution.buffer.OutputBuffers.createInitialEmptyOutputBuffers;
import static io.prestosql.memory.context.AggregatedMemoryContext.newRootAggregatedMemoryContext;
import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestBroadcastOutputBuffer
{
private static final String TASK_INSTANCE_ID = "task-instance-id";
private static final ImmutableList<BigintType> TYPES = ImmutableList.of(BIGINT);
private static final OutputBufferId FIRST = new OutputBufferId(0);
private static final OutputBufferId SECOND = new OutputBufferId(1);
private static final OutputBufferId THIRD = new OutputBufferId(2);
private ScheduledExecutorService stateNotificationExecutor;
@BeforeClass
public void setUp()
{
stateNotificationExecutor = newScheduledThreadPool(5, daemonThreadsNamed(getClass().getSimpleName() + "-%s"));
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
if (stateNotificationExecutor != null) {
stateNotificationExecutor.shutdownNow();
stateNotificationExecutor = null;
}
}
@Test
public void testInvalidConstructorArg()
{
try {
createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID).withNoMoreBufferIds(), DataSize.ofBytes(0));
fail("Expected IllegalStateException");
}
catch (IllegalArgumentException ignored) {
}
try {
createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), DataSize.ofBytes(0));
fail("Expected IllegalStateException");
}
catch (IllegalArgumentException ignored) {
}
}
@Test
public void testSimple()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(BROADCAST);
BroadcastOutputBuffer buffer = createBroadcastBuffer(outputBuffers, sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
outputBuffers = createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID);
// add a queue
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// acknowledge first three pages
buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true);
// pages now acknowledged
assertQueueState(buffer, FIRST, 0, 3);
// fill the buffer (we already added 3 pages)
for (int i = 3; i < 10; i++) {
addPage(buffer, createPage(i));
}
assertQueueState(buffer, FIRST, 7, 3);
// try to add one more page, which should block
ListenableFuture<?> future = enqueuePage(buffer, createPage(10));
assertFalse(future.isDone());
assertQueueState(buffer, FIRST, 8, 3);
// remove a page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 3, sizeOfPages(1), NO_WAIT), bufferResult(3, createPage(3)));
// page not acknowledged yet so sent count is the same
assertQueueState(buffer, FIRST, 8, 3);
// we should still be blocked
assertFalse(future.isDone());
//
// add another buffer and verify it sees all pages
outputBuffers = outputBuffers.withBuffer(SECOND, BROADCAST_PARTITION_ID);
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, SECOND, 11, 0);
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0),
createPage(1),
createPage(2),
createPage(3),
createPage(4),
createPage(5),
createPage(6),
createPage(7),
createPage(8),
createPage(9)));
// page not acknowledged yet so sent count is still zero
assertQueueState(buffer, SECOND, 11, 0);
// acknowledge the 10 pages
buffer.get(SECOND, 10, sizeOfPages(10)).cancel(true);
assertQueueState(buffer, SECOND, 1, 10);
//
// tell shared buffer there will be no more queues
outputBuffers = outputBuffers.withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
// queues consumed the first three pages, so they should be dropped now and the blocked page future from above should be done
assertQueueState(buffer, FIRST, 8, 3);
assertQueueState(buffer, SECOND, 1, 10);
assertFutureIsDone(future);
// we should be able to add 3 more pages (the third will be queued)
// although the first queue fetched the 4th page, the page has not been acknowledged yet
addPage(buffer, createPage(11));
addPage(buffer, createPage(12));
future = enqueuePage(buffer, createPage(13));
assertFalse(future.isDone());
assertQueueState(buffer, FIRST, 11, 3);
assertQueueState(buffer, SECOND, 4, 10);
// acknowledge the receipt of the 3rd page and try to remove the 4th page from the first queue
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 4, sizeOfPages(1), NO_WAIT), bufferResult(4, createPage(4)));
// the blocked page future above should be done
assertFutureIsDone(future);
assertQueueState(buffer, FIRST, 10, 4);
assertQueueState(buffer, SECOND, 4, 10);
//
// finish the buffer
assertFalse(buffer.isFinished());
buffer.setNoMorePages();
assertQueueState(buffer, FIRST, 10, 4);
assertQueueState(buffer, SECOND, 4, 10);
// not fully finished until all pages are consumed
assertFalse(buffer.isFinished());
// remove a page, not finished
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 5, sizeOfPages(1), NO_WAIT), bufferResult(5, createPage(5)));
assertQueueState(buffer, FIRST, 9, 5);
assertQueueState(buffer, SECOND, 4, 10);
assertFalse(buffer.isFinished());
// remove all remaining pages from first queue, should not be finished
BufferResult x = getBufferResult(buffer, FIRST, 6, sizeOfPages(10), NO_WAIT);
assertBufferResultEquals(TYPES, x, bufferResult(6, createPage(6),
createPage(7),
createPage(8),
createPage(9),
createPage(10),
createPage(11),
createPage(12),
createPage(13)));
assertQueueState(buffer, FIRST, 8, 6);
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
// finish first queue
buffer.abort(FIRST);
assertQueueClosed(buffer, FIRST, 14);
assertQueueState(buffer, SECOND, 4, 10);
assertFalse(buffer.isFinished());
// remove all remaining pages from second queue, should be finished
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 10, sizeOfPages(10), NO_WAIT), bufferResult(10, createPage(10),
createPage(11),
createPage(12),
createPage(13)));
assertQueueState(buffer, SECOND, 4, 10);
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
buffer.abort(SECOND);
assertQueueClosed(buffer, FIRST, 14);
assertQueueClosed(buffer, SECOND, 14);
assertFinished(buffer);
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
}
// TODO: remove this after PR is landed: https://github.com/prestodb/presto/pull/7987
@Test
public void testAcknowledge()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(BROADCAST);
BroadcastOutputBuffer buffer = createBroadcastBuffer(outputBuffers, sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
outputBuffers = createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID);
// add a queue
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// acknowledge pages 0 and 1
acknowledgeBufferResult(buffer, FIRST, 2);
// only page 2 is not removed
assertQueueState(buffer, FIRST, 1, 2);
// acknowledge page 2
acknowledgeBufferResult(buffer, FIRST, 3);
// nothing left
assertQueueState(buffer, FIRST, 0, 3);
// acknowledge more pages will fail
try {
acknowledgeBufferResult(buffer, FIRST, 4);
}
catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "Invalid sequence id");
}
// fill the buffer
for (int i = 3; i < 6; i++) {
addPage(buffer, createPage(i));
}
assertQueueState(buffer, FIRST, 3, 3);
// getting new pages will again acknowledge the previously acknowledged pages but this is ok
buffer.get(FIRST, 3, sizeOfPages(1)).cancel(true);
assertQueueState(buffer, FIRST, 3, 3);
}
@Test
public void testSharedBufferFull()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
// third page is blocked
enqueuePage(buffer, createPage(3));
}
@Test
public void testNotifyStatusOnBufferFull()
{
AtomicInteger notifyCount = new AtomicInteger();
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID),
sizeOfPages(1),
notifyCount::incrementAndGet);
// Add a page to the buffer
addPage(buffer, createPage(1));
assertTrue(buffer.isFull().isDone());
assertEquals(notifyCount.get(), 0);
// Add another page to block
ListenableFuture<?> future = enqueuePage(buffer, createPage(2));
assertFalse(future.isDone());
assertEquals(notifyCount.get(), 1);
// Set no more buffers
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID).withNoMoreBufferIds());
// Acknowledge both pages in the buffer to remove them
buffer.acknowledge(FIRST, 2);
assertFutureIsDone(future);
assertEquals(notifyCount.get(), 1);
// Add two more pages, buffer will be blocked second time
addPage(buffer, createPage(3));
future = enqueuePage(buffer, createPage(4));
assertFalse(future.isDone());
assertEquals(notifyCount.get(), 1);
}
@Test
public void testNotifyStatusOnBufferFullWithNoBufferIds()
{
AtomicInteger notifyCount = new AtomicInteger();
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(1),
notifyCount::incrementAndGet);
// Add a page to the buffer
addPage(buffer, createPage(1));
assertTrue(buffer.isFull().isDone());
assertEquals(notifyCount.get(), 0);
// Add another page to block
ListenableFuture<?> future = enqueuePage(buffer, createPage(2));
assertFalse(future.isDone());
assertEquals(notifyCount.get(), 0); // stays 0 because no new buffers will be added
// Acknowledge both pages in the buffer to remove them
buffer.acknowledge(FIRST, 2);
assertFutureIsDone(future);
assertEquals(notifyCount.get(), 0);
// Add two more pages, buffer will be blocked second time
addPage(buffer, createPage(3));
future = enqueuePage(buffer, createPage(4));
assertFalse(future.isDone());
assertEquals(notifyCount.get(), 0);
}
@Test
public void testDuplicateRequests()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
// add a queue
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// acknowledge the pages
buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true);
// attempt to get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, false));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 0, 3);
}
@Test
public void testAddQueueAfterCreation()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
assertFalse(buffer.isFinished());
try {
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds());
fail("Expected IllegalStateException from addQueue after noMoreQueues has been called");
}
catch (IllegalArgumentException ignored) {
}
}
@Test
public void testAddAfterFinish()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
buffer.setNoMorePages();
addPage(buffer, createPage(0));
addPage(buffer, createPage(0));
assertEquals(buffer.getInfo().getTotalPagesSent(), 0);
}
@Test
public void testAddQueueAfterNoMoreQueues()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(10));
assertFalse(buffer.isFinished());
// tell buffer no more queues will be added
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
assertTrue(buffer.isFinished());
// set no more queues a second time to assure that we don't get an exception or such
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
assertTrue(buffer.isFinished());
// set no more queues a third time to assure that we don't get an exception or such
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
assertTrue(buffer.isFinished());
}
@Test
public void testAddAfterDestroy()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
buffer.destroy();
addPage(buffer, createPage(0));
addPage(buffer, createPage(0));
assertEquals(buffer.getInfo().getTotalPagesSent(), 0);
}
@Test
public void testGetBeforeCreate()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0L, sizeOfPages(1));
assertFalse(future.isDone());
// add a page and verify the future is complete
addPage(buffer, createPage(33));
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(33)));
}
@Test(expectedExceptions = IllegalStateException.class, expectedExceptionsMessageRegExp = ".*does not contain.*\\[0]")
public void testSetFinalBuffersWihtoutDeclaringUsedBuffer()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0L, sizeOfPages(1));
assertFalse(future.isDone());
// add a page and set no more pages
addPage(buffer, createPage(33));
buffer.setNoMorePages();
// read the page
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(33)));
// acknowledge the page and verify we are finished
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 1, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, true));
buffer.abort(FIRST);
// set final buffers to a set that does not contain the buffer, which will fail
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
}
@Test(expectedExceptions = IllegalStateException.class, expectedExceptionsMessageRegExp = "No more buffers already set")
public void testUseUndeclaredBufferAfterFinalBuffersSet()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that was not declared, which will fail
buffer.get(SECOND, 0L, sizeOfPages(1));
}
@Test
public void testAbortBeforeCreate()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(2));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(1));
assertFalse(future.isDone());
// abort that buffer, and verify the future is complete and buffer is finished
buffer.abort(FIRST);
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
}
@Test
public void testFullBufferBlocksWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
// third page is blocked
enqueuePage(buffer, createPage(3));
}
@Test
public void testAcknowledgementFreesWriters()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
assertQueueState(buffer, FIRST, 2, 0);
// third page is blocked
ListenableFuture<?> future = enqueuePage(buffer, createPage(3));
// we should be blocked
assertFalse(future.isDone());
assertQueueState(buffer, FIRST, 3, 0);
assertQueueState(buffer, SECOND, 3, 0);
// acknowledge pages for first buffer, no space is freed
buffer.get(FIRST, 2, sizeOfPages(10)).cancel(true);
assertFalse(future.isDone());
// acknowledge pages for second buffer, which makes space in the buffer
buffer.get(SECOND, 2, sizeOfPages(10)).cancel(true);
// writer should not be blocked
assertFutureIsDone(future);
assertQueueState(buffer, SECOND, 1, 2);
}
@Test
public void testAbort()
{
BroadcastOutputBuffer bufferedBuffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// fill the buffer
for (int i = 0; i < 10; i++) {
addPage(bufferedBuffer, createPage(i));
}
bufferedBuffer.setNoMorePages();
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, FIRST, 0, sizeOfPages(1), NO_WAIT), bufferResult(0, createPage(0)));
bufferedBuffer.abort(FIRST);
assertQueueClosed(bufferedBuffer, FIRST, 0);
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, FIRST, 1, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, SECOND, 0, sizeOfPages(1), NO_WAIT), bufferResult(0, createPage(0)));
bufferedBuffer.abort(SECOND);
assertQueueClosed(bufferedBuffer, SECOND, 0);
assertFinished(bufferedBuffer);
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, SECOND, 1, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
}
@Test
public void testFinishClosesEmptyQueues()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// finish while queues are empty
buffer.setNoMorePages();
assertQueueState(buffer, FIRST, 0, 0);
assertQueueState(buffer, SECOND, 0, 0);
buffer.abort(FIRST);
buffer.abort(SECOND);
assertQueueClosed(buffer, FIRST, 0);
assertQueueClosed(buffer, SECOND, 0);
}
@Test
public void testAbortFreesReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one item
addPage(buffer, createPage(0));
assertTrue(future.isDone());
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// abort the buffer
buffer.abort(FIRST);
// verify the future completed
// broadcast buffer does not return a "complete" result in this case, but it doesn't mapper
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, false));
// further requests will see a completed result
assertQueueClosed(buffer, FIRST, 1);
}
@Test
public void testFinishFreesReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one item
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// finish the buffer
buffer.setNoMorePages();
assertQueueState(buffer, FIRST, 0, 1);
// verify the future completed
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, true));
}
@Test
public void testFinishFreesWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// enqueue the addition two pages more pages
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// finish the query
buffer.setNoMorePages();
assertFalse(buffer.isFinished());
// verify futures are complete
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
// get and acknowledge the last 6 pages
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 1, sizeOfPages(100), NO_WAIT),
bufferResult(1, createPage(1), createPage(2), createPage(3), createPage(4), createPage(5), createPage(6)));
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 7, sizeOfPages(100), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 7, true));
buffer.abort(FIRST);
// verify finished
assertFinished(buffer);
}
@Test
public void testDestroyFreesReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// destroy the buffer
buffer.destroy();
assertQueueClosed(buffer, FIRST, 1);
// verify the future completed
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, false));
}
@Test
public void testDestroyFreesWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// add two pages to the buffer queue
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// destroy the buffer (i.e., cancel the query)
buffer.destroy();
assertFinished(buffer);
// verify the futures are completed
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
}
@Test
public void testFailDoesNotFreeReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// fail the buffer
buffer.fail();
// future should have not finished
assertFalse(future.isDone());
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
}
@Test
public void testFailFreesWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// add two pages to the buffer queue
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// fail the buffer (i.e., cancel the query)
buffer.fail();
assertFalse(buffer.isFinished());
// verify the futures are completed
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
}
@Test
public void testAddBufferAfterFail()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID);
BroadcastOutputBuffer buffer = createBroadcastBuffer(outputBuffers, sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// fail the buffer
buffer.fail();
// add a buffer
outputBuffers = outputBuffers.withBuffer(SECOND, BROADCAST_PARTITION_ID);
buffer.setOutputBuffers(outputBuffers);
// attempt to get page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
future = buffer.get(SECOND, 0, sizeOfPages(10));
assertFalse(future.isDone());
// set no more buffers
outputBuffers = outputBuffers.withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
// attempt to get page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
future = buffer.get(SECOND, 0, sizeOfPages(10));
assertFalse(future.isDone());
}
@Test
public void testBufferCompletion()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
List<Page> pages = new ArrayList<>();
for (int i = 0; i < 5; i++) {
Page page = createPage(i);
addPage(buffer, page);
pages.add(page);
}
buffer.setNoMorePages();
// get and acknowledge 5 pages
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(5), MAX_WAIT), createBufferResult(TASK_INSTANCE_ID, 0, pages));
// buffer is not finished
assertFalse(buffer.isFinished());
// there are no more pages and no more buffers, but buffer is not finished because it didn't receive an acknowledgement yet
assertFalse(buffer.isFinished());
// ask the buffer to finish
buffer.abort(FIRST);
// verify that the buffer is finished
assertTrue(buffer.isFinished());
}
@Test
public void testSharedBufferBlocking()
{
SettableFuture<?> blockedFuture = SettableFuture.create();
MockMemoryReservationHandler reservationHandler = new MockMemoryReservationHandler(blockedFuture);
AggregatedMemoryContext memoryContext = newRootAggregatedMemoryContext(reservationHandler, 0L);
Page page = createPage(1);
long pageSize = serializePage(page).getRetainedSizeInBytes();
// create a buffer that can only hold two pages
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), DataSize.ofBytes(pageSize * 2), memoryContext, directExecutor());
OutputBufferMemoryManager memoryManager = buffer.getMemoryManager();
// adding the first page will block as no memory is available (MockMemoryReservationHandler will return a future that is not done)
enqueuePage(buffer, page);
// more memory is available
blockedFuture.set(null);
memoryManager.onMemoryAvailable();
assertTrue(memoryManager.getBufferBlockedFuture().isDone(), "buffer shouldn't be blocked");
// we should be able to add one more page after more memory is available
addPage(buffer, page);
// the buffer is full now
enqueuePage(buffer, page);
}
@Test
public void testSharedBufferBlocking2()
{
// start with a complete future
SettableFuture<?> blockedFuture = SettableFuture.create();
blockedFuture.set(null);
MockMemoryReservationHandler reservationHandler = new MockMemoryReservationHandler(blockedFuture);
AggregatedMemoryContext memoryContext = newRootAggregatedMemoryContext(reservationHandler, 0L);
Page page = createPage(1);
long pageSize = serializePage(page).getRetainedSizeInBytes();
// create a buffer that can only hold two pages
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), DataSize.ofBytes(pageSize * 2), memoryContext, directExecutor());
OutputBufferMemoryManager memoryManager = buffer.getMemoryManager();
// add two pages to fill up the buffer (memory is available)
addPage(buffer, page);
addPage(buffer, page);
// fill up the memory pool
blockedFuture = SettableFuture.create();
reservationHandler.updateBlockedFuture(blockedFuture);
// allocate one more byte to make the buffer full
memoryManager.updateMemoryUsage(1L);
// more memory is available
blockedFuture.set(null);
memoryManager.onMemoryAvailable();
// memoryManager should still return a blocked future as the buffer is still full
assertFalse(memoryManager.getBufferBlockedFuture().isDone(), "buffer should be blocked");
// remove all pages from the memory manager and the 1 byte that we added above
memoryManager.updateMemoryUsage(-pageSize * 2 - 1);
// now we have both buffer space and memory available, so memoryManager shouldn't be blocked
assertTrue(memoryManager.getBufferBlockedFuture().isDone(), "buffer shouldn't be blocked");
// we should be able to add two pages after more memory is available
addPage(buffer, page);
addPage(buffer, page);
// the buffer is full now
enqueuePage(buffer, page);
}
@Test
public void testSharedBufferBlockingNoBlockOnFull()
{
SettableFuture<?> blockedFuture = SettableFuture.create();
MockMemoryReservationHandler reservationHandler = new MockMemoryReservationHandler(blockedFuture);
AggregatedMemoryContext memoryContext = newRootAggregatedMemoryContext(reservationHandler, 0L);
Page page = createPage(1);
long pageSize = serializePage(page).getRetainedSizeInBytes();
// create a buffer that can only hold two pages
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), DataSize.ofBytes(pageSize * 2), memoryContext, directExecutor());
OutputBufferMemoryManager memoryManager = buffer.getMemoryManager();
memoryManager.setNoBlockOnFull();
// even if setNoBlockOnFull() is called the buffer should block on memory when we add the first page
// as no memory is available (MockMemoryReservationHandler will return a future that is not done)
enqueuePage(buffer, page);
// more memory is available
blockedFuture.set(null);
memoryManager.onMemoryAvailable();
assertTrue(memoryManager.getBufferBlockedFuture().isDone(), "buffer shouldn't be blocked");
// we should be able to add one more page after more memory is available
addPage(buffer, page);
// the buffer is full now, but setNoBlockOnFull() is called so the buffer shouldn't block
addPage(buffer, page);
}
private static class MockMemoryReservationHandler
implements MemoryReservationHandler
{
private ListenableFuture<?> blockedFuture;
public MockMemoryReservationHandler(ListenableFuture<?> blockedFuture)
{
this.blockedFuture = requireNonNull(blockedFuture, "blockedFuture is null");
}
@Override
public ListenableFuture<?> reserveMemory(String allocationTag, long delta)
{
return blockedFuture;
}
@Override
public boolean tryReserveMemory(String allocationTag, long delta)
{
return true;
}
public void updateBlockedFuture(ListenableFuture<?> blockedFuture)
{
this.blockedFuture = requireNonNull(blockedFuture);
}
}
private BroadcastOutputBuffer createBroadcastBuffer(OutputBuffers outputBuffers, DataSize dataSize, AggregatedMemoryContext memoryContext, Executor notificationExecutor)
{
BroadcastOutputBuffer buffer = new BroadcastOutputBuffer(
TASK_INSTANCE_ID,
new StateMachine<>("bufferState", stateNotificationExecutor, OPEN, TERMINAL_BUFFER_STATES),
dataSize,
() -> memoryContext.newLocalMemoryContext("test"),
notificationExecutor,
() -> {});
buffer.setOutputBuffers(outputBuffers);
return buffer;
}
@Test
public void testBufferFinishesWhenClientBuffersDestroyed()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withBuffer(THIRD, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
// add pages before closing the buffers to make sure
// that the buffers close even if there are pending pages
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// the buffer is in the NO_MORE_BUFFERS state now
// and if we abort all the buffers it should destroy itself
// and move to the FINISHED state
buffer.abort(FIRST);
assertFalse(buffer.isFinished());
buffer.abort(SECOND);
assertFalse(buffer.isFinished());
buffer.abort(THIRD);
assertTrue(buffer.isFinished());
}
@Test
public void testForceFreeMemory()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(1), 0);
}
OutputBufferMemoryManager memoryManager = buffer.getMemoryManager();
assertTrue(memoryManager.getBufferedBytes() > 0);
buffer.forceFreeMemory();
assertEquals(memoryManager.getBufferedBytes(), 0);
// adding a page after forceFreeMemory() should be NOOP
addPage(buffer, createPage(1));
assertEquals(memoryManager.getBufferedBytes(), 0);
}
private BroadcastOutputBuffer createBroadcastBuffer(OutputBuffers outputBuffers, DataSize dataSize)
{
return createBroadcastBuffer(outputBuffers, dataSize, () -> {});
}
private BroadcastOutputBuffer createBroadcastBuffer(OutputBuffers outputBuffers, DataSize dataSize, Runnable notifyStatusChanged)
{
BroadcastOutputBuffer buffer = new BroadcastOutputBuffer(
TASK_INSTANCE_ID,
new StateMachine<>("bufferState", stateNotificationExecutor, OPEN, TERMINAL_BUFFER_STATES),
dataSize,
() -> new SimpleLocalMemoryContext(newSimpleAggregatedMemoryContext(), "test"),
stateNotificationExecutor,
notifyStatusChanged);
buffer.setOutputBuffers(outputBuffers);
return buffer;
}
private static BufferResult bufferResult(long token, Page firstPage, Page... otherPages)
{
List<Page> pages = ImmutableList.<Page>builder().add(firstPage).add(otherPages).build();
return createBufferResult(TASK_INSTANCE_ID, token, pages);
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.esb.project.model;
import org.wso2.developerstudio.eclipse.esb.project.utils.SolutionProjectArtifactConstants;
import org.wso2.developerstudio.eclipse.platform.core.exception.ObserverFailedException;
import static org.wso2.developerstudio.eclipse.esb.project.utils.SolutionProjectArtifactConstants.*;
public class ESBSolutionProjectModel extends ESBProjectModel {
private String esbProjectName;
private String registryProjectName;
private String connectorExporterProjectName;
private String compositeApplicationProjectName;
private String dockerExporterProjectName;
private String kubernetesExporterProjectName;
private String mmmProjectName;
private boolean registryProjectChecked = false;
private boolean connectorExporterProjectChecked = false;
private boolean cappProjectChecked = true;
private boolean isConfigProjectChecked = true;
private boolean isMMMProjectChecked = true;
public void setMMMProjectName(String projectName) {
this.mmmProjectName = projectName;
}
public String getMMMProjectName() {
return this.mmmProjectName;
}
public void setConfigProjectChecked(boolean isConfigProjectChecked) {
this.isConfigProjectChecked = isConfigProjectChecked;
}
public boolean isConfigProjectChecked() {
return this.isConfigProjectChecked;
}
public void setMMMProjectChecked(boolean isMMMProjectChecked) {
this.isMMMProjectChecked = isMMMProjectChecked;
}
public boolean isMMMProjectChecked() {
return this.isMMMProjectChecked;
}
public String getDockerExporterProjectName() {
return dockerExporterProjectName;
}
public String getKubernetesExporterProjectName() {
return kubernetesExporterProjectName;
}
public void setDockerExporterProjectName(String dockerExporterProjectName) {
this.dockerExporterProjectName = dockerExporterProjectName;
}
public void setKubernetesExporterProjectName(String kubernetesExporterProjectName) {
this.kubernetesExporterProjectName = kubernetesExporterProjectName;
}
public boolean isConnectorExporterProjectChecked() {
return connectorExporterProjectChecked;
}
public void setConnectorExporterProjectChecked(boolean connectorExporterProjectChecked) {
this.connectorExporterProjectChecked = connectorExporterProjectChecked;
}
public boolean isCappProjectChecked() {
return cappProjectChecked;
}
public void setCappProjectChecked(boolean cappProjectChecked) {
this.cappProjectChecked = cappProjectChecked;
}
public boolean isRegistryProjectChecked() {
return registryProjectChecked;
}
public void setRegistryProjectChecked(boolean registryProjectChecked) {
this.registryProjectChecked = registryProjectChecked;
}
public Object getModelPropertyValue(String key) {
Object modelPropertyValue = super.getModelPropertyValue(key);
if (modelPropertyValue == null) {
if (key.equals(ESB_PROJECT_NAME)) {
return getEsbProjectName();
} else if (key.equals(REGISTRY_RESOURCES_PROJECT_NAME)) {
return getRegistryProjectName();
} else if (key.equals(CONNECTOR_EXPORTER_PROJECT_NAME)) {
return getConnectorExporterProjectName();
} else if (key.equals(COMPOSITE_APPLICATION_PROJECT_NAME)) {
return getCompositeApplicationProjectName();
} else if (key.equals(DOCKER_EXPORTER_PROJECT_NAME)) {
return getDockerExporterProjectName();
} else if (key.equals(KUBERNETES_EXPORTER_PROJECT_NAME)) {
return getKubernetesExporterProjectName();
} else if (key.equals(SolutionProjectArtifactConstants.REGISTRY_PROJECT_CHECKED)) {
return isRegistryProjectChecked();
} else if (key.equals(CONNECTOR_EXPORTER_PROJECT_CHECKED)) {
return isConnectorExporterProjectChecked();
} else if (key.equals(CAPP_PROJECT_CHECKED)) {
return isCappProjectChecked();
} else if (key.equals(DOCKER_EXPORTER_PROJECT_CHECKED)) {
return isDockerExporterProjectChecked();
} else if (key.equals(KUBERNETES_EXPORTER_PROJECT_CHECKED)) {
return isKubernetesExporterProjectChecked();
} else if (key.equals(ESB_PROJECT_CHOICE)) {
return isConfigProjectChecked();
} else if (key.equals(MMM_PROJECT_NAME)) {
return getMMMProjectName();
} else if (key.equals(MMM_PROJECT_CHOICE)) {
return isMMMProjectChecked();
}
}
return modelPropertyValue;
}
public boolean setModelPropertyValue(String key, Object data) throws ObserverFailedException {
boolean returnResult = super.setModelPropertyValue(key, data);
String value = data.toString();
if (key.equals(MMM_PROJECT_NAME)) {
setMMMProjectName(value);
if (value != null && !value.isEmpty()) {
if (this.isConfigProjectChecked) {
if (this.isMMMProjectChecked) {
setEsbProjectName(value + "Configs");
} else {
setEsbProjectName(value);
}
}
setRegistryProjectName(value + "RegistryResources");
setConnectorExporterProjectName(value + "ConnectorExporter");
setCompositeApplicationProjectName(value + "CompositeExporter");
setDockerExporterProjectName(value + "DockerExporter");
setKubernetesExporterProjectName(value + "KubernetesExporter");
} else {
setEsbProjectName("");
setRegistryProjectName("");
setConnectorExporterProjectName("");
setCompositeApplicationProjectName("");
setDockerExporterProjectName("");
setKubernetesExporterProjectName("");
}
} else if (key.equals(ESB_PROJECT_NAME)) {
setEsbProjectName(value);
} else if (key.equals(ESB_PROJECT_CHOICE)) {
setConfigProjectChecked((boolean) data);
} else if (key.equals(MMM_PROJECT_CHOICE)) {
setMMMProjectChecked((boolean) data);
setEsbProjectName(getMMMProjectName());
setConfigProjectChecked(true);
} else if (key.equals(REGISTRY_RESOURCES_PROJECT_NAME)) {
setRegistryProjectName(value);
} else if (key.equals(CONNECTOR_EXPORTER_PROJECT_NAME)) {
setConnectorExporterProjectName(value);
} else if (key.equals(COMPOSITE_APPLICATION_PROJECT_NAME)) {
setCompositeApplicationProjectName(value);
} else if (key.equals(DOCKER_EXPORTER_PROJECT_NAME)) {
setDockerExporterProjectName(value);
} else if (key.equals(KUBERNETES_EXPORTER_PROJECT_NAME)) {
setKubernetesExporterProjectName(value);
} else if (key.equals(REGISTRY_PROJECT_CHECKED)) {
setRegistryProjectChecked((boolean) data);
} else if (key.equals(CONNECTOR_EXPORTER_PROJECT_CHECKED)) {
setConnectorExporterProjectChecked((boolean) data);
} else if (key.equals(CAPP_PROJECT_CHECKED)) {
setCappProjectChecked((boolean) data);
} else if (key.equals(DOCKER_EXPORTER_PROJECT_CHECKED)) {
setDockerExporterProjectChecked((boolean) data);
return (boolean) data;
} else if (key.equals(KUBERNETES_EXPORTER_PROJECT_CHECKED)) {
setKubernetesExporterProjectChecked((boolean) data);
return (boolean) data;
}
return returnResult;
}
public String getEsbProjectName() {
return esbProjectName;
}
public void setEsbProjectName(String esbProjectName) {
this.esbProjectName = esbProjectName;
}
public String getRegistryProjectName() {
return registryProjectName;
}
public void setRegistryProjectName(String registryProjectName) {
this.registryProjectName = registryProjectName;
}
public String getConnectorExporterProjectName() {
return connectorExporterProjectName;
}
public void setConnectorExporterProjectName(String connectorExporterProjectName) {
this.connectorExporterProjectName = connectorExporterProjectName;
}
public String getCompositeApplicationProjectName() {
return compositeApplicationProjectName;
}
public void setCompositeApplicationProjectName(String compositeApplicationProjectName) {
this.compositeApplicationProjectName = compositeApplicationProjectName;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.appium.java_client.events.stubs;
import com.google.common.collect.ImmutableList;
import org.openqa.selenium.Alert;
import org.openqa.selenium.By;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.ContextAware;
import org.openqa.selenium.Cookie;
import org.openqa.selenium.DeviceRotation;
import org.openqa.selenium.HasCapabilities;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.Rotatable;
import org.openqa.selenium.ScreenOrientation;
import org.openqa.selenium.TakesScreenshot;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.WindowType;
import org.openqa.selenium.logging.Logs;
import org.openqa.selenium.remote.DesiredCapabilities;
import java.net.URL;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class EmptyWebDriver implements WebDriver, ContextAware, Rotatable,
JavascriptExecutor, HasCapabilities, TakesScreenshot {
public EmptyWebDriver() {
}
private static List<StubWebElement> createStubList() {
return ImmutableList.of(new StubWebElement(), new StubWebElement());
}
public WebDriver context(String name) {
return null;
}
public Set<String> getContextHandles() {
return null;
}
public String getContext() {
return "";
}
public void rotate(ScreenOrientation orientation) {
}
public void rotate(DeviceRotation rotation) {
}
public ScreenOrientation getOrientation() {
return null;
}
public DeviceRotation rotation() {
return null;
}
public void get(String url) {
}
public String getCurrentUrl() {
return null;
}
public String getTitle() {
return null;
}
public StubWebElement findElement(By by) {
return new StubWebElement();
}
public StubWebElement findElement(String by, String using) throws WebDriverException, NoSuchElementException {
return new StubWebElement();
}
public List findElements(By by) {
return createStubList();
}
public List<StubWebElement> findElements(String by, String using) throws WebDriverException {
return createStubList();
}
public String getPageSource() {
throw new WebDriverException();
}
public void close() {
}
public void quit() {
}
public Set<String> getWindowHandles() {
return null;
}
public String getWindowHandle() {
throw new WebDriverException();
}
public TargetLocator switchTo() {
return new EmptyWebDriver.StubTargetLocator(this);
}
public Navigation navigate() {
return new EmptyWebDriver.StubNavigation();
}
public Options manage() {
return new EmptyWebDriver.StubOptions();
}
public Object executeScript(String script, Object... args) {
return null;
}
public Object executeAsyncScript(String script, Object... args) {
return null;
}
public Capabilities getCapabilities() {
Map<String, Object> map = new HashMap<>();
map.put("0", "");
map.put("1", "");
return new DesiredCapabilities(map);
}
public <X> X getScreenshotAs(OutputType<X> target) throws WebDriverException {
return target.convertFromPngBytes(new byte[]{1, 2});
}
private class StubNavigation implements Navigation {
private StubNavigation() {
}
public void back() {
}
public void forward() {
}
public void to(String url) {
}
public void to(URL url) {
}
public void refresh() {
}
}
private class StubOptions implements Options {
private StubOptions() {
}
public void addCookie(Cookie cookie) {
}
public void deleteCookieNamed(String name) {
}
public void deleteCookie(Cookie cookie) {
}
public void deleteAllCookies() {
}
public Set<Cookie> getCookies() {
return null;
}
public Cookie getCookieNamed(String name) {
return null;
}
public Timeouts timeouts() {
return null;
}
public ImeHandler ime() {
return null;
}
public Window window() {
return new StubWindow();
}
public Logs logs() {
return null;
}
}
private class StubTargetLocator implements TargetLocator {
private final WebDriver driver;
StubTargetLocator(WebDriver driver) {
this.driver = driver;
}
public WebDriver frame(int index) {
return this.driver;
}
public WebDriver frame(String nameOrId) {
return this.driver;
}
public WebDriver frame(WebElement frameElement) {
return this.driver;
}
public WebDriver parentFrame() {
return this.driver;
}
public WebDriver window(String nameOrHandle) {
return this.driver;
}
@Override
public WebDriver newWindow(WindowType typeHint) {
return null;
}
public WebDriver defaultContent() {
return this.driver;
}
public WebElement activeElement() {
return new StubWebElement();
}
public Alert alert() {
return new StubAlert();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.binary.BinaryObjectBuilder;
import org.apache.ignite.cache.QueryEntity;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cache.query.FieldsQueryCursor;
import org.apache.ignite.cache.query.SqlFieldsQuery;
import org.apache.ignite.cache.query.annotations.QuerySqlField;
import org.apache.ignite.cluster.ClusterState;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.DataRegionConfiguration;
import org.apache.ignite.configuration.DataStorageConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.failure.StopNodeFailureHandler;
import org.apache.ignite.internal.processors.cache.index.AbstractIndexingCommonTest;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.testframework.GridTestUtils;
import org.junit.Test;
import static org.apache.ignite.testframework.GridTestUtils.assertThrowsWithCause;
/**
* Checks add field with invalid data type to index.
*/
public class CreateIndexOnInvalidDataTypeTest extends AbstractIndexingCommonTest {
/** Keys count. */
private static final int KEY_CNT = 10;
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
return super.getConfiguration(igniteInstanceName)
.setFailureHandler(new StopNodeFailureHandler())
.setDataStorageConfiguration(
new DataStorageConfiguration()
.setDefaultDataRegionConfiguration(
new DataRegionConfiguration()
.setPersistenceEnabled(true)
)
);
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
cleanPersistenceDir();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
super.afterTest();
}
/**
* Check case when index is created on the field with invalid data type.
* Test steps:
* - create cache with query entity describes a table;
* - fill data (real data contains the fields that was not described by query entity);
* - execute alter table (ADD COLUMN with invalid type for exists field);
* - try to create index for the new field - exception must be throw;
* - checks that index isn't created.
*/
@Test
public void testCreateIndexOnInvalidData() throws Exception {
startGrid();
grid().cluster().state(ClusterState.ACTIVE);
IgniteCache<Integer, Value> c = grid().createCache(
new CacheConfiguration<Integer, Value>()
.setName("test")
.setSqlSchema("PUBLIC")
.setQueryEntities(
Collections.singleton(
new QueryEntity(Integer.class, Value.class)
.setTableName("TEST")
)
)
.setBackups(1)
.setAffinity(new RendezvousAffinityFunction(false, 10))
);
for (int i = 0; i < KEY_CNT; ++i)
c.put(i, new Value(i));
sql("ALTER TABLE TEST ADD COLUMN (VAL_DATE DATE)");
sql("CREATE INDEX TEST_VAL_INT_IDX ON TEST(VAL_INT)");
GridTestUtils.assertThrowsAnyCause(log, () -> {
sql("CREATE INDEX TEST_VAL_DATE_IDX ON TEST(VAL_DATE)");
return null;
},
IgniteSQLException.class, "java.util.Date cannot be cast to java.sql.Date");
// Wait for node stop if it is initiated by FailureHandler
U.sleep(1000);
List<List<?>> res = sql("SELECT val_int FROM TEST where val_int > -1").getAll();
assertEquals(KEY_CNT, res.size());
GridTestUtils.assertThrowsAnyCause(log, () -> {
sql("DROP INDEX TEST_VAL_DATE_IDX");
return null;
},
IgniteSQLException.class, "Index doesn't exist: TEST_VAL_DATE_IDX");
}
/**
* Check case when row with invalid field is added.
* Test steps:
* - create table;
* - create two index;
* - try add entry - exception must be thrown;
* - remove the index for field with invalid type;
* - check that select query that uses the index for valid field is successful.
*/
@Test
public void testAddInvalidDataToIndex() throws Exception {
startGrid();
grid().cluster().state(ClusterState.ACTIVE);
sql("CREATE TABLE TEST (ID INT PRIMARY KEY, VAL_INT INT, VAL_DATE DATE) " +
"WITH \"CACHE_NAME=test,VALUE_TYPE=ValueType0\"");
sql("CREATE INDEX TEST_0_VAL_DATE_IDX ON TEST(VAL_DATE)");
sql("CREATE INDEX TEST_1_VAL_INT_IDX ON TEST(VAL_INT)");
BinaryObjectBuilder bob = grid().binary().builder("ValueType0");
bob.setField("VAL_INT", 10);
bob.setField("VAL_DATE", new java.util.Date());
assertThrowsWithCause(() -> grid().cache("test").put(0, bob.build()), IgniteSQLException.class);
assertNull(grid().cache("test").get(0));
sql("DROP INDEX TEST_0_VAL_DATE_IDX");
// Check successful insert after index is dropped.
grid().cache("test").put(1, bob.build());
List<List<?>> res = sql("SELECT VAL_INT FROM TEST WHERE VAL_INT > 0").getAll();
assertEquals(1, res.size());
}
/**
* @param sql SQL query.
* @param args Query parameters.
* @return Results cursor.
*/
private FieldsQueryCursor<List<?>> sql(String sql, Object... args) {
return grid().context().query().querySqlFields(new SqlFieldsQuery(sql)
.setLazy(true)
.setArgs(args), false);
}
/** */
private static class Value {
/** */
@QuerySqlField
int val_int;
/** */
Date val_date;
/**
* @param val Test value.
*/
public Value(int val) {
this.val_int = val;
val_date = new Date(val);
}
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Druid;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2019-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Chant_PurpleMoon extends Chant
{
@Override
public String ID()
{
return "Chant_PurpleMoon";
}
private final static String localizedName = CMLib.lang().L("Purple Moon");
@Override
public String name()
{
return localizedName;
}
private final static String localizedStaticDisplay = CMLib.lang().L("(Purple Moon)");
@Override
public String displayText()
{
return localizedStaticDisplay;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_MALICIOUS;
}
@Override
public int enchantQuality()
{
return Ability.QUALITY_INDIFFERENT;
}
@Override
protected int canAffectCode()
{
return CAN_ROOMS;
}
@Override
protected int canTargetCode()
{
return 0;
}
@Override
public int classificationCode()
{
return Ability.ACODE_CHANT|Ability.DOMAIN_MOONALTERING;
}
@Override
public void unInvoke()
{
// undo the affects of this spell
if((affected==null)||(!(affected instanceof Room)))
return;
final Room R=(Room)affected;
if(canBeUninvoked())
R.showHappens(CMMsg.MSG_OK_VISUAL,L("The purple moon sets."));
super.unInvoke();
}
@Override
public boolean tick(final Tickable ticking, final int tickID)
{
if(!super.tick(ticking,tickID))
return false;
if(affected==null)
return false;
if(affected instanceof Room)
{
final Room R=(Room)affected;
if(!R.getArea().getClimateObj().canSeeTheMoon(R,this))
unInvoke();
}
return true;
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(!super.okMessage(myHost,msg))
return false;
if((msg.targetMinor()==CMMsg.TYP_DAMAGE)
&&(msg.target() instanceof MOB))
{
final MOB mob=(MOB)msg.target();
int recovery=(int)Math.round(CMath.div((msg.value()),2.0));
if(CMLib.flags().isChaotic(mob))
recovery=recovery*-1;
else
if(!CMLib.flags().isLawful(mob))
return true;
msg.setValue(msg.value()+recovery);
}
return true;
}
@Override
public int castingQuality(final MOB mob, final Physical target)
{
if(mob!=null)
{
if(!CMLib.flags().isChaotic(mob))
return Ability.QUALITY_INDIFFERENT;
if((mob.isInCombat())&&CMLib.flags().isChaotic(mob.getVictim()))
return Ability.QUALITY_INDIFFERENT;
final Room R=mob.location();
if(R!=null)
{
if(!R.getArea().getClimateObj().canSeeTheMoon(R,null))
return Ability.QUALITY_INDIFFERENT;
if(R.fetchEffect(ID())!=null)
return Ability.QUALITY_INDIFFERENT;
for(final Enumeration<Ability> a=R.effects();a.hasMoreElements();)
{
final Ability A=a.nextElement();
if((A!=null)
&&((A.classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_MOONALTERING))
return Ability.QUALITY_INDIFFERENT;
}
}
}
return super.castingQuality(mob,target);
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
final Room target=mob.location();
if(target==null)
return false;
if(!target.getArea().getClimateObj().canSeeTheMoon(target,null))
{
mob.tell(L("You must be able to see the moon for this magic to work."));
return false;
}
if(target.fetchEffect(ID())!=null)
{
mob.tell(L("This place is already under the purple moon."));
return false;
}
for(final Enumeration<Ability> a=target.effects();a.hasMoreElements();)
{
final Ability A=a.nextElement();
if((A!=null)
&&((A.classificationCode()&Ability.ALL_DOMAINS)==Ability.DOMAIN_MOONALTERING))
{
mob.tell(L("The moon is already under @x1, and can not be changed until this magic is gone.",A.name()));
return false;
}
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
invoker=mob;
final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> chant(s) to the sky.^?"));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
if(msg.value()<=0)
{
mob.location().showHappens(CMMsg.MSG_OK_VISUAL,L("The Purple Moon Rises!"));
beneficialAffect(mob,target,asLevel,0);
}
}
}
else
return maliciousFizzle(mob,target,L("<S-NAME> chant(s) to the sky, but the magic fades."));
// return whether it worked
return success;
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.android.apps.exposurenotification.home;
import static com.google.common.truth.Truth.assertThat;
import androidx.fragment.app.Fragment;
import androidx.lifecycle.MutableLiveData;
import com.google.android.apps.exposurenotification.home.ExposureNotificationViewModel.ExposureNotificationState;
import com.google.android.apps.exposurenotification.onboarding.OnboardingEnTurndownForRegionFragment;
import com.google.android.apps.exposurenotification.onboarding.OnboardingEnTurndownFragment;
import com.google.android.apps.exposurenotification.onboarding.OnboardingPermissionDisabledFragment;
import com.google.android.apps.exposurenotification.onboarding.OnboardingPermissionEnabledFragment;
import com.google.android.apps.exposurenotification.onboarding.OnboardingPrivateAnalyticsFragment;
import com.google.android.apps.exposurenotification.testsupport.ExposureNotificationRules;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import dagger.hilt.android.testing.HiltAndroidTest;
import dagger.hilt.android.testing.HiltTestApplication;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
@HiltAndroidTest
@RunWith(RobolectricTestRunner.class)
@Config(application = HiltTestApplication.class)
public class SplashNextFragmentLiveDataTest {
@Rule
public ExposureNotificationRules rules = ExposureNotificationRules.forTest(this).build();
private final Set<ExposureNotificationState> notTurndownStates = ImmutableSet.of(
ExposureNotificationState.PAUSED_BLE,
ExposureNotificationState.PAUSED_LOCATION,
ExposureNotificationState.PAUSED_LOCATION_BLE,
ExposureNotificationState.PAUSED_HW_NOT_SUPPORT,
ExposureNotificationState.PAUSED_USER_PROFILE_NOT_SUPPORT,
ExposureNotificationState.STORAGE_LOW,
ExposureNotificationState.FOCUS_LOST,
ExposureNotificationState.DISABLED,
ExposureNotificationState.ENABLED
);
private final MutableLiveData<Boolean> isEnabledLiveData = new MutableLiveData<>(false);
private final MutableLiveData<ExposureNotificationState> enStateLiveData =
new MutableLiveData<>();
private final MutableLiveData<Boolean> isOnboardingStateSetLiveData =
new MutableLiveData<>(false);
private final MutableLiveData<Boolean> isPrivateAnalyticsSetLiveData =
new MutableLiveData<>(false);
private final MutableLiveData<Boolean> isPrivateAnalyticsSupportedAndConfiguredLiveData =
new MutableLiveData<>(false);
@Test
public void enabled_notOnboarded_returnsOnboardingPermissionEnabledFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(true),
/* enStates= */ notTurndownStates,
/* isOnboardingStateSetStates= */ Sets.newHashSet(false),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(true, false),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(true, false),
/* onboardAsMigratingUser= */ Sets.newHashSet(true, false),
/* expected= */ OnboardingPermissionEnabledFragment.class);
}
@Test
public void notEnabled_notOnboarded_notEnTurndown_returnsOnboardingPermissionDisabledFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(false),
/* enStates= */ notTurndownStates,
/* isOnboardingStateSetStates= */ Sets.newHashSet(false),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(true, false),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(true, false),
/* onboardAsMigratingUser= */ Sets.newHashSet(true, false),
/* expected= */ OnboardingPermissionDisabledFragment.class);
}
@Test
public void enabled_onboarded_notMigratingApp_ENPASupportedAndConfiguredButNotSet_returnsOnboardingPrivateAnalyticsFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(true),
/* enStates= */ notTurndownStates,
/* isOnboardingStateSetStates= */ Sets.newHashSet(true),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(false),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(true),
/* onboardAsMigratingUser= */ Sets.newHashSet(false),
/* expected= */ OnboardingPrivateAnalyticsFragment.class);
}
@Test
public void notEnabled_onboarded_notMigratingApp_returnsHomeFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(false),
/* enStates= */ notTurndownStates,
/* isOnboardingStateSetStates= */ Sets.newHashSet(true),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(true, false),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(true, false),
/* onboardAsMigratingUser= */ Sets.newHashSet(false),
/* expected= */ SinglePageHomeFragment.class);
}
@Test
public void notEnabled_onboarded_onboardAsMigratingUser_returnsOnboardingPermissionDisabledFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(false),
/* enStates= */ notTurndownStates,
/* isOnboardingStateSetStates= */ Sets.newHashSet(true),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(true, false),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(true, false),
/* onboardAsMigratingUser= */ Sets.newHashSet(true),
/* expected= */ OnboardingPermissionDisabledFragment.class);
}
@Test
public void enabled_onboarded_onboardAsMigratingUser_returnsOnboardingPermissionEnabledFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(true),
/* enStates= */ notTurndownStates,
/* isOnboardingStateSetStates= */ Sets.newHashSet(true),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(true, false),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(true, false),
/* onboardAsMigratingUser= */ Sets.newHashSet(true),
/* expected= */ OnboardingPermissionEnabledFragment.class);
}
@Test
public void enabled_onboarded_notMigratingApp_ENPASetAndSupportedAndConfigured_returnsHomeFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(true),
/* enStates= */ notTurndownStates,
/* isOnboardingStateSetStates= */ Sets.newHashSet(true),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(true),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(true),
/* onboardAsMigratingUser= */ Sets.newHashSet(false),
/* expected= */ SinglePageHomeFragment.class);
}
@Test
public void enabled_onboarded_notMigratingApp_ENPANotSupportedAndConfigured_returnsHomeFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(true),
/* enStates= */ notTurndownStates,
/* isOnboardingStateSetStates= */ Sets.newHashSet(true),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(true, false),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(false),
/* onboardAsMigratingUser= */ Sets.newHashSet(false),
/* expected= */ SinglePageHomeFragment.class);
}
@Test
public void notEnabled_notOnboarded_enTurndownForRegion_returnsOnboardingEnTurndownForRegionFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(false),
/* enStates= */ Sets.newHashSet(ExposureNotificationState.PAUSED_NOT_IN_ALLOWLIST),
/* isOnboardingStateSetStates= */ Sets.newHashSet(false),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(true, false),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(true, false),
/* onboardAsMigratingUser= */ Sets.newHashSet(true, false),
/* expected= */ OnboardingEnTurndownForRegionFragment.class);
}
@Test
public void notEnabled_notOnboarded_enTurndown_returnsOnboardingEnTurndownFragment() {
assertEnumeratedCases(
/* isEnabledStates= */ Sets.newHashSet(false),
/* enStates= */ Sets.newHashSet(ExposureNotificationState.PAUSED_EN_NOT_SUPPORT),
/* isOnboardingStateSetStates= */ Sets.newHashSet(false),
/* isPrivateAnalyticsSetStates= */ Sets.newHashSet(true, false),
/* isPrivateAnalyticsSupportedAndConfiguredStates= */ Sets.newHashSet(false),
/* onboardAsMigratingUser= */ Sets.newHashSet(true, false),
/* expected= */ OnboardingEnTurndownFragment.class);
}
/**
* This method checks if the next fragment after SplashFragment is as expected using Sets of
* booleans and {@link ExposureNotificationState} objects to test many scenarios under which
* SplashNextFragmentLiveData is created.
*/
private void assertEnumeratedCases(
Set<Boolean> isEnabledStates,
Set<ExposureNotificationState> enStates,
Set<Boolean> isOnboardingStateSetStates,
Set<Boolean> isPrivateAnalyticsSetStates,
Set<Boolean> isPrivateAnalyticsSupportedAndConfiguredStates,
Set<Boolean> onboardAsMigratingUserStates,
Class expected) {
for (boolean onboardAsMigratingUser : onboardAsMigratingUserStates) {
SplashNextFragmentLiveData splashNextFragmentLiveData = SplashNextFragmentLiveData.create(
isEnabledLiveData,
enStateLiveData,
isOnboardingStateSetLiveData,
isPrivateAnalyticsSetLiveData,
isPrivateAnalyticsSupportedAndConfiguredLiveData,
onboardAsMigratingUser);
AtomicReference<Fragment> current = new AtomicReference<>();
splashNextFragmentLiveData.observeForever(current::set);
Set<List<Boolean>> states =
Sets.cartesianProduct(isEnabledStates, isOnboardingStateSetStates,
isPrivateAnalyticsSetStates, isPrivateAnalyticsSupportedAndConfiguredStates);
for (List<Boolean> state : states) {
isEnabledLiveData.setValue(state.get(0));
isOnboardingStateSetLiveData.setValue(state.get(1));
isPrivateAnalyticsSetLiveData.setValue(state.get(2));
isPrivateAnalyticsSupportedAndConfiguredLiveData.setValue(state.get(3));
for (ExposureNotificationState enState : enStates) {
enStateLiveData.setValue(enState);
assertThat(current.get()).isInstanceOf(expected);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.query;
import java.text.DateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.Properties;
import java.util.Set;
import it.unimi.dsi.fastutil.floats.FloatArrayList;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.execute.RegionFunctionContext;
import org.apache.geode.cache.query.data.Portfolio;
import org.apache.geode.cache.query.internal.ResultsSet;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.NanoTimer;
public class PerfQuery {
private static int NUM_ITERATIONS = 20000;
// exec types
private static final int HAND_CODED = 0;
private static final int BRUTE_FORCE = 1;
private static final int INDEXED = 2;
private static final int INDEX_CREATE = 3;
private static final String[] execTypeStrings =
new String[] {"hand-coded", "brute force", "indexed", "index-create"};
private static final int[] DATA_SET_SIZES = new int[] {100, 1000, 10000, 20000};
private DistributedSystem ds;
private Region region = null;
private RegionAttributes regionAttributes;
private QueryService qs;
protected Cache cache;
// RESULTS
private final FloatArrayList[] results;
/** Creates a new instance of PerfQuery */
public PerfQuery() {
results = new FloatArrayList[4];
results[BRUTE_FORCE] = new FloatArrayList();
results[HAND_CODED] = new FloatArrayList();
results[INDEXED] = new FloatArrayList();
results[INDEX_CREATE] = new FloatArrayList();
}
public void run() throws Exception {
Date startTime = new Date();
setUp();
DateFormat formatter = DateFormat.getDateTimeInstance();
System.out.println("Test started at: " + formatter.format(startTime));
runQueries();
printSummary();
tearDown();
Date endTime = new Date();
System.out.println("Test ended at: " + formatter.format(endTime));
long durationMs = endTime.getTime() - startTime.getTime();
long durationS = durationMs / 1000;
long durationM = durationS / 60;
long durationMM = durationM % 60;
long durationH = durationM / 60;
System.out.println("Test took " + durationH + "hrs, " + durationMM + "min.");
}
private void printSummary() {
System.out.println("Query Execution Performance Results Summary");
System.out.println("num iterations = " + NUM_ITERATIONS);
System.out.println();
System.out.println("Average query execution time in ms");
System.out.println();
String[] setNames = new String[] {"33% Retrieval", "0% Retrieval"};
for (int setI = 0; setI < setNames.length; setI++) {
String setName = setNames[setI];
System.out.println(setName + ":");
System.out.println("dataset size,hand-coded,brute-force,indexed,[index-create-time]");
for (int szi = 0; szi < DATA_SET_SIZES.length; szi++) {
System.out.print(DATA_SET_SIZES[szi]);
System.out.print(',');
for (int ti = HAND_CODED; ti <= INDEX_CREATE; ti++) {
// skip over first set of each type, which was warm up
int ix = ((setI + 1) * DATA_SET_SIZES.length) + szi;
System.out.print(results[ti].get(ix));
if (ti < INDEX_CREATE) {
System.out.print(',');
}
}
System.out.println();
}
System.out.println();
}
}
private void runQueries() throws Exception {
String queryString;
Query query;
// WARM-UP
System.out.println("WARMING UP...");
queryString = "select distinct * from /portfolios where type = 'type1'";
query = this.qs.newQuery(queryString);
runQuery(getType1HandQuery(queryString), HAND_CODED);
runQuery(query, BRUTE_FORCE);
runQuery(query, INDEXED);
warmUpIndexCreation();
System.out.println("END WARM UP");
// 1/3 DATASET
queryString = "select distinct * from /portfolios where type = 'type1'";
query = this.qs.newQuery(queryString);
runQuery(getType1HandQuery(queryString), HAND_CODED);
runQuery(query, BRUTE_FORCE);
runQuery(query, INDEXED);
// MISS QUERY
queryString = "select distinct * from /portfolios where type = 'miss'";
query = this.qs.newQuery(queryString);
runQuery(getMissHandQuery(queryString), HAND_CODED);
runQuery(query, BRUTE_FORCE);
runQuery(query, INDEXED);
}
private void runQuery(Query query, int execType) throws Exception {
System.out.println("Executing Query: " + query.getQueryString());
System.out.println("Num iterations=" + NUM_ITERATIONS);
System.out.println();
boolean indexed = execType == INDEXED;
for (int i = 0; i < DATA_SET_SIZES.length; i++) {
int num = DATA_SET_SIZES[i];
populate(num, indexed);
System.out.println("Executing (" + execTypeStrings[execType] + ")...");
long startTime = NanoTimer.getTime();
SelectResults results = null;
for (int j = 0; j < NUM_ITERATIONS; j++) {
results = (SelectResults) query.execute();
}
long totalTime = NanoTimer.getTime() - startTime;
System.out.println("results size =" + results.size());
float aveTime = totalTime / NUM_ITERATIONS / 1e6f;
System.out.println("ave execution time=" + aveTime + " ms");
this.results[execType].add(aveTime);
System.out.println();
}
System.out.println("--------------------------------------------");
}
// --------------------------------------------------------
// Hand-Coded Queries
private Query getType1HandQuery(String queryString) {
return new HandQuery(queryString) {
@Override
public Object execute() {
Region region = PerfQuery.this.cache.getRegion("/portfolios");
SelectResults results = new ResultsSet();
for (Iterator itr = region.values().iterator(); itr.hasNext();) {
Portfolio ptflo = (Portfolio) itr.next();
if ("type1".equals(ptflo.getType())) {
results.add(ptflo);
}
}
return results;
}
};
}
private Query getMissHandQuery(String queryString) {
return new HandQuery(queryString) {
@Override
public Object execute() {
Region region = PerfQuery.this.cache.getRegion("/portfolios");
SelectResults results = new ResultsSet();
for (Iterator itr = region.values().iterator(); itr.hasNext();) {
Portfolio ptflo = (Portfolio) itr.next();
if ("miss".equals(ptflo.getType())) {
results.add(ptflo);
}
}
return results;
}
};
}
// --------------------------------------------------------
private void setUp() throws CacheException {
this.ds = DistributedSystem.connect(new Properties());
this.cache = CacheFactory.create(ds);
AttributesFactory attributesFactory = new AttributesFactory();
attributesFactory.setValueConstraint(Portfolio.class);
this.regionAttributes = attributesFactory.create();
this.qs = this.cache.getQueryService();
}
private void tearDown() {
this.ds.disconnect();
}
private void populate(int numPortfolios, boolean indexed) throws CacheException, QueryException {
System.out.println("Populating Cache with " + numPortfolios + " Portfolios");
if (this.region != null) {
this.region.localDestroyRegion();
}
this.region = cache.createRegion("portfolios", this.regionAttributes);
for (int i = 0; i < numPortfolios; i++) {
this.region.put(String.valueOf(i), new Portfolio(i));
}
if (indexed) {
System.out.println("Creating index...");
long startNanos = NanoTimer.getTime();
this.qs.createIndex("portfolios", IndexType.FUNCTIONAL, "type", "/portfolios");
float createTime = (NanoTimer.getTime() - startNanos) / 1e6f;
System.out.println("Index created in " + createTime + " ms.");
this.results[INDEX_CREATE].add(createTime);
}
}
private void warmUpIndexCreation() throws CacheException, QueryException {
System.out.println("Populating Cache with 1000 Portfolios");
if (this.region != null) {
this.region.localDestroyRegion();
}
this.region = cache.createRegion("portfolios", this.regionAttributes);
for (int i = 0; i < 1000; i++) {
this.region.put(String.valueOf(i), new Portfolio(i));
}
System.out.println("Warming up index creation...");
for (int i = 0; i < 20000; i++) {
Index index = this.qs.createIndex("portfolios", IndexType.FUNCTIONAL, "type", "/portfolios");
this.qs.removeIndex(index);
}
}
/**
* @param args the command line arguments
*/
public static void main(String[] args) throws Exception {
new PerfQuery().run();
}
abstract class HandQuery implements Query {
private final String queryString;
HandQuery(String queryString) {
this.queryString = queryString;
}
@Override
public abstract Object execute() throws FunctionDomainException, TypeMismatchException,
NameResolutionException, QueryInvocationTargetException;
@Override
public void compile() throws TypeMismatchException, NameResolutionException {
// already compiled
}
@Override
public boolean isCompiled() {
return true;
}
@Override
public String getQueryString() {
return this.queryString;
}
@Override
public Object execute(Object[] params) throws FunctionDomainException, TypeMismatchException,
NameResolutionException, QueryInvocationTargetException {
throw new UnsupportedOperationException();
}
@Override
public QueryStatistics getStatistics() {
throw new UnsupportedOperationException();
}
public Set getRegionsInQuery() {
throw new UnsupportedOperationException();
}
@Override
public Object execute(RegionFunctionContext context) throws FunctionDomainException,
TypeMismatchException, NameResolutionException, QueryInvocationTargetException {
throw new UnsupportedOperationException();
}
@Override
public Object execute(RegionFunctionContext context, Object[] params)
throws FunctionDomainException, TypeMismatchException, NameResolutionException,
QueryInvocationTargetException {
throw new UnsupportedOperationException();
}
}
}
| |
/*
* Copyright 2017 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.errorprone.BugPattern.SeverityLevel.SUGGESTION;
import com.google.common.base.Ascii;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Streams;
import com.google.errorprone.BugPattern;
import com.google.errorprone.BugPattern.LinkType;
import com.google.errorprone.VisitorState;
import com.google.errorprone.bugpatterns.BugChecker.TypeParameterTreeMatcher;
import com.google.errorprone.fixes.SuggestedFixes;
import com.google.errorprone.matchers.Description;
import com.google.errorprone.names.NamingConventions;
import com.google.errorprone.util.ASTHelpers;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.MethodTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.TypeParameterTree;
import com.sun.source.util.TreePath;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.TypeVariableSymbol;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.lang.model.element.Name;
/**
* Enforces type parameters match the google style guide.
*
* @author siyuanl@google.com (Siyuan Liu)
* @author glorioso@google.com (Nick Glorioso)
*/
@BugPattern(
summary =
"Type parameters must be a single letter with an optional numeric suffix,"
+ " or an UpperCamelCase name followed by the letter 'T'.",
severity = SUGGESTION,
linkType = LinkType.CUSTOM,
link = "https://google.github.io/styleguide/javaguide.html#s5.2.8-type-variable-names")
public class TypeParameterNaming extends BugChecker implements TypeParameterTreeMatcher {
private static final Pattern TRAILING_DIGIT_EXTRACTOR = Pattern.compile("^(.*?)(\\d+)$");
private static final Pattern SINGLE_PLUS_MAYBE_DIGITS = Pattern.compile("[A-Z]\\d*");
private static String upperCamelToken(String s) {
return "" + Ascii.toUpperCase(s.charAt(0)) + (s.length() == 1 ? "" : s.substring(1));
}
/**
* An enum that classifies a String name into different types, based on the Google Java Style
* Guide's rules for Type Parameters.
*/
public enum TypeParameterNamingClassification {
/** Examples: B, Q, R2, T1, A9 */
LETTER_WITH_MAYBE_NUMERAL(true),
/**
* A valid Type Parameter name, that follows the style guide rule:
*
* <p>Examples: DataTypeT, FooT, BarT
*/
CLASS_NAME_WITH_T(true),
/**
* Names of the form which are not camel case, but nonetheless have a Capital T at the end and
* this shouldn't suggest to add more.
*
* <p>Examples; IDataT, CConverterT. BART, FOOT
*/
NON_CLASS_NAME_WITH_T_SUFFIX(false),
/** Anything else. */
UNCLASSIFIED(false);
private final boolean isValidName;
TypeParameterNamingClassification(boolean isValidName) {
this.isValidName = isValidName;
}
public static TypeParameterNamingClassification classify(String name) {
if (SINGLE_PLUS_MAYBE_DIGITS.matcher(name).matches()) {
return LETTER_WITH_MAYBE_NUMERAL;
}
if (!name.endsWith("T")) {
return UNCLASSIFIED;
}
ImmutableList<String> tokens = NamingConventions.splitToLowercaseTerms(name);
// Combine the tokens back into UpperCamelTokens and make sure it matches the identifier
String reassembled =
tokens.stream().map(TypeParameterNaming::upperCamelToken).collect(Collectors.joining());
return name.equals(reassembled) ? CLASS_NAME_WITH_T : NON_CLASS_NAME_WITH_T_SUFFIX;
}
public boolean isValidName() {
return isValidName;
}
}
@Override
public Description matchTypeParameter(TypeParameterTree tree, VisitorState state) {
TypeParameterNamingClassification classification =
TypeParameterNamingClassification.classify(tree.getName().toString());
if (classification.isValidName()) {
return Description.NO_MATCH;
}
Description.Builder descriptionBuilder =
buildDescription(tree).setMessage(errorMessage(tree.getName(), classification));
TreePath enclosingPath = enclosingMethodOrClass(state.getPath());
if (classification != TypeParameterNamingClassification.NON_CLASS_NAME_WITH_T_SUFFIX) {
descriptionBuilder.addFix(
SuggestedFixes.renameTypeParameter(
tree,
state.getPath().getParentPath().getLeaf(),
suggestedNameFollowedWithT(tree.getName().toString()),
state.withPath(enclosingPath)));
}
return descriptionBuilder
.addFix(
SuggestedFixes.renameTypeParameter(
tree,
state.getPath().getParentPath().getLeaf(),
suggestedSingleLetter(tree.getName().toString(), tree),
state.withPath(enclosingPath)))
.build();
}
private static TreePath enclosingMethodOrClass(TreePath path) {
for (TreePath parent = path; parent != null; parent = parent.getParentPath()) {
if (parent.getLeaf() instanceof MethodTree || parent.getLeaf() instanceof ClassTree) {
return parent;
}
}
return path;
}
private static String errorMessage(Name name, TypeParameterNamingClassification classification) {
Preconditions.checkArgument(!classification.isValidName());
if (classification == TypeParameterNamingClassification.NON_CLASS_NAME_WITH_T_SUFFIX) {
return String.format(
"Type Parameters should be an UpperCamelCase name followed by the letter 'T'. "
+ "%s ends in T, but is not a valid UpperCamelCase name",
name);
}
return String.format(
"Type Parameter %s must be a single letter with an optional numeric"
+ " suffix, or an UpperCamelCase name followed by the letter 'T'.",
name);
}
// Get list of type params of every enclosing class
private static List<TypeVariableSymbol> typeVariablesEnclosing(Symbol sym) {
List<TypeVariableSymbol> typeVarScopes = new ArrayList<>();
outer:
while (!sym.isStatic()) {
sym = sym.owner;
switch (sym.getKind()) {
case PACKAGE:
break outer;
case METHOD:
case CLASS:
typeVarScopes.addAll(0, sym.getTypeParameters());
break;
default: // fall out
}
}
return typeVarScopes;
}
private static String suggestedSingleLetter(String id, Tree tree) {
char firstLetter =
Ascii.toUpperCase(NamingConventions.splitToLowercaseTerms(id).get(0).charAt(0));
Symbol sym = ASTHelpers.getSymbol(tree);
List<TypeVariableSymbol> enclosingTypeSymbols = typeVariablesEnclosing(sym);
for (TypeVariableSymbol typeName : enclosingTypeSymbols) {
char enclosingTypeFirstLetter = typeName.toString().charAt(0);
if (enclosingTypeFirstLetter == firstLetter
&& !TypeParameterNamingClassification.classify(typeName.name.toString()).isValidName()) {
ImmutableList<String> typeVarsInScope =
Streams.concat(enclosingTypeSymbols.stream(), sym.getTypeParameters().stream())
.map(v -> v.name.toString())
.collect(toImmutableList());
return firstLetterReplacementName(id, typeVarsInScope);
}
}
return Character.toString(firstLetter);
}
// T -> T2
// T2 -> T3
// T -> T4 (if T2 and T3 already exist)
// TODO(user) : combine this method with TypeParameterShadowing.replacementTypeVarName
private static String firstLetterReplacementName(String name, List<String> superTypeVars) {
String firstLetterOfBase = Character.toString(name.charAt(0));
int typeVarNum = 2;
boolean first = true;
Matcher matcher = TRAILING_DIGIT_EXTRACTOR.matcher(name);
if (matcher.matches()) {
name = matcher.group(1);
typeVarNum = Integer.parseInt(matcher.group(2)) + 1;
}
String replacementName = "";
// Look at the type names to the left of the current type
// Since this bugchecker doesn't rename as it goes, we have to check which type names
// would've been renamed before the current ones
for (String superTypeVar : superTypeVars) {
if (superTypeVar.equals(name)) {
if (typeVarNum == 2 && first) {
return firstLetterOfBase;
}
break;
} else if (superTypeVar.charAt(0) == name.charAt(0)) {
if (!first) {
typeVarNum++;
} else {
first = false;
}
replacementName = firstLetterOfBase + typeVarNum;
}
}
while (superTypeVars.contains(replacementName)) {
typeVarNum++;
replacementName = firstLetterOfBase + typeVarNum;
}
return replacementName;
}
private static String suggestedNameFollowedWithT(String identifier) {
Preconditions.checkArgument(!identifier.isEmpty());
// Some early checks:
// TFoo => FooT
if (identifier.length() > 2
&& identifier.charAt(0) == 'T'
&& Ascii.isUpperCase(identifier.charAt(1))
&& Ascii.isLowerCase(identifier.charAt(2))) {
// splitToLowercaseTerms thinks "TFooBar" is ["tfoo", "bar"], so we remove "t", have it parse
// as ["foo", "bar"], then staple "t" back on the end.
ImmutableList<String> tokens =
NamingConventions.splitToLowercaseTerms(identifier.substring(1));
return Streams.concat(tokens.stream(), Stream.of("T"))
.map(TypeParameterNaming::upperCamelToken)
.collect(Collectors.joining());
}
ImmutableList<String> tokens = NamingConventions.splitToLowercaseTerms(identifier);
// UPPERCASE => UppercaseT
if (tokens.size() == 1) {
String token = tokens.get(0);
if (Ascii.toUpperCase(token).equals(identifier)) {
return upperCamelToken(token) + "T";
}
}
// FooType => FooT
if (Iterables.getLast(tokens).equals("type")) {
return Streams.concat(tokens.subList(0, tokens.size() - 1).stream(), Stream.of("T"))
.map(TypeParameterNaming::upperCamelToken)
.collect(Collectors.joining());
}
return identifier + "T";
}
}
| |
package com.bazaarvoice.emodb.sor.core;
import com.bazaarvoice.emodb.sor.api.AuditBuilder;
import com.bazaarvoice.emodb.sor.api.Change;
import com.bazaarvoice.emodb.sor.api.ChangeBuilder;
import com.bazaarvoice.emodb.sor.api.Compaction;
import com.bazaarvoice.emodb.sor.api.DataStore;
import com.bazaarvoice.emodb.sor.api.History;
import com.bazaarvoice.emodb.sor.api.ReadConsistency;
import com.bazaarvoice.emodb.sor.api.TableOptionsBuilder;
import com.bazaarvoice.emodb.sor.api.WriteConsistency;
import com.bazaarvoice.emodb.sor.core.test.InMemoryDataStore;
import com.bazaarvoice.emodb.sor.db.Key;
import com.bazaarvoice.emodb.sor.db.Record;
import com.bazaarvoice.emodb.sor.db.test.DeltaClusteringKey;
import com.bazaarvoice.emodb.sor.db.test.InMemoryDataReaderDAO;
import com.bazaarvoice.emodb.sor.delta.Delta;
import com.bazaarvoice.emodb.sor.delta.Deltas;
import com.bazaarvoice.emodb.sor.test.SystemClock;
import com.bazaarvoice.emodb.sor.uuid.TimeUUIDs;
import com.bazaarvoice.emodb.table.db.Table;
import com.codahale.metrics.Counter;
import com.codahale.metrics.MetricRegistry;
import com.google.common.base.Predicate;
import com.google.common.base.Supplier;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterators;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.testng.annotations.Test;
import java.time.Duration;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static java.util.Objects.requireNonNull;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
public class CompactorTest {
/**
* Simulate backward compatibility for compactions
*/
@Test
public void testLegacyCompaction() {
// Old style compactions simulation
final Key key = mock(Key.class);
UUID t1 = TimeUUIDs.newUUID(); // First write
UUID t2 = TimeUUIDs.newUUID(); // Second write
UUID t3 = TimeUUIDs.newUUID(); // Third write
UUID t5 = TimeUUIDs.newUUID(); // Legacy compaction compacts t1, t2, t3
UUID t6 = TimeUUIDs.newUUID(); // Fourth write
UUID t7 = TimeUUIDs.newUUID(); // New-style compaction
// Wait 1 ms so that now is guaranteed to be after the last UUID created above
SystemClock.tick();
// Compaction and delta records after the second compaction
Delta delta2 = Deltas.literal(ImmutableMap.of("key", "value"));
Delta delta3 = Deltas.mapBuilder().put("key2", "change").build();
// Old style compaction
Compaction compaction2 = new Compaction(2, t1, t3, "abcdef0123456789", t3, t3);
final List<Map.Entry<DeltaClusteringKey, Compaction>> compactions2 = ImmutableList.of(
Maps.immutableEntry(new DeltaClusteringKey(t5 ,1), compaction2));
final List<Map.Entry<DeltaClusteringKey,Change>> deltas2 = ImmutableList.of(
Maps.immutableEntry(new DeltaClusteringKey(t3, 1), ChangeBuilder.just(t3, delta2)),
Maps.immutableEntry(new DeltaClusteringKey(t5, 1), ChangeBuilder.just(t5, compaction2)),
Maps.immutableEntry(new DeltaClusteringKey(t6, 1), ChangeBuilder.just(t6, delta3)));
// This record will make it to LegacyCompactor, but will not "double-dip" in Cassandra as it has already found
// compaction.
Record record = mock(Record.class);
when(record.getKey()).thenReturn(key);
when(record.passOneIterator()).thenReturn(compactions2.iterator());
when(record.passTwoIterator()).thenReturn(deltas2.iterator());
//noinspection unchecked
Supplier<Record> requeryFn = mock(Supplier.class);
// Let's go to the new DistributedCompactor and make sure that the legacy compaction converts to DistributedCompaction.
SystemClock.tick();
long now = System.currentTimeMillis();
MetricRegistry metricRegistry = new MetricRegistry();
Counter archiveDeltaSize = metricRegistry.counter(MetricRegistry.name("bv.emodb.sor", "DistributedCompactor", "archivedDeltaSize"));
Expanded expanded =
new DistributedCompactor(archiveDeltaSize, true, metricRegistry)
.expand(record, now, now, now, MutableIntrinsics.create(key), false, requeryFn);
// Methods that return iterators may not be called more than once.
verify(record, times(1)).passOneIterator();
verify(record, times(1)).passTwoIterator();
// Verify we never require to requery fn
verifyNoMoreInteractions(record, requeryFn);
// Verify that expansion resolved to the expected results even with "old" compaction records
Map<String, String> expectedContent = ImmutableMap.of("key", "value", "key2", "change");
assertEquals(expanded.getResolved().getContent(), expectedContent);
assertEquals(expanded.getResolved().getIntrinsics().getVersion(), 4); // 4 writes: t1, t2, t3, t6
assertEquals(expanded.getResolved().getIntrinsics().getFirstUpdateAtUuid(), t1);
assertEquals(expanded.getResolved().getIntrinsics().getLastUpdateAtUuid(), t6);
assertEquals(expanded.getNumDeletedDeltas(), 2);
assertEquals(expanded.getNumPersistentDeltas(), 2);
assertTrue(expanded.getPendingCompaction() != null);
assertEquals(expanded.getPendingCompaction().getChangeId(), t6);
assertEquals(expanded.getPendingCompaction().getDelta(), Deltas.literal(expectedContent));
assertEquals(expanded.getPendingCompaction().getCompaction().getCount(), 3/*add 1 for cutoff delta*/+1);
assertEquals(expanded.getPendingCompaction().getCompaction().getFirst(), t1);
assertEquals(expanded.getPendingCompaction().getCompaction().getCutoff(), t6);
// Verify that the pending compaction is a new-style compaction
// This pending compaction should compact all the deltas available and replace them with compaction column
assertNotNull(expanded.getPendingCompaction(), "Pending compaction does not exist");
assertNotNull(expanded.getPendingCompaction().getCompaction(), "No compaction found in compaction");
// The following test verifies that the pending compaction is a new style compaction.
assertNotNull(expanded.getPendingCompaction().getCompaction().getCompactedDelta(), "Not a new version of compaction");
Compaction newCompaction = expanded.getPendingCompaction().getCompaction();
final List<Map.Entry<DeltaClusteringKey, Compaction>> compactions3 = ImmutableList.of(
Maps.immutableEntry(new DeltaClusteringKey(t7, 1), newCompaction));
final List<Map.Entry<DeltaClusteringKey,Change>> deltas3 = ImmutableList.of(
Maps.immutableEntry(new DeltaClusteringKey(t7, 1), ChangeBuilder.just(t7, newCompaction)));
Record record3 = mock(Record.class);
when(record3.getKey()).thenReturn(key);
when(record3.passOneIterator()).thenReturn(compactions3.iterator());
when(record3.passTwoIterator()).thenReturn(deltas3.iterator());
// This time it should not delegate to DefaultCompactor since the resulting compaction was a new compaction
Expanded expanded3 =
new DistributedCompactor(archiveDeltaSize, true, metricRegistry)
.expand(record3, now, now, now, MutableIntrinsics.create(key), false, requeryFn);
// Verify we never require to requery fn
verify(record3, times(1)).passOneIterator();
verify(record3, times(1)).passTwoIterator();
verifyNoMoreInteractions(record3, requeryFn);
// Verify content with new compaction this time
assertEquals(expanded3.getResolved().getContent(), expectedContent);
assertEquals(expanded3.getResolved().getIntrinsics().getVersion(), 4); // 4 writes: t1, t2, t3, t6
assertEquals(expanded3.getResolved().getIntrinsics().getFirstUpdateAtUuid(), t1);
assertEquals(expanded3.getResolved().getIntrinsics().getLastUpdateAtUuid(), t6);
assertEquals(expanded3.getNumDeletedDeltas(), 4);
assertEquals(expanded3.getNumPersistentDeltas(), 0);
}
/** Simulate compaction occurring between Record.passOneIterator and Record.passTwoIterator.
* Note that this test is for legacy Compactor that restarts itself in case of race conditions between two
* compactor threads.
*/
@Test
public void testRestart()
throws Exception {
final Key key = mock(Key.class);
UUID t1 = TimeUUIDs.newUUID(); // First write
UUID t2 = TimeUUIDs.newUUID(); // Second write
UUID t3 = TimeUUIDs.newUUID(); // Third write
UUID t4 = TimeUUIDs.newUUID(); // First compaction compacts t1 and t2
UUID t5 = TimeUUIDs.newUUID(); // Second compaction compacts t1, t2, t3
UUID t6 = TimeUUIDs.newUUID(); // Fourth write
// Wait 1 ms so that now is guaranteed to be after the last UUID created above
SystemClock.tick();
// Compaction records after the first compaction
Compaction compaction1 = new Compaction(2, t1, t2, "0123456789abcdef", t2, t2);
final List<Map.Entry<DeltaClusteringKey, Compaction>> compactions1 = ImmutableList.of(
Maps.immutableEntry(new DeltaClusteringKey(t4, 1), compaction1));
// Compaction and delta records after the second compaction
Delta delta2 = Deltas.literal(ImmutableMap.of("key", "value"));
Delta delta3 = Deltas.mapBuilder().put("key2", "change").build();
Compaction compaction2 = new Compaction(2, t1, t3, "abcdef0123456789", t3, t3);
final List<Map.Entry<DeltaClusteringKey, Compaction>> compactions2 = ImmutableList.of(
Maps.immutableEntry(new DeltaClusteringKey(t5, 1), compaction2));
final List<Map.Entry<DeltaClusteringKey, Change>> deltas2 = ImmutableList.of(
Maps.immutableEntry(new DeltaClusteringKey(t3, 1), ChangeBuilder.just(t3, delta2)),
Maps.immutableEntry(new DeltaClusteringKey(t5, 1), ChangeBuilder.just(t5, compaction2)),
Maps.immutableEntry(new DeltaClusteringKey(t6, 1), ChangeBuilder.just(t6, delta3)));
// First try will delegate to legacy compactor and fail because compaction record 1 is not present in the 2nd sequence of deltas
Record record1 = mock(Record.class);
when(record1.getKey()).thenReturn(key);
when(record1.passOneIterator()).thenReturn(compactions1.iterator());
when(record1.passTwoIterator()).thenReturn(deltas2.iterator());
// Second try should succeed - while also delegating back to legacy compactor.
Record record2 = mock(Record.class);
when(record2.getKey()).thenReturn(key);
when(record2.passOneIterator()).thenReturn(compactions2.iterator());
when(record2.passTwoIterator()).thenReturn(deltas2.iterator());
//noinspection unchecked
Supplier<Record> requeryFn = mock(Supplier.class);
when(requeryFn.get()).thenReturn(record2);
long now = System.currentTimeMillis();
MetricRegistry metricRegistry = new MetricRegistry();
Counter archiveDeltaSize = metricRegistry.counter(MetricRegistry.name("bv.emodb.sor", "DistributedCompactor", "archivedDeltaSize"));
Expanded expanded =
new DistributedCompactor(archiveDeltaSize, true, metricRegistry)
.expand(record1, now, now, now, MutableIntrinsics.create(key), false, requeryFn);
// Methods that return iterators may not be called more than once.
verify(record1, times(1)).passOneIterator();
verify(record1, times(1)).passTwoIterator();
verify(requeryFn).get();
verify(record2, times(1)).passOneIterator();
verify(record2, times(1)).passTwoIterator();
verifyNoMoreInteractions(record1, record2, requeryFn);
// Verify that expansion resolved to the expected results.
Map<String, String> expectedContent = ImmutableMap.of("key", "value", "key2", "change");
assertEquals(expanded.getResolved().getContent(), expectedContent);
assertEquals(expanded.getResolved().getIntrinsics().getVersion(), 4); // 4 writes: t1, t2, t3, t6
assertEquals(expanded.getResolved().getIntrinsics().getFirstUpdateAtUuid(), t1);
assertEquals(expanded.getResolved().getIntrinsics().getLastUpdateAtUuid(), t6);
assertEquals(expanded.getNumDeletedDeltas(), 2);
assertEquals(expanded.getNumPersistentDeltas(), 2);
assertTrue(expanded.getPendingCompaction() != null);
assertEquals(expanded.getPendingCompaction().getChangeId(), t6);
assertEquals(expanded.getPendingCompaction().getDelta(), Deltas.literal(expectedContent));
assertEquals(expanded.getPendingCompaction().getCompaction().getCount(), 3/*add 1 for cutoff delta*/+1);
assertEquals(expanded.getPendingCompaction().getCompaction().getFirst(), t1);
assertEquals(expanded.getPendingCompaction().getCompaction().getCutoff(), t6);
}
@Test
public void testDisableDeltaHistory() {
final Key key = mock(Key.class);
UUID t1 = TimeUUIDs.newUUID(); // First write
UUID t2 = TimeUUIDs.newUUID(); // Second write
UUID t3 = TimeUUIDs.newUUID(); // Third write
// Wait 1 ms so that now is guaranteed to be after the last UUID created above
SystemClock.tick();
Delta delta2 = Deltas.literal(ImmutableMap.of("key", "value"));
Delta delta3 = Deltas.mapBuilder().put("key2", "change").build();
final List<Map.Entry<DeltaClusteringKey, Compaction>> compactions = Lists.newArrayList();
final List<Map.Entry<DeltaClusteringKey, Change>> deltas2 = ImmutableList.of(
Maps.immutableEntry(new DeltaClusteringKey(t1, 1), ChangeBuilder.just(t1, delta2)),
Maps.immutableEntry(new DeltaClusteringKey(t2, 1), ChangeBuilder.just(t2, delta2)),
Maps.immutableEntry(new DeltaClusteringKey(t3, 1), ChangeBuilder.just(t3, delta3)));
Record record = mock(Record.class);
when(record.getKey()).thenReturn(key);
when(record.passOneIterator()).thenReturn(compactions.iterator());
when(record.passTwoIterator()).thenReturn(deltas2.iterator());
//noinspection unchecked
Supplier<Record> requeryFn = mock(Supplier.class);
when(requeryFn.get()).thenReturn(record);
long now = System.currentTimeMillis();
MetricRegistry metricRegistry = new MetricRegistry();
Counter archiveDeltaSize = metricRegistry.counter(MetricRegistry.name("bv.emodb.sor", "DefaultCompactor", "archivedDeltaSize"));
boolean isDeltaHistoryEnabled = false;
Expanded expanded = new DistributedCompactor(archiveDeltaSize, isDeltaHistoryEnabled, metricRegistry)
.expand(record, now, now, now, MutableIntrinsics.create(key), false, requeryFn);
// Verify that expansion produces a compaction with no delta archives
Map<String, String> expectedContent = ImmutableMap.of("key", "value", "key2", "change");
assertEquals(expanded.getResolved().getContent(), expectedContent);
assertTrue(expanded.getPendingCompaction() != null);
assertTrue(expanded.getPendingCompaction().getDeltasToArchive().isEmpty(), "Delta history is disabled");
// Now verify we do get delta history if enabled
Record record2 = mock(Record.class);
when(record2.getKey()).thenReturn(key);
when(record2.passOneIterator()).thenReturn(compactions.iterator());
when(record2.passTwoIterator()).thenReturn(deltas2.iterator());
isDeltaHistoryEnabled = true;
expanded = new DistributedCompactor(archiveDeltaSize, isDeltaHistoryEnabled, metricRegistry)
.expand(record2, now, now, now, MutableIntrinsics.create(key), false, requeryFn);
expectedContent = ImmutableMap.of("key", "value", "key2", "change");
assertEquals(expanded.getResolved().getContent(), expectedContent);
assertTrue(expanded.getPendingCompaction() != null);
assertEquals(expanded.getPendingCompaction().getDeltasToArchive().size(), 3, "Archive 3 deltas");
}
/**
* Proof that System of Record does not need tombstones, i.e., gc_grace_seconds can be set to 0
* Basically, the effect of not keeping tombstones in Cassandra nodes upon compaction is that some
* nodes may "resurrect" the deletes. The only deletes that happen in System of Record is when compaction
* takes place, and old deltas get deleted. Emo deletes/compacts only the deltas whose change Id is before
* Full Consistency Timestamp (FCT).
*
* This test proves that even if deleted deltas resurrect themselves, Emodb would simply delete them again in
* a subsequent compaction without any impact to data.
*
* Update: Although technically correct, there is an implementation level concern. Cassandra sets the hints
* expiration to gc_grace_seconds. You can set hints expiration to less than gc_grace, but not more than that.
* If these hints expire (or are never generated when gc_grace_seconds is set to 0), our FCT algorithm will think
* we are fully consistent, when we may not be. Obviously, this will be a problem for several reasons but primarily
* delta compactions may result in data loss.
*
* So, we should set our gc_grace_seconds not due to concern for tombstones, but out of hints expiration concern.
* Cassandra's default recommendation is gc_grace_seconds to 10 days, and a weekly repair.
* References:
* https://issues.apache.org/jira/browse/CASSANDRA-5988
* https://issues.apache.org/jira/browse/CASSANDRA-5314
*
* Ideally, Cassandra should allow the users to make the call on hints TTL independently from gc_grace_seconds.
*/
@Test
public void testTombstonesDoNotMatter() {
final Key key = mock(Key.class);
UUID t1 = TimeUUIDs.newUUID(); // First write
UUID t2 = TimeUUIDs.newUUID(); // Second write
UUID t3 = TimeUUIDs.newUUID(); // Third write
// Wait 1 ms so that now is guaranteed to be after the last UUID created above
// Record full consistency timestamp right after t3 for later use
long fctRightAfterT3 = TimeUUIDs.getTimeMillis(TimeUUIDs.getNext(t3));
SystemClock.tick();
Delta delta2 = Deltas.literal(ImmutableMap.of("key", "value"));
Delta delta3 = Deltas.mapBuilder().put("key2", "change").build();
final List<Map.Entry<DeltaClusteringKey, Compaction>> compactions = Lists.newArrayList();
Map.Entry<DeltaClusteringKey, Change> firstDelta = Maps.immutableEntry(new DeltaClusteringKey(t1, 1), ChangeBuilder.just(t1, delta2));
final List<Map.Entry<DeltaClusteringKey, Change>> deltas2 = Lists.newArrayList(firstDelta,
Maps.immutableEntry(new DeltaClusteringKey(t2, 1), ChangeBuilder.just(t2, delta2)),
Maps.immutableEntry(new DeltaClusteringKey(t3, 1), ChangeBuilder.just(t3, delta3)));
Record record = mock(Record.class);
when(record.getKey()).thenReturn(key);
when(record.passOneIterator()).thenReturn(compactions.iterator());
when(record.passTwoIterator()).thenReturn(deltas2.iterator());
//noinspection unchecked
Supplier<Record> requeryFn = mock(Supplier.class);
long now = System.currentTimeMillis();
MetricRegistry metricRegistry = new MetricRegistry();
Counter archiveDeltaSize = metricRegistry.counter(MetricRegistry.name("bv.emodb.sor", "DefaultCompactor", "archivedDeltaSize"));
Compactor compactor = new DistributedCompactor(archiveDeltaSize, false, metricRegistry);
Expanded expanded = compactor.expand(record, now, now, Long.MIN_VALUE, MutableIntrinsics.create(key), false, requeryFn);
// Verify that expansion produces a compaction, but does *not* delete the compaction owned deltas. That will happen in the next read
Map<String, String> expectedContent = ImmutableMap.of("key", "value", "key2", "change");
assertEquals(expanded.getResolved().getContent(), expectedContent);
assertTrue(expanded.getPendingCompaction() != null);
// Do not delete deltas just yet
assertTrue(expanded.getPendingCompaction().getKeysToDelete().isEmpty());
// Add the compactions to the compaction list
compactions.add(Maps.immutableEntry(new DeltaClusteringKey(expanded.getPendingCompaction().getChangeId(), 1), expanded.getPendingCompaction().getCompaction()));
// Resetting now
SystemClock.tick();
now = System.currentTimeMillis();
when(record.passOneIterator()).thenReturn(compactions.iterator());
when(record.passTwoIterator()).thenReturn(deltas2.iterator());
expanded = compactor.expand(record, now, now, Long.MIN_VALUE, MutableIntrinsics.create(key), false, requeryFn);
// Verify that our deltas are going to be deleted now
assertTrue(expanded.getPendingCompaction().getKeysToDelete().size() == 3, "All 3 deltas should be up for deletion");
assertTrue(ImmutableSet.copyOf(expanded.getPendingCompaction().getKeysToDelete()).equals(ImmutableSet.of(new DeltaClusteringKey(t1, 1), new DeltaClusteringKey(t2, 1), new DeltaClusteringKey(t3, 1))));
// Finally, let's assume only one delta really got deleted, and two of these deltas
// "resurrected" themselves due to no tombstones in Cassandra
deltas2.remove(firstDelta); // only _really_ deleting the first delta
// Try again, and make sure that this time we delete the two resurrected deltas
when(record.passOneIterator()).thenReturn(compactions.iterator());
when(record.passTwoIterator()).thenReturn(deltas2.iterator());
expanded = compactor.expand(record, now, now, Long.MIN_VALUE, MutableIntrinsics.create(key), false, requeryFn);
assertTrue(expanded.getPendingCompaction().getKeysToDelete().size() == 2, "The 2 'resurrected' deltas are simply deleted again");
assertTrue(ImmutableSet.copyOf(expanded.getPendingCompaction().getKeysToDelete()).equals(ImmutableSet.of(new DeltaClusteringKey(t2, 1), new DeltaClusteringKey(t3, 1))));
expectedContent = ImmutableMap.of("key", "value", "key2", "change");
assertEquals(expanded.getResolved().getContent(), expectedContent);
// Verify that a resurrected compaction is OK
// Remember the only time we start a new compaction is when there is no compaction, or the
// existing compaction is before the full consistency timestamp (FCT), i.e., it is fully consistent on all nodes.
// To be sure, the following test asserts the above.
// Artificially set the FCT after t3, but before the compaction
when(record.passOneIterator()).thenReturn(compactions.iterator());
when(record.passTwoIterator()).thenReturn(deltas2.iterator());
expanded = compactor.expand(record, fctRightAfterT3, fctRightAfterT3, Long.MIN_VALUE, MutableIntrinsics.create(key), false, requeryFn);
// Even though there are deltas before the FCT, we do not compact since we have an outstanding compaction.
assertTrue(expanded.getPendingCompaction() == null);
SystemClock.tick();
now = System.currentTimeMillis();
// Add a new delta so we can discard the old compaction, and then resurrect compaction again for our test
UUID t4 = TimeUUIDs.newUUID();
Delta delta4 = Deltas.mapBuilder().put("key4", "change4").build();
deltas2.add(Maps.immutableEntry(new DeltaClusteringKey(t4, 1), ChangeBuilder.just(t4, delta4)));
when(record.passOneIterator()).thenReturn(compactions.iterator());
when(record.passTwoIterator()).thenReturn(deltas2.iterator());
expanded = compactor.expand(record, now, now, Long.MIN_VALUE, MutableIntrinsics.create(key), false, requeryFn);
// The above should create a new compaction
assertTrue(expanded.getPendingCompaction().getCompaction() != null);
DeltaClusteringKey toBeResurrectedCompaction = compactions.get(0).getKey();
// Make sure the old compaction is getting deleted
assertTrue(expanded.getPendingCompaction().getKeysToDelete().contains(toBeResurrectedCompaction));
expectedContent = ImmutableMap.of("key", "value", "key2", "change", "key4", "change4");
assertEquals(expanded.getResolved().getContent(), expectedContent);
// Add the newest compaction to our list of compaction, but do not delete the old one simulating resurrection
compactions.add(Maps.immutableEntry(new DeltaClusteringKey(expanded.getPendingCompaction().getChangeId(), 1), expanded.getPendingCompaction().getCompaction()));
// Let's fetch the record again, and see if the existing old compaction affect anything
when(record.passOneIterator()).thenReturn(compactions.iterator());
when(record.passTwoIterator()).thenReturn(deltas2.iterator());
expanded = compactor.expand(record, now, now, Long.MIN_VALUE, MutableIntrinsics.create(key), false, requeryFn);
// The resurrected compaction should be deleted again
assertTrue(expanded.getPendingCompaction().getKeysToDelete().contains(toBeResurrectedCompaction));
// No changes to the content
assertEquals(expanded.getResolved().getContent(), expectedContent);
}
@Test
public void testCorruption()
throws InterruptedException {
final String tableName = "test:corruption";
final String placement = "app_global:default";
final String key = "corruptedKey";
// Latch used to coordinate compactin events
final CountDownLatch compactionDeletesLatch = new CountDownLatch(1);
final CountDownLatch addCompactionLatch = new CountDownLatch(1);
// Boolean used to control whether the data DAO should pause after deletion of records or not
final AtomicBoolean holdRecord = new AtomicBoolean(true);
InMemoryDataReaderDAO dataDAO = new InMemoryDataReaderDAO() {
@Override
public void compact(Table table, String key, UUID compactionKey, Compaction compaction,
UUID changeId, Delta delta, Collection<DeltaClusteringKey> changesToDelete, List<History> historyList, WriteConsistency consistency) {
requireNonNull(table, "table");
requireNonNull(key, "key");
requireNonNull(compactionKey, "compactionKey");
requireNonNull(compaction, "compaction");
requireNonNull(changeId, "changeId");
requireNonNull(delta, "delta");
requireNonNull(changesToDelete, "changesToDelete");
Map<UUID, Change> changes = super.safePut(super._contentChanges, table.getName(), key);
// // delete the old deltas & compaction records
// super.deleteDeltas(changesToDelete, changes);
// add the compaction record and update the last content of the last delta
super.addCompaction(compactionKey, compaction, changeId, delta, changes);
// This countdown latch is here for testing race condition scenarios
if (holdRecord.get()) {
compactionDeletesLatch.countDown();
// Wait till the other compaction is done
try {
addCompactionLatch.await();
} catch (InterruptedException e) {
throw Throwables.propagate(e);
}
}
// // add the compaction record and update the last content of the last delta
// super.addCompaction(compactionKey, compaction, changeId, delta, changes);
// delete the old deltas & compaction records
super.deleteDeltas(changesToDelete, changes);
// Add delta histories
if (historyList != null && !historyList.isEmpty()) {
super._historyStore.putDeltaHistory(table.getName(), key, historyList);
}
}
};
final DataStore dataStore = new InMemoryDataStore(dataDAO, new MetricRegistry());
// Create a table for our test
dataStore.createTable(tableName,
new TableOptionsBuilder().setPlacement(placement).build(),
ImmutableMap.<String, Object>of(),
new AuditBuilder().setComment("Corrupted Compaction scenario").build());
createDelta(dataStore, tableName, key, "count", 0); // Delta 0
createDelta(dataStore, tableName, key, "count", 1); // Delta 1
createDelta(dataStore, tableName, key, "count", 2); // Delta 2
createDelta(dataStore, tableName, key, "count1", 3); // Delta 3
// Async compaction will try to compact d0, d1, d2 into d3.. but ends up deleting d0 d1 d2, before writing compaction
Thread concurrentCompactionThread = new Thread(new Runnable() {
@Override
public void run() {
dataStore.compact(tableName, key, Duration.ZERO, ReadConsistency.STRONG, WriteConsistency.STRONG);
}
});
concurrentCompactionThread.start();
// In the mean time, another compaction comes around and will try to compact d3, d4 into d5, and deletes d3, d4
// Another node starts concurrent compaction
compactionDeletesLatch.await();
createDelta(dataStore, tableName, key, "count2", 4); // Delta 4
createDelta(dataStore, tableName, key, "count2", 5); // Delta 5
holdRecord.set(false);
dataStore.compact(tableName, key, Duration.ZERO, ReadConsistency.STRONG, WriteConsistency.STRONG);
addCompactionLatch.countDown();
// Now un-pause the previous compaction
compactionDeletesLatch.countDown();
// Allow the async compaction to finish to avoid ConcurrentModificationException
concurrentCompactionThread.join();
// Verify that you have corrupted the document
Map<String, Object> map = dataStore.get(tableName, key);
assertEquals(Integer.parseInt(map.get("~version").toString()), 6, "Version is not 6. This record is corrupted.");
}
/**
* This unit test specifically checks for the condition detailed in EMO-5335. Please refer to that
* ticket in JIRA for a full discussion of the defect.
* @throws Exception
*/
@Test
public void testConcurrentCompactionWithLazilyLoadedChanges() throws Exception {
// Setup some constants used throughout this test
final String tableName = "test:compactionloss";
final String placement = "app_global:default";
final String key = "thekey";
final long now = System.currentTimeMillis();
// Boolean used to control whether the data DAO should block after reading a record before returning
final AtomicBoolean holdRecord = new AtomicBoolean(false);
// Latch used to coordinate events after the record has been read
final CountDownLatch recordRead = new CountDownLatch(1);
// Latch used to coordinate returning the read record
final CountDownLatch returnRecord = new CountDownLatch(1);
InMemoryDataReaderDAO dataDAO = new InMemoryDataReaderDAO() {
@Override
public Record read(Key key, ReadConsistency ignored) {
// Read the record as usual
Record record = super.read(key, ignored);
if (holdRecord.get()) {
// Signal that the record has been read
recordRead.countDown();
// Wait until the main thread signals that execution should resume
try {
returnRecord.await();
} catch (InterruptedException e) {
throw Throwables.propagate(e);
}
}
return record;
}
};
// Configure the data DAO to read 10 columns initially, causing other column reads to be read lazily
dataDAO.setColumnBatchSize(10);
final DataStore dataStore = new InMemoryDataStore(dataDAO, new MetricRegistry());
// Create a table for our test
dataStore.createTable(tableName,
new TableOptionsBuilder().setPlacement(placement).build(),
ImmutableMap.<String, Object>of(),
new AuditBuilder().setComment("Creating compaction loss test table").build());
// Set the current full consistency timestamp in dataDAO to one day ago
dataDAO.setFullConsistencyTimestamp(now - TimeUnit.DAYS.toMillis(1));
// Later we'll update the full consistency timestamp to one minute in the past; calculate it now
long fullConsistencyTs = now - TimeUnit.MINUTES.toMillis(1);
// Create an initial delta for this row one hour in the past
long timestamp = System.currentTimeMillis() - TimeUnit.HOURS.toMillis(1);
createInitialValue(dataStore, tableName, key, timestamp);
// Create 8 additional records all before the full consistency time
for (int i=1; i <= 8; i++) {
timestamp += TimeUnit.MINUTES.toMillis(1);
createDelta(dataStore, tableName, key, i, timestamp);
}
// Create 1 more record which is after the full consistency timestamp
createDelta(dataStore, tableName, key, 9, fullConsistencyTs - TimeUnit.SECONDS.toMillis(1));
// There are now 10 deltas, which is the initial read column limit. Create one more delta afterward
createDelta(dataStore, tableName, key, 10, fullConsistencyTs + TimeUnit.SECONDS.toMillis(1));
// Set the full consistency timestamp to the calculated value
dataDAO.setFullConsistencyTimestamp(fullConsistencyTs);
// Create a thread which will compact the record but block partway through, simulating a context switch
holdRecord.set(true); // Causes the record read to block
Thread concurrentCompactionThread = new Thread(new Runnable() {
@Override
public void run() {
dataStore.compact(tableName, key, null, ReadConsistency.STRONG, WriteConsistency.STRONG);
}
});
concurrentCompactionThread.start();
// Wait until the record has been read in the other thread but has not been resolved
recordRead.await();
// Perform a compaction concurrent to the other thread
holdRecord.set(false); // Causes the record to read normally without blocking
dataStore.compact(tableName, key, null, ReadConsistency.STRONG, WriteConsistency.STRONG);
// Release the other thread, allowing it to complete resolution and compaction
returnRecord.countDown();
// Wait for the other thread to finish compaction
concurrentCompactionThread.join();
// Verify the final object is as expected
Map<String, Object> map = dataStore.get(tableName, key);
assertEquals(map.get("constant"), "immutable-value");
assertEquals(map.get("count"), 10);
// Verify in the timeline that compaction did occur
Iterator<Change> timeline = dataStore.getTimeline(tableName, key, true, false, null, null, false, 10000, ReadConsistency.STRONG);
Iterator<Change> compactions = Iterators.filter(timeline, new Predicate<Change>() {
@Override
public boolean apply(Change change) {
return change.getCompaction() != null;
}
});
assertEquals(Iterators.size(compactions), 1);
}
private void createInitialValue(DataStore dataStore, String tableName, String key, long timestamp) {
dataStore.update(tableName, key, com.bazaarvoice.emodb.common.uuid.TimeUUIDs.uuidForTimeMillis(timestamp),
Deltas.mapBuilder()
.put("constant", "immutable-value")
.put("count", 0)
.build(),
new AuditBuilder().setComment("initial value").build());
}
private void createDelta(DataStore dataStore, String tableName, String key, String attribute, int newCount) {
dataStore.update(tableName, key, com.bazaarvoice.emodb.common.uuid.TimeUUIDs.uuidForTimeMillis(System.currentTimeMillis()),
Deltas.mapBuilder()
.update(attribute, Deltas.literal(newCount))
.build(),
new AuditBuilder().setComment("Updating count to " + newCount).build());
SystemClock.tick();
}
private void createDelta(DataStore dataStore, String tableName, String key, int newCount, long timestamp) {
createDelta(dataStore, tableName, key, "count", newCount, timestamp);
}
private void createDelta(DataStore dataStore, String tableName, String key, String attribute, int newCount, long timestamp) {
dataStore.update(tableName, key, com.bazaarvoice.emodb.common.uuid.TimeUUIDs.uuidForTimeMillis(timestamp),
Deltas.mapBuilder()
.updateIfExists(attribute, Deltas.literal(newCount))
.build(),
new AuditBuilder().setComment("Updating count to " + newCount).build());
SystemClock.tick();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution.buffer;
import com.facebook.presto.OutputBuffers.OutputBufferId;
import com.facebook.presto.spi.Page;
import com.google.common.collect.ImmutableList;
import io.airlift.units.DataSize;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.Immutable;
import javax.annotation.concurrent.ThreadSafe;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static com.facebook.presto.execution.buffer.BufferResult.emptyResults;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static java.lang.Math.toIntExact;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.CompletableFuture.completedFuture;
@ThreadSafe
class ClientBuffer
{
private final String taskInstanceId;
private final OutputBufferId bufferId;
private final AtomicLong rowsAdded = new AtomicLong();
private final AtomicLong pagesAdded = new AtomicLong();
private final AtomicLong bufferedBytes = new AtomicLong();
@GuardedBy("this")
private final AtomicLong currentSequenceId = new AtomicLong();
@GuardedBy("this")
private final LinkedList<PageReference> pages = new LinkedList<>();
@GuardedBy("this")
private boolean noMorePages;
// destroyed is set when the client sends a DELETE to the buffer
// this is an acknowledgement that the client has observed the end of the buffer
@GuardedBy("this")
private final AtomicBoolean destroyed = new AtomicBoolean();
@GuardedBy("this")
private PendingRead pendingRead;
public ClientBuffer(String taskInstanceId, OutputBufferId bufferId)
{
this.taskInstanceId = requireNonNull(taskInstanceId, "taskInstanceId is null");
this.bufferId = requireNonNull(bufferId, "bufferId is null");
}
public BufferInfo getInfo()
{
//
// NOTE: this code must be lock free so state machine updates do not hang
//
@SuppressWarnings("FieldAccessNotGuarded")
boolean destroyed = this.destroyed.get();
@SuppressWarnings("FieldAccessNotGuarded")
long sequenceId = this.currentSequenceId.get();
// if destroyed the buffered page count must be zero regardless of observation ordering in this lock free code
int bufferedPages = destroyed ? 0 : Math.max(toIntExact(pagesAdded.get() - sequenceId), 0);
PageBufferInfo pageBufferInfo = new PageBufferInfo(bufferId.getId(), bufferedPages, bufferedBytes.get(), rowsAdded.get(), pagesAdded.get());
return new BufferInfo(bufferId, destroyed, bufferedPages, sequenceId, pageBufferInfo);
}
public boolean isDestroyed()
{
//
// NOTE: this code must be lock free so state machine updates do not hang
//
@SuppressWarnings("FieldAccessNotGuarded")
boolean destroyed = this.destroyed.get();
return destroyed;
}
public void destroy()
{
List<PageReference> removedPages;
PendingRead pendingRead;
synchronized (this) {
removedPages = ImmutableList.copyOf(pages);
pages.clear();
bufferedBytes.getAndSet(0);
noMorePages = true;
destroyed.set(true);
pendingRead = this.pendingRead;
this.pendingRead = null;
}
removedPages.forEach(PageReference::dereferencePage);
if (pendingRead != null) {
pendingRead.completeResultFutureWithEmpty();
}
}
public void enqueuePages(Collection<PageReference> pages)
{
PendingRead pendingRead;
synchronized (this) {
// ignore pages after no more pages is set
// this can happen with limit queries
if (noMorePages) {
return;
}
pages.forEach(PageReference::addReference);
this.pages.addAll(pages);
long rowCount = pages.stream().mapToLong(PageReference::getPositionCount).sum();
rowsAdded.addAndGet(rowCount);
pagesAdded.addAndGet(pages.size());
long bytesAdded = pages.stream().mapToLong(PageReference::getRetainedSizeInBytes).sum();
bufferedBytes.addAndGet(bytesAdded);
pendingRead = this.pendingRead;
this.pendingRead = null;
}
// we just added a page, so process the pending read
if (pendingRead != null) {
processRead(pendingRead);
}
}
public CompletableFuture<BufferResult> getPages(long sequenceId, DataSize maxSize)
{
checkArgument(sequenceId >= 0, "Invalid sequence id");
// acknowledge pages first, out side of locks to not trigger callbacks while holding the lock
acknowledgePages(sequenceId);
PendingRead oldPendingRead = null;
try {
synchronized (this) {
// save off the old pending read so we can abort it out side of the lock
oldPendingRead = this.pendingRead;
this.pendingRead = null;
// Return results immediately if we have data, there will be no more data, or this is
// an out of order request
if (!pages.isEmpty() || noMorePages || sequenceId != currentSequenceId.get()) {
return completedFuture(processRead(sequenceId, maxSize));
}
// otherwise, wait for more data to arrive
pendingRead = new PendingRead(taskInstanceId, sequenceId, maxSize);
return pendingRead.getResultFuture();
}
}
finally {
if (oldPendingRead != null) {
// Each buffer is private to a single client, and each client should only have one outstanding
// read. Therefore, we abort the existing read since it was most likely abandoned by the client.
oldPendingRead.completeResultFutureWithEmpty();
}
}
}
public void setNoMorePages()
{
PendingRead pendingRead;
synchronized (this) {
// ignore duplicate calls
if (noMorePages) {
return;
}
noMorePages = true;
pendingRead = this.pendingRead;
this.pendingRead = null;
}
// there will be no more pages, so process the pending read
if (pendingRead != null) {
processRead(pendingRead);
}
}
private void processRead(PendingRead pendingRead)
{
if (pendingRead.getResultFuture().isDone()) {
return;
}
BufferResult bufferResult = processRead(pendingRead.getSequenceId(), pendingRead.getMaxSize());
pendingRead.getResultFuture().complete(bufferResult);
}
/**
* @return a result with at least one page if we have pages in buffer, empty result otherwise
*/
private synchronized BufferResult processRead(long sequenceId, DataSize maxSize)
{
// When pages are added to the partition buffer they are effectively
// assigned an id starting from zero. When a read is processed, the
// "token" is the id of the page to start the read from, so the first
// step of the read is to acknowledge, and drop all pages up to the
// provided sequenceId. Then pages starting from the sequenceId are
// returned with the sequenceId of the next page to read.
//
// Since the buffer API is asynchronous there are a number of problems
// that can occur our of order request (typically from retries due to
// request failures):
// - Request to read pages that have already been acknowledged.
// Simply, send an result with no pages and the requested sequenceId,
// and since the client has already acknowledge the pages, it will
// ignore the out of order response.
// - Request to read after the buffer has been destroyed. When the
// buffer is destroyed all pages are dropped, so the read sequenceId
// appears to be off the end of the queue. Normally a read past the
// end of the queue would be be an error, but this specific case is
// detected and handled. The client is sent an empty response with
// the finished flag set and next token is the max acknowledged page
// when the buffer is destroyed.
//
// if request is for pages before the current position, just return an empty result
if (sequenceId < currentSequenceId.get()) {
return emptyResults(taskInstanceId, sequenceId, false);
}
// if this buffer is finished, notify the client of this, so the client
// will destroy this buffer
if (pages.isEmpty() && noMorePages) {
return emptyResults(taskInstanceId, currentSequenceId.get(), true);
}
// if request is for pages after the current position, there is a bug somewhere
// a read call is always proceeded by acknowledge pages, which
// will advance the sequence id to at least the request position, unless
// the buffer is destroyed, and in that case the buffer will be empty with
// no more pages set, which is checked above
verify(sequenceId == currentSequenceId.get(), "Invalid sequence id");
// read the new pages
long maxBytes = maxSize.toBytes();
List<Page> result = new ArrayList<>();
long bytes = 0;
for (PageReference page : pages) {
bytes += page.getRetainedSizeInBytes();
// break (and don't add) if this page would exceed the limit
if (!result.isEmpty() && bytes > maxBytes) {
break;
}
result.add(page.getPage());
}
return new BufferResult(taskInstanceId, sequenceId, sequenceId + result.size(), false, result);
}
/**
* Drops pages up to the specified sequence id
*/
private void acknowledgePages(long sequenceId)
{
checkState(!Thread.holdsLock(this), "Can not acknowledge pages while holding a lock on this");
List<PageReference> removedPages = new ArrayList<>();
synchronized (this) {
if (destroyed.get()) {
return;
}
// if pages have already been acknowledged, just ignore this
long oldCurrentSequenceId = currentSequenceId.get();
if (sequenceId < oldCurrentSequenceId) {
return;
}
int pagesToRemove = toIntExact(sequenceId - oldCurrentSequenceId);
checkArgument(pagesToRemove <= pages.size(), "Invalid sequence id");
long bytesRemoved = 0;
for (int i = 0; i < pagesToRemove; i++) {
PageReference removedPage = pages.removeFirst();
removedPages.add(removedPage);
bytesRemoved += removedPage.getRetainedSizeInBytes();
}
// update current sequence id
verify(currentSequenceId.compareAndSet(oldCurrentSequenceId, oldCurrentSequenceId + pagesToRemove));
// update memory tracking
verify(bufferedBytes.addAndGet(-bytesRemoved) >= 0);
}
// dereference outside of synchronized to avoid making a callback while holding a lock
removedPages.forEach(PageReference::dereferencePage);
}
@Override
public String toString()
{
@SuppressWarnings("FieldAccessNotGuarded")
long sequenceId = currentSequenceId.get();
@SuppressWarnings("FieldAccessNotGuarded")
boolean destroyed = this.destroyed.get();
return toStringHelper(this)
.add("bufferId", bufferId)
.add("sequenceId", sequenceId)
.add("destroyed", destroyed)
.toString();
}
@Immutable
private static class PendingRead
{
private final String taskInstanceId;
private final long sequenceId;
private final DataSize maxSize;
private final CompletableFuture<BufferResult> resultFuture = new CompletableFuture<>();
private PendingRead(String taskInstanceId, long sequenceId, DataSize maxSize)
{
this.taskInstanceId = requireNonNull(taskInstanceId, "taskInstanceId is null");
this.sequenceId = sequenceId;
this.maxSize = maxSize;
}
public long getSequenceId()
{
return sequenceId;
}
public DataSize getMaxSize()
{
return maxSize;
}
public CompletableFuture<BufferResult> getResultFuture()
{
return resultFuture;
}
public void completeResultFutureWithEmpty()
{
resultFuture.complete(emptyResults(taskInstanceId, sequenceId, false));
}
}
@ThreadSafe
static class PageReference
{
private final Page page;
private final AtomicInteger referenceCount;
private final Runnable onDereference;
public PageReference(Page page, int referenceCount, Runnable onDereference)
{
this.page = requireNonNull(page, "page is null");
checkArgument(referenceCount > 0, "referenceCount must be at least 1");
this.referenceCount = new AtomicInteger(referenceCount);
this.onDereference = requireNonNull(onDereference, "onDereference is null");
}
public void addReference()
{
int oldReferences = referenceCount.getAndIncrement();
checkState(oldReferences > 0, "Page has already been dereferenced");
}
public Page getPage()
{
return page;
}
public int getPositionCount()
{
return page.getPositionCount();
}
public long getRetainedSizeInBytes()
{
return page.getRetainedSizeInBytes();
}
public void dereferencePage()
{
int remainingReferences = referenceCount.decrementAndGet();
checkState(remainingReferences >= 0, "Page reference count is negative");
if (remainingReferences == 0) {
onDereference.run();
}
}
@Override
public String toString()
{
return toStringHelper(this)
.add("referenceCount", referenceCount)
.toString();
}
}
}
| |
package de.flohrit.drillrig.runtime;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import net.schmizz.sshj.SSHClient;
import net.schmizz.sshj.common.DisconnectReason;
import net.schmizz.sshj.transport.DisconnectListener;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.MDC;
import de.flohrit.drillrig.config.Connection;
import de.flohrit.drillrig.config.Forward;
import de.flohrit.drillrig.config.Proxy;
import de.flohrit.drillrig.config.SshSession;
import de.flohrit.drillrig.protocol.ProxyTunnelSocketFactory;
import de.flohrit.drillrig.protocol.SocksSocketFactory;
import de.flohrit.drillrig.services.ForwardStateInfo;
public class SshSessionMonitor extends Thread implements DisconnectListener {
final static private Logger logger = LoggerFactory
.getLogger(SshSessionMonitor.class);
private Map<Forward, PortForwarder> portForwarders = new LinkedHashMap<Forward, PortForwarder>();
private SshSession sshClientsCfg;
public SshSessionMonitor(SshSession sshClientsCfg) throws IOException {
super("LForwarder");
setDaemon(true);
this.sshClientsCfg = sshClientsCfg;
if (sshClientsCfg.isEnabled()) {
createPortForwardings(sshClientsCfg);
}
}
@Override
public synchronized void start() {
if (sshClientsCfg.isEnabled()) {
super.start();
}
}
private void createPortForwardings(SshSession sshClientsCfg) {
for (Forward forward : sshClientsCfg.getForward()) {
createPortForwarding(forward);
}
}
public void createPortForwarding(Forward forward) {
PortForwarder fwd;
if (!forward.isEnabled()) {
return;
}
MDC.put("forward", forward.getId());
try {
SSHClient sshClient = createSshTransportSession(forward);
if ("L".equals(forward.getType())) {
fwd = new MyLocalPortForwarder(sshClient, forward);
} else if ("D".equals(forward.getType())) {
fwd = new MyDynamicPortForwarder(sshClient, forward);
} else {
fwd = new MyRemotePortForwarder(sshClient, forward);
}
portForwarders.put(forward, fwd);
fwd.start();
} catch (IOException e) {
logger.error("Can't create port {} listener for interface {}",
new Object[] { forward.getSPort(), e.toString() });
} finally {
MDC.remove("forward");
}
}
private SSHClient createSshTransportSession(Forward forward) throws IOException {
SSHClient sshClient = new SSHClient();
Connection maschineAccount = (Connection) forward.getConnection();
logger.info("create ssh session for user {}@{}", new Object[] {
maschineAccount.getUser(), maschineAccount.getHost() });
Proxy proxy = maschineAccount.getProxy();
if (proxy != null) {
if ("HTTP".equals(proxy.getType())) {
sshClient.setSocketFactory(new ProxyTunnelSocketFactory(proxy.getHost(), proxy.getPort()));
} else if ("SOCKS".equals(proxy.getType())) {
sshClient.setSocketFactory(new SocksSocketFactory(proxy.getHost(), proxy.getPort()));
} else if ("DIRECT".equals(proxy.getType())) {
// no proxy
} else {
logger.warn("Unsupported proxy type found {}", proxy.getType());
}
}
sshClient.addHostKeyVerifier(new AutoKnownHostsVerifier(new File(
"known_hosts")));
sshClient.connect(maschineAccount.getHost(),
maschineAccount.getPort());
sshClient.authPassword(maschineAccount.getUser(),
DrillServer.getEncDecorder().decrypt(maschineAccount.getPassword()));
return sshClient;
}
@Override
public void notifyDisconnect(DisconnectReason paramDisconnectReason) {
synchronized (portForwarders) {
logger.info("Disconnect event received, reason: {}",
paramDisconnectReason);
for (PortForwarder forward : portForwarders.values()) {
forward.close();
}
portForwarders.clear();
}
}
@Override
public void run() {
while (!interrupted()) {
try {
Thread.sleep(10000);
} catch (InterruptedException e) {
logger.error("SshClientMonitor thread interrupted -> exiting now");
break;
}
synchronized (portForwarders) {
/*
try {
sshClientSession.getTransport().write(
new SSHPacket(Message.IGNORE));
} catch (TransportException e) {
logger.error("SshClientMonitor failed to send heartbeat message");
try {
sshClientSession.close();
} catch (IOException e1) {
}
}
*/
for (Forward forward : sshClientsCfg.getForward()) {
PortForwarder myLocalPortForwarder = portForwarders
.get(forward);
if (myLocalPortForwarder == null
|| !myLocalPortForwarder.isActive()) {
createPortForwarding(forward);
}
}
}
}
logger.info("Shuting down SshClientMonitor thread");
synchronized (portForwarders) {
for (PortForwarder myPortForwarder : portForwarders.values()) {
myPortForwarder.close();
}
portForwarders.clear();
}
logger.info("Shuting down SshClientMonitor thread completed");
}
public List<ForwardStateInfo> getForwardStateInfos() {
List<ForwardStateInfo> infos = new ArrayList<ForwardStateInfo>();
synchronized (portForwarders) {
for (Forward forward : sshClientsCfg.getForward()) {
ForwardStateInfo info = new ForwardStateInfo();
info.setId(forward.getId());
PortForwarder myLocalPortForwarder = portForwarders
.get(forward);
if (!forward.isEnabled() || !sshClientsCfg.isEnabled()) {
info.setState("deactivated");
} else if (myLocalPortForwarder == null) {
info.setState("stopped");
} else if (myLocalPortForwarder.isActive()) {
info.setState("running");
} else {
info.setState("retry");
}
infos.add(info);
}
}
return infos;
}
}
| |
/*
* Copyright 2010 DTO Labs, Inc. (http://dtolabs.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* PoliciesCache.java
*
* User: Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
* Created: Nov 16, 2010 11:26:12 AM
*
*/
package com.dtolabs.rundeck.core.authorization.providers;
import com.dtolabs.rundeck.core.authorization.Attribute;
import org.apache.log4j.Logger;
import org.yaml.snakeyaml.parser.ParserException;
import java.io.File;
import java.util.*;
/**
* PoliciesCache retains PolicyDocument objects for inserted Files, and reloads them if file modification time changes.
*
* @author Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
*/
public class PoliciesCache implements Iterable<PolicyCollection> {
static final long DIR_LIST_CHECK_DELAY = Long.getLong(PoliciesCache.class.getName()+".DirListCheckDelay", 60000);
static final long FILE_CHECK_DELAY = Long.getLong(PoliciesCache.class.getName() + ".FileCheckDelay", 60000);
private final static Logger logger = Logger.getLogger(PoliciesCache.class);
private Set<File> warned = new HashSet<File>();
private Map<String, CacheItem> cache = new HashMap<>();
private SourceProvider provider;
/**
* Context to load the polices within, invalid policies will be flagged
*/
final private Set<Attribute> forcedContext;
private PoliciesCache(final SourceProvider provider) {
this.provider = provider;
this.forcedContext =null;
}
private PoliciesCache(final SourceProvider provider, final Set<Attribute> forcedContext) {
this.provider = provider;
this.forcedContext = forcedContext;
}
private static class CacheItem{
PolicyCollection policyCollection;
Long cacheTime;
Long modTime;
private CacheItem(PolicyCollection policyCollection, Long modTime) {
this.policyCollection = policyCollection;
this.modTime = modTime;
this.cacheTime=System.currentTimeMillis();
}
public void touch(Long time) {
this.cacheTime = time;
}
}
private PolicyCollection createEntry(final YamlSource source) throws PoliciesParseException {
try {
return YamlProvider.policiesFromSource(source, forcedContext);
} catch (ParserException e1) {
throw new PoliciesParseException("YAML syntax error: " + e1.toString(), e1);
}catch (Exception e1) {
throw new PoliciesParseException(e1);
}
}
/**
* @param source source
* @return collection
* @throws PoliciesParseException
*/
public synchronized PolicyCollection getDocument(final CacheableYamlSource source) throws PoliciesParseException {
// cacheTotal++;
CacheItem entry = cache.get(source.getIdentity());
long checkTime = System.currentTimeMillis();
if (null == entry || ((checkTime - entry.cacheTime) > FILE_CHECK_DELAY)) {
final long lastmod = source.getLastModified().getTime();
if (null == entry || lastmod > entry.modTime) {
if (!source.isValid()) {
CacheItem remove = cache.remove(source.getIdentity());
entry = null;
// cacheRemove++;
} else {
// cacheMiss++;
PolicyCollection entry1 = createEntry(source);
if (null != entry1) {
entry = new CacheItem(entry1, lastmod);
cache.put(source.getIdentity(), entry);
} else {
cache.remove(source.getIdentity());
entry = null;
}
}
}else{
// cacheUnmodifiedHit++;
entry.touch(checkTime);
}
}else{
// cacheHit++;
}
return null != entry ? entry.policyCollection : null;
}
public Iterator<PolicyCollection> iterator() {
return new cacheIterator(provider.getSourceIterator());
}
/**
* Create a cache from a single file source
* @param singleFile file
* @return cache
*/
public static PoliciesCache fromFile(File singleFile) {
return fromSourceProvider(YamlProvider.getFileProvider(singleFile));
}
/**
* Create a cache from a single file source
*
* @param singleFile file
*
* @return cache
*/
public static PoliciesCache fromFile(File singleFile, Set<Attribute> forcedContext) {
return fromSourceProvider(YamlProvider.getFileProvider(singleFile), forcedContext);
}
/**
* Create from a provider
* @param provider source provider
* @return policies cache
*/
public static PoliciesCache fromSourceProvider(final SourceProvider provider) {
return new PoliciesCache(provider);
}
/**
* Create from a provider with a forced context
* @param provider source provider
* @param forcedContext forced context
* @return policies cache
*/
public static PoliciesCache fromSourceProvider(
final SourceProvider provider,
final Set<Attribute> forcedContext
)
{
return new PoliciesCache(provider, forcedContext);
}
/**
* Create a cache from a directory source
* @param rootDir base director
* @return cache
*/
public static PoliciesCache fromDir(File rootDir) {
return fromSourceProvider(YamlProvider.getDirProvider(rootDir));
}
/**
* Create a cache from a directory source
* @param rootDir base director
* @return cache
*/
public static PoliciesCache fromDir(File rootDir, final Set<Attribute> forcedContext) {
return fromSourceProvider(YamlProvider.getDirProvider(rootDir),forcedContext);
}
/**
* Create a cache from cacheable sources
* @param sources source
* @return cache
*/
public static PoliciesCache fromSources(final Iterable<CacheableYamlSource> sources) {
return fromSourceProvider(
new SourceProvider() {
@Override
public Iterator<CacheableYamlSource> getSourceIterator() {
return sources.iterator();
}
}
);
}
/**
* Create a cache from cacheable sources
* @param sources source
* @return cache
*/
public static PoliciesCache fromSources(final Iterable<CacheableYamlSource> sources, final Set<Attribute> context) {
return fromSourceProvider(
new SourceProvider() {
@Override
public Iterator<CacheableYamlSource> getSourceIterator() {
return sources.iterator();
}
},
context
);
}
private Map<CacheableYamlSource, Long> cooldownset = Collections.synchronizedMap(new HashMap<CacheableYamlSource, Long>());
/**
* Iterator over the PolicyCollections for the cache's sources. It skips
* sources that are no longer valid
*/
private class cacheIterator implements Iterator<PolicyCollection> {
Iterator<CacheableYamlSource> intIter;
private CacheableYamlSource nextSource;
private PolicyCollection nextPolicyCollection;
public cacheIterator(final Iterator<CacheableYamlSource> intIter) {
this.intIter = intIter;
nextSource = this.intIter.hasNext() ? this.intIter.next() : null;
loadNextSource();
}
private void loadNextSource() {
while (hasNextFile() && null == nextPolicyCollection) {
CacheableYamlSource newNextSource = getNextSource();
Long aLong = cooldownset.get(newNextSource);
if (null != aLong && newNextSource.getLastModified().getTime() == aLong) {
logger.debug("Skip parsing of: " + newNextSource + ". Reason: parse error cooldown until modified");
continue;
} else if (null != aLong) {
//clear
cooldownset.remove(newNextSource);
}
try {
nextPolicyCollection = getDocument(newNextSource);
} catch (PoliciesParseException e) {
logger.error("ERROR unable to parse aclpolicy: " + newNextSource + ". Reason: " + e.getMessage());
logger.debug("ERROR unable to parse aclpolicy: " + newNextSource + ". Reason: " + e.getMessage(), e);
cache.remove(newNextSource.getIdentity());
cooldownset.put(newNextSource, newNextSource.getLastModified().getTime());
}
}
}
private CacheableYamlSource getNextSource() {
CacheableYamlSource next = nextSource;
nextSource = intIter.hasNext() ? intIter.next() : null;
return next;
}
private PolicyCollection getNextPolicyCollection() {
PolicyCollection doc = nextPolicyCollection;
nextPolicyCollection =null;
loadNextSource();
return doc;
}
public boolean hasNextFile() {
return null != nextSource;
}
public boolean hasNext() {
return null != nextPolicyCollection;
}
public PolicyCollection next() {
return getNextPolicyCollection();
}
public void remove() {
}
}
}
| |
/**
* The MIT License (MIT)
*
* Copyright (c) 2011-2016 Incapture Technologies LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rapture.repo.meta.handler;
import rapture.common.MessageFormat;
import rapture.common.RaptureFolderInfo;
import rapture.common.RaptureURI;
import rapture.common.exception.RaptureExceptionFactory;
import rapture.common.impl.jackson.JacksonUtil;
import rapture.common.model.DocumentMetadata;
import rapture.common.model.DocumentWithMeta;
import rapture.dsl.dparse.AsOfTimeDirectiveParser;
import rapture.index.IndexProducer;
import rapture.repo.KeyStore;
import rapture.repo.Messages;
import rapture.repo.RepoUtil;
import java.net.HttpURLConnection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* This class handle the bringing together of three key stores - for latest
* version, for historical versions and for the meta data about versions
*
* @author amkimian
*/
public class VersionedMetaHandler extends AbstractMetaHandler {
private static final int INITIAL_VERSION = 1;
private KeyStore versionStore;
public VersionedMetaHandler(KeyStore latestStore, KeyStore versionStore, KeyStore metaStore, KeyStore attributeStore) {
super(latestStore, metaStore, attributeStore);
this.versionStore = versionStore;
}
public DocumentWithMeta addDocumentWithExpectedVersion(String key, String value, String user, String comment, int expectedVersion, IndexProducer producer) {
// Check current version
DocumentMetadata latestMeta = getLatestMeta(key);
if (latestMeta != null && latestMeta.getVersion() != expectedVersion) {
return null;
}
return addDocument(key, value, user, comment, producer);
}
@Override
protected boolean isVersioned() {
return true;
}
@Override
protected void addToVersionStore(String docPath, String value, DocumentMetadata newMetaData) {
String versionKey = createVersionKey(docPath, newMetaData.getVersion());
String newMetadataJson = JacksonUtil.jsonFromObject(newMetaData);
if (supportsVersionLookupByTime()) {
// Make sure all the timestamps agree with each other.
long timestamp = newMetaData.getModifiedTimestamp();
metaStore.put(versionKey, timestamp, newMetadataJson);
versionStore.put(versionKey, timestamp, value);
}
else {
metaStore.put(versionKey, newMetadataJson);
versionStore.put(versionKey, value);
}
}
private String createVersionKey(String key, int version) {
if (supportsVersionLookupByTime()) {
// The version is indicated in a separate field in Cassandra
return key;
}
else {
return key + "?" + version;
}
}
@Override
protected String createLatestKey(String docPath) {
if (supportsVersionLookupByTime()) {
// The version is indicated in a separate field in Cassandra
return docPath;
}
else {
return docPath + "?latest";
}
}
@Override
public void dropAll() {
super.dropAll();
versionStore.dropKeyStore();
}
@Override
protected void updateMetaOnDelete(String user, String docPath) {
DocumentMetadata deletionMeta = createDeletionMeta(user, docPath);
String deletionMetaJson = JacksonUtil.jsonFromObject(deletionMeta);
String latestKey = createLatestKey(docPath);
metaStore.put(latestKey, deletionMetaJson);
String versionKey = createVersionKey(docPath, deletionMeta.getVersion());
metaStore.put(versionKey, deletionMetaJson);
if (indexHandler.isPresent()) {
indexHandler.get().removeAll(docPath);
}
}
@Override
protected void addLatestToMetaStore(String latestKey, DocumentMetadata newMetaData) {
// We don't need an extra copy with the ?latest key in timestamp repos.
if (!supportsVersionLookupByTime()) {
metaStore.put(latestKey, JacksonUtil.jsonFromObject(newMetaData));
}
}
protected DocumentMetadata createDeletionMeta(String user, String key) {
return createMetadataFromLatest(user, "Deleted", key, true, INITIAL_VERSION, true);
}
public DocumentWithMeta getDocumentWithMeta(String key, int version) {
if (supportsVersionLookupByTime()) {
return getDocumentWithMetaFromTimestampRepo(key, version);
}
else {
return getDocumentWithMetaFromStandardRepo(key, version);
}
}
protected DocumentWithMeta getDocumentWithMetaFromStandardRepo(String key, int version) {
String versionKey = createVersionKey(key, version);
String content = versionStore.get(versionKey);
DocumentWithMeta md = new DocumentWithMeta();
md.setDisplayName(key);
md.setContent(content);
md.setMetaData(getMetaFromVersionKey(versionKey));
return md;
}
protected DocumentWithMeta getDocumentWithMetaFromTimestampRepo(String key, int version) {
// Figure out what timestamp to look for by brute forcing it for the metadata lookup,
// then use that timestamp to fetch the data directly for that version.
DocumentMetadata metadata = getMetaFromTimestampRepo(key, version);
String content = versionStore.get(key, metadata.getModifiedTimestamp());
DocumentWithMeta md = new DocumentWithMeta();
md.setDisplayName(key);
md.setContent(content);
md.setMetaData(metadata);
return md;
}
protected DocumentMetadata getMetaFromTimestampRepo(String key, int version) {
DocumentMetadata meta = getLatestMeta(key);
while (meta != null && meta.getVersion() > version) {
int numVersionsBack = meta.getVersion() - version;
long maxPossibleTimestamp = meta.getModifiedTimestamp() - numVersionsBack;
meta = getMetaAtTimestamp(key, maxPossibleTimestamp);
}
return meta;
}
public DocumentWithMeta getDocumentWithMeta(String key, long millisTimeStamp) {
if (!supportsVersionLookupByTime()) {
throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_INTERNAL_ERROR,
new MessageFormat(Messages.getString("MetaBasedRepo.NotSupported")));
}
String content = versionStore.get(key, millisTimeStamp);
if (content == null) {
return null;
}
DocumentWithMeta md = new DocumentWithMeta();
md.setDisplayName(key);
md.setContent(content);
md.setMetaData(getMetaAtTimestamp(key, millisTimeStamp));
return md;
}
public DocumentMetadata getMetaAtTimestamp(String key, long millisTimeStamp) {
if (!supportsVersionLookupByTime()) {
throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_INTERNAL_ERROR,
new MessageFormat(Messages.getString("MetaBasedRepo.NotSupported")));
}
String metaData = metaStore.get(key, millisTimeStamp);
if (metaData != null) {
return JacksonUtil.objectFromJson(metaData, DocumentMetadata.class);
}
return null;
}
public List<DocumentWithMeta> getDocAndMetas(List<RaptureURI> uris) {
if (uris == null || uris.isEmpty()) {
return new ArrayList<DocumentWithMeta>();
}
DocumentWithMeta[] ret = new DocumentWithMeta[uris.size()];
Map<Integer, String> versionedKeys = new LinkedHashMap<Integer, String>();
Map<Integer, String> latestKeys = new LinkedHashMap<Integer, String>();
List<String> latestKeysWithVersion = new ArrayList<String>();
int position = 0;
for (RaptureURI uri : uris) {
String docPath = uri.getDocPath();
Integer version = null;
if (uri.hasVersion()) {
version = Integer.parseInt(uri.getVersion());
}
else if (uri.hasAsOfTime()) {
version = getVersionNumberAsOfTime(docPath, uri.getAsOfTime());
}
if (version != null) {
versionedKeys.put(position, createVersionKey(docPath, version));
} else {
latestKeys.put(position, docPath);
latestKeysWithVersion.add(createLatestKey(docPath));
}
++position;
}
List<String> versionedKeysList = new ArrayList<String>(versionedKeys.values());
List<Integer> versionedPositionList = new ArrayList<Integer>(versionedKeys.keySet());
List<String> versionedContents = versionStore.getBatch(versionedKeysList);
List<String> versionedMeta = metaStore.getBatch(versionedKeysList);
constructDocumentWithMetaList(ret, uris, versionedPositionList, versionedContents, versionedMeta);
List<Integer> latestPositionList = new ArrayList<Integer>(latestKeys.keySet());
List<String> latestContents = documentStore.getBatch(new ArrayList<String>(latestKeys.values()));
List<String> latestMeta = metaStore.getBatch(latestKeysWithVersion);
constructDocumentWithMetaList(ret, uris, latestPositionList, latestContents, latestMeta);
return Arrays.asList(ret);
}
public DocumentMetadata getVersionMeta(String key, int version) {
return getMetaFromVersionKey(createVersionKey(key, version));
}
public DocumentWithMeta revertDoc(String key, IndexProducer producer) {
DocumentMetadata latest = getLatestMeta(key);
DocumentWithMeta previous = getDocumentWithMeta(key, latest.getVersion() - 1);
addDocument(key, previous.getContent(), previous.getMetaData().getUser(), previous.getMetaData().getComment() + " - REVERTED", producer);
return getLatestDocAndMeta(key);
}
public List<RaptureFolderInfo> removeChildren(String displayNamePart, Boolean force, IndexProducer producer) {
List<RaptureFolderInfo> rfis = super.removeChildren(displayNamePart, force, producer);
if (rfis != null) versionStore.delete(RepoUtil.extractNonFolderKeys(rfis));
return rfis;
}
public Map<String, String> getStatus() {
Map<String, String> ret = new HashMap<String, String>();
Object[] checks = { documentStore, "Latest", versionStore, "Version", metaStore, "Meta" };
long totalSize = 0L;
for (int i = 0; i < checks.length; i += 2) {
KeyStore ks = (KeyStore) checks[i];
if (ks != null) {
long size = ks.getSize();
if (size != -1L) {
ret.put(checks[i + 1].toString(), readableFileSize(size));
ret.put(checks[i + 1].toString() + "_Raw", "" + size);
totalSize += size;
}
}
}
if (totalSize != 0L) {
ret.put("Total", readableFileSize(totalSize));
ret.put("Total_Raw", "" + totalSize);
}
return ret;
}
// delete versions older than cutoffVersion (version# <= cutoffVersion)
public boolean deleteOldVersions(String docUri, int cutoffVersion) {
List<String> keys = new ArrayList<String>();
for (int version = 1; version <= cutoffVersion; version++) {
keys.add(docUri + "?" + version);
}
metaStore.delete(keys);
versionStore.delete(keys);
return true;
}
// delete versions older than cutoffMillis (lastModifiedTimestamp <= cutoffMillis)
public boolean deleteOldVersions(String docUri, long cutoffMillis) {
metaStore.deleteUpTo(docUri, cutoffMillis);
versionStore.deleteUpTo(docUri, cutoffMillis);
return true;
}
@Override
protected DocumentMetadata createNewMetadata(String user, String comment, String docPath) {
return createMetadataFromLatest(user, comment, docPath, false, INITIAL_VERSION, true);
}
public Integer getVersionNumberAsOfTime(String docUri, String asOfTime) {
AsOfTimeDirectiveParser parser = new AsOfTimeDirectiveParser(asOfTime);
long asOfTimeMillis = parser.getMillisTimestamp();
DocumentMetadata metadata = getLatestMeta(docUri);
if (metadata.getCreatedTimestamp() > asOfTimeMillis) {
return null;
}
int version = metadata.getVersion();
while (metadata != null && metadata.getModifiedTimestamp() > asOfTimeMillis) {
version--;
if (version < 1) {
// Shouldn't happen because we checked for this above, but...
return null;
}
metadata = getVersionMeta(docUri, version);
}
if (metadata == null) {
String[] parameters = {docUri, asOfTime};
throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_NOT_FOUND,
new MessageFormat(Messages.getString("NVersionedRepository.IncalculableVersion"), parameters));
}
return version;
}
public boolean supportsVersionLookupByTime() {
return versionStore.supportsVersionLookupByTime();
}
}
| |
/*
* Copyright (c) Nmote d.o.o. 2003-2015. All rights reserved.
* See LICENSE.txt for licensing information.
*/
/*
* WARNING Do not modify this file.
*
* This file was generated from protocol description file
* and will be OVERWRITTEN on next invocation of
* smpp-gen ant task during build process.
*/
package com.nmote.smpp;
import java.io.IOException;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.EqualsBuilder;
/**
* A variation of the submit_sm PDU that included support for
255 recipients for the given message.
*
* @author (this class was autogenerated)
*/
public class SubmitMultiPDU extends AbstractPDU {
private static final long serialVersionUID = About.SERIAL_VERSION_UID;
/**
* Creates a new PDU object.
*/
public SubmitMultiPDU() {
}
/**
* Returns Command ID for this PDU.
*
* @return PDU.SUBMIT_MULTI;
*/
public int getCommandId() {
return PDU.SUBMIT_MULTI;
}
/**
* Reads mandatory and optional parameters from SMPPInputStream.
*
* @param in SMPPInputStream for reading parameters.
* @throws IOException In case of a problem while reading data.
*/
void readParameters(SMPPInputStream in) throws IOException {
// Mandatory parameters
service_type = in.readCString();
source_addr = in.readSMPPAddress();
dest_addresses = in.readSMPPAddress();
esm_class = in.readInteger1();
protocol_id = in.readInteger1();
priority_flag = in.readInteger1();
schedule_delivery_time = in.readSMPPTime();
validity_period = in.readSMPPTime();
registered_delivery = in.readInteger1();
replace_if_present_flag = in.readBoolean();
data_coding = in.readInteger1();
sm_default_msg_id = in.readInteger1();
short_message = in.readPString();
// Optional parameters
readOptionalParameters(in);
}
/**
* Write mandatory and optional PDU parameters to SMPPOutputStream.
*
* @param out SMPPOutputStream for writting paramters.
* @throws IOException In case of errors while writing.
*/
void writeParameters(SMPPOutputStream out) throws IOException {
// Mandatory parameters
out.writeCString(service_type);
out.writeSMPPAddress(source_addr);
out.writeSMPPAddress(dest_addresses);
out.writeInteger1(esm_class);
out.writeInteger1(protocol_id);
out.writeInteger1(priority_flag);
out.writeSMPPTime(schedule_delivery_time);
out.writeSMPPTime(validity_period);
out.writeInteger1(registered_delivery);
out.writeBoolean(replace_if_present_flag);
out.writeInteger1(data_coding);
out.writeInteger1(sm_default_msg_id);
out.writePString(short_message);
// Optional parameters
writeOptionalParameters(out);
}
/**
* @see com.nmote.smpp.AbstractPDU#isRequestPDU()
*/
public boolean isRequestPDU() {
return true;
}
/**
* @see com.nmote.smpp.AbstractPDU#createResponse()
*/
public AbstractPDU createResponse() {
SubmitMultiRespPDU response = new SubmitMultiRespPDU();
response.sequence = sequence;
return response;
}
/**
* @see com.nmote.smpp.AbstractPDU#isOneWay()
*/
public final boolean isOneWay() {
return false;
}
/**
* Returns PDU name.
*
* @return PDU name
*/
public String getName() {
return "submit_multi";
}
/**
* Creates a string representation of a PDU.
*
* @return a String
*/
public String toString() {
ToStringBuilder b = new ToStringBuilder(this);
b.append(getSequence());
b.append(ESMEStatus.toString(getStatus()));
// Appending mandatory parameters
b.append("service_type", toStringHelper(service_type));
b.append("source_addr", toStringHelper(source_addr));
b.append("dest_addresses", toStringHelper(dest_addresses));
b.append("esm_class", toStringHelper(esm_class));
b.append("protocol_id", toStringHelper(protocol_id));
b.append("priority_flag", toStringHelper(priority_flag));
b.append("schedule_delivery_time", toStringHelper(schedule_delivery_time));
b.append("validity_period", toStringHelper(validity_period));
b.append("registered_delivery", toStringHelper(registered_delivery));
b.append("replace_if_present_flag", toStringHelper(replace_if_present_flag));
b.append("data_coding", toStringHelper(data_coding));
b.append("sm_default_msg_id", toStringHelper(sm_default_msg_id));
b.append("short_message", toStringHelper(short_message));
// Appending optional parameters
if (getParameters() != null) {
b.append(getParameters());
}
return b.toString();
}
/**
* Calculates hash code of this object.
*
* @return hash code
*/
public int hashCode() {
HashCodeBuilder b = new HashCodeBuilder();
b.append(getSequence());
b.append(getStatus());
// Appending mandatory parameters
b.append(service_type);
b.append(source_addr);
b.append(dest_addresses);
b.append(esm_class);
b.append(protocol_id);
b.append(priority_flag);
b.append(schedule_delivery_time);
b.append(validity_period);
b.append(registered_delivery);
b.append(replace_if_present_flag);
b.append(data_coding);
b.append(sm_default_msg_id);
b.append(short_message);
// Appending optional parameters
if (getParameters() != null) {
b.append(getParameters());
}
return b.toHashCode();
}
/**
* Checks if <code>o</code> and this object are equal.
*
* @return true if objects are equal, false otherwise
*/
public boolean equals(Object o) {
boolean result;
if (o instanceof SubmitMultiPDU) {
SubmitMultiPDU p = (SubmitMultiPDU) o;
EqualsBuilder b = new EqualsBuilder();
b.append(p.getSequence(), getSequence());
b.append(p.getStatus(), getStatus());
// Appending mandatory parameters
b.append(p.service_type, service_type);
b.append(p.source_addr, source_addr);
b.append(p.dest_addresses, dest_addresses);
b.append(p.esm_class, esm_class);
b.append(p.protocol_id, protocol_id);
b.append(p.priority_flag, priority_flag);
b.append(p.schedule_delivery_time, schedule_delivery_time);
b.append(p.validity_period, validity_period);
b.append(p.registered_delivery, registered_delivery);
b.append(p.replace_if_present_flag, replace_if_present_flag);
b.append(p.data_coding, data_coding);
b.append(p.sm_default_msg_id, sm_default_msg_id);
b.append(p.short_message, short_message);
// Appending optional parameters
b.append(p.getParameters(), getParameters());
result = b.isEquals();
} else {
result = false;
}
return result;
}
// Mandatory parameters
private String service_type;
/**
* Getter for a mandatory parameter service_type.
*
* @return parameter value
*/
public String getServiceType() {
return service_type;
}
/**
* Setter for a mandatory parameter service_type.
*
* @param value New parameter value.
*/
public void setServiceType(String value) {
service_type = value;
}
private SMPPAddress source_addr;
/**
* Getter for a mandatory parameter source_addr.
*
* @return parameter value
*/
public SMPPAddress getSourceAddr() {
return source_addr;
}
/**
* Setter for a mandatory parameter source_addr.
*
* @param value New parameter value.
*/
public void setSourceAddr(SMPPAddress value) {
source_addr = value;
}
private SMPPAddress dest_addresses;
/**
* Getter for a mandatory parameter dest_addresses.
*
* @return parameter value
*/
public SMPPAddress getDestAddresses() {
return dest_addresses;
}
/**
* Setter for a mandatory parameter dest_addresses.
*
* @param value New parameter value.
*/
public void setDestAddresses(SMPPAddress value) {
dest_addresses = value;
}
private int esm_class;
/**
* Getter for a mandatory parameter esm_class.
* The esm_class parameter is used to indicate special message attributes associated with the short message.
* @return parameter value
*/
public int getEsmClass() {
return esm_class;
}
/**
* Setter for a mandatory parameter esm_class.
* The esm_class parameter is used to indicate special message attributes associated with the short message.
* @param value New parameter value.
*/
public void setEsmClass(int value) {
esm_class = value;
}
private int protocol_id;
/**
* Getter for a mandatory parameter protocol_id.
*
* @return parameter value
*/
public int getProtocolId() {
return protocol_id;
}
/**
* Setter for a mandatory parameter protocol_id.
*
* @param value New parameter value.
*/
public void setProtocolId(int value) {
protocol_id = value;
}
private int priority_flag;
/**
* Getter for a mandatory parameter priority_flag.
*
* @return parameter value
*/
public int getPriorityFlag() {
return priority_flag;
}
/**
* Setter for a mandatory parameter priority_flag.
*
* @param value New parameter value.
*/
public void setPriorityFlag(int value) {
priority_flag = value;
}
private SMPPTime schedule_delivery_time;
/**
* Getter for a mandatory parameter schedule_delivery_time.
*
* @return parameter value
*/
public SMPPTime getScheduleDeliveryTime() {
return schedule_delivery_time;
}
/**
* Setter for a mandatory parameter schedule_delivery_time.
*
* @param value New parameter value.
*/
public void setScheduleDeliveryTime(SMPPTime value) {
schedule_delivery_time = value;
}
private SMPPTime validity_period;
/**
* Getter for a mandatory parameter validity_period.
*
* @return parameter value
*/
public SMPPTime getValidityPeriod() {
return validity_period;
}
/**
* Setter for a mandatory parameter validity_period.
*
* @param value New parameter value.
*/
public void setValidityPeriod(SMPPTime value) {
validity_period = value;
}
private int registered_delivery;
/**
* Getter for a mandatory parameter registered_delivery.
*
* @return parameter value
*/
public int getRegisteredDelivery() {
return registered_delivery;
}
/**
* Setter for a mandatory parameter registered_delivery.
*
* @param value New parameter value.
*/
public void setRegisteredDelivery(int value) {
registered_delivery = value;
}
private boolean replace_if_present_flag;
/**
* Getter for a mandatory parameter replace_if_present_flag.
*
* @return parameter value
*/
public boolean getReplaceIfPresentFlag() {
return replace_if_present_flag;
}
/**
* Setter for a mandatory parameter replace_if_present_flag.
*
* @param value New parameter value.
*/
public void setReplaceIfPresentFlag(boolean value) {
replace_if_present_flag = value;
}
private int data_coding;
/**
* Getter for a mandatory parameter data_coding.
* Data coding scheme (see GSM 03.38)
* @return parameter value
*/
public int getDataCoding() {
return data_coding;
}
/**
* Setter for a mandatory parameter data_coding.
* Data coding scheme (see GSM 03.38)
* @param value New parameter value.
*/
public void setDataCoding(int value) {
data_coding = value;
}
private int sm_default_msg_id;
/**
* Getter for a mandatory parameter sm_default_msg_id.
*
* @return parameter value
*/
public int getSmDefaultMsgId() {
return sm_default_msg_id;
}
/**
* Setter for a mandatory parameter sm_default_msg_id.
*
* @param value New parameter value.
*/
public void setSmDefaultMsgId(int value) {
sm_default_msg_id = value;
}
private byte[] short_message;
/**
* Getter for a mandatory parameter short_message.
*
* @return parameter value
*/
public byte[] getShortMessage() {
return short_message;
}
/**
* Setter for a mandatory parameter short_message.
*
* @param value New parameter value.
*/
public void setShortMessage(byte[] value) {
short_message = value;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.testframework.configvariations;
import java.util.Arrays;
import junit.framework.TestSuite;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.testframework.junits.IgniteCacheConfigVariationsAbstractTest;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.testframework.junits.IgniteConfigVariationsAbstractTest;
import org.jetbrains.annotations.Nullable;
/**
* Configuration variations test suite builder.
*/
public class ConfigVariationsTestSuiteBuilder {
/** */
private final TestSuite suite;
/** */
@SuppressWarnings("unchecked")
private ConfigParameter<IgniteConfiguration>[][] igniteParams =
ConfigVariations.igniteBasicSet();
/** */
@SuppressWarnings("unchecked")
private ConfigParameter<CacheConfiguration>[][] cacheParams;
/** */
private CacheStartMode cacheStartMode = CacheStartMode.DYNAMIC;
/** */
private boolean withClients;
/** */
private int gridsCnt = 3;
/** */
private int testedNodeCnt = 1;
/** */
private Class<? extends IgniteConfigVariationsAbstractTest> cls;
/** */
private int[] specificIgniteParam;
/** */
private int[] specificCacheParam;
/** */
private int backups = -1;
/** */
private IgnitePredicate<IgniteConfiguration>[] igniteCfgFilters;
/** */
private IgnitePredicate<CacheConfiguration>[] cacheCfgFilters;
/**
* @param name Name.
* @param cls Test class.
*/
public ConfigVariationsTestSuiteBuilder(String name, Class<? extends IgniteConfigVariationsAbstractTest> cls) {
suite = new TestSuite(name);
this.cls = cls;
}
/**
* @return Test suite.
*/
public TestSuite build() {
assert testedNodeCnt > 0;
assert gridsCnt > 0;
VariationsIterator igniteCfgIter;
if (specificIgniteParam == null)
igniteCfgIter = new VariationsIterator(igniteParams);
else
igniteCfgIter = new OneElementVariationsIterator(specificIgniteParam, igniteParams);
for (; igniteCfgIter.hasNext(); ) {
final int[] igniteCfgVariation = igniteCfgIter.next();
if (!passIgniteConfigFilter(igniteCfgVariation))
continue;
if (cacheParams == null) {
TestSuite addedSuite = build(igniteCfgVariation, null, true);
suite.addTest(addedSuite);
}
else {
VariationsIterator cacheCfgIter;
if (specificCacheParam == null)
cacheCfgIter = new VariationsIterator(cacheParams);
else
cacheCfgIter = new OneElementVariationsIterator(specificCacheParam, cacheParams);
for (; cacheCfgIter.hasNext(); ) {
int[] cacheCfgVariation = cacheCfgIter.next();
if (!passCacheConfigFilter(cacheCfgVariation))
continue;
// Stop all grids before starting new ignite configuration.
boolean stopNodes = !cacheCfgIter.hasNext();
TestSuite addedSuite = build(igniteCfgVariation, cacheCfgVariation, stopNodes);
suite.addTest(addedSuite);
}
}
}
return suite;
}
/**
* @param variation Variation.
* @return {@code True} if variation pass filters.
*/
private boolean passIgniteConfigFilter(int[] variation) {
ConfigVariationsFactory factory = new ConfigVariationsFactory(igniteParams, variation, null, null);
IgniteConfiguration cfg = factory.getConfiguration(null, null);
if (igniteCfgFilters != null) {
for (IgnitePredicate<IgniteConfiguration> filter : igniteCfgFilters) {
if (!filter.apply(cfg))
return false;
}
}
return true;
}
/**
* @param variation Variation.
* @return {@code True} if variation pass filters.
*/
private boolean passCacheConfigFilter(int[] variation) {
ConfigVariationsFactory factory = new ConfigVariationsFactory(null, null, cacheParams, variation);
CacheConfiguration cfg = factory.cacheConfiguration(null);
if (cacheCfgFilters != null) {
for (IgnitePredicate<CacheConfiguration> filter : cacheCfgFilters) {
if (!filter.apply(cfg))
return false;
}
}
return true;
}
/**
* @param igniteCfgVariation Ignite Variation.
* @param cacheCfgVariation Cache Variation.
* @param stopNodes Stop nodes.
* @return Test suite.
*/
private TestSuite build(int[] igniteCfgVariation, @Nullable int[] cacheCfgVariation, boolean stopNodes) {
ConfigVariationsFactory factory = new ConfigVariationsFactory(igniteParams,
igniteCfgVariation, cacheParams, cacheCfgVariation);
factory.backups(backups);
String clsNameSuffix = "[igniteCfgVariation=" + Arrays.toString(igniteCfgVariation)
+ ", cacheCfgVariation=" + Arrays.toString(cacheCfgVariation)
+ ", igniteCfg=" + factory.getIgniteConfigurationDescription()
+ ", cacheCfg=" + factory.getCacheConfigurationDescription() + "]";
VariationsTestsConfig testCfg = new VariationsTestsConfig(factory, clsNameSuffix, stopNodes, cacheStartMode,
gridsCnt);
TestSuite addedSuite;
if (testedNodeCnt > 1)
addedSuite = createMultiNodeTestSuite((Class<? extends IgniteCacheConfigVariationsAbstractTest>)cls,
testCfg, testedNodeCnt, withClients);
else
addedSuite = new IgniteConfigVariationsTestSuite(cls, testCfg);
return addedSuite;
}
/**
* @param cls Test class.
* @param cfg Configuration.
* @param testedNodeCnt Count of tested nodes.
*/
private static TestSuite createMultiNodeTestSuite(Class<? extends IgniteCacheConfigVariationsAbstractTest> cls,
VariationsTestsConfig cfg, int testedNodeCnt, boolean withClients) {
TestSuite suite = new TestSuite();
if (cfg.gridCount() < testedNodeCnt)
throw new IllegalArgumentException("Failed to initialize test suite [nodeCnt=" + testedNodeCnt
+ ", cfgGridCnt=" + cfg.gridCount() + "]");
for (int i = 0; i < testedNodeCnt; i++) {
boolean stopNodes = cfg.isStopNodes() && i + 1 == testedNodeCnt;
boolean startCache = i == 0;
boolean stopCache = i + 1 == testedNodeCnt;
VariationsTestsConfig cfg0 = new VariationsTestsConfig(cfg.configurationFactory(), cfg.description(),
stopNodes, startCache, stopCache, cfg.cacheStartMode(), cfg.gridCount(), i, withClients);
suite.addTest(new IgniteConfigVariationsTestSuite(cls, cfg0));
}
return suite;
}
/**
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder withClients() {
if (testedNodeCnt < 2)
throw new IllegalStateException("Tested node count should be more than 1: " + testedNodeCnt);
withClients = true;
return this;
}
/**
* @param testedNodeCnt Tested node count.
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder testedNodesCount(int testedNodeCnt) {
this.testedNodeCnt = testedNodeCnt;
return this;
}
/**
* @param cnt Count.
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder gridsCount(int cnt) {
assert cnt > 0;
gridsCnt = cnt;
return this;
}
/**
* @param igniteParams New ignite params.
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder igniteParams(
ConfigParameter<IgniteConfiguration>[][] igniteParams) {
this.igniteParams = igniteParams;
return this;
}
/**
* @param cacheParams New cache params.
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder cacheParams(ConfigParameter<CacheConfiguration>[][] cacheParams) {
this.cacheParams = cacheParams;
return this;
}
/**
* Sets basic cache params and basic count of backups.
*
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder withBasicCacheParams() {
cacheParams = ConfigVariations.cacheBasicSet();
backups = 1;
return this;
}
/**
* @param backups Backups.
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder backups(int backups) {
assert backups > 0 : backups;
this.backups = backups;
return this;
}
/**
* @param singleIgniteParam Param.
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder specifyIgniteParam(int... singleIgniteParam) {
specificIgniteParam = singleIgniteParam;
return this;
}
/**
* @param singleParam Param.
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder specifyCacheParam(int... singleParam) {
specificCacheParam = singleParam;
return this;
}
/**
* @param filters Ignite configuration filters.
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder withIgniteConfigFilters(IgnitePredicate<IgniteConfiguration>... filters) {
igniteCfgFilters = filters;
return this;
}
/**
* @param filters Ignite configuration filters.
* @return {@code this} for chaining.
*/
public ConfigVariationsTestSuiteBuilder withCacheConfigFilters(IgnitePredicate<CacheConfiguration>... filters) {
cacheCfgFilters = filters;
return this;
}
/**
*
*/
private static class OneElementVariationsIterator extends VariationsIterator {
/** */
private int[] elem;
/** */
private boolean hasNext = true;
/**
* @param elem Element.
*/
OneElementVariationsIterator(int[] elem, Object[][] params) {
super(params);
this.elem = elem;
}
/** {@inheritDoc} */
@Override public boolean hasNext() {
return hasNext;
}
/** {@inheritDoc} */
@Override public int[] next() {
hasNext = false;
return elem;
}
}
}
| |
package mil.nga.geopackage.core.srs;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import mil.nga.geopackage.GeoPackageException;
import mil.nga.geopackage.core.contents.Contents;
import mil.nga.geopackage.core.contents.ContentsDao;
import mil.nga.geopackage.features.columns.GeometryColumns;
import mil.nga.geopackage.features.columns.GeometryColumnsDao;
import mil.nga.geopackage.projection.ProjectionConstants;
import mil.nga.geopackage.property.GeoPackageProperties;
import mil.nga.geopackage.property.PropertyConstants;
import mil.nga.geopackage.tiles.matrixset.TileMatrixSet;
import mil.nga.geopackage.tiles.matrixset.TileMatrixSetDao;
import com.j256.ormlite.dao.BaseDaoImpl;
import com.j256.ormlite.dao.DaoManager;
import com.j256.ormlite.dao.ForeignCollection;
import com.j256.ormlite.stmt.PreparedQuery;
import com.j256.ormlite.support.ConnectionSource;
/**
* Spatial Reference System Data Access Object
*
* @author osbornb
*/
public class SpatialReferenceSystemDao extends
BaseDaoImpl<SpatialReferenceSystem, Long> {
/**
* Contents DAO
*/
private ContentsDao contentsDao;
/**
* Geometry Columns DAO
*/
private GeometryColumnsDao geometryColumnsDao;
/**
* Tile Matrix Set DAO
*/
private TileMatrixSetDao tileMatrixSetDao;
/**
* Constructor, required by ORMLite
*
* @param connectionSource
* @param dataClass
* @throws SQLException
*/
public SpatialReferenceSystemDao(ConnectionSource connectionSource,
Class<SpatialReferenceSystem> dataClass) throws SQLException {
super(connectionSource, dataClass);
}
/**
* Creates the required EPSG WGS84 Spatial Reference System (spec
* Requirement 11)
*
* @return
* @throws SQLException
*/
public SpatialReferenceSystem createWgs84() throws SQLException {
SpatialReferenceSystem srs = new SpatialReferenceSystem();
srs.setSrsName(GeoPackageProperties.getProperty(
PropertyConstants.WGS_84, PropertyConstants.SRS_NAME));
srs.setSrsId(GeoPackageProperties.getIntegerProperty(
PropertyConstants.WGS_84, PropertyConstants.SRS_ID));
srs.setOrganization(GeoPackageProperties.getProperty(
PropertyConstants.WGS_84, PropertyConstants.ORGANIZATION));
srs.setOrganizationCoordsysId(GeoPackageProperties.getIntegerProperty(
PropertyConstants.WGS_84,
PropertyConstants.ORGANIZATION_COORDSYS_ID));
srs.setDefinition(GeoPackageProperties.getProperty(
PropertyConstants.WGS_84, PropertyConstants.DEFINITION));
srs.setDescription(GeoPackageProperties.getProperty(
PropertyConstants.WGS_84, PropertyConstants.DESCRIPTION));
create(srs);
return srs;
}
/**
* Creates the required Undefined Cartesian Spatial Reference System (spec
* Requirement 11)
*
* @return
* @throws SQLException
*/
public SpatialReferenceSystem createUndefinedCartesian()
throws SQLException {
SpatialReferenceSystem srs = new SpatialReferenceSystem();
srs.setSrsName(GeoPackageProperties.getProperty(
PropertyConstants.UNDEFINED_CARTESIAN,
PropertyConstants.SRS_NAME));
srs.setSrsId(GeoPackageProperties
.getIntegerProperty(PropertyConstants.UNDEFINED_CARTESIAN,
PropertyConstants.SRS_ID));
srs.setOrganization(GeoPackageProperties.getProperty(
PropertyConstants.UNDEFINED_CARTESIAN,
PropertyConstants.ORGANIZATION));
srs.setOrganizationCoordsysId(GeoPackageProperties.getIntegerProperty(
PropertyConstants.UNDEFINED_CARTESIAN,
PropertyConstants.ORGANIZATION_COORDSYS_ID));
srs.setDefinition(GeoPackageProperties.getProperty(
PropertyConstants.UNDEFINED_CARTESIAN,
PropertyConstants.DEFINITION));
srs.setDescription(GeoPackageProperties.getProperty(
PropertyConstants.UNDEFINED_CARTESIAN,
PropertyConstants.DESCRIPTION));
create(srs);
return srs;
}
/**
* Creates the required Undefined Geographic Spatial Reference System (spec
* Requirement 11)
*
* @return
* @throws SQLException
*/
public SpatialReferenceSystem createUndefinedGeographic()
throws SQLException {
SpatialReferenceSystem srs = new SpatialReferenceSystem();
srs.setSrsName(GeoPackageProperties.getProperty(
PropertyConstants.UNDEFINED_GEOGRAPHIC,
PropertyConstants.SRS_NAME));
srs.setSrsId(GeoPackageProperties.getIntegerProperty(
PropertyConstants.UNDEFINED_GEOGRAPHIC,
PropertyConstants.SRS_ID));
srs.setOrganization(GeoPackageProperties.getProperty(
PropertyConstants.UNDEFINED_GEOGRAPHIC,
PropertyConstants.ORGANIZATION));
srs.setOrganizationCoordsysId(GeoPackageProperties.getIntegerProperty(
PropertyConstants.UNDEFINED_GEOGRAPHIC,
PropertyConstants.ORGANIZATION_COORDSYS_ID));
srs.setDefinition(GeoPackageProperties.getProperty(
PropertyConstants.UNDEFINED_GEOGRAPHIC,
PropertyConstants.DEFINITION));
srs.setDescription(GeoPackageProperties.getProperty(
PropertyConstants.UNDEFINED_GEOGRAPHIC,
PropertyConstants.DESCRIPTION));
create(srs);
return srs;
}
/**
* Creates the Web Mercator Spatial Reference System if it does not already
* exist
*
* @return
* @throws SQLException
*/
public SpatialReferenceSystem createWebMercator() throws SQLException {
SpatialReferenceSystem srs = new SpatialReferenceSystem();
srs.setSrsName(GeoPackageProperties.getProperty(
PropertyConstants.WEB_MERCATOR, PropertyConstants.SRS_NAME));
srs.setSrsId(GeoPackageProperties.getIntegerProperty(
PropertyConstants.WEB_MERCATOR, PropertyConstants.SRS_ID));
srs.setOrganization(GeoPackageProperties.getProperty(
PropertyConstants.WEB_MERCATOR, PropertyConstants.ORGANIZATION));
srs.setOrganizationCoordsysId(GeoPackageProperties.getIntegerProperty(
PropertyConstants.WEB_MERCATOR,
PropertyConstants.ORGANIZATION_COORDSYS_ID));
srs.setDefinition(GeoPackageProperties.getProperty(
PropertyConstants.WEB_MERCATOR, PropertyConstants.DEFINITION));
srs.setDescription(GeoPackageProperties.getProperty(
PropertyConstants.WEB_MERCATOR, PropertyConstants.DESCRIPTION));
create(srs);
return srs;
}
/**
* Get or Create the Spatial Reference System for the provided id
*
* @param context
* @param srsId
* @return
*/
public SpatialReferenceSystem getOrCreate(long srsId) throws SQLException {
SpatialReferenceSystem srs = queryForId(srsId);
if (srs == null) {
switch ((int) srsId) {
case ProjectionConstants.EPSG_WORLD_GEODETIC_SYSTEM:
srs = createWgs84();
break;
case ProjectionConstants.UNDEFINED_CARTESIAN:
srs = createUndefinedCartesian();
break;
case ProjectionConstants.UNDEFINED_GEOGRAPHIC:
srs = createUndefinedGeographic();
break;
case ProjectionConstants.EPSG_WEB_MERCATOR:
srs = createWebMercator();
break;
default:
throw new GeoPackageException(
"Spatial Reference System not supported for metadata creation: "
+ srsId);
}
}
return srs;
}
/**
* Delete the Spatial Reference System, cascading
*
* @param srs
* @return
* @throws SQLException
*/
public int deleteCascade(SpatialReferenceSystem srs) throws SQLException {
int count = 0;
if (srs != null) {
// Delete Contents
ForeignCollection<Contents> contentsCollection = srs.getContents();
if (!contentsCollection.isEmpty()) {
ContentsDao dao = getContentsDao();
dao.deleteCascade(contentsCollection);
}
// Delete Geometry Columns
GeometryColumnsDao geometryColumnsDao = getGeometryColumnsDao();
if (geometryColumnsDao.isTableExists()) {
ForeignCollection<GeometryColumns> geometryColumnsCollection = srs
.getGeometryColumns();
if (!geometryColumnsCollection.isEmpty()) {
geometryColumnsDao.delete(geometryColumnsCollection);
}
}
// Delete Tile Matrix Set
TileMatrixSetDao tileMatrixSetDao = getTileMatrixSetDao();
if (tileMatrixSetDao.isTableExists()) {
ForeignCollection<TileMatrixSet> tileMatrixSetCollection = srs
.getTileMatrixSet();
if (!tileMatrixSetCollection.isEmpty()) {
tileMatrixSetDao.delete(tileMatrixSetCollection);
}
}
// Delete
count = delete(srs);
}
return count;
}
/**
* Delete the collection of Spatial Reference Systems, cascading
*
* @param srsCollection
* @return
* @throws SQLException
*/
public int deleteCascade(Collection<SpatialReferenceSystem> srsCollection)
throws SQLException {
int count = 0;
if (srsCollection != null) {
for (SpatialReferenceSystem srs : srsCollection) {
count += deleteCascade(srs);
}
}
return count;
}
/**
* Delete the Spatial Reference Systems matching the prepared query,
* cascading
*
* @param preparedDelete
* @return
* @throws SQLException
*/
public int deleteCascade(
PreparedQuery<SpatialReferenceSystem> preparedDelete)
throws SQLException {
int count = 0;
if (preparedDelete != null) {
List<SpatialReferenceSystem> srsList = query(preparedDelete);
count = deleteCascade(srsList);
}
return count;
}
/**
* Delete a Spatial Reference System by id, cascading
*
* @param id
* @return
* @throws SQLException
*/
public int deleteByIdCascade(Long id) throws SQLException {
int count = 0;
if (id != null) {
SpatialReferenceSystem srs = queryForId(id);
if (srs != null) {
count = deleteCascade(srs);
}
}
return count;
}
/**
* Delete the Spatial Reference Systems with the provided ids, cascading
*
* @param idCollection
* @return
* @throws SQLException
*/
public int deleteIdsCascade(Collection<Long> idCollection)
throws SQLException {
int count = 0;
if (idCollection != null) {
for (Long id : idCollection) {
count += deleteByIdCascade(id);
}
}
return count;
}
/**
* Get or create a Contents DAO
*
* @return
* @throws SQLException
*/
private ContentsDao getContentsDao() throws SQLException {
if (contentsDao == null) {
contentsDao = DaoManager
.createDao(connectionSource, Contents.class);
}
return contentsDao;
}
/**
* Get or create a Geometry Columns DAO
*
* @return
* @throws SQLException
*/
private GeometryColumnsDao getGeometryColumnsDao() throws SQLException {
if (geometryColumnsDao == null) {
geometryColumnsDao = DaoManager.createDao(connectionSource,
GeometryColumns.class);
}
return geometryColumnsDao;
}
/**
* Get or create a Tile Matrix Set DAO
*
* @return
* @throws SQLException
*/
private TileMatrixSetDao getTileMatrixSetDao() throws SQLException {
if (tileMatrixSetDao == null) {
tileMatrixSetDao = DaoManager.createDao(connectionSource,
TileMatrixSet.class);
}
return tileMatrixSetDao;
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/automl/v1beta1/service.proto
package com.google.cloud.automl.v1beta1;
/**
*
*
* <pre>
* Request message for
* [AutoMl.CreateModel][google.cloud.automl.v1beta1.AutoMl.CreateModel].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.CreateModelRequest}
*/
public final class CreateModelRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.automl.v1beta1.CreateModelRequest)
CreateModelRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateModelRequest.newBuilder() to construct.
private CreateModelRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateModelRequest() {
parent_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private CreateModelRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
parent_ = s;
break;
}
case 34:
{
com.google.cloud.automl.v1beta1.Model.Builder subBuilder = null;
if (model_ != null) {
subBuilder = model_.toBuilder();
}
model_ =
input.readMessage(
com.google.cloud.automl.v1beta1.Model.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(model_);
model_ = subBuilder.buildPartial();
}
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_CreateModelRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_CreateModelRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.CreateModelRequest.class,
com.google.cloud.automl.v1beta1.CreateModelRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
private volatile java.lang.Object parent_;
/**
*
*
* <pre>
* Resource name of the parent project where the model is being created.
* </pre>
*
* <code>string parent = 1;</code>
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Resource name of the parent project where the model is being created.
* </pre>
*
* <code>string parent = 1;</code>
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MODEL_FIELD_NUMBER = 4;
private com.google.cloud.automl.v1beta1.Model model_;
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public boolean hasModel() {
return model_ != null;
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public com.google.cloud.automl.v1beta1.Model getModel() {
return model_ == null ? com.google.cloud.automl.v1beta1.Model.getDefaultInstance() : model_;
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public com.google.cloud.automl.v1beta1.ModelOrBuilder getModelOrBuilder() {
return getModel();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getParentBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (model_ != null) {
output.writeMessage(4, getModel());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getParentBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (model_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getModel());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.automl.v1beta1.CreateModelRequest)) {
return super.equals(obj);
}
com.google.cloud.automl.v1beta1.CreateModelRequest other =
(com.google.cloud.automl.v1beta1.CreateModelRequest) obj;
boolean result = true;
result = result && getParent().equals(other.getParent());
result = result && (hasModel() == other.hasModel());
if (hasModel()) {
result = result && getModel().equals(other.getModel());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasModel()) {
hash = (37 * hash) + MODEL_FIELD_NUMBER;
hash = (53 * hash) + getModel().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.automl.v1beta1.CreateModelRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [AutoMl.CreateModel][google.cloud.automl.v1beta1.AutoMl.CreateModel].
* </pre>
*
* Protobuf type {@code google.cloud.automl.v1beta1.CreateModelRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.automl.v1beta1.CreateModelRequest)
com.google.cloud.automl.v1beta1.CreateModelRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_CreateModelRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_CreateModelRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.automl.v1beta1.CreateModelRequest.class,
com.google.cloud.automl.v1beta1.CreateModelRequest.Builder.class);
}
// Construct using com.google.cloud.automl.v1beta1.CreateModelRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
parent_ = "";
if (modelBuilder_ == null) {
model_ = null;
} else {
model_ = null;
modelBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.automl.v1beta1.AutoMlProto
.internal_static_google_cloud_automl_v1beta1_CreateModelRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.CreateModelRequest getDefaultInstanceForType() {
return com.google.cloud.automl.v1beta1.CreateModelRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.CreateModelRequest build() {
com.google.cloud.automl.v1beta1.CreateModelRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.CreateModelRequest buildPartial() {
com.google.cloud.automl.v1beta1.CreateModelRequest result =
new com.google.cloud.automl.v1beta1.CreateModelRequest(this);
result.parent_ = parent_;
if (modelBuilder_ == null) {
result.model_ = model_;
} else {
result.model_ = modelBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.automl.v1beta1.CreateModelRequest) {
return mergeFrom((com.google.cloud.automl.v1beta1.CreateModelRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.automl.v1beta1.CreateModelRequest other) {
if (other == com.google.cloud.automl.v1beta1.CreateModelRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
onChanged();
}
if (other.hasModel()) {
mergeModel(other.getModel());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.automl.v1beta1.CreateModelRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.automl.v1beta1.CreateModelRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Resource name of the parent project where the model is being created.
* </pre>
*
* <code>string parent = 1;</code>
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Resource name of the parent project where the model is being created.
* </pre>
*
* <code>string parent = 1;</code>
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Resource name of the parent project where the model is being created.
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Resource name of the parent project where the model is being created.
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
onChanged();
return this;
}
/**
*
*
* <pre>
* Resource name of the parent project where the model is being created.
* </pre>
*
* <code>string parent = 1;</code>
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
onChanged();
return this;
}
private com.google.cloud.automl.v1beta1.Model model_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.automl.v1beta1.Model,
com.google.cloud.automl.v1beta1.Model.Builder,
com.google.cloud.automl.v1beta1.ModelOrBuilder>
modelBuilder_;
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public boolean hasModel() {
return modelBuilder_ != null || model_ != null;
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public com.google.cloud.automl.v1beta1.Model getModel() {
if (modelBuilder_ == null) {
return model_ == null ? com.google.cloud.automl.v1beta1.Model.getDefaultInstance() : model_;
} else {
return modelBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public Builder setModel(com.google.cloud.automl.v1beta1.Model value) {
if (modelBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
model_ = value;
onChanged();
} else {
modelBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public Builder setModel(com.google.cloud.automl.v1beta1.Model.Builder builderForValue) {
if (modelBuilder_ == null) {
model_ = builderForValue.build();
onChanged();
} else {
modelBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public Builder mergeModel(com.google.cloud.automl.v1beta1.Model value) {
if (modelBuilder_ == null) {
if (model_ != null) {
model_ =
com.google.cloud.automl.v1beta1.Model.newBuilder(model_)
.mergeFrom(value)
.buildPartial();
} else {
model_ = value;
}
onChanged();
} else {
modelBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public Builder clearModel() {
if (modelBuilder_ == null) {
model_ = null;
onChanged();
} else {
model_ = null;
modelBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public com.google.cloud.automl.v1beta1.Model.Builder getModelBuilder() {
onChanged();
return getModelFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
public com.google.cloud.automl.v1beta1.ModelOrBuilder getModelOrBuilder() {
if (modelBuilder_ != null) {
return modelBuilder_.getMessageOrBuilder();
} else {
return model_ == null ? com.google.cloud.automl.v1beta1.Model.getDefaultInstance() : model_;
}
}
/**
*
*
* <pre>
* The model to create.
* </pre>
*
* <code>.google.cloud.automl.v1beta1.Model model = 4;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.automl.v1beta1.Model,
com.google.cloud.automl.v1beta1.Model.Builder,
com.google.cloud.automl.v1beta1.ModelOrBuilder>
getModelFieldBuilder() {
if (modelBuilder_ == null) {
modelBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.automl.v1beta1.Model,
com.google.cloud.automl.v1beta1.Model.Builder,
com.google.cloud.automl.v1beta1.ModelOrBuilder>(
getModel(), getParentForChildren(), isClean());
model_ = null;
}
return modelBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.automl.v1beta1.CreateModelRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.CreateModelRequest)
private static final com.google.cloud.automl.v1beta1.CreateModelRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.automl.v1beta1.CreateModelRequest();
}
public static com.google.cloud.automl.v1beta1.CreateModelRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateModelRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateModelRequest>() {
@java.lang.Override
public CreateModelRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CreateModelRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CreateModelRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateModelRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.automl.v1beta1.CreateModelRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.core.StringContains.containsString;
import static org.hamcrest.object.HasToString.hasToString;
public class IndexSettingsTests extends ESTestCase {
public void testRunListener() {
Version version = VersionUtils.getPreviousVersion();
Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version)
.put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build();
final AtomicInteger integer = new AtomicInteger(0);
Setting<Integer> integerSetting = Setting.intSetting("index.test.setting.int", -1,
Property.Dynamic, Property.IndexScope);
IndexMetaData metaData = newIndexMeta("index", theSettings);
IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting);
settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set);
assertEquals(version, settings.getIndexVersionCreated());
assertEquals("0xdeadbeef", settings.getUUID());
assertFalse(settings.updateIndexMetaData(metaData));
assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap());
assertEquals(0, integer.get());
assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 42)
.build())));
assertEquals(42, integer.get());
}
public void testSettingsUpdateValidator() {
Version version = VersionUtils.getPreviousVersion();
Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version)
.put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build();
final AtomicInteger integer = new AtomicInteger(0);
Setting<Integer> integerSetting = Setting.intSetting("index.test.setting.int", -1,
Property.Dynamic, Property.IndexScope);
IndexMetaData metaData = newIndexMeta("index", theSettings);
IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting);
settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set,
(i) -> {if (i == 42) throw new AssertionError("boom");});
assertEquals(version, settings.getIndexVersionCreated());
assertEquals("0xdeadbeef", settings.getUUID());
assertFalse(settings.updateIndexMetaData(metaData));
assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap());
assertEquals(0, integer.get());
expectThrows(IllegalArgumentException.class, () -> settings.updateIndexMetaData(newIndexMeta("index",
Settings.builder().put(theSettings).put("index.test.setting.int", 42).build())));
assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 41)
.build())));
assertEquals(41, integer.get());
}
public void testMergedSettingsArePassed() {
Version version = VersionUtils.getPreviousVersion();
Settings theSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version)
.put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build();
final AtomicInteger integer = new AtomicInteger(0);
final StringBuilder builder = new StringBuilder();
Setting<Integer> integerSetting = Setting.intSetting("index.test.setting.int", -1,
Property.Dynamic, Property.IndexScope);
Setting<String> notUpdated = new Setting<>("index.not.updated", "", Function.identity(),
Property.Dynamic, Property.IndexScope);
IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), Settings.EMPTY, integerSetting, notUpdated);
settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, integer::set);
settings.getScopedSettings().addSettingsUpdateConsumer(notUpdated, builder::append);
assertEquals(0, integer.get());
assertEquals("", builder.toString());
IndexMetaData newMetaData = newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings())
.put("index.test.setting.int", 42).build());
assertTrue(settings.updateIndexMetaData(newMetaData));
assertSame(settings.getIndexMetaData(), newMetaData);
assertEquals(42, integer.get());
assertEquals("", builder.toString());
integer.set(0);
assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings())
.put("index.not.updated", "boom").build())));
assertEquals("boom", builder.toString());
assertEquals("not updated - we preserve the old settings", 0, integer.get());
}
public void testSettingsConsistency() {
Version version = VersionUtils.getPreviousVersion();
IndexMetaData metaData = newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(version, settings.getIndexVersionCreated());
assertEquals("_na_", settings.getUUID());
try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED,
Version.CURRENT).put("index.test.setting.int", 42).build()));
fail("version has changed");
} catch (IllegalArgumentException ex) {
assertTrue(ex.getMessage(), ex.getMessage().startsWith("version mismatch on settings update expected: "));
}
// use version number that is unknown
metaData = newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromId(999999))
.build());
settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(Version.fromId(999999), settings.getIndexVersionCreated());
assertEquals("_na_", settings.getUUID());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED,
Version.fromId(999999)).put("index.test.setting.int", 42).build()));
metaData = newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build());
settings = new IndexSettings(metaData, Settings.EMPTY);
try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED,
Version.CURRENT).put("index.test.setting.int", 42).build()));
fail("uuid missing/change");
} catch (IllegalArgumentException ex) {
assertEquals("uuid mismatch on settings update expected: 0xdeadbeef but was: _na_", ex.getMessage());
}
assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap());
}
public IndexSettings newIndexSettings(IndexMetaData metaData, Settings nodeSettings, Setting<?>... settings) {
Set<Setting<?>> settingSet = new HashSet<>(IndexScopedSettings.BUILT_IN_INDEX_SETTINGS);
if (settings.length > 0) {
settingSet.addAll(Arrays.asList(settings));
}
return new IndexSettings(metaData, nodeSettings, new IndexScopedSettings(Settings.EMPTY, settingSet));
}
public void testNodeSettingsAreContained() {
final int numShards = randomIntBetween(1, 10);
final int numReplicas = randomIntBetween(0, 10);
Settings theSettings = Settings.builder().
put("index.foo.bar", 0)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards).build();
Settings nodeSettings = Settings.builder().put("index.foo.bar", 43).build();
final AtomicInteger indexValue = new AtomicInteger(0);
Setting<Integer> integerSetting = Setting.intSetting("index.foo.bar", -1, Property.Dynamic, Property.IndexScope);
IndexSettings settings = newIndexSettings(newIndexMeta("index", theSettings), nodeSettings, integerSetting);
settings.getScopedSettings().addSettingsUpdateConsumer(integerSetting, indexValue::set);
assertEquals(numReplicas, settings.getNumberOfReplicas());
assertEquals(numShards, settings.getNumberOfShards());
assertEquals(0, indexValue.get());
assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().
put("index.foo.bar", 42)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas + 1)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards).build())));
assertEquals(42, indexValue.get());
assertSame(nodeSettings, settings.getNodeSettings());
assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, numReplicas + 1)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, numShards).build())));
assertEquals(43, indexValue.get());
}
public static IndexMetaData newIndexMeta(String name, Settings indexSettings) {
Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(indexSettings)
.build();
return IndexMetaData.builder(name).settings(build).build();
}
public void testUpdateDurability() {
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "async")
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(Translog.Durability.ASYNC, settings.getTranslogDurability());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(),
"request").build()));
assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability());
metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); // test default
}
public void testIsWarmerEnabled() {
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.INDEX_WARMER_ENABLED_SETTING.getKey(), false)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertFalse(settings.isWarmerEnabled());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_WARMER_ENABLED_SETTING.getKey(),
"true").build()));
assertTrue(settings.isWarmerEnabled());
metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
settings = new IndexSettings(metaData, Settings.EMPTY);
assertTrue(settings.isWarmerEnabled());
}
public void testRefreshInterval() {
String refreshInterval = getRandomTimeString();
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(TimeValue.parseTimeValue(refreshInterval, new TimeValue(1, TimeUnit.DAYS),
IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval());
String newRefreshInterval = getRandomTimeString();
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(),
newRefreshInterval).build()));
assertEquals(TimeValue.parseTimeValue(newRefreshInterval, new TimeValue(1, TimeUnit.DAYS),
IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval());
}
private String getRandomTimeString() {
int refreshIntervalInt= randomFrom(-1, Math.abs(randomInt()));
String refreshInterval = Integer.toString(refreshIntervalInt);
if (refreshIntervalInt >= 0) {
refreshInterval += randomFrom("s", "ms", "h");
}
return refreshInterval;
}
public void testMaxResultWindow() {
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), 15)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(15, settings.getMaxResultWindow());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(),
42).build()));
assertEquals(42, settings.getMaxResultWindow());
settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY));
assertEquals(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxResultWindow());
metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxResultWindow());
}
public void testMaxInnerResultWindow() {
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), 200)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(200, settings.getMaxInnerResultWindow());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(),
50).build()));
assertEquals(50, settings.getMaxInnerResultWindow());
settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY));
assertEquals(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxInnerResultWindow());
metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxInnerResultWindow());
}
public void testMaxAdjacencyMatrixFiltersSetting() {
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING.getKey(), 15)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(15, settings.getMaxAdjacencyMatrixFilters());
settings.updateIndexMetaData(newIndexMeta("index",
Settings.builder().put(IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING.getKey(),
42).build()));
assertEquals(42, settings.getMaxAdjacencyMatrixFilters());
settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY));
assertEquals(IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING.get(Settings.EMPTY).intValue(),
settings.getMaxAdjacencyMatrixFilters());
metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(IndexSettings.MAX_ADJACENCY_MATRIX_FILTERS_SETTING.get(Settings.EMPTY).intValue(),
settings.getMaxAdjacencyMatrixFilters());
}
public void testGCDeletesSetting() {
TimeValue gcDeleteSetting = new TimeValue(Math.abs(randomInt()), TimeUnit.MILLISECONDS);
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), gcDeleteSetting.getStringRep())
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(TimeValue.parseTimeValue(gcDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS),
IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis());
TimeValue newGCDeleteSetting = new TimeValue(Math.abs(randomInt()), TimeUnit.MILLISECONDS);
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(),
newGCDeleteSetting.getStringRep()).build()));
assertEquals(TimeValue.parseTimeValue(newGCDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS),
IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(),
randomBoolean() ? -1 : new TimeValue(-1, TimeUnit.MILLISECONDS)).build()));
assertEquals(-1, settings.getGcDeletesInMillis());
}
public void testIsTTLPurgeDisabled() {
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING.getKey(), false)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertFalse(settings.isTTLPurgeDisabled());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING.getKey(),
"true").build()));
assertTrue(settings.isTTLPurgeDisabled());
settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY));
assertFalse("reset to default", settings.isTTLPurgeDisabled());
metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build());
settings = new IndexSettings(metaData, Settings.EMPTY);
assertFalse(settings.isTTLPurgeDisabled());
}
public void testTranslogFlushSizeThreshold() {
ByteSizeValue translogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt()));
ByteSizeValue actualValue = ByteSizeValue.parseBytesSizeValue(translogFlushThresholdSize.toString(),
IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey());
IndexMetaData metaData = newIndexMeta("index", Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), translogFlushThresholdSize.toString())
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(actualValue, settings.getFlushThresholdSize());
ByteSizeValue newTranslogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt()));
ByteSizeValue actualNewTranslogFlushThresholdSize = ByteSizeValue.parseBytesSizeValue(newTranslogFlushThresholdSize.toString(),
IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder()
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), newTranslogFlushThresholdSize.toString()).build()));
assertEquals(actualNewTranslogFlushThresholdSize, settings.getFlushThresholdSize());
}
public void testTranslogGenerationSizeThreshold() {
final ByteSizeValue size = new ByteSizeValue(Math.abs(randomInt()));
final String key = IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING.getKey();
final ByteSizeValue actualValue =
ByteSizeValue.parseBytesSizeValue(size.toString(), key);
final IndexMetaData metaData =
newIndexMeta(
"index",
Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(key, size.toString())
.build());
final IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(actualValue, settings.getGenerationThresholdSize());
final ByteSizeValue newSize = new ByteSizeValue(Math.abs(randomInt()));
final ByteSizeValue actual = ByteSizeValue.parseBytesSizeValue(newSize.toString(), key);
settings.updateIndexMetaData(
newIndexMeta("index", Settings.builder().put(key, newSize.toString()).build()));
assertEquals(actual, settings.getGenerationThresholdSize());
}
public void testArchiveBrokenIndexSettings() {
Settings settings =
IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrInvalidSettings(
Settings.EMPTY,
e -> { assert false : "should not have been invoked, no unknown settings"; },
(e, ex) -> { assert false : "should not have been invoked, no invalid settings"; });
assertSame(settings, Settings.EMPTY);
settings =
IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrInvalidSettings(
Settings.builder().put("index.refresh_interval", "-200").build(),
e -> { assert false : "should not have been invoked, no invalid settings"; },
(e, ex) -> {
assertThat(e.getKey(), equalTo("index.refresh_interval"));
assertThat(e.getValue(), equalTo("-200"));
assertThat(ex, hasToString(containsString("failed to parse setting [index.refresh_interval] with value [-200]")));
});
assertEquals("-200", settings.get("archived.index.refresh_interval"));
assertNull(settings.get("index.refresh_interval"));
Settings prevSettings = settings; // no double archive
settings =
IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrInvalidSettings(
prevSettings,
e -> { assert false : "should not have been invoked, no unknown settings"; },
(e, ex) -> { assert false : "should not have been invoked, no invalid settings"; });
assertSame(prevSettings, settings);
settings =
IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrInvalidSettings(
Settings.builder()
.put("index.version.created", Version.CURRENT.id) // private setting
.put("index.unknown", "foo")
.put("index.refresh_interval", "2s").build(),
e -> {
assertThat(e.getKey(), equalTo("index.unknown"));
assertThat(e.getValue(), equalTo("foo"));
},
(e, ex) -> { assert false : "should not have been invoked, no invalid settings"; });
assertEquals("foo", settings.get("archived.index.unknown"));
assertEquals(Integer.toString(Version.CURRENT.id), settings.get("index.version.created"));
assertEquals("2s", settings.get("index.refresh_interval"));
}
public void testSingleTypeSetting() {
{
IndexSettings index = newIndexSettings(newIndexMeta("index", Settings.EMPTY), Settings.EMPTY);
IndexScopedSettings scopedSettings = index.getScopedSettings();
Settings build = Settings.builder().put(IndexSettings.INDEX_MAPPING_SINGLE_TYPE_SETTING_KEY, randomBoolean()).build();
scopedSettings.archiveUnknownOrInvalidSettings(build, e -> fail("unexpected unknown setting " + e),
(e, ex) -> fail("unexpected illegal setting"));
assertTrue(index.isSingleType());
expectThrows(IllegalArgumentException.class, () -> {
index.getScopedSettings()
.validate(Settings.builder().put(IndexSettings.INDEX_MAPPING_SINGLE_TYPE_SETTING_KEY, randomBoolean()).build());
});
}
{
boolean single_type = randomBoolean();
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_6_0)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexSettings.INDEX_MAPPING_SINGLE_TYPE_SETTING_KEY, single_type)
.build();
IndexMetaData meta = IndexMetaData.builder("index").settings(settings).build();
IndexSettings index = newIndexSettings(meta, Settings.EMPTY);
IndexScopedSettings scopedSettings = index.getScopedSettings();
Settings build = Settings.builder().put(IndexSettings.INDEX_MAPPING_SINGLE_TYPE_SETTING_KEY, randomBoolean()).build();
scopedSettings.archiveUnknownOrInvalidSettings(build, e -> fail("unexpected unknown setting " + e),
(e, ex) -> fail("unexpected illegal setting"));
assertEquals(single_type, index.isSingleType());
}
{
Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexSettings.INDEX_MAPPING_SINGLE_TYPE_SETTING_KEY, false)
.build();
IndexMetaData meta = IndexMetaData.builder("index").settings(settings).build();
try {
newIndexSettings(meta, Settings.EMPTY);
fail("should fail with assertion error");
} catch (AssertionError e) {
// all is well
}
}
}
public void testQueryDefaultField() {
IndexSettings index = newIndexSettings(
newIndexMeta("index", Settings.EMPTY), Settings.EMPTY
);
assertThat(index.getDefaultFields(), equalTo(Collections.singletonList("*")));
index = newIndexSettings(
newIndexMeta("index", Settings.EMPTY), Settings.builder().put("index.query.default_field", "body").build()
);
assertThat(index.getDefaultFields(), equalTo(Collections.singletonList("body")));
index.updateIndexMetaData(
newIndexMeta("index", Settings.builder().putArray("index.query.default_field", "body", "title").build())
);
assertThat(index.getDefaultFields(), equalTo(Arrays.asList("body", "title")));
}
}
| |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.autofill_assistant;
import static android.support.test.espresso.Espresso.onView;
import static android.support.test.espresso.action.ViewActions.click;
import static android.support.test.espresso.action.ViewActions.scrollTo;
import static android.support.test.espresso.assertion.ViewAssertions.doesNotExist;
import static android.support.test.espresso.assertion.ViewAssertions.matches;
import static android.support.test.espresso.intent.Intents.intended;
import static android.support.test.espresso.intent.Intents.intending;
import static android.support.test.espresso.intent.matcher.IntentMatchers.anyIntent;
import static android.support.test.espresso.intent.matcher.IntentMatchers.hasAction;
import static android.support.test.espresso.intent.matcher.IntentMatchers.hasData;
import static android.support.test.espresso.matcher.ViewMatchers.Visibility.VISIBLE;
import static android.support.test.espresso.matcher.ViewMatchers.assertThat;
import static android.support.test.espresso.matcher.ViewMatchers.hasDescendant;
import static android.support.test.espresso.matcher.ViewMatchers.hasSibling;
import static android.support.test.espresso.matcher.ViewMatchers.hasTextColor;
import static android.support.test.espresso.matcher.ViewMatchers.isChecked;
import static android.support.test.espresso.matcher.ViewMatchers.isCompletelyDisplayed;
import static android.support.test.espresso.matcher.ViewMatchers.isDisplayed;
import static android.support.test.espresso.matcher.ViewMatchers.isEnabled;
import static android.support.test.espresso.matcher.ViewMatchers.withClassName;
import static android.support.test.espresso.matcher.ViewMatchers.withEffectiveVisibility;
import static android.support.test.espresso.matcher.ViewMatchers.withId;
import static android.support.test.espresso.matcher.ViewMatchers.withText;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.iterableWithSize;
import static org.hamcrest.Matchers.not;
import static org.chromium.chrome.browser.autofill_assistant.AutofillAssistantUiTestUtil.hasTintColor;
import static org.chromium.chrome.browser.autofill_assistant.AutofillAssistantUiTestUtil.hasTypefaceSpan;
import static org.chromium.chrome.browser.autofill_assistant.AutofillAssistantUiTestUtil.startAutofillAssistant;
import static org.chromium.chrome.browser.autofill_assistant.AutofillAssistantUiTestUtil.waitUntilViewMatchesCondition;
import static org.chromium.chrome.browser.autofill_assistant.AutofillAssistantUiTestUtil.withParentIndex;
import android.app.Activity;
import android.app.Instrumentation.ActivityResult;
import android.content.Intent;
import android.graphics.Typeface;
import android.net.Uri;
import android.support.test.InstrumentationRegistry;
import android.support.test.espresso.intent.Intents;
import android.support.test.filters.MediumTest;
import android.widget.RadioButton;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.DisableIf;
import org.chromium.chrome.autofill_assistant.R;
import org.chromium.chrome.browser.autofill_assistant.proto.ActionProto;
import org.chromium.chrome.browser.autofill_assistant.proto.ChipProto;
import org.chromium.chrome.browser.autofill_assistant.proto.ChipType;
import org.chromium.chrome.browser.autofill_assistant.proto.CounterInputProto;
import org.chromium.chrome.browser.autofill_assistant.proto.CounterInputProto.Counter;
import org.chromium.chrome.browser.autofill_assistant.proto.FormInputProto;
import org.chromium.chrome.browser.autofill_assistant.proto.FormProto;
import org.chromium.chrome.browser.autofill_assistant.proto.InfoPopupProto;
import org.chromium.chrome.browser.autofill_assistant.proto.InfoPopupProto.DialogButton;
import org.chromium.chrome.browser.autofill_assistant.proto.InfoPopupProto.DialogButton.OpenUrlInCCT;
import org.chromium.chrome.browser.autofill_assistant.proto.ProcessedActionProto;
import org.chromium.chrome.browser.autofill_assistant.proto.ProcessedActionStatusProto;
import org.chromium.chrome.browser.autofill_assistant.proto.PromptProto;
import org.chromium.chrome.browser.autofill_assistant.proto.PromptProto.Choice;
import org.chromium.chrome.browser.autofill_assistant.proto.SelectionInputProto;
import org.chromium.chrome.browser.autofill_assistant.proto.ShowFormProto;
import org.chromium.chrome.browser.autofill_assistant.proto.SupportedScriptProto;
import org.chromium.chrome.browser.autofill_assistant.proto.SupportedScriptProto.PresentationProto;
import org.chromium.chrome.browser.customtabs.CustomTabActivityTestRule;
import org.chromium.chrome.browser.customtabs.CustomTabsTestUtils;
import org.chromium.chrome.browser.flags.ChromeSwitches;
import org.chromium.chrome.test.ChromeJUnit4ClassRunner;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Tests autofill assistant bottomsheet.
*/
@CommandLineFlags.Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE})
@RunWith(ChromeJUnit4ClassRunner.class)
public class AutofillAssistantFormActionTest {
@Rule
public CustomTabActivityTestRule mTestRule = new CustomTabActivityTestRule();
private static final String TEST_PAGE = "/components/test/data/autofill_assistant/html/"
+ "autofill_assistant_target_website.html";
@Before
public void setUp() {
AutofillAssistantPreferencesUtil.setInitialPreferences(true);
mTestRule.startCustomTabActivityWithIntent(CustomTabsTestUtils.createMinimalCustomTabIntent(
InstrumentationRegistry.getTargetContext(),
mTestRule.getTestServer().getURL(TEST_PAGE)));
mTestRule.getActivity().getScrim().disableAnimationForTesting(true);
}
/**
* Creates a close-to-real example of a form action with multiple counters and choices,
* interacts with those widgets, and then checks the response to the server.
*/
@Test
@MediumTest
@DisableIf.Build(sdk_is_less_than = 21)
public void testFormAction() {
ArrayList<ActionProto> list = new ArrayList<>();
// FromProto.Builder, extracted to avoid excessive line widths.
FormProto.Builder formProto =
FormProto.newBuilder()
.addInputs(FormInputProto.newBuilder().setCounter(
CounterInputProto.newBuilder()
.addCounters(CounterInputProto.Counter.newBuilder()
.setMinValue(0)
.setMaxValue(1)
.setLabel("Counter 1")
.setDescriptionLine1("$34.00 per item")
.setDescriptionLine2(
"<link1>Details</link1>"))
.addCounters(
CounterInputProto.Counter.newBuilder()
.setMinValue(1)
.setMaxValue(9)
.setLabel("Counter 2")
.setDescriptionLine1("$387.00 per item"))
.setMinimizedCount(1)
.setMinCountersSum(2)
.setMinimizeText("Minimize")
.setExpandText("Expand")))
.addInputs(FormInputProto.newBuilder().setSelection(
SelectionInputProto.newBuilder()
.addChoices(SelectionInputProto.Choice.newBuilder()
.setLabel("Choice 1")
.setDescriptionLine1("$10.00 option")
.setDescriptionLine2(
"<link1>Details</link1>"))
.addChoices(SelectionInputProto.Choice.newBuilder()
.setLabel("Choice 2")
.setDescriptionLine1("$20.00 option")
.setDescriptionLine2(
"<link1>Details</link1>"))
.setAllowMultiple(false)))
.addInputs(FormInputProto.newBuilder().setCounter(
CounterInputProto.newBuilder().addCounters(
CounterInputProto.Counter.newBuilder()
.setMinValue(1)
.setMaxValue(9)
.setLabel("Counter 3")
.setDescriptionLine1("$20.00 per item")
.setDescriptionLine2("<link1>Details</link1>"))));
// FormAction.
list.add((ActionProto) ActionProto.newBuilder()
.setShowForm(ShowFormProto.newBuilder()
.setChip(ChipProto.newBuilder()
.setType(ChipType.HIGHLIGHTED_ACTION)
.setText("Continue"))
.setForm(formProto))
.build());
AutofillAssistantTestScript script = new AutofillAssistantTestScript(
(SupportedScriptProto) SupportedScriptProto.newBuilder()
.setPath("autofill_assistant_target_website.html")
.setPresentation(PresentationProto.newBuilder().setAutostart(true).setChip(
ChipProto.newBuilder().setText("Autostart")))
.build(),
list);
AutofillAssistantTestService testService =
new AutofillAssistantTestService(Collections.singletonList(script));
startAutofillAssistant(mTestRule.getActivity(), testService);
waitUntilViewMatchesCondition(withText("Continue"), isCompletelyDisplayed());
// TODO(b/144690738) Remove the isDisplayed() condition.
onView(allOf(withId(R.id.value), withEffectiveVisibility(VISIBLE),
hasSibling(hasDescendant(withText("Counter 1")))))
.check(matches(hasTextColor(R.color.modern_grey_800_alpha_38)));
onView(allOf(withId(R.id.increase_button), withEffectiveVisibility(VISIBLE),
hasSibling(hasDescendant(withText("Counter 1")))))
.check(matches(hasTintColor(R.color.modern_blue_600)));
onView(allOf(withId(R.id.decrease_button), withEffectiveVisibility(VISIBLE),
hasSibling(hasDescendant(withText("Counter 1")))))
.check(matches(hasTintColor(R.color.modern_grey_800_alpha_38)));
// Click on Counter 1 +, increase from 0 to 1.
onView(allOf(withId(R.id.increase_button), withEffectiveVisibility(VISIBLE),
hasSibling(hasDescendant(withText("Counter 1")))))
.perform(scrollTo(), click());
onView(allOf(withId(R.id.value), withEffectiveVisibility(VISIBLE),
hasSibling(hasDescendant(withText("Counter 1")))))
.check(matches(hasTextColor(R.color.modern_blue_600)));
onView(allOf(withId(R.id.increase_button), withEffectiveVisibility(VISIBLE),
hasSibling(hasDescendant(withText("Counter 1")))))
.check(matches(hasTintColor(R.color.modern_grey_800_alpha_38)));
// Decrease button is still disabled due to the minCountersSum requirement.
// Click expand label to make Counter 2 visible.
onView(allOf(withId(R.id.expand_label), withEffectiveVisibility(VISIBLE)))
.perform(scrollTo(), click());
// Click on Counter 3 +, increase from 0 to 1.
onView(allOf(withId(R.id.increase_button), withEffectiveVisibility(VISIBLE),
hasSibling(hasDescendant(withText("Counter 3")))))
.perform(scrollTo(), click());
// Click on Choice 1, then Choice 2, then back to Choice 1.
onView(allOf(withClassName(is(RadioButton.class.getName())), withParentIndex(0),
withEffectiveVisibility(VISIBLE)))
.perform(scrollTo(), click());
onView(allOf(withClassName(is(RadioButton.class.getName())), withParentIndex(3),
withEffectiveVisibility(VISIBLE)))
.perform(scrollTo(), click());
onView(allOf(withClassName(is(RadioButton.class.getName())), withParentIndex(0),
withEffectiveVisibility(VISIBLE)))
.perform(scrollTo(), click());
// Check that choice 1 is visually selected and choice 2 is de-selected.
onView(allOf(withClassName(is(RadioButton.class.getName())), withParentIndex(0),
withEffectiveVisibility(VISIBLE)))
.check(matches(isChecked()));
onView(allOf(withClassName(is(RadioButton.class.getName())), withParentIndex(3),
withEffectiveVisibility(VISIBLE)))
.check(matches(not(isChecked())));
// Click on Counter 2 +, increase from 0 to 1.
onView(allOf(withId(R.id.increase_button), withEffectiveVisibility(VISIBLE),
hasSibling(hasDescendant(withText("Counter 2")))))
.perform(scrollTo(), click());
// Finish form action, wait for response and prepare next set of actions.
List<ActionProto> nextActions = new ArrayList<>();
nextActions.add((ActionProto) ActionProto.newBuilder()
.setPrompt(PromptProto.newBuilder()
.setMessage("Finished")
.addChoices(Choice.newBuilder().setChip(
ChipProto.newBuilder()
.setType(ChipType.DONE_ACTION)
.setText("End"))))
.build());
testService.setNextActions(nextActions);
waitUntilViewMatchesCondition(withText("Continue"), isEnabled());
int numNextActionsCalled = testService.getNextActionsCounter();
onView(withText("Continue")).perform(click());
testService.waitUntilGetNextActions(numNextActionsCalled + 1);
List<ProcessedActionProto> processedActions = testService.getProcessedActions();
assertThat(processedActions, iterableWithSize(1));
assertThat(
processedActions.get(0).getStatus(), is(ProcessedActionStatusProto.ACTION_APPLIED));
assertThat(processedActions.get(0).getResultDataCase(),
is(ProcessedActionProto.ResultDataCase.FORM_RESULT));
List<FormInputProto.Result> formResult =
processedActions.get(0).getFormResult().getInputResultsList();
assertThat(formResult.size(), is(3));
assertThat(formResult.get(0).getInputTypeCase(),
is(FormInputProto.Result.InputTypeCase.COUNTER));
assertThat(formResult.get(0).getCounter().getValuesCount(), is(2));
// Counter 1
assertThat(formResult.get(0).getCounter().getValues(0), is(1));
// Counter 2
assertThat(formResult.get(0).getCounter().getValues(1), is(1));
// Choice 1
assertThat(formResult.get(1).getInputTypeCase(),
is(FormInputProto.Result.InputTypeCase.SELECTION));
assertThat(formResult.get(1).getSelection().getSelectedCount(), is(2));
assertThat(formResult.get(1).getSelection().getSelected(0), is(true));
assertThat(formResult.get(1).getSelection().getSelected(1), is(false));
// Counter 3
assertThat(formResult.get(2).getInputTypeCase(),
is(FormInputProto.Result.InputTypeCase.COUNTER));
assertThat(formResult.get(2).getCounter().getValuesCount(), is(1));
assertThat(formResult.get(2).getCounter().getValues(0), is(1));
waitUntilViewMatchesCondition(withText("End"), isCompletelyDisplayed());
}
@Test
@MediumTest
@DisableIf.Build(sdk_is_less_than = 21)
public void testFormActionClickLink() {
ArrayList<ActionProto> list = new ArrayList<>();
// FromProto.Builder, extracted to avoid excessive line widths.
FormProto.Builder formProto =
FormProto.newBuilder().addInputs(FormInputProto.newBuilder().setCounter(
CounterInputProto.newBuilder()
.addCounters(CounterInputProto.Counter.newBuilder()
.setMinValue(1)
.setMaxValue(9)
.setLabel("Counter 1")
.setDescriptionLine1("$34.00 per item")
.setDescriptionLine2("<link4>Details</link4>")
.setInitialValue(1))
.setMinimizedCount(1)
.setMinCountersSum(2)
.setMinimizeText("Minimize")
.setExpandText("Expand")));
list.add((ActionProto) ActionProto.newBuilder()
.setShowForm(ShowFormProto.newBuilder()
.setChip(ChipProto.newBuilder()
.setType(ChipType.HIGHLIGHTED_ACTION)
.setText("Continue"))
.setForm(formProto))
.build());
AutofillAssistantTestScript script = new AutofillAssistantTestScript(
(SupportedScriptProto) SupportedScriptProto.newBuilder()
.setPath("autofill_assistant_target_website.html")
.setPresentation(PresentationProto.newBuilder().setAutostart(true).setChip(
ChipProto.newBuilder().setText("Autostart")))
.build(),
list);
AutofillAssistantTestService testService =
new AutofillAssistantTestService(Collections.singletonList(script));
startAutofillAssistant(mTestRule.getActivity(), testService);
// Click on Counter 1 +, increase from 1 to 2.
onView(allOf(withId(R.id.increase_button), withEffectiveVisibility(VISIBLE),
hasSibling(hasDescendant(withText("Counter 1")))))
.perform(scrollTo(), click());
int numNextActionsCalled = testService.getNextActionsCounter();
onView(allOf(isDisplayed(), withText("Details"))).perform(click());
testService.waitUntilGetNextActions(numNextActionsCalled + 1);
List<ProcessedActionProto> processedActions = testService.getProcessedActions();
assertThat(processedActions, iterableWithSize(1));
assertThat(
processedActions.get(0).getStatus(), is(ProcessedActionStatusProto.ACTION_APPLIED));
assertThat(processedActions.get(0).getResultDataCase(),
is(ProcessedActionProto.ResultDataCase.FORM_RESULT));
List<FormInputProto.Result> formResult =
processedActions.get(0).getFormResult().getInputResultsList();
assertThat(formResult.size(), is(1));
assertThat(processedActions.get(0).getFormResult().getLink(), is(4));
assertThat(formResult.get(0).getInputTypeCase(),
is(FormInputProto.Result.InputTypeCase.COUNTER));
assertThat(formResult.get(0).getCounter().getValuesCount(), is(1));
// Counter 1
assertThat(formResult.get(0).getCounter().getValues(0), is(2));
}
@Test
@MediumTest
@DisableIf.Build(sdk_is_less_than = 21)
public void testInfoPopup() {
ArrayList<ActionProto> list = new ArrayList<>();
// FromProto.Builder, extracted to avoid excessive line widths.
FormProto.Builder formProto =
FormProto.newBuilder()
.addInputs(FormInputProto.newBuilder().setCounter(
CounterInputProto.newBuilder().addCounters(
Counter.newBuilder()
.setLabel("Counter 1")
.setDescriptionLine1("$20.00 per tick")
.setDescriptionLine2("<link1>Details</link1>"))))
.addInputs(FormInputProto.newBuilder().setCounter(
CounterInputProto.newBuilder().addCounters(
Counter.newBuilder()
.setLabel("Counter 2")
.setDescriptionLine1("$20.00 per tick")
.setDescriptionLine2("<link1>Details</link1>"))))
.setInfoLabel("<b>Info label with bold text</b>")
.setInfoPopup(
InfoPopupProto.newBuilder()
.setTitle("Prompt title")
.setText("Prompt text")
.setNeutralButton(
DialogButton.newBuilder()
.setLabel("Go to url")
.setOpenUrlInCct(
OpenUrlInCCT.newBuilder().setUrl(
"https://www.google.com"))));
// FormAction.
list.add((ActionProto) ActionProto.newBuilder()
.setShowForm(ShowFormProto.newBuilder()
.setChip(ChipProto.newBuilder()
.setType(ChipType.HIGHLIGHTED_ACTION)
.setText("Continue"))
.setForm(formProto))
.build());
AutofillAssistantTestScript script = new AutofillAssistantTestScript(
(SupportedScriptProto) SupportedScriptProto.newBuilder()
.setPath("autofill_assistant_target_website.html")
.setPresentation(PresentationProto.newBuilder().setAutostart(true).setChip(
ChipProto.newBuilder().setText("Autostart")))
.build(),
list);
AutofillAssistantTestService testService =
new AutofillAssistantTestService(Collections.singletonList(script));
startAutofillAssistant(mTestRule.getActivity(), testService);
waitUntilViewMatchesCondition(withText("Continue"), isCompletelyDisplayed());
onView(withText("Info label with bold text"))
.check(matches(withEffectiveVisibility(VISIBLE)));
onView(withText("Info label with bold text"))
.check(matches(hasTypefaceSpan(
0, "Info label with bold text".length() - 1, Typeface.BOLD)));
onView(withId(R.id.info_button)).perform(click());
waitUntilViewMatchesCondition(withText("Prompt title"), isCompletelyDisplayed());
waitUntilViewMatchesCondition(withText("Prompt text"), isCompletelyDisplayed());
Intent intent = new Intent();
ActivityResult intentResult = new ActivityResult(Activity.RESULT_OK, intent);
Intents.init();
intending(anyIntent()).respondWith(intentResult);
onView(withText("Go to url")).perform(click());
intended(
allOf(hasAction(Intent.ACTION_VIEW), hasData(Uri.parse("https://www.google.com"))));
Intents.release();
}
@Test
@MediumTest
@DisableIf.Build(sdk_is_less_than = 21)
public void testInfoPopupNoButtons() {
ArrayList<ActionProto> list = new ArrayList<>();
// FromProto.Builder, extracted to avoid excessive line widths.
FormProto.Builder formProto =
FormProto.newBuilder()
.addInputs(FormInputProto.newBuilder().setCounter(
CounterInputProto.newBuilder().addCounters(
Counter.newBuilder()
.setLabel("Counter")
.setDescriptionLine1("$20.00 per tick")
.setDescriptionLine2("<link1>Details</link1>"))))
.setInfoLabel("Info label")
.setInfoPopup(InfoPopupProto.newBuilder()
.setTitle("Prompt title")
.setText("Prompt text"));
// FormAction.
list.add((ActionProto) ActionProto.newBuilder()
.setShowForm(ShowFormProto.newBuilder()
.setChip(ChipProto.newBuilder()
.setType(ChipType.HIGHLIGHTED_ACTION)
.setText("Continue"))
.setForm(formProto))
.build());
AutofillAssistantTestScript script = new AutofillAssistantTestScript(
(SupportedScriptProto) SupportedScriptProto.newBuilder()
.setPath("autofill_assistant_target_website.html")
.setPresentation(PresentationProto.newBuilder().setAutostart(true).setChip(
ChipProto.newBuilder().setText("Autostart")))
.build(),
list);
AutofillAssistantTestService testService =
new AutofillAssistantTestService(Collections.singletonList(script));
startAutofillAssistant(mTestRule.getActivity(), testService);
waitUntilViewMatchesCondition(withText("Continue"), isCompletelyDisplayed());
onView(withId(R.id.info_button)).perform(click());
waitUntilViewMatchesCondition(withText("Prompt title"), isCompletelyDisplayed());
// If no button is set in the proto a "Close" button should be added by default.
onView(withText("Close")).perform(click());
onView(withText("Prompt title")).check(doesNotExist());
}
@Test
@MediumTest
@DisableIf.Build(sdk_is_less_than = 21)
public void testMultipleForms() {
ArrayList<ActionProto> list = new ArrayList<>();
// FromProto.Builder, extracted to avoid excessive line widths.
FormProto.Builder formProtoWithInfo =
FormProto.newBuilder()
.addInputs(FormInputProto.newBuilder().setCounter(
CounterInputProto.newBuilder().addCounters(
Counter.newBuilder()
.setLabel("Counter 1")
.setDescriptionLine1("$20.00 per tick")
.setDescriptionLine2("<link1>Details</link1>"))))
.setInfoLabel("Info label");
// FormAction.
list.add((ActionProto) ActionProto.newBuilder()
.setShowForm(ShowFormProto.newBuilder()
.setChip(ChipProto.newBuilder()
.setType(ChipType.HIGHLIGHTED_ACTION)
.setText("Continue"))
.setForm(formProtoWithInfo))
.build());
FormProto.Builder formProto = FormProto.newBuilder().addInputs(
FormInputProto.newBuilder().setCounter(CounterInputProto.newBuilder().addCounters(
Counter.newBuilder()
.setLabel("Counter 1")
.setDescriptionLine1("$20.00 per tick")
.setDescriptionLine2("<link1>Details</link1>"))));
// FormAction.
list.add((ActionProto) ActionProto.newBuilder()
.setShowForm(ShowFormProto.newBuilder()
.setChip(ChipProto.newBuilder()
.setType(ChipType.HIGHLIGHTED_ACTION)
.setText("Finish"))
.setForm(formProto))
.build());
AutofillAssistantTestScript script = new AutofillAssistantTestScript(
(SupportedScriptProto) SupportedScriptProto.newBuilder()
.setPath("autofill_assistant_target_website.html")
.setPresentation(PresentationProto.newBuilder().setAutostart(true).setChip(
ChipProto.newBuilder().setText("Autostart")))
.build(),
list);
AutofillAssistantTestService testService =
new AutofillAssistantTestService(Collections.singletonList(script));
startAutofillAssistant(mTestRule.getActivity(), testService);
waitUntilViewMatchesCondition(withText("Continue"), isCompletelyDisplayed());
onView(withText("Info label")).check(matches(isCompletelyDisplayed()));
onView(withId(R.id.info_button)).check(matches(not(isDisplayed())));
onView(withText("Continue")).perform(click());
waitUntilViewMatchesCondition(withText("Finish"), isCompletelyDisplayed());
onView(withText("Info label")).check(matches(not(isDisplayed())));
onView(withId(R.id.info_button)).check(matches(not(isDisplayed())));
}
}
| |
package com.franktan.popularmovies.data.trailer;
import java.util.Date;
import android.content.Context;
import android.content.ContentResolver;
import android.database.Cursor;
import android.net.Uri;
import com.franktan.popularmovies.data.base.AbstractSelection;
import com.franktan.popularmovies.data.movie.*;
/**
* Selection for the {@code trailer} table.
*/
public class TrailerSelection extends AbstractSelection<TrailerSelection> {
@Override
protected Uri baseUri() {
return TrailerColumns.CONTENT_URI;
}
/**
* Query the given content resolver using this selection.
*
* @param contentResolver The content resolver to query.
* @param projection A list of which columns to return. Passing null will return all columns, which is inefficient.
* @return A {@code TrailerCursor} object, which is positioned before the first entry, or null.
*/
public TrailerCursor query(ContentResolver contentResolver, String[] projection) {
Cursor cursor = contentResolver.query(uri(), projection, sel(), args(), order());
if (cursor == null) return null;
return new TrailerCursor(cursor);
}
/**
* Equivalent of calling {@code query(contentResolver, null)}.
*/
public TrailerCursor query(ContentResolver contentResolver) {
return query(contentResolver, null);
}
/**
* Query the given content resolver using this selection.
*
* @param context The context to use for the query.
* @param projection A list of which columns to return. Passing null will return all columns, which is inefficient.
* @return A {@code TrailerCursor} object, which is positioned before the first entry, or null.
*/
public TrailerCursor query(Context context, String[] projection) {
Cursor cursor = context.getContentResolver().query(uri(), projection, sel(), args(), order());
if (cursor == null) return null;
return new TrailerCursor(cursor);
}
/**
* Equivalent of calling {@code query(context, null)}.
*/
public TrailerCursor query(Context context) {
return query(context, null);
}
public TrailerSelection id(long... value) {
addEquals("trailer." + TrailerColumns._ID, toObjectArray(value));
return this;
}
public TrailerSelection idNot(long... value) {
addNotEquals("trailer." + TrailerColumns._ID, toObjectArray(value));
return this;
}
public TrailerSelection orderById(boolean desc) {
orderBy("trailer." + TrailerColumns._ID, desc);
return this;
}
public TrailerSelection orderById() {
return orderById(false);
}
public TrailerSelection name(String... value) {
addEquals(TrailerColumns.NAME, value);
return this;
}
public TrailerSelection nameNot(String... value) {
addNotEquals(TrailerColumns.NAME, value);
return this;
}
public TrailerSelection nameLike(String... value) {
addLike(TrailerColumns.NAME, value);
return this;
}
public TrailerSelection nameContains(String... value) {
addContains(TrailerColumns.NAME, value);
return this;
}
public TrailerSelection nameStartsWith(String... value) {
addStartsWith(TrailerColumns.NAME, value);
return this;
}
public TrailerSelection nameEndsWith(String... value) {
addEndsWith(TrailerColumns.NAME, value);
return this;
}
public TrailerSelection orderByName(boolean desc) {
orderBy(TrailerColumns.NAME, desc);
return this;
}
public TrailerSelection orderByName() {
orderBy(TrailerColumns.NAME, false);
return this;
}
public TrailerSelection size(String... value) {
addEquals(TrailerColumns.SIZE, value);
return this;
}
public TrailerSelection sizeNot(String... value) {
addNotEquals(TrailerColumns.SIZE, value);
return this;
}
public TrailerSelection sizeLike(String... value) {
addLike(TrailerColumns.SIZE, value);
return this;
}
public TrailerSelection sizeContains(String... value) {
addContains(TrailerColumns.SIZE, value);
return this;
}
public TrailerSelection sizeStartsWith(String... value) {
addStartsWith(TrailerColumns.SIZE, value);
return this;
}
public TrailerSelection sizeEndsWith(String... value) {
addEndsWith(TrailerColumns.SIZE, value);
return this;
}
public TrailerSelection orderBySize(boolean desc) {
orderBy(TrailerColumns.SIZE, desc);
return this;
}
public TrailerSelection orderBySize() {
orderBy(TrailerColumns.SIZE, false);
return this;
}
public TrailerSelection source(String... value) {
addEquals(TrailerColumns.SOURCE, value);
return this;
}
public TrailerSelection sourceNot(String... value) {
addNotEquals(TrailerColumns.SOURCE, value);
return this;
}
public TrailerSelection sourceLike(String... value) {
addLike(TrailerColumns.SOURCE, value);
return this;
}
public TrailerSelection sourceContains(String... value) {
addContains(TrailerColumns.SOURCE, value);
return this;
}
public TrailerSelection sourceStartsWith(String... value) {
addStartsWith(TrailerColumns.SOURCE, value);
return this;
}
public TrailerSelection sourceEndsWith(String... value) {
addEndsWith(TrailerColumns.SOURCE, value);
return this;
}
public TrailerSelection orderBySource(boolean desc) {
orderBy(TrailerColumns.SOURCE, desc);
return this;
}
public TrailerSelection orderBySource() {
orderBy(TrailerColumns.SOURCE, false);
return this;
}
public TrailerSelection type(String... value) {
addEquals(TrailerColumns.TYPE, value);
return this;
}
public TrailerSelection typeNot(String... value) {
addNotEquals(TrailerColumns.TYPE, value);
return this;
}
public TrailerSelection typeLike(String... value) {
addLike(TrailerColumns.TYPE, value);
return this;
}
public TrailerSelection typeContains(String... value) {
addContains(TrailerColumns.TYPE, value);
return this;
}
public TrailerSelection typeStartsWith(String... value) {
addStartsWith(TrailerColumns.TYPE, value);
return this;
}
public TrailerSelection typeEndsWith(String... value) {
addEndsWith(TrailerColumns.TYPE, value);
return this;
}
public TrailerSelection orderByType(boolean desc) {
orderBy(TrailerColumns.TYPE, desc);
return this;
}
public TrailerSelection orderByType() {
orderBy(TrailerColumns.TYPE, false);
return this;
}
public TrailerSelection movieId(long... value) {
addEquals(TrailerColumns.MOVIE_ID, toObjectArray(value));
return this;
}
public TrailerSelection movieIdNot(long... value) {
addNotEquals(TrailerColumns.MOVIE_ID, toObjectArray(value));
return this;
}
public TrailerSelection movieIdGt(long value) {
addGreaterThan(TrailerColumns.MOVIE_ID, value);
return this;
}
public TrailerSelection movieIdGtEq(long value) {
addGreaterThanOrEquals(TrailerColumns.MOVIE_ID, value);
return this;
}
public TrailerSelection movieIdLt(long value) {
addLessThan(TrailerColumns.MOVIE_ID, value);
return this;
}
public TrailerSelection movieIdLtEq(long value) {
addLessThanOrEquals(TrailerColumns.MOVIE_ID, value);
return this;
}
public TrailerSelection orderByMovieId(boolean desc) {
orderBy(TrailerColumns.MOVIE_ID, desc);
return this;
}
public TrailerSelection orderByMovieId() {
orderBy(TrailerColumns.MOVIE_ID, false);
return this;
}
public TrailerSelection movieMovieMoviedbId(long... value) {
addEquals(MovieColumns.MOVIE_MOVIEDB_ID, toObjectArray(value));
return this;
}
public TrailerSelection movieMovieMoviedbIdNot(long... value) {
addNotEquals(MovieColumns.MOVIE_MOVIEDB_ID, toObjectArray(value));
return this;
}
public TrailerSelection movieMovieMoviedbIdGt(long value) {
addGreaterThan(MovieColumns.MOVIE_MOVIEDB_ID, value);
return this;
}
public TrailerSelection movieMovieMoviedbIdGtEq(long value) {
addGreaterThanOrEquals(MovieColumns.MOVIE_MOVIEDB_ID, value);
return this;
}
public TrailerSelection movieMovieMoviedbIdLt(long value) {
addLessThan(MovieColumns.MOVIE_MOVIEDB_ID, value);
return this;
}
public TrailerSelection movieMovieMoviedbIdLtEq(long value) {
addLessThanOrEquals(MovieColumns.MOVIE_MOVIEDB_ID, value);
return this;
}
public TrailerSelection orderByMovieMovieMoviedbId(boolean desc) {
orderBy(MovieColumns.MOVIE_MOVIEDB_ID, desc);
return this;
}
public TrailerSelection orderByMovieMovieMoviedbId() {
orderBy(MovieColumns.MOVIE_MOVIEDB_ID, false);
return this;
}
public TrailerSelection movieTitle(String... value) {
addEquals(MovieColumns.TITLE, value);
return this;
}
public TrailerSelection movieTitleNot(String... value) {
addNotEquals(MovieColumns.TITLE, value);
return this;
}
public TrailerSelection movieTitleLike(String... value) {
addLike(MovieColumns.TITLE, value);
return this;
}
public TrailerSelection movieTitleContains(String... value) {
addContains(MovieColumns.TITLE, value);
return this;
}
public TrailerSelection movieTitleStartsWith(String... value) {
addStartsWith(MovieColumns.TITLE, value);
return this;
}
public TrailerSelection movieTitleEndsWith(String... value) {
addEndsWith(MovieColumns.TITLE, value);
return this;
}
public TrailerSelection orderByMovieTitle(boolean desc) {
orderBy(MovieColumns.TITLE, desc);
return this;
}
public TrailerSelection orderByMovieTitle() {
orderBy(MovieColumns.TITLE, false);
return this;
}
public TrailerSelection movieBackdropPath(String... value) {
addEquals(MovieColumns.BACKDROP_PATH, value);
return this;
}
public TrailerSelection movieBackdropPathNot(String... value) {
addNotEquals(MovieColumns.BACKDROP_PATH, value);
return this;
}
public TrailerSelection movieBackdropPathLike(String... value) {
addLike(MovieColumns.BACKDROP_PATH, value);
return this;
}
public TrailerSelection movieBackdropPathContains(String... value) {
addContains(MovieColumns.BACKDROP_PATH, value);
return this;
}
public TrailerSelection movieBackdropPathStartsWith(String... value) {
addStartsWith(MovieColumns.BACKDROP_PATH, value);
return this;
}
public TrailerSelection movieBackdropPathEndsWith(String... value) {
addEndsWith(MovieColumns.BACKDROP_PATH, value);
return this;
}
public TrailerSelection orderByMovieBackdropPath(boolean desc) {
orderBy(MovieColumns.BACKDROP_PATH, desc);
return this;
}
public TrailerSelection orderByMovieBackdropPath() {
orderBy(MovieColumns.BACKDROP_PATH, false);
return this;
}
public TrailerSelection movieOriginalLan(String... value) {
addEquals(MovieColumns.ORIGINAL_LAN, value);
return this;
}
public TrailerSelection movieOriginalLanNot(String... value) {
addNotEquals(MovieColumns.ORIGINAL_LAN, value);
return this;
}
public TrailerSelection movieOriginalLanLike(String... value) {
addLike(MovieColumns.ORIGINAL_LAN, value);
return this;
}
public TrailerSelection movieOriginalLanContains(String... value) {
addContains(MovieColumns.ORIGINAL_LAN, value);
return this;
}
public TrailerSelection movieOriginalLanStartsWith(String... value) {
addStartsWith(MovieColumns.ORIGINAL_LAN, value);
return this;
}
public TrailerSelection movieOriginalLanEndsWith(String... value) {
addEndsWith(MovieColumns.ORIGINAL_LAN, value);
return this;
}
public TrailerSelection orderByMovieOriginalLan(boolean desc) {
orderBy(MovieColumns.ORIGINAL_LAN, desc);
return this;
}
public TrailerSelection orderByMovieOriginalLan() {
orderBy(MovieColumns.ORIGINAL_LAN, false);
return this;
}
public TrailerSelection movieOriginalTitle(String... value) {
addEquals(MovieColumns.ORIGINAL_TITLE, value);
return this;
}
public TrailerSelection movieOriginalTitleNot(String... value) {
addNotEquals(MovieColumns.ORIGINAL_TITLE, value);
return this;
}
public TrailerSelection movieOriginalTitleLike(String... value) {
addLike(MovieColumns.ORIGINAL_TITLE, value);
return this;
}
public TrailerSelection movieOriginalTitleContains(String... value) {
addContains(MovieColumns.ORIGINAL_TITLE, value);
return this;
}
public TrailerSelection movieOriginalTitleStartsWith(String... value) {
addStartsWith(MovieColumns.ORIGINAL_TITLE, value);
return this;
}
public TrailerSelection movieOriginalTitleEndsWith(String... value) {
addEndsWith(MovieColumns.ORIGINAL_TITLE, value);
return this;
}
public TrailerSelection orderByMovieOriginalTitle(boolean desc) {
orderBy(MovieColumns.ORIGINAL_TITLE, desc);
return this;
}
public TrailerSelection orderByMovieOriginalTitle() {
orderBy(MovieColumns.ORIGINAL_TITLE, false);
return this;
}
public TrailerSelection movieOverview(String... value) {
addEquals(MovieColumns.OVERVIEW, value);
return this;
}
public TrailerSelection movieOverviewNot(String... value) {
addNotEquals(MovieColumns.OVERVIEW, value);
return this;
}
public TrailerSelection movieOverviewLike(String... value) {
addLike(MovieColumns.OVERVIEW, value);
return this;
}
public TrailerSelection movieOverviewContains(String... value) {
addContains(MovieColumns.OVERVIEW, value);
return this;
}
public TrailerSelection movieOverviewStartsWith(String... value) {
addStartsWith(MovieColumns.OVERVIEW, value);
return this;
}
public TrailerSelection movieOverviewEndsWith(String... value) {
addEndsWith(MovieColumns.OVERVIEW, value);
return this;
}
public TrailerSelection orderByMovieOverview(boolean desc) {
orderBy(MovieColumns.OVERVIEW, desc);
return this;
}
public TrailerSelection orderByMovieOverview() {
orderBy(MovieColumns.OVERVIEW, false);
return this;
}
public TrailerSelection movieReleaseDate(Long... value) {
addEquals(MovieColumns.RELEASE_DATE, value);
return this;
}
public TrailerSelection movieReleaseDateNot(Long... value) {
addNotEquals(MovieColumns.RELEASE_DATE, value);
return this;
}
public TrailerSelection movieReleaseDateGt(long value) {
addGreaterThan(MovieColumns.RELEASE_DATE, value);
return this;
}
public TrailerSelection movieReleaseDateGtEq(long value) {
addGreaterThanOrEquals(MovieColumns.RELEASE_DATE, value);
return this;
}
public TrailerSelection movieReleaseDateLt(long value) {
addLessThan(MovieColumns.RELEASE_DATE, value);
return this;
}
public TrailerSelection movieReleaseDateLtEq(long value) {
addLessThanOrEquals(MovieColumns.RELEASE_DATE, value);
return this;
}
public TrailerSelection orderByMovieReleaseDate(boolean desc) {
orderBy(MovieColumns.RELEASE_DATE, desc);
return this;
}
public TrailerSelection orderByMovieReleaseDate() {
orderBy(MovieColumns.RELEASE_DATE, false);
return this;
}
public TrailerSelection moviePosterPath(String... value) {
addEquals(MovieColumns.POSTER_PATH, value);
return this;
}
public TrailerSelection moviePosterPathNot(String... value) {
addNotEquals(MovieColumns.POSTER_PATH, value);
return this;
}
public TrailerSelection moviePosterPathLike(String... value) {
addLike(MovieColumns.POSTER_PATH, value);
return this;
}
public TrailerSelection moviePosterPathContains(String... value) {
addContains(MovieColumns.POSTER_PATH, value);
return this;
}
public TrailerSelection moviePosterPathStartsWith(String... value) {
addStartsWith(MovieColumns.POSTER_PATH, value);
return this;
}
public TrailerSelection moviePosterPathEndsWith(String... value) {
addEndsWith(MovieColumns.POSTER_PATH, value);
return this;
}
public TrailerSelection orderByMoviePosterPath(boolean desc) {
orderBy(MovieColumns.POSTER_PATH, desc);
return this;
}
public TrailerSelection orderByMoviePosterPath() {
orderBy(MovieColumns.POSTER_PATH, false);
return this;
}
public TrailerSelection moviePopularity(Double... value) {
addEquals(MovieColumns.POPULARITY, value);
return this;
}
public TrailerSelection moviePopularityNot(Double... value) {
addNotEquals(MovieColumns.POPULARITY, value);
return this;
}
public TrailerSelection moviePopularityGt(double value) {
addGreaterThan(MovieColumns.POPULARITY, value);
return this;
}
public TrailerSelection moviePopularityGtEq(double value) {
addGreaterThanOrEquals(MovieColumns.POPULARITY, value);
return this;
}
public TrailerSelection moviePopularityLt(double value) {
addLessThan(MovieColumns.POPULARITY, value);
return this;
}
public TrailerSelection moviePopularityLtEq(double value) {
addLessThanOrEquals(MovieColumns.POPULARITY, value);
return this;
}
public TrailerSelection orderByMoviePopularity(boolean desc) {
orderBy(MovieColumns.POPULARITY, desc);
return this;
}
public TrailerSelection orderByMoviePopularity() {
orderBy(MovieColumns.POPULARITY, false);
return this;
}
public TrailerSelection movieVoteAverage(Double... value) {
addEquals(MovieColumns.VOTE_AVERAGE, value);
return this;
}
public TrailerSelection movieVoteAverageNot(Double... value) {
addNotEquals(MovieColumns.VOTE_AVERAGE, value);
return this;
}
public TrailerSelection movieVoteAverageGt(double value) {
addGreaterThan(MovieColumns.VOTE_AVERAGE, value);
return this;
}
public TrailerSelection movieVoteAverageGtEq(double value) {
addGreaterThanOrEquals(MovieColumns.VOTE_AVERAGE, value);
return this;
}
public TrailerSelection movieVoteAverageLt(double value) {
addLessThan(MovieColumns.VOTE_AVERAGE, value);
return this;
}
public TrailerSelection movieVoteAverageLtEq(double value) {
addLessThanOrEquals(MovieColumns.VOTE_AVERAGE, value);
return this;
}
public TrailerSelection orderByMovieVoteAverage(boolean desc) {
orderBy(MovieColumns.VOTE_AVERAGE, desc);
return this;
}
public TrailerSelection orderByMovieVoteAverage() {
orderBy(MovieColumns.VOTE_AVERAGE, false);
return this;
}
public TrailerSelection movieVoteCount(Integer... value) {
addEquals(MovieColumns.VOTE_COUNT, value);
return this;
}
public TrailerSelection movieVoteCountNot(Integer... value) {
addNotEquals(MovieColumns.VOTE_COUNT, value);
return this;
}
public TrailerSelection movieVoteCountGt(int value) {
addGreaterThan(MovieColumns.VOTE_COUNT, value);
return this;
}
public TrailerSelection movieVoteCountGtEq(int value) {
addGreaterThanOrEquals(MovieColumns.VOTE_COUNT, value);
return this;
}
public TrailerSelection movieVoteCountLt(int value) {
addLessThan(MovieColumns.VOTE_COUNT, value);
return this;
}
public TrailerSelection movieVoteCountLtEq(int value) {
addLessThanOrEquals(MovieColumns.VOTE_COUNT, value);
return this;
}
public TrailerSelection orderByMovieVoteCount(boolean desc) {
orderBy(MovieColumns.VOTE_COUNT, desc);
return this;
}
public TrailerSelection orderByMovieVoteCount() {
orderBy(MovieColumns.VOTE_COUNT, false);
return this;
}
}
| |
package me.lyneira.MachinaPlanter;
import java.util.Collection;
import java.util.EnumMap;
import java.util.Map;
import me.lyneira.MachinaCore.BlockLocation;
import me.lyneira.MachinaCore.BlockRotation;
import me.lyneira.MachinaCore.Fuel;
import me.lyneira.MachinaCore.HeartBeatEvent;
import me.lyneira.MachinaCore.Machina;
import me.lyneira.MachinaCore.Tool;
import me.lyneira.MachinaPlanter.crop.CropCarrot;
import me.lyneira.MachinaPlanter.crop.CropCocoa;
import me.lyneira.MachinaPlanter.crop.CropHandler;
import me.lyneira.MachinaPlanter.crop.CropMelon;
import me.lyneira.MachinaPlanter.crop.CropNetherWart;
import me.lyneira.MachinaPlanter.crop.CropPotato;
import me.lyneira.MachinaPlanter.crop.CropPumpkin;
import me.lyneira.MachinaPlanter.crop.CropWheat;
import me.lyneira.util.InventoryManager;
import me.lyneira.util.InventoryTransaction;
import org.bukkit.Material;
import org.bukkit.block.BlockFace;
import org.bukkit.block.Furnace;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.entity.Player;
import org.bukkit.inventory.FurnaceInventory;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.InventoryHolder;
import org.bukkit.inventory.ItemStack;
import com.google.common.base.Predicate;
public class Planter implements Machina {
private static int delay = 20;
private final static int harvestCost = 20;
private final static int plantingCost = 10;
static int maxLength = 16;
static int maxWidth = 10;
private static boolean useEnergy = false;
private static boolean useTool = true;
private static Map<Material, CropHandler> harvestableMap = new EnumMap<Material, CropHandler>(Material.class);
private static Map<Material, CropHandler> plantableMap = new EnumMap<Material, CropHandler>(Material.class);
private final Rail rail;
private final BlockLocation lever;
private final BlockLocation base;
private final BlockLocation chest;
private final BlockLocation furnace;
private final BlockRotation furnaceYaw;
private final boolean harvest;
private State state;
private int currentEnergy = 0;
Planter(Rail rail, BlockLocation lever, BlockLocation base, BlockLocation chest, BlockLocation furnace, BlockRotation furnaceYaw, boolean harvest) {
this.rail = rail;
this.lever = lever;
this.base = base;
this.chest = chest;
this.furnace = furnace;
this.furnaceYaw = furnaceYaw;
this.harvest = harvest;
Fuel.setFurnace(furnace.getBlock(), furnaceYaw, true);
state = activate;
}
@Override
public boolean verify(BlockLocation anchor) {
if (!(anchor.checkType(Blueprint.anchorMaterial) //
&& lever.checkType(Material.LEVER) //
&& base.checkType(Blueprint.baseMaterial) //
&& chest.checkType(Material.CHEST) //
&& furnace.checkTypes(Material.BURNING_FURNACE))){
if(furnace.checkType(Material.FURNACE)) {
} else {
return false;
}
}
return rail.verify();
}
@Override
public HeartBeatEvent heartBeat(BlockLocation anchor) {
state = state.run();
if (state == null)
return null;
return new HeartBeatEvent(delay);
}
@Override
public boolean onLever(BlockLocation anchor, Player player, ItemStack itemInHand) {
if (player.hasPermission("machinaplanter.activate")) {
if (state == activate || state == plant)
state = deactivate;
}
return true;
}
@Override
public void onDeActivate(BlockLocation anchor) {
Fuel.setFurnace(furnace.getBlock(), furnaceYaw, false);
}
/**
* Runs all the necessary planter actions for the current tile.
*
* @throws NoEnergyException
*/
private void operateOnTile() throws NoEnergyException {
if (rail.getRowType() == RailType.SKIP)
return;
BlockLocation tile = rail.currentTile();
BlockLocation crop = tile.getRelative(BlockFace.UP);
// Attempt to till to farmland
till(tile, crop);
if (harvest)
harvest(crop);
plant(tile, crop);
}
/**
* Attempts to till the current tile if appropriate.
*
* @param tile
* @param crop
* @throws NoEnergyException
*/
private void till(BlockLocation tile, BlockLocation crop) throws NoEnergyException {
switch (tile.getType()) {
case DIRT:
case GRASS:
switch (crop.getType()) {
case SNOW:
case LONG_GRASS:
crop.setEmpty();
case AIR:
useEnergy(plantingCost);
try {
useTool();
tile.setType(Material.SOIL);
} catch (NoToolException e) {
}
default:
break;
}
break;
default:
break;
}
}
/**
* Attempts to harvest the current crop block.
*
* @param crop
* The block being harvested
* @throws NoEnergyException
*/
private void harvest(BlockLocation crop) throws NoEnergyException {
CropHandler handler = harvestableMap.get(crop.getType());
if (handler == null)
return;
boolean canHarvest = handler.isRipe(crop);
if (!canHarvest) {
if (handler.canUseBonemealAtHarvest()) {
/*
* Use bonemeal, but it may still fail to produce a harvestable
* crop.
*/
InventoryManager manager = new InventoryManager(chestInventory());
if (manager.findItemTypeAndData(Material.INK_SACK.getId(), (byte) 15)
// Attempt to use bonemeal if found
&& handler.useBonemeal(crop)) {
manager.decrement();
canHarvest = handler.isRipe(crop);
}
}
}
if (canHarvest) {
InventoryTransaction transaction = new InventoryTransaction(chestInventory());
Collection<ItemStack> drops = handler.getDrops();
if (drops == null) {
// Ask Bukkit
drops = crop.getBlock().getDrops();
}
transaction.add(drops);
useEnergy(harvestCost);
try {
useTool();
} catch (NoToolException e) {
return;
}
if (!transaction.execute()) {
return;
}
crop.setEmpty();
}
}
/**
* Attempts to plant a crop on the current block.
*
* @param tile
* The ground tile being planted in
* @param crop
* The block above the tile
* @throws NoEnergyException
*/
private void plant(BlockLocation tile, BlockLocation crop) throws NoEnergyException {
if (rail.getRowType() != RailType.PLANT)
return;
// Cannot plant in a non-empty space.
if (!crop.isEmpty())
return;
InventoryManager manager = new InventoryManager(chestInventory());
if (!manager.find(plantable))
return;
ItemStack item = manager.get();
Material seedType = item.getType();
CropHandler handler = plantableMap.get(seedType);
if (handler == null) {
MachinaPlanter.log("SEVERE: Got a null CropHandler after finding a suitable item to plant!");
return;
}
if (!handler.canPlant(tile))
return;
useEnergy(plantingCost);
manager.decrement();
boolean usedBonemeal = false;
if (handler.canUseBonemealWhilePlanting()) {
usedBonemeal = manager.findItemTypeAndData(Material.INK_SACK.getId(), (byte) 15);
if (usedBonemeal) {
manager.decrement();
}
}
handler.plant(crop, usedBonemeal);
}
private void useTool() throws NoToolException {
if (!useTool)
return;
FurnaceInventory furnaceInventory = ((Furnace) furnace.getBlock().getState()).getInventory();
if (!Tool.useInFurnace(furnaceInventory, planterTool, chestInventory())) {
throw new NoToolException();
}
}
/**
* Uses the given amount of energy and returns true if successful.
*
* @param energy
* The amount of energy needed for the next action
* @return True if enough energy could be used up
*/
protected void useEnergy(final int energy) throws NoEnergyException {
if (!useEnergy)
return;
while (currentEnergy < energy) {
int newFuel = Fuel.consume((Furnace) furnace.getBlock().getState());
if (newFuel > 0) {
currentEnergy += newFuel;
} else {
throw new NoEnergyException();
}
}
currentEnergy -= energy;
}
private final Inventory chestInventory() {
return ((InventoryHolder) chest.getBlock().getState()).getInventory();
}
private interface State {
State run();
}
private final State activate = new State() {
@Override
public State run() {
if (rail.activate()) {
try {
operateOnTile();
return plant;
} catch (NoEnergyException e) {
return deactivate;
}
} else
return parkHead.run();
}
};
private final State deactivate = new State() {
@Override
public State run() {
if (rail.deactivate()) {
return parkInit;
} else {
return null;
}
}
};
private final State plant = new State() {
@Override
public State run() {
if (rail.nextTile()) {
try {
operateOnTile();
return this;
} catch (NoEnergyException e) {
return deactivate;
}
} else {
return deactivate.run();
}
}
};
/**
* In this state the planter will retract the moving rail if the head's
* current direction is backward.
*/
private final State parkInit = new State() {
@Override
public State run() {
if (rail.isHeadMovingBackward()) {
rail.retract();
return parkHead;
} else {
return parkHead.run();
}
}
};
/**
* In this state the planter will pull the head back to its parking
* position.
*/
private final State parkHead = new State() {
@Override
public State run() {
switch (rail.retractHead()) {
case OK:
return this;
case RAIL_END:
return parkMovingRail.run();
default:
return null;
}
}
};
/**
* In this state the planter will retract the moving rail back to its
* parking position.
*/
private final State parkMovingRail = new State() {
@Override
public State run() {
if (rail.retract()) {
return this;
} else {
return null;
}
}
};
// Statics
/**
* Allows for adding crop types for the planter to handle.
*
* @param handler
*/
public static void addCrop(CropHandler handler) {
if (harvestableMap.containsKey(handler.getHarvestableMaterial())) {
MachinaPlanter.log("Warning: Crophandler " + handler.getClass().getName() + " is overriding existing harvestable mapping for " + handler.getHarvestableMaterial().toString());
}
harvestableMap.put(handler.getHarvestableMaterial(), handler);
if (plantableMap.containsKey(handler.getPlantableItem())) {
MachinaPlanter.log("Warning: Crophandler " + handler.getClass().getName() + " is overriding existing plantable mapping for " + handler.getPlantableItem().toString());
}
plantableMap.put(handler.getPlantableItem(), handler);
}
private static final Predicate<ItemStack> plantable = new Predicate<ItemStack>() {
@Override
public boolean apply(ItemStack item) {
if (item == null)
return false;
CropHandler handler = plantableMap.get(item.getType());
if (handler == null)
return false;
return handler.checkPlantableItemData(item.getData());
}
};
private static final Predicate<ItemStack> planterTool = new Predicate<ItemStack>() {
@Override
public boolean apply(ItemStack item) {
if (item == null)
return false;
switch (item.getType()) {
case DIAMOND_HOE:
case GOLD_HOE:
case IRON_HOE:
case STONE_HOE:
case WOOD_HOE:
return true;
default:
return false;
}
}
};
/**
* Loads the given configuration.
*
* @param configuration
*/
static void loadConfiguration(ConfigurationSection configuration) {
delay = Math.max(configuration.getInt("action-delay", delay), 1);
maxLength = Math.min(Math.max(configuration.getInt("max-length", maxLength), 1), 64);
maxWidth = Math.min(Math.max(configuration.getInt("max-width", maxWidth), 1), 64);
boolean harvestWheat = configuration.getBoolean("harvest-wheat", true);
boolean harvestWheatSeeds = configuration.getBoolean("harvest-wheat-seeds", true);
boolean harvestPumpkin = configuration.getBoolean("harvest-pumpkin", true);
boolean harvestMelon = configuration.getBoolean("harvest-melon", true);
boolean harvestNetherWart = configuration.getBoolean("harvest-netherwart", true);
boolean harvestCarrot = configuration.getBoolean("harvest-carrot", true);
boolean harvestPotato = configuration.getBoolean("harvest-potato", true);
boolean harvestCocoa = configuration.getBoolean("harvest-cocoa", true);
useEnergy = configuration.getBoolean("use-energy", useEnergy);
useTool = configuration.getBoolean("use-tool", useTool);
// Add all the default crops.
addCrop(new CropWheat(harvestWheat, harvestWheatSeeds));
addCrop(new CropPumpkin(harvestPumpkin));
addCrop(new CropMelon(harvestMelon));
addCrop(new CropNetherWart(harvestNetherWart));
addCrop(new CropCarrot(harvestCarrot));
addCrop(new CropPotato(harvestPotato));
addCrop(new CropCocoa(harvestCocoa));
}
}
| |
package fsb.semantics.empeagerpso;
import java.util.Arrays;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.WeakHashMap;
import fsb.semantics.AbsSharedResVal;
import fsb.semantics.BufVal;
import fsb.utils.Options;
public class SharedResVal implements Cloneable, AbsSharedResVal
{
private static WeakHashMap<AbstractBuffer, AbstractBuffer> cachedBuffers = new WeakHashMap<AbstractBuffer, AbstractBuffer>();
public class AbstractBuffer implements Cloneable
{
int last;
HashSet<BufVal> set;
LinkedList<BufVal> head;
boolean precise;
public AbstractBuffer()
{
precise = true;
set = new HashSet<BufVal>(2);
head = new LinkedList<BufVal>();
}
@SuppressWarnings("unchecked")
public AbstractBuffer(AbstractBuffer other)
{
precise = other.precise;
last = other.last;
set = (HashSet<BufVal>) other.set.clone();
head = (LinkedList<BufVal>) other.head.clone();
}
public Object clone()
{
AbstractBuffer newbuffer = new AbstractBuffer(this);
return newbuffer;
}
//The hashcode may not refer to the set, since the equals()
//of SharedResVal does not check set equality!
//Yes, this is black magic.
@Override
public int hashCode()
{
final int prime = 31;
int result = last;
result = prime * result + head.hashCode();
//result = prime * result + (precise ? 1 : 0);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!(obj instanceof AbstractBuffer)) return false;
AbstractBuffer other = (AbstractBuffer) obj;
return (other.precise == precise && other.last == last && head.equals(other.head) && other.set.equals(set));
}
public boolean isJoinable(Object obj) {
if (this == obj)
return true;
if (!(obj instanceof AbstractBuffer)) return false;
AbstractBuffer other = (AbstractBuffer) obj;
return (other.precise == precise && other.last == last && head.equals(other.head));
}
}
private int value;
//TODO: Should be private
AbstractBuffer buffer[];
SharedResVal(int numProcs, int init)
{
value = init;
buffer = new AbstractBuffer[numProcs];
for (int i = 0; i < numProcs; i++)
buffer[i] = new AbstractBuffer();
}
public SharedResVal(SharedResVal other) {
value = other.value;
//Note this is NOT a deep copy.
buffer = other.buffer.clone();
if (!Options.LAZY_COPY)
{
for (int i = 0; i < buffer.length; i++)
buffer[i] = (AbstractBuffer) buffer[i].clone();
}
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (!(obj instanceof SharedResVal)) return false;
SharedResVal other = (SharedResVal) obj;
if (other.value != value)
return false;
for(int i = 0; i < buffer.length; i++)
{
if (!buffer[i].isJoinable(other.buffer[i]))
return false;
}
return true;
}
@Override
public int hashCode()
{
final int prime = 31;
int result = value;
result = prime * result + Arrays.hashCode(buffer);
return result;
}
@Override
public String toString()
{
//TODO Fix up to use tail
StringBuffer sb = new StringBuffer();
sb.append(value);
sb.append(", [[");
for (int i = 0; i < buffer.length; i++)
{
sb.append(i);
sb.append(":");
sb.append(buffer[i].precise);
sb.append(", ");
sb.append(buffer[i].last);
sb.append(", {");
sb.append(buffer[i].set);
sb.append("}, (");
sb.append(buffer[i].head);
sb.append(") ");
}
sb.append("]]");
return sb.toString();
}
@Override
public AbsSharedResVal clone()
{
SharedResVal newval = new SharedResVal(this);
return newval;
}
public int getLast(int pid)
{
return buffer[pid].last;
}
public boolean isPrecise(int pid)
{
return buffer[pid].precise;
}
public int getGlobalVal()
{
return value;
}
//TODO: Store-H should be enabled more often!
public void store(int pid, int val, int writingLabel)
{
if (Options.LAZY_COPY)
buffer[pid] = (AbstractBuffer) buffer[pid].clone();
AbstractBuffer buf = buffer[pid];
buf.last = val;
if (isPrecise(pid) && !isFullHead(pid))
buf.head.addLast(new BufVal(val, writingLabel));
else
{
buf.set.add(new BufVal(val, writingLabel));
buf.precise = false;
}
AbstractBuffer cached = cachedBuffers.get(buf);
if (cached == null)
cachedBuffers.put(buf, buf);
else
buffer[pid] = cached;
}
public void clearBuffer(int pid)
{
if (Options.LAZY_COPY)
buffer[pid] = (AbstractBuffer) buffer[pid].clone();
buffer[pid].set.clear();
buffer[pid].head.clear();
buffer[pid].precise = true;
AbstractBuffer cached = cachedBuffers.get(buffer[pid]);
if (cached == null)
cachedBuffers.put(buffer[pid], buffer[pid]);
else
buffer[pid] = cached;
}
public boolean isEmptyHead(int pid)
{
return buffer[pid].head.isEmpty();
}
private boolean isFullHead(int pid)
{
return buffer[pid].head.size() == EmpEagerAbsSemantics.headlen;
}
private boolean isEmptySet(int pid)
{
return buffer[pid].set.isEmpty();
}
public boolean isEmpty(int pid)
{
return (isEmptyHead(pid) && isPrecise(pid));
}
public void directSetValue(int val)
{
value = val;
}
public void flushFromHead(int pid) {
if (Options.LAZY_COPY)
buffer[pid] = (AbstractBuffer) buffer[pid].clone();
value = buffer[pid].head.pollFirst().contents;
AbstractBuffer cached = cachedBuffers.get(buffer[pid]);
if (cached == null)
cachedBuffers.put(buffer[pid], buffer[pid]);
else
buffer[pid] = cached;
}
public boolean join(SharedResVal other) {
boolean changed = false;
for (int pid = 0; pid < buffer.length; pid++)
{
if (buffer[pid].set.containsAll(other.buffer[pid].set))
continue;
if (Options.LAZY_COPY)
buffer[pid] = (AbstractBuffer) buffer[pid].clone();
buffer[pid].set.addAll(other.buffer[pid].set);
changed = true;
AbstractBuffer cached = cachedBuffers.get(buffer[pid]);
if (cached == null)
cachedBuffers.put(buffer[pid], buffer[pid]);
else
buffer[pid] = cached;
}
return changed;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.store;
import com.carrotsearch.ant.tasks.junit4.dependencies.com.google.common.collect.Lists;
import com.carrotsearch.randomizedtesting.LifecycleScope;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.google.common.base.Charsets;
import com.google.common.base.Predicate;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.index.CheckIndex;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.store.*;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthStatus;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.*;
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.index.engine.EngineConfig;
import org.elasticsearch.index.merge.policy.MergePolicyModule;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardException;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.TranslogService;
import org.elasticsearch.indices.IndicesLifecycle;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.recovery.RecoveryFileChunkRequest;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.indices.recovery.RecoveryTarget;
import org.elasticsearch.monitor.fs.FsStats;
import org.elasticsearch.snapshots.SnapshotState;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.elasticsearch.test.index.merge.NoMergePolicyProvider;
import org.elasticsearch.test.store.MockFSDirectoryService;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.transport.*;
import org.junit.Test;
import java.io.*;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.hamcrest.Matchers.*;
@ElasticsearchIntegrationTest.ClusterScope(scope = ElasticsearchIntegrationTest.Scope.SUITE)
public class CorruptedFileTest extends ElasticsearchIntegrationTest {
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return ImmutableSettings.builder()
// we really need local GW here since this also checks for corruption etc.
// and we need to make sure primaries are not just trashed if we don't have replicas
.put(super.nodeSettings(nodeOrdinal)).put("gateway.type", "local")
.put(TransportModule.TRANSPORT_SERVICE_TYPE_KEY, MockTransportService.class.getName())
// speed up recoveries
.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_STREAMS, 10)
.put(RecoverySettings.INDICES_RECOVERY_CONCURRENT_SMALL_FILE_STREAMS, 10)
.put(ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES, 5)
.build();
}
/**
* Tests that we can actually recover from a corruption on the primary given that we have replica shards around.
*/
@Test
public void testCorruptFileAndRecover() throws ExecutionException, InterruptedException, IOException {
int numDocs = scaledRandomIntBetween(100, 1000);
assertThat(cluster().numDataNodes(), greaterThanOrEqualTo(2));
while (cluster().numDataNodes() < 4) {
/**
* We need 4 nodes since if we have 2 replicas and only 3 nodes we can't get into green state since
* the corrupted node will never be used reallocate a replica since it's marked as corrupted
*/
internalCluster().startNode(ImmutableSettings.builder().put("node.data", true).put("node.client", false).put("node.master", false));
}
assertThat(cluster().numDataNodes(), greaterThanOrEqualTo(3));
final boolean failOnCorruption = randomBoolean();
assertAcked(prepareCreate("test").setSettings(ImmutableSettings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "1")
.put(MergePolicyModule.MERGE_POLICY_TYPE_KEY, NoMergePolicyProvider.class)
.put(MockFSDirectoryService.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose
.put(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, failOnCorruption)
.put(TranslogService.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files
.put("indices.recovery.concurrent_streams", 10)
));
if (failOnCorruption == false) { // test the dynamic setting
client().admin().indices().prepareUpdateSettings("test").setSettings(ImmutableSettings.builder()
.put(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, true)).get();
}
ensureGreen();
disableAllocation("test");
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type").setSource("field", "value");
}
indexRandom(true, builders);
ensureGreen();
assertAllSuccessful(client().admin().indices().prepareFlush().setForce(true).setWaitIfOngoing(true).execute().actionGet());
// we have to flush at least once here since we don't corrupt the translog
CountResponse countResponse = client().prepareCount().get();
assertHitCount(countResponse, numDocs);
final int numShards = numShards("test");
ShardRouting corruptedShardRouting = corruptRandomFile();
enableAllocation("test");
/*
* we corrupted the primary shard - now lets make sure we never recover from it successfully
*/
Settings build = ImmutableSettings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "2").build();
client().admin().indices().prepareUpdateSettings("test").setSettings(build).get();
ClusterHealthResponse health = client().admin().cluster()
.health(Requests.clusterHealthRequest("test").waitForGreenStatus()
.timeout("5m") // sometimes due to cluster rebalacing and random settings default timeout is just not enough.
.waitForRelocatingShards(0)).actionGet();
if (health.isTimedOut()) {
logger.info("cluster state:\n{}\n{}", client().admin().cluster().prepareState().get().getState().prettyPrint(), client().admin().cluster().preparePendingClusterTasks().get().prettyPrint());
assertThat("timed out waiting for green state", health.isTimedOut(), equalTo(false));
}
assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN));
final int numIterations = scaledRandomIntBetween(5, 20);
for (int i = 0; i < numIterations; i++) {
SearchResponse response = client().prepareSearch().setSize(numDocs).get();
assertHitCount(response, numDocs);
}
/*
* now hook into the IndicesService and register a close listener to
* run the checkindex. if the corruption is still there we will catch it.
*/
final CountDownLatch latch = new CountDownLatch(numShards * 3); // primary + 2 replicas
final CopyOnWriteArrayList<Throwable> exception = new CopyOnWriteArrayList<>();
final IndicesLifecycle.Listener listener = new IndicesLifecycle.Listener() {
@Override
public void beforeIndexShardClosed(ShardId sid, @Nullable IndexShard indexShard, @IndexSettings Settings indexSettings) {
if (indexShard != null) {
Store store = ((IndexShard) indexShard).store();
store.incRef();
try {
if (!Lucene.indexExists(store.directory()) && indexShard.state() == IndexShardState.STARTED) {
return;
}
CheckIndex checkIndex = new CheckIndex(store.directory());
BytesStreamOutput os = new BytesStreamOutput();
PrintStream out = new PrintStream(os, false, Charsets.UTF_8.name());
checkIndex.setInfoStream(out);
out.flush();
CheckIndex.Status status = checkIndex.checkIndex();
if (!status.clean) {
logger.warn("check index [failure]\n{}", new String(os.bytes().toBytes(), Charsets.UTF_8));
throw new IndexShardException(sid, "index check failure");
}
} catch (Throwable t) {
exception.add(t);
} finally {
store.decRef();
latch.countDown();
}
}
}
};
for (IndicesService service : internalCluster().getDataNodeInstances(IndicesService.class)) {
service.indicesLifecycle().addListener(listener);
}
try {
client().admin().indices().prepareDelete("test").get();
latch.await();
assertThat(exception, empty());
} finally {
for (IndicesService service : internalCluster().getDataNodeInstances(IndicesService.class)) {
service.indicesLifecycle().removeListener(listener);
}
}
}
/**
* Tests corruption that happens on a single shard when no replicas are present. We make sure that the primary stays unassigned
* and all other replicas for the healthy shards happens
*/
@Test
public void testCorruptPrimaryNoReplica() throws ExecutionException, InterruptedException, IOException {
int numDocs = scaledRandomIntBetween(100, 1000);
assertThat(cluster().numDataNodes(), greaterThanOrEqualTo(2));
assertAcked(prepareCreate("test").setSettings(ImmutableSettings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0")
.put(MergePolicyModule.MERGE_POLICY_TYPE_KEY, NoMergePolicyProvider.class)
.put(MockFSDirectoryService.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose
.put(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, true)
.put(TranslogService.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files
.put("indices.recovery.concurrent_streams", 10)
));
ensureGreen();
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type").setSource("field", "value");
}
indexRandom(true, builders);
ensureGreen();
assertAllSuccessful(client().admin().indices().prepareFlush().setForce(true).setWaitIfOngoing(true).execute().actionGet());
// we have to flush at least once here since we don't corrupt the translog
CountResponse countResponse = client().prepareCount().get();
assertHitCount(countResponse, numDocs);
ShardRouting shardRouting = corruptRandomFile();
/*
* we corrupted the primary shard - now lets make sure we never recover from it successfully
*/
Settings build = ImmutableSettings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "1").build();
client().admin().indices().prepareUpdateSettings("test").setSettings(build).get();
client().admin().cluster().prepareReroute().get();
boolean didClusterTurnRed = awaitBusy(new Predicate<Object>() {
@Override
public boolean apply(Object input) {
ClusterHealthStatus test = client().admin().cluster()
.health(Requests.clusterHealthRequest("test")).actionGet().getStatus();
return test == ClusterHealthStatus.RED;
}
}, 5, TimeUnit.MINUTES);// sometimes on slow nodes the replication / recovery is just dead slow
final ClusterHealthResponse response = client().admin().cluster()
.health(Requests.clusterHealthRequest("test")).get();
if (response.getStatus() != ClusterHealthStatus.RED) {
logger.info("Cluster turned red in busy loop: {}", didClusterTurnRed);
logger.info("cluster state:\n{}\n{}", client().admin().cluster().prepareState().get().getState().prettyPrint(), client().admin().cluster().preparePendingClusterTasks().get().prettyPrint());
}
assertThat(response.getStatus(), is(ClusterHealthStatus.RED));
ClusterState state = client().admin().cluster().prepareState().get().getState();
GroupShardsIterator shardIterators = state.getRoutingNodes().getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false);
for (ShardIterator iterator : shardIterators) {
ShardRouting routing;
while ((routing = iterator.nextOrNull()) != null) {
if (routing.getId() == shardRouting.getId()) {
assertThat(routing.state(), equalTo(ShardRoutingState.UNASSIGNED));
} else {
assertThat(routing.state(), anyOf(equalTo(ShardRoutingState.RELOCATING), equalTo(ShardRoutingState.STARTED)));
}
}
}
final List<File> files = listShardFiles(shardRouting);
File corruptedFile = null;
for (File file : files) {
if (file.getName().startsWith("corrupted_")) {
corruptedFile = file;
break;
}
}
assertThat(corruptedFile, notNullValue());
}
/**
* This test triggers a corrupt index exception during finalization size if an empty commit point is transferred
* during recovery we don't know the version of the segments_N file because it has no segments we can take it from.
* This simulates recoveries from old indices or even without checksums and makes sure if we fail during finalization
* we also check if the primary is ok. Without the relevant checks this test fails with a RED cluster
*/
public void testCorruptionOnNetworkLayerFinalizingRecovery() throws ExecutionException, InterruptedException, IOException {
internalCluster().ensureAtLeastNumDataNodes(2);
NodesStatsResponse nodeStats = client().admin().cluster().prepareNodesStats().get();
List<NodeStats> dataNodeStats = new ArrayList<>();
for (NodeStats stat : nodeStats.getNodes()) {
if (stat.getNode().isDataNode()) {
dataNodeStats.add(stat);
}
}
assertThat(dataNodeStats.size(), greaterThanOrEqualTo(2));
Collections.shuffle(dataNodeStats, getRandom());
NodeStats primariesNode = dataNodeStats.get(0);
NodeStats unluckyNode = dataNodeStats.get(1);
assertAcked(prepareCreate("test").setSettings(ImmutableSettings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0")
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, true)
.put("index.routing.allocation.include._name", primariesNode.getNode().name())
));
ensureGreen(); // allocated with empty commit
final AtomicBoolean corrupt = new AtomicBoolean(true);
final CountDownLatch hasCorrupted = new CountDownLatch(1);
for (NodeStats dataNode : dataNodeStats) {
MockTransportService mockTransportService = ((MockTransportService) internalCluster().getInstance(TransportService.class, dataNode.getNode().name()));
mockTransportService.addDelegate(internalCluster().getInstance(Discovery.class, unluckyNode.getNode().name()).localNode(), new MockTransportService.DelegateTransport(mockTransportService.original()) {
@Override
public void sendRequest(DiscoveryNode node, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException, TransportException {
if (corrupt.get() && action.equals(RecoveryTarget.Actions.FILE_CHUNK)) {
RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request;
byte[] array = req.content().array();
int i = randomIntBetween(0, req.content().length() - 1);
array[i] = (byte) ~array[i]; // flip one byte in the content
hasCorrupted.countDown();
}
super.sendRequest(node, requestId, action, request, options);
}
});
}
Settings build = ImmutableSettings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "1")
.put("index.routing.allocation.include._name", primariesNode.getNode().name() + "," + unluckyNode.getNode().name()).build();
client().admin().indices().prepareUpdateSettings("test").setSettings(build).get();
client().admin().cluster().prepareReroute().get();
hasCorrupted.await();
corrupt.set(false);
ensureGreen();
}
/**
* Tests corruption that happens on the network layer and that the primary does not get affected by corruption that happens on the way
* to the replica. The file on disk stays uncorrupted
*/
@Test
public void testCorruptionOnNetworkLayer() throws ExecutionException, InterruptedException {
int numDocs = scaledRandomIntBetween(100, 1000);
assertThat(cluster().numDataNodes(), greaterThanOrEqualTo(2));
if (cluster().numDataNodes() < 3) {
internalCluster().startNode(ImmutableSettings.builder().put("node.data", true).put("node.client", false).put("node.master", false));
}
NodesStatsResponse nodeStats = client().admin().cluster().prepareNodesStats().get();
List<NodeStats> dataNodeStats = new ArrayList<>();
for (NodeStats stat : nodeStats.getNodes()) {
if (stat.getNode().isDataNode()) {
dataNodeStats.add(stat);
}
}
assertThat(dataNodeStats.size(), greaterThanOrEqualTo(2));
Collections.shuffle(dataNodeStats, getRandom());
NodeStats primariesNode = dataNodeStats.get(0);
NodeStats unluckyNode = dataNodeStats.get(1);
assertAcked(prepareCreate("test").setSettings(ImmutableSettings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0")
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, between(1, 4)) // don't go crazy here it must recovery fast
.put(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, true)
// This does corrupt files on the replica, so we can't check:
.put(MockFSDirectoryService.CHECK_INDEX_ON_CLOSE, false)
.put("index.routing.allocation.include._name", primariesNode.getNode().name())
));
ensureGreen();
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type").setSource("field", "value");
}
indexRandom(true, builders);
ensureGreen();
assertAllSuccessful(client().admin().indices().prepareFlush().setForce(true).setWaitIfOngoing(true).execute().actionGet());
// we have to flush at least once here since we don't corrupt the translog
CountResponse countResponse = client().prepareCount().get();
assertHitCount(countResponse, numDocs);
final boolean truncate = randomBoolean();
for (NodeStats dataNode : dataNodeStats) {
MockTransportService mockTransportService = ((MockTransportService) internalCluster().getInstance(TransportService.class, dataNode.getNode().name()));
mockTransportService.addDelegate(internalCluster().getInstance(Discovery.class, unluckyNode.getNode().name()).localNode(), new MockTransportService.DelegateTransport(mockTransportService.original()) {
@Override
public void sendRequest(DiscoveryNode node, long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException, TransportException {
if (action.equals(RecoveryTarget.Actions.FILE_CHUNK)) {
RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request;
if (truncate && req.length() > 1) {
BytesArray array = new BytesArray(req.content().array(), req.content().arrayOffset(), (int) req.length() - 1);
request = new RecoveryFileChunkRequest(req.recoveryId(), req.shardId(), req.metadata(), req.position(), array, req.lastChunk(), req.totalTranslogOps(), req.sourceThrottleTimeInNanos());
} else {
byte[] array = req.content().array();
int i = randomIntBetween(0, req.content().length() - 1);
array[i] = (byte) ~array[i]; // flip one byte in the content
}
}
super.sendRequest(node, requestId, action, request, options);
}
});
}
Settings build = ImmutableSettings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "1")
.put("index.routing.allocation.include._name", "*").build();
client().admin().indices().prepareUpdateSettings("test").setSettings(build).get();
client().admin().cluster().prepareReroute().get();
ClusterHealthResponse actionGet = client().admin().cluster()
.health(Requests.clusterHealthRequest("test").waitForGreenStatus()).actionGet();
if (actionGet.isTimedOut()) {
logger.info("ensureGreen timed out, cluster state:\n{}\n{}", client().admin().cluster().prepareState().get().getState().prettyPrint(), client().admin().cluster().preparePendingClusterTasks().get().prettyPrint());
assertThat("timed out waiting for green state", actionGet.isTimedOut(), equalTo(false));
}
// we are green so primaries got not corrupted.
// ensure that no shard is actually allocated on the unlucky node
ClusterStateResponse clusterStateResponse = client().admin().cluster().prepareState().get();
for (IndexShardRoutingTable table : clusterStateResponse.getState().routingNodes().getRoutingTable().index("test")) {
for (ShardRouting routing : table) {
if (unluckyNode.getNode().getId().equals(routing.currentNodeId())) {
assertThat(routing.state(), not(equalTo(ShardRoutingState.STARTED)));
assertThat(routing.state(), not(equalTo(ShardRoutingState.RELOCATING)));
}
}
}
final int numIterations = scaledRandomIntBetween(5, 20);
for (int i = 0; i < numIterations; i++) {
SearchResponse response = client().prepareSearch().setSize(numDocs).get();
assertHitCount(response, numDocs);
}
}
/**
* Tests that restoring of a corrupted shard fails and we get a partial snapshot.
* TODO once checksum verification on snapshotting is implemented this test needs to be fixed or split into several
* parts... We should also corrupt files on the actual snapshot and check that we don't restore the corrupted shard.
*/
@Test
public void testCorruptFileThenSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException {
int numDocs = scaledRandomIntBetween(100, 1000);
assertThat(cluster().numDataNodes(), greaterThanOrEqualTo(2));
assertAcked(prepareCreate("test").setSettings(ImmutableSettings.builder()
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, "0") // no replicas for this test
.put(MergePolicyModule.MERGE_POLICY_TYPE_KEY, NoMergePolicyProvider.class)
.put(MockFSDirectoryService.CHECK_INDEX_ON_CLOSE, false) // no checkindex - we corrupt shards on purpose
.put(EngineConfig.INDEX_FAIL_ON_CORRUPTION_SETTING, true)
.put(TranslogService.INDEX_TRANSLOG_DISABLE_FLUSH, true) // no translog based flush - it might change the .liv / segments.N files
.put("indices.recovery.concurrent_streams", 10)
));
ensureGreen();
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
for (int i = 0; i < builders.length; i++) {
builders[i] = client().prepareIndex("test", "type").setSource("field", "value");
}
indexRandom(true, builders);
ensureGreen();
assertAllSuccessful(client().admin().indices().prepareFlush().setForce(true).setWaitIfOngoing(true).execute().actionGet());
// we have to flush at least once here since we don't corrupt the translog
CountResponse countResponse = client().prepareCount().get();
assertHitCount(countResponse, numDocs);
ShardRouting shardRouting = corruptRandomFile(false);
// we don't corrupt segments.gen since S/R doesn't snapshot this file
// the other problem here why we can't corrupt segments.X files is that the snapshot flushes again before
// it snapshots and that will write a new segments.X+1 file
logger.info("--> creating repository");
assertAcked(client().admin().cluster().preparePutRepository("test-repo")
.setType("fs").setSettings(ImmutableSettings.settingsBuilder()
.put("location", newTempDir(LifecycleScope.SUITE).getAbsolutePath())
.put("compress", randomBoolean())
.put("chunk_size", randomIntBetween(100, 1000))));
logger.info("--> snapshot");
CreateSnapshotResponse createSnapshotResponse = client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test").get();
assertThat(createSnapshotResponse.getSnapshotInfo().state(), equalTo(SnapshotState.PARTIAL));
logger.info("failed during snapshot -- maybe SI file got corrupted");
final List<File> files = listShardFiles(shardRouting);
File corruptedFile = null;
for (File file : files) {
if (file.getName().startsWith("corrupted_")) {
corruptedFile = file;
break;
}
}
assertThat(corruptedFile, notNullValue());
}
private int numShards(String... index) {
ClusterState state = client().admin().cluster().prepareState().get().getState();
GroupShardsIterator shardIterators = state.getRoutingNodes().getRoutingTable().activePrimaryShardsGrouped(index, false);
return shardIterators.size();
}
private ShardRouting corruptRandomFile() throws IOException {
return corruptRandomFile(true);
}
private ShardRouting corruptRandomFile(final boolean includePerCommitFiles) throws IOException {
ClusterState state = client().admin().cluster().prepareState().get().getState();
GroupShardsIterator shardIterators = state.getRoutingNodes().getRoutingTable().activePrimaryShardsGrouped(new String[]{"test"}, false);
List<ShardIterator> iterators = Lists.newArrayList(shardIterators);
ShardIterator shardIterator = RandomPicks.randomFrom(getRandom(), iterators);
ShardRouting shardRouting = shardIterator.nextOrNull();
assertNotNull(shardRouting);
assertTrue(shardRouting.primary());
assertTrue(shardRouting.assignedToNode());
String nodeId = shardRouting.currentNodeId();
NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(nodeId).setFs(true).get();
Set<File> files = new TreeSet<>(); // treeset makes sure iteration order is deterministic
for (FsStats.Info info : nodeStatses.getNodes()[0].getFs()) {
String path = info.getPath();
final String relativeDataLocationPath = "indices/test/" + Integer.toString(shardRouting.getId()) + "/index";
File file = new File(path, relativeDataLocationPath);
files.addAll(Arrays.asList(file.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
if (pathname.isFile() && "write.lock".equals(pathname.getName()) == false) {
return (includePerCommitFiles || isPerSegmentFile(pathname.getName()));
}
return false; // no dirs no write.locks
}
})));
}
pruneOldDeleteGenerations(files);
File fileToCorrupt = null;
if (!files.isEmpty()) {
fileToCorrupt = RandomPicks.randomFrom(getRandom(), files);
try (Directory dir = FSDirectory.open(fileToCorrupt.getParentFile())) {
long checksumBeforeCorruption;
try (IndexInput input = dir.openInput(fileToCorrupt.getName(), IOContext.DEFAULT)) {
checksumBeforeCorruption = CodecUtil.retrieveChecksum(input);
}
try (RandomAccessFile raf = new RandomAccessFile(fileToCorrupt, "rw")) {
raf.seek(randomIntBetween(0, (int) Math.min(Integer.MAX_VALUE, raf.length() - 1)));
long filePointer = raf.getFilePointer();
byte b = raf.readByte();
raf.seek(filePointer);
int corruptedValue = (b + 1) & 0xff;
raf.writeByte(corruptedValue);
raf.getFD().sync();
logger.info("Corrupting file for shard {} -- flipping at position {} from {} to {} file: {}", shardRouting, filePointer, Integer.toHexString(b), Integer.toHexString(corruptedValue), fileToCorrupt.getName());
}
long checksumAfterCorruption;
long actualChecksumAfterCorruption;
try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getName(), IOContext.DEFAULT)) {
assertThat(input.getFilePointer(), is(0l));
input.seek(input.length() - 8); // one long is the checksum... 8 bytes
checksumAfterCorruption = input.getChecksum();
actualChecksumAfterCorruption = input.readLong();
}
// we need to add assumptions here that the checksums actually really don't match there is a small chance to get collisions
// in the checksum which is ok though....
StringBuilder msg = new StringBuilder();
msg.append("Checksum before: [").append(checksumBeforeCorruption).append("]");
msg.append(" after: [").append(checksumAfterCorruption).append("]");
msg.append(" checksum value after corruption: ").append(actualChecksumAfterCorruption).append("]");
msg.append(" file: ").append(fileToCorrupt.getName()).append(" length: ").append(dir.fileLength(fileToCorrupt.getName()));
logger.info(msg.toString());
assumeTrue("Checksum collision - " + msg.toString(),
checksumAfterCorruption != checksumBeforeCorruption // collision
|| actualChecksumAfterCorruption != checksumBeforeCorruption); // checksum corrupted
}
}
assertThat("no file corrupted", fileToCorrupt, notNullValue());
return shardRouting;
}
private static final boolean isPerCommitFile(String fileName) {
// .del and segments_N are per commit files and might change after corruption
return fileName.startsWith("segments") || fileName.endsWith(".del");
}
private static final boolean isPerSegmentFile(String fileName) {
return isPerCommitFile(fileName) == false;
}
/**
* prunes the list of index files such that only the latest del generation files are contained.
*/
private void pruneOldDeleteGenerations(Set<File> files) {
final TreeSet<File> delFiles = new TreeSet<>();
for (File file : files) {
if (file.getName().endsWith(".del")) {
delFiles.add(file);
}
}
File last = null;
for (File current : delFiles) {
if (last != null) {
final String newSegmentName = IndexFileNames.parseSegmentName(current.getName());
final String oldSegmentName = IndexFileNames.parseSegmentName(last.getName());
if (newSegmentName.equals(oldSegmentName)) {
int oldGen = Integer.parseInt(IndexFileNames.stripExtension(IndexFileNames.stripSegmentName(last.getName())).replace("_", ""), Character.MAX_RADIX);
int newGen = Integer.parseInt(IndexFileNames.stripExtension(IndexFileNames.stripSegmentName(current.getName())).replace("_", ""), Character.MAX_RADIX);
if (newGen > oldGen) {
files.remove(last);
} else {
files.remove(current);
continue;
}
}
}
last = current;
}
}
public List<File> listShardFiles(ShardRouting routing) {
NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(routing.currentNodeId()).setFs(true).get();
assertThat(routing.toString(), nodeStatses.getNodes().length, equalTo(1));
List<File> files = new ArrayList<>();
for (FsStats.Info info : nodeStatses.getNodes()[0].getFs()) {
String path = info.getPath();
File file = new File(path, "indices/test/" + Integer.toString(routing.getId()) + "/index");
files.addAll(Arrays.asList(file.listFiles()));
}
return files;
}
private void disableAllocation(String index) {
client().admin().indices().prepareUpdateSettings(index).setSettings(ImmutableSettings.builder().put(
"index.routing.allocation.enable", "none"
)).get();
}
private void enableAllocation(String index) {
client().admin().indices().prepareUpdateSettings(index).setSettings(ImmutableSettings.builder().put(
"index.routing.allocation.enable", "all"
)).get();
}
}
| |
package org.knowm.xchange.bitfinex.v1;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.knowm.xchange.bitfinex.v1.dto.account.BitfinexAccountFeesResponse;
import org.knowm.xchange.bitfinex.v1.dto.account.BitfinexBalancesResponse;
import org.knowm.xchange.bitfinex.v1.dto.account.BitfinexDepositWithdrawalHistoryResponse;
import org.knowm.xchange.bitfinex.v1.dto.account.BitfinexTradingFeeResponse;
import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexDepth;
import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexLendLevel;
import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexLevel;
import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexSymbolDetail;
import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexTicker;
import org.knowm.xchange.bitfinex.v1.dto.marketdata.BitfinexTrade;
import org.knowm.xchange.bitfinex.v1.dto.trade.BitfinexAccountInfosResponse;
import org.knowm.xchange.bitfinex.v1.dto.trade.BitfinexOrderFlags;
import org.knowm.xchange.bitfinex.v1.dto.trade.BitfinexOrderStatusResponse;
import org.knowm.xchange.bitfinex.v1.dto.trade.BitfinexTradeResponse;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.Order.OrderStatus;
import org.knowm.xchange.dto.Order.OrderType;
import org.knowm.xchange.dto.account.Balance;
import org.knowm.xchange.dto.account.Fee;
import org.knowm.xchange.dto.account.FundingRecord;
import org.knowm.xchange.dto.account.Wallet;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trade;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.dto.marketdata.Trades.TradeSortType;
import org.knowm.xchange.dto.meta.CurrencyMetaData;
import org.knowm.xchange.dto.meta.CurrencyPairMetaData;
import org.knowm.xchange.dto.meta.ExchangeMetaData;
import org.knowm.xchange.dto.trade.FixedRateLoanOrder;
import org.knowm.xchange.dto.trade.FloatingRateLoanOrder;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.dto.trade.OpenOrders;
import org.knowm.xchange.dto.trade.StopOrder;
import org.knowm.xchange.dto.trade.UserTrade;
import org.knowm.xchange.dto.trade.UserTrades;
import org.knowm.xchange.utils.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public final class BitfinexAdapters {
public static final Logger log = LoggerFactory.getLogger(BitfinexAdapters.class);
private static final AtomicBoolean warnedStopLimit = new AtomicBoolean();
private BitfinexAdapters() {}
/**
* Each element in the response array contains a set of currencies that are at a given fee tier.
* The API returns the fee per currency in each tier and does not make any promises that they are
* all the same, so this adapter will use the fee per currency instead of the fee per tier.
*/
public static Map<CurrencyPair, Fee> adaptDynamicTradingFees(
BitfinexTradingFeeResponse[] responses, List<CurrencyPair> currencyPairs) {
Map<CurrencyPair, Fee> result = new HashMap<>();
for (BitfinexTradingFeeResponse response : responses) {
BitfinexTradingFeeResponse.BitfinexTradingFeeResponseRow[] responseRows =
response.getTradingFees();
for (BitfinexTradingFeeResponse.BitfinexTradingFeeResponseRow responseRow : responseRows) {
Currency currency = Currency.getInstance(responseRow.getCurrency());
BigDecimal percentToFraction = BigDecimal.ONE.divide(BigDecimal.ONE.scaleByPowerOfTen(2));
Fee fee =
new Fee(
responseRow.getMakerFee().multiply(percentToFraction),
responseRow.getTakerFee().multiply(percentToFraction));
for (CurrencyPair pair : currencyPairs) {
// Fee to trade for a currency is the fee to trade currency pairs with this base.
// Fee is typically assessed in units counter.
if (pair.base.equals(currency)) {
if (result.put(pair, fee) != null) {
throw new IllegalStateException(
"Fee for currency pair " + pair + " is overspecified");
}
}
}
}
}
return result;
}
public static String adaptBitfinexCurrency(String bitfinexSymbol) {
return bitfinexSymbol.toUpperCase();
}
public static String adaptOrderType(OrderType type) {
switch (type) {
case BID:
case EXIT_BID:
return "buy";
case ASK:
case EXIT_ASK:
return "sell";
}
throw new IllegalArgumentException(String.format("Unexpected type of order: %s", type));
}
public static BitfinexOrderType adaptOrderFlagsToType(Set<Order.IOrderFlags> flags) {
if (flags.contains(BitfinexOrderFlags.MARGIN)) {
if (flags.contains(BitfinexOrderFlags.FILL_OR_KILL)) {
return BitfinexOrderType.MARGIN_FILL_OR_KILL;
} else if (flags.contains(BitfinexOrderFlags.TRAILING_STOP)) {
return BitfinexOrderType.MARGIN_TRAILING_STOP;
} else if (flags.contains(BitfinexOrderFlags.STOP)) {
return BitfinexOrderType.MARGIN_STOP;
} else {
return BitfinexOrderType.MARGIN_LIMIT;
}
} else {
if (flags.contains(BitfinexOrderFlags.FILL_OR_KILL)) {
return BitfinexOrderType.FILL_OR_KILL;
} else if (flags.contains(BitfinexOrderFlags.TRAILING_STOP)) {
return BitfinexOrderType.TRAILING_STOP;
} else if (flags.contains(BitfinexOrderFlags.STOP)) {
return BitfinexOrderType.STOP;
} else {
return BitfinexOrderType.LIMIT;
}
}
}
public static CurrencyPair adaptCurrencyPair(String bitfinexSymbol) {
String tradableIdentifier = adaptBitfinexCurrency(bitfinexSymbol.substring(0, 3));
String transactionCurrency = adaptBitfinexCurrency(bitfinexSymbol.substring(3));
return new CurrencyPair(tradableIdentifier, transactionCurrency);
}
public static OrderStatus adaptOrderStatus(BitfinexOrderStatusResponse order) {
if (order.isCancelled()) return OrderStatus.CANCELED;
else if (order.getExecutedAmount().compareTo(BigDecimal.ZERO) == 0) return OrderStatus.NEW;
else if (order.getExecutedAmount().compareTo(order.getOriginalAmount()) < 0)
return OrderStatus.PARTIALLY_FILLED;
else if (order.getExecutedAmount().compareTo(order.getOriginalAmount()) == 0)
return OrderStatus.FILLED;
else return null;
}
public static String adaptCurrencyPair(CurrencyPair pair) {
return BitfinexUtils.toPairString(pair);
}
public static OrderBook adaptOrderBook(BitfinexDepth btceDepth, CurrencyPair currencyPair) {
OrdersContainer asksOrdersContainer =
adaptOrders(btceDepth.getAsks(), currencyPair, OrderType.ASK);
OrdersContainer bidsOrdersContainer =
adaptOrders(btceDepth.getBids(), currencyPair, OrderType.BID);
return new OrderBook(
new Date(Math.max(asksOrdersContainer.getTimestamp(), bidsOrdersContainer.getTimestamp())),
asksOrdersContainer.getLimitOrders(),
bidsOrdersContainer.getLimitOrders());
}
public static OrdersContainer adaptOrders(
BitfinexLevel[] bitfinexLevels, CurrencyPair currencyPair, OrderType orderType) {
BigDecimal maxTimestamp = new BigDecimal(Long.MIN_VALUE);
List<LimitOrder> limitOrders = new ArrayList<>(bitfinexLevels.length);
for (BitfinexLevel bitfinexLevel : bitfinexLevels) {
if (bitfinexLevel.getTimestamp().compareTo(maxTimestamp) > 0) {
maxTimestamp = bitfinexLevel.getTimestamp();
}
Date timestamp = convertBigDecimalTimestampToDate(bitfinexLevel.getTimestamp());
limitOrders.add(
adaptOrder(
bitfinexLevel.getAmount(),
bitfinexLevel.getPrice(),
currencyPair,
orderType,
timestamp));
}
long maxTimestampInMillis = maxTimestamp.multiply(new BigDecimal(1000L)).longValue();
return new OrdersContainer(maxTimestampInMillis, limitOrders);
}
public static LimitOrder adaptOrder(
BigDecimal originalAmount,
BigDecimal price,
CurrencyPair currencyPair,
OrderType orderType,
Date timestamp) {
return new LimitOrder(orderType, originalAmount, currencyPair, "", timestamp, price);
}
public static List<FixedRateLoanOrder> adaptFixedRateLoanOrders(
BitfinexLendLevel[] orders, String currency, String orderType, String id) {
List<FixedRateLoanOrder> loanOrders = new ArrayList<>(orders.length);
for (BitfinexLendLevel order : orders) {
if ("yes".equalsIgnoreCase(order.getFrr())) {
continue;
}
// Bid orderbook is reversed order. Insert at reversed indices
if (orderType.equalsIgnoreCase("loan")) {
loanOrders.add(
0,
adaptFixedRateLoanOrder(
currency, order.getAmount(), order.getPeriod(), orderType, id, order.getRate()));
} else {
loanOrders.add(
adaptFixedRateLoanOrder(
currency, order.getAmount(), order.getPeriod(), orderType, id, order.getRate()));
}
}
return loanOrders;
}
public static FixedRateLoanOrder adaptFixedRateLoanOrder(
String currency,
BigDecimal amount,
int dayPeriod,
String direction,
String id,
BigDecimal rate) {
OrderType orderType = direction.equalsIgnoreCase("loan") ? OrderType.BID : OrderType.ASK;
return new FixedRateLoanOrder(orderType, currency, amount, dayPeriod, id, null, rate);
}
public static List<FloatingRateLoanOrder> adaptFloatingRateLoanOrders(
BitfinexLendLevel[] orders, String currency, String orderType, String id) {
List<FloatingRateLoanOrder> loanOrders = new ArrayList<>(orders.length);
for (BitfinexLendLevel order : orders) {
if ("no".equals(order.getFrr())) {
continue;
}
// Bid orderbook is reversed order. Insert at reversed indices
if (orderType.equalsIgnoreCase("loan")) {
loanOrders.add(
0,
adaptFloatingRateLoanOrder(
currency, order.getAmount(), order.getPeriod(), orderType, id, order.getRate()));
} else {
loanOrders.add(
adaptFloatingRateLoanOrder(
currency, order.getAmount(), order.getPeriod(), orderType, id, order.getRate()));
}
}
return loanOrders;
}
public static FloatingRateLoanOrder adaptFloatingRateLoanOrder(
String currency,
BigDecimal amount,
int dayPeriod,
String direction,
String id,
BigDecimal rate) {
OrderType orderType = direction.equalsIgnoreCase("loan") ? OrderType.BID : OrderType.ASK;
return new FloatingRateLoanOrder(orderType, currency, amount, dayPeriod, id, null, rate);
}
public static Trade adaptTrade(BitfinexTrade trade, CurrencyPair currencyPair) {
OrderType orderType = trade.getType().equals("buy") ? OrderType.BID : OrderType.ASK;
BigDecimal amount = trade.getAmount();
BigDecimal price = trade.getPrice();
Date date =
DateUtils.fromMillisUtc(trade.getTimestamp() * 1000L); // Bitfinex uses Unix timestamps
final String tradeId = String.valueOf(trade.getTradeId());
return new Trade(orderType, amount, currencyPair, price, date, tradeId);
}
public static Trades adaptTrades(BitfinexTrade[] trades, CurrencyPair currencyPair) {
List<Trade> tradesList = new ArrayList<>(trades.length);
long lastTradeId = 0;
for (BitfinexTrade trade : trades) {
long tradeId = trade.getTradeId();
if (tradeId > lastTradeId) {
lastTradeId = tradeId;
}
tradesList.add(adaptTrade(trade, currencyPair));
}
return new Trades(tradesList, lastTradeId, TradeSortType.SortByID);
}
public static Ticker adaptTicker(BitfinexTicker bitfinexTicker, CurrencyPair currencyPair) {
BigDecimal last = bitfinexTicker.getLast_price();
BigDecimal bid = bitfinexTicker.getBid();
BigDecimal ask = bitfinexTicker.getAsk();
BigDecimal high = bitfinexTicker.getHigh();
BigDecimal low = bitfinexTicker.getLow();
BigDecimal volume = bitfinexTicker.getVolume();
Date timestamp = DateUtils.fromMillisUtc((long) (bitfinexTicker.getTimestamp() * 1000L));
return new Ticker.Builder()
.currencyPair(currencyPair)
.last(last)
.bid(bid)
.ask(ask)
.high(high)
.low(low)
.volume(volume)
.timestamp(timestamp)
.build();
}
public static List<Wallet> adaptWallets(BitfinexBalancesResponse[] response) {
Map<String, Map<String, BigDecimal[]>> walletsBalancesMap = new HashMap<>();
// for each currency we have multiple balances types: exchange, trading, deposit.
// each of those may be partially frozen/available
for (BitfinexBalancesResponse balance : response) {
String walletId = balance.getType();
if (!walletsBalancesMap.containsKey(walletId)) {
walletsBalancesMap.put(walletId, new HashMap<>());
}
Map<String, BigDecimal[]> balancesByCurrency =
walletsBalancesMap.get(walletId); // {total, available}
String currencyName = adaptBitfinexCurrency(balance.getCurrency());
BigDecimal[] balanceDetail = balancesByCurrency.get(currencyName);
if (balanceDetail == null) {
balanceDetail = new BigDecimal[] {balance.getAmount(), balance.getAvailable()};
} else {
balanceDetail[0] = balanceDetail[0].add(balance.getAmount());
balanceDetail[1] = balanceDetail[1].add(balance.getAvailable());
}
balancesByCurrency.put(currencyName, balanceDetail);
}
List<Wallet> wallets = new ArrayList<>();
for (Entry<String, Map<String, BigDecimal[]>> walletData : walletsBalancesMap.entrySet()) {
Map<String, BigDecimal[]> balancesByCurrency = walletData.getValue();
List<Balance> balances = new ArrayList<>(balancesByCurrency.size());
for (Entry<String, BigDecimal[]> entry : balancesByCurrency.entrySet()) {
String currencyName = entry.getKey();
BigDecimal[] balanceDetail = entry.getValue();
BigDecimal balanceTotal = balanceDetail[0];
BigDecimal balanceAvailable = balanceDetail[1];
balances.add(
new Balance(Currency.getInstance(currencyName), balanceTotal, balanceAvailable));
}
wallets.add(new Wallet(walletData.getKey(), balances));
}
return wallets;
}
public static OpenOrders adaptOrders(BitfinexOrderStatusResponse[] activeOrders) {
List<LimitOrder> limitOrders = new ArrayList<>();
List<Order> hiddenOrders = new ArrayList<>();
for (BitfinexOrderStatusResponse order : activeOrders) {
OrderType orderType = order.getSide().equalsIgnoreCase("buy") ? OrderType.BID : OrderType.ASK;
OrderStatus status = adaptOrderStatus(order);
CurrencyPair currencyPair = adaptCurrencyPair(order.getSymbol());
Date timestamp = convertBigDecimalTimestampToDate(order.getTimestamp());
Supplier<LimitOrder> limitOrderCreator =
() ->
new LimitOrder(
orderType,
order.getOriginalAmount(),
currencyPair,
String.valueOf(order.getId()),
timestamp,
order.getPrice(),
order.getAvgExecutionPrice(),
order.getExecutedAmount(),
null,
status);
Supplier<StopOrder> stopOrderCreator =
() ->
new StopOrder(
orderType,
order.getOriginalAmount(),
currencyPair,
String.valueOf(order.getId()),
timestamp,
order.getPrice(),
null,
order.getAvgExecutionPrice(),
order.getExecutedAmount(),
status);
LimitOrder limitOrder = null;
StopOrder stopOrder = null;
Optional<BitfinexOrderType> bitfinexOrderType =
Arrays.stream(BitfinexOrderType.values())
.filter(v -> v.getValue().equals(order.getType()))
.findFirst();
if (bitfinexOrderType.isPresent()) {
switch (bitfinexOrderType.get()) {
case FILL_OR_KILL:
limitOrder = limitOrderCreator.get();
limitOrder.addOrderFlag(BitfinexOrderFlags.FILL_OR_KILL);
break;
case MARGIN_FILL_OR_KILL:
limitOrder = limitOrderCreator.get();
limitOrder.addOrderFlag(BitfinexOrderFlags.FILL_OR_KILL);
limitOrder.addOrderFlag(BitfinexOrderFlags.MARGIN);
break;
case MARGIN_LIMIT:
limitOrder = limitOrderCreator.get();
limitOrder.addOrderFlag(BitfinexOrderFlags.MARGIN);
break;
case MARGIN_STOP:
stopOrder = stopOrderCreator.get();
stopOrder.addOrderFlag(BitfinexOrderFlags.STOP);
stopOrder.addOrderFlag(BitfinexOrderFlags.MARGIN);
break;
case MARGIN_STOP_LIMIT:
stopLimitWarning();
stopOrder = stopOrderCreator.get();
stopOrder.addOrderFlag(BitfinexOrderFlags.STOP);
stopOrder.addOrderFlag(BitfinexOrderFlags.MARGIN);
break;
case MARGIN_TRAILING_STOP:
limitOrder = limitOrderCreator.get();
limitOrder.addOrderFlag(BitfinexOrderFlags.TRAILING_STOP);
limitOrder.addOrderFlag(BitfinexOrderFlags.MARGIN);
break;
case STOP:
stopOrder = stopOrderCreator.get();
stopOrder.addOrderFlag(BitfinexOrderFlags.STOP);
break;
case STOP_LIMIT:
stopLimitWarning();
stopOrder = stopOrderCreator.get();
stopOrder.addOrderFlag(BitfinexOrderFlags.STOP);
break;
case TRAILING_STOP:
limitOrder = limitOrderCreator.get();
limitOrder.addOrderFlag(BitfinexOrderFlags.TRAILING_STOP);
break;
case LIMIT:
limitOrder = limitOrderCreator.get();
break;
case MARGIN_MARKET:
case MARKET:
log.warn("Unexpected market order on book. Defaulting to limit order");
limitOrder = limitOrderCreator.get();
break;
default:
log.warn(
"Unhandled Bitfinex order type [{}]. Defaulting to limit order", order.getType());
limitOrder = limitOrderCreator.get();
break;
}
} else {
log.warn("Unknown Bitfinex order type [{}]. Defaulting to limit order", order.getType());
limitOrder = limitOrderCreator.get();
}
if (limitOrder != null) {
limitOrders.add(limitOrder);
} else if (stopOrder != null) {
hiddenOrders.add(stopOrder);
}
}
return new OpenOrders(limitOrders, hiddenOrders);
}
private static void stopLimitWarning() {
if (warnedStopLimit.compareAndSet(false, true)) {
log.warn(
"Found a stop-limit order. Bitfinex v1 API does not return limit prices for stop-limit "
+ "orders so these are returned as stop-at-market orders. This warning will only appear "
+ "once.");
}
}
public static UserTrades adaptTradeHistory(BitfinexTradeResponse[] trades, String symbol) {
List<UserTrade> pastTrades = new ArrayList<>(trades.length);
CurrencyPair currencyPair = adaptCurrencyPair(symbol);
for (BitfinexTradeResponse trade : trades) {
OrderType orderType = trade.getType().equalsIgnoreCase("buy") ? OrderType.BID : OrderType.ASK;
Date timestamp = convertBigDecimalTimestampToDate(trade.getTimestamp());
final BigDecimal fee = trade.getFeeAmount() == null ? null : trade.getFeeAmount().negate();
pastTrades.add(
new UserTrade(
orderType,
trade.getAmount(),
currencyPair,
trade.getPrice(),
timestamp,
trade.getTradeId(),
trade.getOrderId(),
fee,
Currency.getInstance(trade.getFeeCurrency())));
}
return new UserTrades(pastTrades, TradeSortType.SortByTimestamp);
}
private static Date convertBigDecimalTimestampToDate(BigDecimal timestamp) {
BigDecimal timestampInMillis = timestamp.multiply(new BigDecimal("1000"));
return new Date(timestampInMillis.longValue());
}
public static ExchangeMetaData adaptMetaData(
List<CurrencyPair> currencyPairs, ExchangeMetaData metaData) {
Map<CurrencyPair, CurrencyPairMetaData> pairsMap = metaData.getCurrencyPairs();
Map<Currency, CurrencyMetaData> currenciesMap = metaData.getCurrencies();
// Remove pairs that are no-longer in use
pairsMap.keySet().retainAll(currencyPairs);
// Remove currencies that are no-longer in use
Set<Currency> currencies =
currencyPairs.stream()
.flatMap(pair -> Stream.of(pair.base, pair.counter))
.collect(Collectors.toSet());
currenciesMap.keySet().retainAll(currencies);
// Add missing pairs and currencies
for (CurrencyPair c : currencyPairs) {
if (!pairsMap.containsKey(c)) {
pairsMap.put(c, null);
}
if (!currenciesMap.containsKey(c.base)) {
currenciesMap.put(
c.base,
new CurrencyMetaData(
2,
null)); // When missing, add default meta-data with scale of 2 (Bitfinex's minimal
// scale)
}
if (!currenciesMap.containsKey(c.counter)) {
currenciesMap.put(c.counter, new CurrencyMetaData(2, null));
}
}
return metaData;
}
/**
* Flipped order of arguments to avoid type-erasure clash with {@link #adaptMetaData(List,
* ExchangeMetaData)}
*
* @param exchangeMetaData The exchange metadata provided from bitfinex.json.
* @param symbolDetails The symbol data fetced from Bitfinex.
* @return The combined result.
*/
public static ExchangeMetaData adaptMetaData(
ExchangeMetaData exchangeMetaData,
List<BitfinexSymbolDetail> symbolDetails,
Map<CurrencyPair, BigDecimal> lastPrices) {
final Map<CurrencyPair, CurrencyPairMetaData> currencyPairs =
exchangeMetaData.getCurrencyPairs();
symbolDetails
.parallelStream()
.forEach(
bitfinexSymbolDetail -> {
final CurrencyPair currencyPair = adaptCurrencyPair(bitfinexSymbolDetail.getPair());
// Infer price-scale from last and price-precision
BigDecimal last = lastPrices.get(currencyPair);
if (last != null) {
int pricePercision = bitfinexSymbolDetail.getPrice_precision();
int priceScale = last.scale() + (pricePercision - last.precision());
CurrencyPairMetaData newMetaData =
new CurrencyPairMetaData(
currencyPairs.get(currencyPair) == null
? null
: currencyPairs
.get(currencyPair)
.getTradingFee(), // Take tradingFee from static metaData if exists
bitfinexSymbolDetail.getMinimum_order_size(),
bitfinexSymbolDetail.getMaximum_order_size(),
priceScale,
null);
currencyPairs.put(currencyPair, newMetaData);
}
});
return exchangeMetaData;
}
public static ExchangeMetaData adaptMetaData(
BitfinexAccountFeesResponse accountFeesResponse, ExchangeMetaData metaData) {
Map<Currency, CurrencyMetaData> currencies = metaData.getCurrencies();
final Map<Currency, BigDecimal> withdrawFees = accountFeesResponse.getWithdraw();
withdrawFees.forEach(
(currency, withdrawalFee) -> {
CurrencyMetaData newMetaData =
new CurrencyMetaData(
// Currency should have at least the scale of the withdrawalFee
currencies.get(currency) == null
? withdrawalFee.scale()
: Math.max(withdrawalFee.scale(), currencies.get(currency).getScale()),
withdrawalFee);
currencies.put(currency, newMetaData);
});
return metaData;
}
public static ExchangeMetaData adaptMetaData(
BitfinexAccountInfosResponse[] bitfinexAccountInfos, ExchangeMetaData exchangeMetaData) {
final Map<CurrencyPair, CurrencyPairMetaData> currencyPairs =
exchangeMetaData.getCurrencyPairs();
// lets go with the assumption that the trading fees are common across all trading pairs for
// now.
// also setting the taker_fee as the trading_fee for now.
final CurrencyPairMetaData metaData =
new CurrencyPairMetaData(
bitfinexAccountInfos[0].getTakerFees().movePointLeft(2), null, null, null, null);
currencyPairs
.keySet()
.parallelStream()
.forEach(
currencyPair ->
currencyPairs.merge(
currencyPair,
metaData,
(oldMetaData, newMetaData) ->
new CurrencyPairMetaData(
newMetaData.getTradingFee(),
oldMetaData.getMinimumAmount(),
oldMetaData.getMaximumAmount(),
oldMetaData.getPriceScale(),
oldMetaData.getFeeTiers())));
return exchangeMetaData;
}
public static List<FundingRecord> adaptFundingHistory(
BitfinexDepositWithdrawalHistoryResponse[] bitfinexDepositWithdrawalHistoryResponses) {
final List<FundingRecord> fundingRecords = new ArrayList<>();
for (BitfinexDepositWithdrawalHistoryResponse responseEntry :
bitfinexDepositWithdrawalHistoryResponses) {
String address = responseEntry.getAddress();
String description = responseEntry.getDescription();
Currency currency = Currency.getInstance(responseEntry.getCurrency());
FundingRecord.Status status = FundingRecord.Status.resolveStatus(responseEntry.getStatus());
if (status == null
&& responseEntry
.getStatus()
.equalsIgnoreCase("CANCELED")) // there's a spelling mistake in the protocol
status = FundingRecord.Status.CANCELLED;
String txnId = null;
if (status == null || !status.equals(FundingRecord.Status.CANCELLED)) {
/*
sometimes the description looks like this (with the txn hash in it):
"description":"a9d387cf5d9df58ff2ac4a338e0f050fd3857cf78d1dbca4f33619dc4ccdac82","address":"1Enx...
and sometimes like this (with the address in it as well as the txn hash):
"description":"3AXVnDapuRiAn73pjKe7gukLSx5813oFyn, txid: aa4057486d5f73747167beb9949a0dfe17b5fc630499a66af075abdaf4986987","address":"3AX...
and sometimes when cancelled
"description":"3LFVTLFZoDDzLCcLGDDQ7MNkk4YPe26Yva, expired","address":"3LFV...
*/
String cleanedDescription =
description.replace(",", "").replace("txid:", "").trim().toLowerCase();
// Address will only be present for crypto payments. It will be null for all fiat payments
if (address != null) {
cleanedDescription = cleanedDescription.replace(address.toLowerCase(), "").trim();
}
// check its just some hex characters, and if so lets assume its the txn hash
if (cleanedDescription.matches("^(0x)?[0-9a-f]+$")) {
txnId = cleanedDescription;
}
}
FundingRecord fundingRecordEntry =
new FundingRecord(
address,
responseEntry.getTimestamp(),
currency,
responseEntry.getAmount(),
String.valueOf(responseEntry.getId()),
txnId,
responseEntry.getType(),
status,
null,
null,
description);
fundingRecords.add(fundingRecordEntry);
}
return fundingRecords;
}
public static class OrdersContainer {
private final long timestamp;
private final List<LimitOrder> limitOrders;
/**
* Constructor
*
* @param timestamp The timestamp for the data fetched.
* @param limitOrders The orders.
*/
public OrdersContainer(long timestamp, List<LimitOrder> limitOrders) {
this.timestamp = timestamp;
this.limitOrders = limitOrders;
}
public long getTimestamp() {
return timestamp;
}
public List<LimitOrder> getLimitOrders() {
return limitOrders;
}
}
}
| |
/**
* Copyright 2010-present Facebook.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook;
import android.Manifest;
import android.app.Activity;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.text.TextUtils;
import android.webkit.CookieSyncManager;
import com.facebook.android.R;
import com.facebook.internal.AnalyticsEvents;
import com.facebook.internal.NativeProtocol;
import com.facebook.internal.ServerProtocol;
import com.facebook.internal.Utility;
import com.facebook.model.GraphUser;
import com.facebook.widget.WebDialog;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
class AuthorizationClient implements Serializable {
private static final long serialVersionUID = 1L;
private static final String TAG = "Facebook-AuthorizationClient";
private static final String WEB_VIEW_AUTH_HANDLER_STORE =
"com.facebook.AuthorizationClient.WebViewAuthHandler.TOKEN_STORE_KEY";
private static final String WEB_VIEW_AUTH_HANDLER_TOKEN_KEY = "TOKEN";
// Constants for logging login-related data. Some of these are only used by Session, but grouped here for
// maintainability.
private static final String EVENT_NAME_LOGIN_METHOD_START = "fb_mobile_login_method_start";
private static final String EVENT_NAME_LOGIN_METHOD_COMPLETE = "fb_mobile_login_method_complete";
private static final String EVENT_PARAM_METHOD_RESULT_SKIPPED = "skipped";
static final String EVENT_NAME_LOGIN_START = "fb_mobile_login_start";
static final String EVENT_NAME_LOGIN_COMPLETE = "fb_mobile_login_complete";
// Note: to ensure stability of column mappings across the four different event types, we prepend a column
// index to each name, and we log all columns with all events, even if they are empty.
static final String EVENT_PARAM_AUTH_LOGGER_ID = "0_auth_logger_id";
static final String EVENT_PARAM_TIMESTAMP = "1_timestamp_ms";
static final String EVENT_PARAM_LOGIN_RESULT = "2_result";
static final String EVENT_PARAM_METHOD = "3_method";
static final String EVENT_PARAM_ERROR_CODE = "4_error_code";
static final String EVENT_PARAM_ERROR_MESSAGE = "5_error_message";
static final String EVENT_PARAM_EXTRAS = "6_extras";
static final String EVENT_EXTRAS_TRY_LOGIN_ACTIVITY = "try_login_activity";
static final String EVENT_EXTRAS_TRY_LEGACY = "try_legacy";
static final String EVENT_EXTRAS_LOGIN_BEHAVIOR = "login_behavior";
static final String EVENT_EXTRAS_REQUEST_CODE = "request_code";
static final String EVENT_EXTRAS_IS_LEGACY = "is_legacy";
static final String EVENT_EXTRAS_PERMISSIONS = "permissions";
static final String EVENT_EXTRAS_DEFAULT_AUDIENCE = "default_audience";
static final String EVENT_EXTRAS_MISSING_INTERNET_PERMISSION = "no_internet_permission";
static final String EVENT_EXTRAS_NOT_TRIED = "not_tried";
static final String EVENT_EXTRAS_NEW_PERMISSIONS = "new_permissions";
List<AuthHandler> handlersToTry;
AuthHandler currentHandler;
transient Context context;
transient StartActivityDelegate startActivityDelegate;
transient OnCompletedListener onCompletedListener;
transient BackgroundProcessingListener backgroundProcessingListener;
transient boolean checkedInternetPermission;
AuthorizationRequest pendingRequest;
Map<String, String> loggingExtras;
private transient AppEventsLogger appEventsLogger;
interface OnCompletedListener {
void onCompleted(Result result);
}
interface BackgroundProcessingListener {
void onBackgroundProcessingStarted();
void onBackgroundProcessingStopped();
}
interface StartActivityDelegate {
public void startActivityForResult(Intent intent, int requestCode);
public Activity getActivityContext();
}
void setContext(final Context context) {
this.context = context;
// We rely on individual requests to tell us how to start an activity.
startActivityDelegate = null;
}
void setContext(final Activity activity) {
this.context = activity;
// If we are used in the context of an activity, we will always use that activity to
// call startActivityForResult.
startActivityDelegate = new StartActivityDelegate() {
@Override
public void startActivityForResult(Intent intent, int requestCode) {
activity.startActivityForResult(intent, requestCode);
}
@Override
public Activity getActivityContext() {
return activity;
}
};
}
void startOrContinueAuth(AuthorizationRequest request) {
if (getInProgress()) {
continueAuth();
} else {
authorize(request);
}
}
void authorize(AuthorizationRequest request) {
if (request == null) {
return;
}
if (pendingRequest != null) {
throw new FacebookException("Attempted to authorize while a request is pending.");
}
if (request.needsNewTokenValidation() && !checkInternetPermission()) {
// We're going to need INTERNET permission later and don't have it, so fail early.
return;
}
pendingRequest = request;
handlersToTry = getHandlerTypes(request);
tryNextHandler();
}
void continueAuth() {
if (pendingRequest == null || currentHandler == null) {
throw new FacebookException("Attempted to continue authorization without a pending request.");
}
if (currentHandler.needsRestart()) {
currentHandler.cancel();
tryCurrentHandler();
}
}
boolean getInProgress() {
return pendingRequest != null && currentHandler != null;
}
void cancelCurrentHandler() {
if (currentHandler != null) {
currentHandler.cancel();
}
}
boolean onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == pendingRequest.getRequestCode()) {
return currentHandler.onActivityResult(requestCode, resultCode, data);
}
return false;
}
private List<AuthHandler> getHandlerTypes(AuthorizationRequest request) {
ArrayList<AuthHandler> handlers = new ArrayList<AuthHandler>();
final SessionLoginBehavior behavior = request.getLoginBehavior();
if (behavior.allowsKatanaAuth()) {
if (!request.isLegacy()) {
handlers.add(new GetTokenAuthHandler());
}
handlers.add(new KatanaProxyAuthHandler());
}
if (behavior.allowsWebViewAuth()) {
handlers.add(new WebViewAuthHandler());
}
return handlers;
}
boolean checkInternetPermission() {
if (checkedInternetPermission) {
return true;
}
int permissionCheck = checkPermission(Manifest.permission.INTERNET);
if (permissionCheck != PackageManager.PERMISSION_GRANTED) {
String errorType = context.getString(R.string.com_facebook_internet_permission_error_title);
String errorDescription = context.getString(R.string.com_facebook_internet_permission_error_message);
complete(Result.createErrorResult(pendingRequest, errorType, errorDescription));
return false;
}
checkedInternetPermission = true;
return true;
}
void tryNextHandler() {
if (currentHandler != null) {
logAuthorizationMethodComplete(currentHandler.getNameForLogging(), EVENT_PARAM_METHOD_RESULT_SKIPPED,
null, null, currentHandler.methodLoggingExtras);
}
while (handlersToTry != null && !handlersToTry.isEmpty()) {
currentHandler = handlersToTry.remove(0);
boolean started = tryCurrentHandler();
if (started) {
return;
}
}
if (pendingRequest != null) {
// We went through all handlers without successfully attempting an auth.
completeWithFailure();
}
}
private void completeWithFailure() {
complete(Result.createErrorResult(pendingRequest, "Login attempt failed.", null));
}
private void addLoggingExtra(String key, String value, boolean accumulate) {
if (loggingExtras == null) {
loggingExtras = new HashMap<String, String>();
}
if (loggingExtras.containsKey(key) && accumulate) {
value = loggingExtras.get(key) + "," + value;
}
loggingExtras.put(key, value);
}
boolean tryCurrentHandler() {
if (currentHandler.needsInternetPermission() && !checkInternetPermission()) {
addLoggingExtra(EVENT_EXTRAS_MISSING_INTERNET_PERMISSION, AppEventsConstants.EVENT_PARAM_VALUE_YES,
false);
return false;
}
boolean tried = currentHandler.tryAuthorize(pendingRequest);
if (tried) {
logAuthorizationMethodStart(currentHandler.getNameForLogging());
} else {
// We didn't try it, so we don't get any other completion notification -- log that we skipped it.
addLoggingExtra(EVENT_EXTRAS_NOT_TRIED, currentHandler.getNameForLogging(), true);
}
return tried;
}
void completeAndValidate(Result outcome) {
// Do we need to validate a successful result (as in the case of a reauth)?
if (outcome.token != null && pendingRequest.needsNewTokenValidation()) {
validateSameFbidAndFinish(outcome);
} else {
// We're done, just notify the listener.
complete(outcome);
}
}
void complete(Result outcome) {
// This might be null if, for some reason, none of the handlers were successfully tried (in which case
// we already logged that).
if (currentHandler != null) {
logAuthorizationMethodComplete(currentHandler.getNameForLogging(), outcome,
currentHandler.methodLoggingExtras);
}
if (loggingExtras != null) {
// Pass this back to the caller for logging at the aggregate level.
outcome.loggingExtras = loggingExtras;
}
handlersToTry = null;
currentHandler = null;
pendingRequest = null;
loggingExtras = null;
notifyOnCompleteListener(outcome);
}
OnCompletedListener getOnCompletedListener() {
return onCompletedListener;
}
void setOnCompletedListener(OnCompletedListener onCompletedListener) {
this.onCompletedListener = onCompletedListener;
}
BackgroundProcessingListener getBackgroundProcessingListener() {
return backgroundProcessingListener;
}
void setBackgroundProcessingListener(BackgroundProcessingListener backgroundProcessingListener) {
this.backgroundProcessingListener = backgroundProcessingListener;
}
StartActivityDelegate getStartActivityDelegate() {
if (startActivityDelegate != null) {
return startActivityDelegate;
} else if (pendingRequest != null) {
// Wrap the request's delegate in our own.
return new StartActivityDelegate() {
@Override
public void startActivityForResult(Intent intent, int requestCode) {
pendingRequest.getStartActivityDelegate().startActivityForResult(intent, requestCode);
}
@Override
public Activity getActivityContext() {
return pendingRequest.getStartActivityDelegate().getActivityContext();
}
};
}
return null;
}
int checkPermission(String permission) {
return context.checkCallingOrSelfPermission(permission);
}
void validateSameFbidAndFinish(Result pendingResult) {
if (pendingResult.token == null) {
throw new FacebookException("Can't validate without a token");
}
RequestBatch batch = createReauthValidationBatch(pendingResult);
notifyBackgroundProcessingStart();
batch.executeAsync();
}
RequestBatch createReauthValidationBatch(final Result pendingResult) {
// We need to ensure that the token we got represents the same fbid as the old one. We issue
// a "me" request using the current token, a "me" request using the new token, and a "me/permissions"
// request using the current token to get the permissions of the user.
final ArrayList<String> fbids = new ArrayList<String>();
final ArrayList<String> grantedPermissions = new ArrayList<String>();
final ArrayList<String> declinedPermissions = new ArrayList<String>();
final String newToken = pendingResult.token.getToken();
Request.Callback meCallback = new Request.Callback() {
@Override
public void onCompleted(Response response) {
try {
GraphUser user = response.getGraphObjectAs(GraphUser.class);
if (user != null) {
fbids.add(user.getId());
}
} catch (Exception ex) {
}
}
};
String validateSameFbidAsToken = pendingRequest.getPreviousAccessToken();
Request requestCurrentTokenMe = createGetProfileIdRequest(validateSameFbidAsToken);
requestCurrentTokenMe.setCallback(meCallback);
Request requestNewTokenMe = createGetProfileIdRequest(newToken);
requestNewTokenMe.setCallback(meCallback);
Request requestCurrentTokenPermissions = createGetPermissionsRequest(validateSameFbidAsToken);
requestCurrentTokenPermissions.setCallback(new Request.Callback() {
@Override
public void onCompleted(Response response) {
try {
Session.PermissionsPair permissionsPair = Session.handlePermissionResponse(response);
if (permissionsPair != null) {
grantedPermissions.addAll(permissionsPair.getGrantedPermissions());
declinedPermissions.addAll(permissionsPair.getDeclinedPermissions());
}
} catch (Exception ex) {
}
}
});
RequestBatch batch = new RequestBatch(requestCurrentTokenMe, requestNewTokenMe,
requestCurrentTokenPermissions);
batch.setBatchApplicationId(pendingRequest.getApplicationId());
batch.addCallback(new RequestBatch.Callback() {
@Override
public void onBatchCompleted(RequestBatch batch) {
try {
Result result = null;
if (fbids.size() == 2 && fbids.get(0) != null && fbids.get(1) != null &&
fbids.get(0).equals(fbids.get(1))) {
// Modify the token to have the right permission set.
AccessToken tokenWithPermissions = AccessToken
.createFromTokenWithRefreshedPermissions(pendingResult.token,
grantedPermissions, declinedPermissions);
result = Result.createTokenResult(pendingRequest, tokenWithPermissions);
} else {
result = Result
.createErrorResult(pendingRequest, "User logged in as different Facebook user.", null);
}
complete(result);
} catch (Exception ex) {
complete(Result.createErrorResult(pendingRequest, "Caught exception", ex.getMessage()));
} finally {
notifyBackgroundProcessingStop();
}
}
});
return batch;
}
Request createGetPermissionsRequest(String accessToken) {
Bundle parameters = new Bundle();
parameters.putString("access_token", accessToken);
return new Request(null, "me/permissions", parameters, HttpMethod.GET, null);
}
Request createGetProfileIdRequest(String accessToken) {
Bundle parameters = new Bundle();
parameters.putString("fields", "id");
parameters.putString("access_token", accessToken);
return new Request(null, "me", parameters, HttpMethod.GET, null);
}
private AppEventsLogger getAppEventsLogger() {
if (appEventsLogger == null || !appEventsLogger.getApplicationId().equals(pendingRequest.getApplicationId())) {
appEventsLogger = AppEventsLogger.newLogger(context, pendingRequest.getApplicationId());
}
return appEventsLogger;
}
private void notifyOnCompleteListener(Result outcome) {
if (onCompletedListener != null) {
onCompletedListener.onCompleted(outcome);
}
}
private void notifyBackgroundProcessingStart() {
if (backgroundProcessingListener != null) {
backgroundProcessingListener.onBackgroundProcessingStarted();
}
}
private void notifyBackgroundProcessingStop() {
if (backgroundProcessingListener != null) {
backgroundProcessingListener.onBackgroundProcessingStopped();
}
}
private void logAuthorizationMethodStart(String method) {
Bundle bundle = newAuthorizationLoggingBundle(pendingRequest.getAuthId());
bundle.putLong(EVENT_PARAM_TIMESTAMP, System.currentTimeMillis());
bundle.putString(EVENT_PARAM_METHOD, method);
getAppEventsLogger().logSdkEvent(EVENT_NAME_LOGIN_METHOD_START, null, bundle);
}
private void logAuthorizationMethodComplete(String method, Result result, Map<String, String> loggingExtras) {
logAuthorizationMethodComplete(method, result.code.getLoggingValue(), result.errorMessage, result.errorCode,
loggingExtras);
}
private void logAuthorizationMethodComplete(String method, String result, String errorMessage, String errorCode,
Map<String, String> loggingExtras) {
Bundle bundle = null;
if (pendingRequest == null) {
// We don't expect this to happen, but if it does, log an event for diagnostic purposes.
bundle = newAuthorizationLoggingBundle("");
bundle.putString(EVENT_PARAM_LOGIN_RESULT, Result.Code.ERROR.getLoggingValue());
bundle.putString(EVENT_PARAM_ERROR_MESSAGE,
"Unexpected call to logAuthorizationMethodComplete with null pendingRequest.");
} else {
bundle = newAuthorizationLoggingBundle(pendingRequest.getAuthId());
if (result != null) {
bundle.putString(EVENT_PARAM_LOGIN_RESULT, result);
}
if (errorMessage != null) {
bundle.putString(EVENT_PARAM_ERROR_MESSAGE, errorMessage);
}
if (errorCode != null) {
bundle.putString(EVENT_PARAM_ERROR_CODE, errorCode);
}
if (loggingExtras != null && !loggingExtras.isEmpty()) {
JSONObject jsonObject = new JSONObject(loggingExtras);
bundle.putString(EVENT_PARAM_EXTRAS, jsonObject.toString());
}
}
bundle.putString(EVENT_PARAM_METHOD, method);
bundle.putLong(EVENT_PARAM_TIMESTAMP, System.currentTimeMillis());
getAppEventsLogger().logSdkEvent(EVENT_NAME_LOGIN_METHOD_COMPLETE, null, bundle);
}
static Bundle newAuthorizationLoggingBundle(String authLoggerId) {
// We want to log all parameters for all events, to ensure stability of columns across different event types.
Bundle bundle = new Bundle();
bundle.putLong(EVENT_PARAM_TIMESTAMP, System.currentTimeMillis());
bundle.putString(EVENT_PARAM_AUTH_LOGGER_ID, authLoggerId);
bundle.putString(EVENT_PARAM_METHOD, "");
bundle.putString(EVENT_PARAM_LOGIN_RESULT, "");
bundle.putString(EVENT_PARAM_ERROR_MESSAGE, "");
bundle.putString(EVENT_PARAM_ERROR_CODE, "");
bundle.putString(EVENT_PARAM_EXTRAS, "");
return bundle;
}
abstract class AuthHandler implements Serializable {
private static final long serialVersionUID = 1L;
Map<String, String> methodLoggingExtras;
abstract boolean tryAuthorize(AuthorizationRequest request);
abstract String getNameForLogging();
boolean onActivityResult(int requestCode, int resultCode, Intent data) {
return false;
}
boolean needsRestart() {
return false;
}
boolean needsInternetPermission() {
return false;
}
void cancel() {
}
protected void addLoggingExtra(String key, Object value) {
if (methodLoggingExtras == null) {
methodLoggingExtras = new HashMap<String, String>();
}
methodLoggingExtras.put(key, value == null ? null : value.toString());
}
}
class WebViewAuthHandler extends AuthHandler {
private static final long serialVersionUID = 1L;
private transient WebDialog loginDialog;
private String applicationId;
private String e2e;
@Override
String getNameForLogging() {
return "web_view";
}
@Override
boolean needsRestart() {
// Because we are presenting WebView UI within the current context, we need to explicitly
// restart the process if the context goes away and is recreated.
return true;
}
@Override
boolean needsInternetPermission() {
return true;
}
@Override
void cancel() {
if (loginDialog != null) {
loginDialog.dismiss();
loginDialog = null;
}
}
@Override
boolean tryAuthorize(final AuthorizationRequest request) {
applicationId = request.getApplicationId();
Bundle parameters = new Bundle();
if (!Utility.isNullOrEmpty(request.getPermissions())) {
String scope = TextUtils.join(",", request.getPermissions());
parameters.putString(ServerProtocol.DIALOG_PARAM_SCOPE, scope);
addLoggingExtra(ServerProtocol.DIALOG_PARAM_SCOPE, scope);
}
SessionDefaultAudience audience = request.getDefaultAudience();
parameters.putString(ServerProtocol.DIALOG_PARAM_DEFAULT_AUDIENCE, audience.getNativeProtocolAudience());
String previousToken = request.getPreviousAccessToken();
if (!Utility.isNullOrEmpty(previousToken) && (previousToken.equals(loadCookieToken()))) {
parameters.putString(ServerProtocol.DIALOG_PARAM_ACCESS_TOKEN, previousToken);
// Don't log the actual access token, just its presence or absence.
addLoggingExtra(ServerProtocol.DIALOG_PARAM_ACCESS_TOKEN, AppEventsConstants.EVENT_PARAM_VALUE_YES);
} else {
// The call to clear cookies will create the first instance of CookieSyncManager if necessary
Utility.clearFacebookCookies(context);
addLoggingExtra(ServerProtocol.DIALOG_PARAM_ACCESS_TOKEN, AppEventsConstants.EVENT_PARAM_VALUE_NO);
}
WebDialog.OnCompleteListener listener = new WebDialog.OnCompleteListener() {
@Override
public void onComplete(Bundle values, FacebookException error) {
onWebDialogComplete(request, values, error);
}
};
e2e = getE2E();
addLoggingExtra(ServerProtocol.DIALOG_PARAM_E2E, e2e);
WebDialog.Builder builder =
new AuthDialogBuilder(getStartActivityDelegate().getActivityContext(), applicationId, parameters)
.setE2E(e2e)
.setIsRerequest(request.isRerequest())
.setOnCompleteListener(listener);
loginDialog = builder.build();
loginDialog.show();
return true;
}
void onWebDialogComplete(AuthorizationRequest request, Bundle values,
FacebookException error) {
Result outcome;
if (values != null) {
// Actual e2e we got from the dialog should be used for logging.
if (values.containsKey(ServerProtocol.DIALOG_PARAM_E2E)) {
e2e = values.getString(ServerProtocol.DIALOG_PARAM_E2E);
}
AccessToken token = AccessToken
.createFromWebBundle(request.getPermissions(), values, AccessTokenSource.WEB_VIEW);
outcome = Result.createTokenResult(pendingRequest, token);
// Ensure any cookies set by the dialog are saved
// This is to work around a bug where CookieManager may fail to instantiate if CookieSyncManager
// has never been created.
CookieSyncManager syncManager = CookieSyncManager.createInstance(context);
syncManager.sync();
saveCookieToken(token.getToken());
} else {
if (error instanceof FacebookOperationCanceledException) {
outcome = Result.createCancelResult(pendingRequest, "User canceled log in.");
} else {
// Something went wrong, don't log a completion event since it will skew timing results.
e2e = null;
String errorCode = null;
String errorMessage = error.getMessage();
if (error instanceof FacebookServiceException) {
FacebookRequestError requestError = ((FacebookServiceException)error).getRequestError();
errorCode = String.format("%d", requestError.getErrorCode());
errorMessage = requestError.toString();
}
outcome = Result.createErrorResult(pendingRequest, null, errorMessage, errorCode);
}
}
if (!Utility.isNullOrEmpty(e2e)) {
logWebLoginCompleted(applicationId, e2e);
}
completeAndValidate(outcome);
}
private void saveCookieToken(String token) {
Context context = getStartActivityDelegate().getActivityContext();
SharedPreferences sharedPreferences = context.getSharedPreferences(
WEB_VIEW_AUTH_HANDLER_STORE,
Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(WEB_VIEW_AUTH_HANDLER_TOKEN_KEY, token);
if (!editor.commit()) {
Utility.logd(TAG, "Could not update saved web view auth handler token.");
}
}
private String loadCookieToken() {
Context context = getStartActivityDelegate().getActivityContext();
SharedPreferences sharedPreferences = context.getSharedPreferences(
WEB_VIEW_AUTH_HANDLER_STORE,
Context.MODE_PRIVATE);
return sharedPreferences.getString(WEB_VIEW_AUTH_HANDLER_TOKEN_KEY, "");
}
}
class GetTokenAuthHandler extends AuthHandler {
private static final long serialVersionUID = 1L;
private transient GetTokenClient getTokenClient;
@Override
String getNameForLogging() {
return "get_token";
}
@Override
void cancel() {
if (getTokenClient != null) {
getTokenClient.cancel();
getTokenClient = null;
}
}
@Override
boolean needsRestart() {
// if the getTokenClient is null, that means an orientation change has occurred, and we need
// to recreate the GetTokenClient, so return true to indicate we need a restart
return getTokenClient == null;
}
boolean tryAuthorize(final AuthorizationRequest request) {
getTokenClient = new GetTokenClient(context, request.getApplicationId());
if (!getTokenClient.start()) {
return false;
}
notifyBackgroundProcessingStart();
GetTokenClient.CompletedListener callback = new GetTokenClient.CompletedListener() {
@Override
public void completed(Bundle result) {
getTokenCompleted(request, result);
}
};
getTokenClient.setCompletedListener(callback);
return true;
}
void getTokenCompleted(AuthorizationRequest request, Bundle result) {
getTokenClient = null;
notifyBackgroundProcessingStop();
if (result != null) {
ArrayList<String> currentPermissions = result.getStringArrayList(NativeProtocol.EXTRA_PERMISSIONS);
List<String> permissions = request.getPermissions();
if ((currentPermissions != null) &&
((permissions == null) || currentPermissions.containsAll(permissions))) {
// We got all the permissions we needed, so we can complete the auth now.
AccessToken token = AccessToken
.createFromNativeLogin(result, AccessTokenSource.FACEBOOK_APPLICATION_SERVICE);
Result outcome = Result.createTokenResult(pendingRequest, token);
completeAndValidate(outcome);
return;
}
// We didn't get all the permissions we wanted, so update the request with just the permissions
// we still need.
List<String> newPermissions = new ArrayList<String>();
for (String permission : permissions) {
if (!currentPermissions.contains(permission)) {
newPermissions.add(permission);
}
}
if (!newPermissions.isEmpty()) {
addLoggingExtra(EVENT_EXTRAS_NEW_PERMISSIONS, TextUtils.join(",", newPermissions));
}
request.setPermissions(newPermissions);
}
tryNextHandler();
}
}
abstract class KatanaAuthHandler extends AuthHandler {
private static final long serialVersionUID = 1L;
protected boolean tryIntent(Intent intent, int requestCode) {
if (intent == null) {
return false;
}
try {
getStartActivityDelegate().startActivityForResult(intent, requestCode);
} catch (ActivityNotFoundException e) {
// We don't expect this to happen, since we've already validated the intent and bailed out before
// now if it couldn't be resolved.
return false;
}
return true;
}
}
class KatanaProxyAuthHandler extends KatanaAuthHandler {
private static final long serialVersionUID = 1L;
private String applicationId;
@Override
String getNameForLogging() {
return "katana_proxy_auth";
}
@Override
boolean tryAuthorize(AuthorizationRequest request) {
applicationId = request.getApplicationId();
String e2e = getE2E();
Intent intent = NativeProtocol.createProxyAuthIntent(context, request.getApplicationId(),
request.getPermissions(), e2e, request.isRerequest(), request.getDefaultAudience());
addLoggingExtra(ServerProtocol.DIALOG_PARAM_E2E, e2e);
return tryIntent(intent, request.getRequestCode());
}
@Override
boolean onActivityResult(int requestCode, int resultCode, Intent data) {
// Handle stuff
Result outcome;
if (data == null) {
// This happens if the user presses 'Back'.
outcome = Result.createCancelResult(pendingRequest, "Operation canceled");
} else if (resultCode == Activity.RESULT_CANCELED) {
outcome = Result.createCancelResult(pendingRequest, data.getStringExtra("error"));
} else if (resultCode != Activity.RESULT_OK) {
outcome = Result.createErrorResult(pendingRequest, "Unexpected resultCode from authorization.", null);
} else {
outcome = handleResultOk(data);
}
if (outcome != null) {
completeAndValidate(outcome);
} else {
tryNextHandler();
}
return true;
}
private Result handleResultOk(Intent data) {
Bundle extras = data.getExtras();
String error = extras.getString("error");
if (error == null) {
error = extras.getString("error_type");
}
String errorCode = extras.getString("error_code");
String errorMessage = extras.getString("error_message");
if (errorMessage == null) {
errorMessage = extras.getString("error_description");
}
String e2e = extras.getString(NativeProtocol.FACEBOOK_PROXY_AUTH_E2E_KEY);
if (!Utility.isNullOrEmpty(e2e)) {
logWebLoginCompleted(applicationId, e2e);
}
if (error == null && errorCode == null && errorMessage == null) {
AccessToken token = AccessToken.createFromWebBundle(pendingRequest.getPermissions(), extras,
AccessTokenSource.FACEBOOK_APPLICATION_WEB);
return Result.createTokenResult(pendingRequest, token);
} else if (ServerProtocol.errorsProxyAuthDisabled.contains(error)) {
return null;
} else if (ServerProtocol.errorsUserCanceled.contains(error)) {
return Result.createCancelResult(pendingRequest, null);
} else {
return Result.createErrorResult(pendingRequest, error, errorMessage, errorCode);
}
}
}
private static String getE2E() {
JSONObject e2e = new JSONObject();
try {
e2e.put("init", System.currentTimeMillis());
} catch (JSONException e) {
}
return e2e.toString();
}
private void logWebLoginCompleted(String applicationId, String e2e) {
AppEventsLogger appEventsLogger = AppEventsLogger.newLogger(context, applicationId);
Bundle parameters = new Bundle();
parameters.putString(AnalyticsEvents.PARAMETER_WEB_LOGIN_E2E, e2e);
parameters.putLong(AnalyticsEvents.PARAMETER_WEB_LOGIN_SWITCHBACK_TIME, System.currentTimeMillis());
parameters.putString(AnalyticsEvents.PARAMETER_APP_ID, applicationId);
appEventsLogger.logSdkEvent(AnalyticsEvents.EVENT_WEB_LOGIN_COMPLETE, null, parameters);
}
static class AuthDialogBuilder extends WebDialog.Builder {
private static final String OAUTH_DIALOG = "oauth";
static final String REDIRECT_URI = "fbconnect://success";
private String e2e;
private boolean isRerequest;
public AuthDialogBuilder(Context context, String applicationId, Bundle parameters) {
super(context, applicationId, OAUTH_DIALOG, parameters);
}
public AuthDialogBuilder setE2E(String e2e) {
this.e2e = e2e;
return this;
}
public AuthDialogBuilder setIsRerequest(boolean isRerequest) {
this.isRerequest = isRerequest;
return this;
}
@Override
public WebDialog build() {
Bundle parameters = getParameters();
parameters.putString(ServerProtocol.DIALOG_PARAM_REDIRECT_URI, REDIRECT_URI);
parameters.putString(ServerProtocol.DIALOG_PARAM_CLIENT_ID, getApplicationId());
parameters.putString(ServerProtocol.DIALOG_PARAM_E2E, e2e);
parameters.putString(ServerProtocol.DIALOG_PARAM_RESPONSE_TYPE, ServerProtocol.DIALOG_RESPONSE_TYPE_TOKEN);
parameters.putString(ServerProtocol.DIALOG_PARAM_RETURN_SCOPES, ServerProtocol.DIALOG_RETURN_SCOPES_TRUE);
// Only set the rerequest auth type for non legacy requests
if (isRerequest && !Settings.getPlatformCompatibilityEnabled()) {
parameters.putString(ServerProtocol.DIALOG_PARAM_AUTH_TYPE, ServerProtocol.DIALOG_REREQUEST_AUTH_TYPE);
}
return new WebDialog(getContext(), OAUTH_DIALOG, parameters, getTheme(), getListener());
}
}
static class AuthorizationRequest implements Serializable {
private static final long serialVersionUID = 1L;
private transient final StartActivityDelegate startActivityDelegate;
private final SessionLoginBehavior loginBehavior;
private final int requestCode;
private boolean isLegacy = false;
private List<String> permissions;
private final SessionDefaultAudience defaultAudience;
private final String applicationId;
private final String previousAccessToken;
private final String authId;
private boolean isRerequest = false;
AuthorizationRequest(SessionLoginBehavior loginBehavior, int requestCode, boolean isLegacy,
List<String> permissions, SessionDefaultAudience defaultAudience, String applicationId,
String validateSameFbidAsToken, StartActivityDelegate startActivityDelegate, String authId) {
this.loginBehavior = loginBehavior;
this.requestCode = requestCode;
this.isLegacy = isLegacy;
this.permissions = permissions;
this.defaultAudience = defaultAudience;
this.applicationId = applicationId;
this.previousAccessToken = validateSameFbidAsToken;
this.startActivityDelegate = startActivityDelegate;
this.authId = authId;
}
StartActivityDelegate getStartActivityDelegate() {
return startActivityDelegate;
}
List<String> getPermissions() {
return permissions;
}
void setPermissions(List<String> permissions) {
this.permissions = permissions;
}
SessionLoginBehavior getLoginBehavior() {
return loginBehavior;
}
int getRequestCode() {
return requestCode;
}
SessionDefaultAudience getDefaultAudience() {
return defaultAudience;
}
String getApplicationId() {
return applicationId;
}
boolean isLegacy() {
return isLegacy;
}
void setIsLegacy(boolean isLegacy) {
this.isLegacy = isLegacy;
}
String getPreviousAccessToken() {
return previousAccessToken;
}
boolean needsNewTokenValidation() {
return previousAccessToken != null && !isLegacy;
}
String getAuthId() {
return authId;
}
boolean isRerequest() {
return isRerequest;
}
void setRerequest(boolean isRerequest) {
this.isRerequest = isRerequest;
}
}
static class Result implements Serializable {
private static final long serialVersionUID = 1L;
enum Code {
SUCCESS("success"),
CANCEL("cancel"),
ERROR("error");
private final String loggingValue;
Code(String loggingValue) {
this.loggingValue = loggingValue;
}
// For consistency across platforms, we want to use specific string values when logging these results.
String getLoggingValue() {
return loggingValue;
}
}
final Code code;
final AccessToken token;
final String errorMessage;
final String errorCode;
final AuthorizationRequest request;
Map<String, String> loggingExtras;
private Result(AuthorizationRequest request, Code code, AccessToken token, String errorMessage,
String errorCode) {
this.request = request;
this.token = token;
this.errorMessage = errorMessage;
this.code = code;
this.errorCode = errorCode;
}
static Result createTokenResult(AuthorizationRequest request, AccessToken token) {
return new Result(request, Code.SUCCESS, token, null, null);
}
static Result createCancelResult(AuthorizationRequest request, String message) {
return new Result(request, Code.CANCEL, null, message, null);
}
static Result createErrorResult(AuthorizationRequest request, String errorType, String errorDescription) {
return createErrorResult(request, errorType, errorDescription, null);
}
static Result createErrorResult(AuthorizationRequest request, String errorType, String errorDescription,
String errorCode) {
String message = TextUtils.join(": ", Utility.asListNoNulls(errorType, errorDescription));
return new Result(request, Code.ERROR, null, message, errorCode);
}
}
}
| |
/*
* Copyright 2021-2022 Pavel Ponec, https://github.com/pponec
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ujorm.tools.web.ajax;
import java.time.Duration;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.ujorm.tools.Assert;
import org.ujorm.tools.Check;
import org.ujorm.tools.web.Element;
import org.ujorm.tools.web.Html;
import org.ujorm.tools.web.ao.HttpParameter;
import org.ujorm.tools.web.ao.Injector;
/**
* A prototype of ES6 Vanilla Javascript Writer of the Ujorm framework.
*
* @author Pavel Ponec
*/
public class JavaScriptWriter implements Injector {
/** Default AJAX request parameter name */
public static final HttpParameter DEFAULT_AJAX_REQUEST_PARAM = HttpParameter.of("_ajax");
/** Default AJAX request parameter name */
public static final HttpParameter DEFAULT_SORT_REQUEST_PARAM = HttpParameter.of("_sort");
/** Default duration */
public static final Duration DEFAULT_DURATION = Duration.ofMillis(250);
/** Javascript ajax request parameter */
protected final HttpParameter ajaxRequestParam;
/** Javascript ajax request parameter */
protected final HttpParameter sortRequestParam;
/** Input selectors */
protected final CharSequence[] inputCssSelectors;
/** Input idle delay */
@NotNull
protected Duration idleDelay = DEFAULT_DURATION;
/** Form selector */
protected String formSelector = Html.FORM;
/** On load submit request */
protected boolean onLoadSubmit = false;
/** New line characters */
protected CharSequence newLine = "\n";
/** A subtitle selector */
@Nullable
protected CharSequence subtitleSelector="?";
/** A subtitle selector */
@NotNull
protected CharSequence errorMessage = "AJAX fails due";
/** Ajax Timeout */
@NotNull
protected Duration ajaxTimeout = Duration.ofMillis(30_000);
/** JavaScript version */
protected int version = 1;
/** Javascript ajax request parameter */
protected String ajaxRequestPath = "_ajax";
/** Function order of name */
protected int fceOrder = 1;
/** Ajax support */
protected boolean isAjax = true;
public JavaScriptWriter() {
this("form input:not([type=\"button\"])");
}
public JavaScriptWriter(@NotNull CharSequence... inputSelectors) {
this(DEFAULT_DURATION,
DEFAULT_AJAX_REQUEST_PARAM,
DEFAULT_SORT_REQUEST_PARAM,
inputSelectors);
}
public JavaScriptWriter(
@NotNull Duration idleDelay,
@NotNull HttpParameter ajaxRequestParam,
@NotNull HttpParameter sortRequestParam,
@NotNull CharSequence... inputSelectors) {
this.idleDelay = Assert.notNull(idleDelay, "idleDelay");
this.ajaxRequestParam = Assert.notNull(ajaxRequestParam, "ajaxRequestParam");
this.sortRequestParam = Assert.notNull(sortRequestParam, "sortRequestParam");
this.inputCssSelectors = Assert.hasLength(inputSelectors, "inputSelectors");
}
public JavaScriptWriter setFormSelector(String formSelector) {
this.formSelector = Assert.notNull(formSelector, "formSelector");
return this;
}
public JavaScriptWriter setOnLoadSubmit(boolean onLoadSubmit) {
this.onLoadSubmit = onLoadSubmit;
return this;
}
public JavaScriptWriter setNewLine(@NotNull CharSequence newLine) {
this.newLine = Assert.notNull(newLine, "newLine");
return this;
}
/** Assign a subtitle CSS selector */
public JavaScriptWriter setSubtitleSelector(CharSequence subtitleSelector) {
this.subtitleSelector = subtitleSelector;
return this;
}
/** Assign an AJAX error message */
public JavaScriptWriter setErrorMessage(@Nullable CharSequence errorMessage) {
this.errorMessage = Assert.hasLength(errorMessage, "errorMessage");
return this;
}
/** Assign an AJAX timeout */
public JavaScriptWriter setAjaxTimeout(@NotNull Duration ajaxTimeout) {
this.ajaxTimeout = Assert.notNull(ajaxTimeout, "ajaxTimeout");
return this;
}
/** Assign an AJAX timeout */
public JavaScriptWriter setAjaxRequestPath(@NotNull String ajaxRequestPath) {
this.ajaxRequestPath = ajaxRequestPath;
setVersion(2);
return this;
}
/** Assign an AJAX timeout */
public JavaScriptWriter setVersion(int version) {
this.version = version;
return this;
}
/** Set a function order */
public JavaScriptWriter setSortable(int fceOrder) {
this.fceOrder = fceOrder;
return this;
}
/** Set a function order name */
public int getFceOrder() {
return fceOrder;
}
/** Set a function order */
public JavaScriptWriter setAjax(boolean ajax) {
this.isAjax = ajax;
return this;
}
/** Set a function order name */
public boolean isAjax() {
return isAjax;
}
/**
* Generate a Javascript
*/
@Override
public void write(@NotNull final Element parent) {
final String inpSelectors = Check.hasLength(inputCssSelectors)
? Stream.of(inputCssSelectors).collect(Collectors.joining(", "))
: "#!@";
try (Element js = parent.addElement(Html.SCRIPT)) {
js.addRawText(newLine, "/* Script of ujorm.org *//* jshint esversion:6 */");
if (isAjax) {
js.addRawText(newLine, "const f", fceOrder, "={");
js.addRawTexts(newLine, ""
, "timeout:null, ajaxRun:false, submitReq:false, millis:" + idleDelay.toMillis() + ","
, "init(e){"
, " document.querySelector('" + formSelector + "').addEventListener('submit',this.process,false);"
, " document.querySelectorAll('" + inpSelectors + "').forEach(i=>{"
, " i.addEventListener('keyup',e=>this.timeEvent(e),false);"
, " });},"
);
js.addRawTexts(newLine, ""
, "timeEvent(e){"
, " if(this.timeout)clearTimeout(this.timeout);"
, " this.timeout=setTimeout(()=>{"
, " this.timeout=null;"
, " if(this.ajaxRun)this.submitReq=true;"
, " else this.process(null);"
, " },this.millis);},"
);
js.addRawTexts(newLine, ""
, "process(e){"
, " let pars=new URLSearchParams(new FormData(document.querySelector('" + formSelector + "')));"
, " if(e!==null){e.preventDefault();pars.append(e.submitter.name,e.submitter.value);}"
, " fetch('" + (version == 2
? ajaxRequestPath
: ("?" + ajaxRequestPath + "=true")) + "', {"
, " method:'POST',"
, " body:pars,"
, " headers:{'Content-Type':'application/x-www-form-urlencoded;charset=UTF-8'},"
, " })"
, " .then(response=>response.json())"
, " .then(data=>{"
, " for(const key of Object.keys(data))"
, " if(key=='')eval(data[key]);"
, " else document.querySelectorAll(key).forEach(i=>{i.innerHTML=data[key];});"
, " if(this.submitReq){this.submitReq=false;this.process(e);}" // Next submit the form
, " else{this.ajaxRun=false;}"
, " }).catch(err=>{"
, " this.ajaxRun=false;"
, " document.querySelector('" + subtitleSelector + "').innerHTML='" + errorMessage + ": ' + err;"
, " });"
, "}"
);
js.addRawTexts(newLine, "};");
js.addRawText(newLine, "document.addEventListener('DOMContentLoaded',e=>f", fceOrder, ".init(e));");
if (onLoadSubmit) {
js.addRawText(newLine, "f", fceOrder, ".process(null);");
}
}
}
}
}
| |
/*
*
* Copyright 2012-2014 Eurocommercial Properties NV
*
*
* Licensed under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.estatio.module.lease.dom.invoicing;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import javax.inject.Inject;
import org.joda.time.LocalDate;
import org.apache.isis.applib.annotation.DomainService;
import org.apache.isis.applib.annotation.NatureOfService;
import org.apache.isis.applib.services.factory.FactoryService;
import org.isisaddons.module.security.dom.tenancy.ApplicationTenancy;
import org.incode.module.communications.dom.impl.commchannel.CommunicationChannel;
import org.estatio.module.agreement.dom.Agreement;
import org.estatio.module.asset.dom.FixedAsset;
import org.estatio.module.base.dom.UdoDomainRepositoryAndFactory;
import org.estatio.module.currency.dom.Currency;
import org.estatio.module.invoice.dom.Invoice;
import org.estatio.module.invoice.dom.InvoiceRepository;
import org.estatio.module.invoice.dom.InvoiceStatus;
import org.estatio.module.invoice.dom.PaymentMethod;
import org.estatio.module.lease.dom.AgreementRoleCommunicationChannelTypeEnum;
import org.estatio.module.lease.dom.Lease;
import org.estatio.module.lease.dom.LeaseAgreementRoleTypeEnum;
import org.estatio.module.lease.dom.settings.LeaseInvoicingSettingsService;
import org.estatio.module.party.dom.Party;
@DomainService(repositoryFor = InvoiceForLease.class, nature = NatureOfService.DOMAIN)
public class InvoiceForLeaseRepository extends UdoDomainRepositoryAndFactory<InvoiceForLease> {
public InvoiceForLeaseRepository() {
super(InvoiceForLeaseRepository.class, InvoiceForLease.class);
}
public List<InvoiceForLease> findMatchingInvoices(
final Party seller,
final Party buyer,
final PaymentMethod paymentMethod,
final Lease lease,
final InvoiceStatus invoiceStatus,
final LocalDate dueDate) {
return allMatches(
"findMatchingInvoices",
"seller", seller,
"buyer", buyer,
"paymentMethod", paymentMethod,
"lease", lease,
"status", invoiceStatus,
"dueDate", dueDate);
}
public InvoiceForLease findOrCreateMatchingInvoice(
final ApplicationTenancy applicationTenancy,
final PaymentMethod paymentMethod,
final Lease lease,
final InvoiceStatus invoiceStatus,
final LocalDate dueDate,
final String interactionId) {
Party buyer = lease.secondaryPartyAsOfElseCurrent(dueDate);
Party seller = lease.primaryPartyAsOfElseCurrent(dueDate);
return findOrCreateMatchingInvoice(
applicationTenancy, seller, buyer, paymentMethod, lease, invoiceStatus, dueDate, interactionId);
}
public InvoiceForLease findMatchingInvoice(
final Party seller,
final Party buyer,
final PaymentMethod paymentMethod,
final Lease lease,
final InvoiceStatus invoiceStatus,
final LocalDate dueDate) {
final List<InvoiceForLease> invoices = findMatchingInvoices(
seller, buyer, paymentMethod, lease, invoiceStatus, dueDate);
if (invoices == null || invoices.size() == 0) {
return null;
}
return invoices.get(0);
}
public InvoiceForLease findOrCreateMatchingInvoice(
final ApplicationTenancy applicationTenancy,
final Party seller,
final Party buyer,
final PaymentMethod paymentMethod,
final Lease lease,
final InvoiceStatus invoiceStatus,
final LocalDate dueDate,
final String interactionId) {
final List<InvoiceForLease> invoices = findMatchingInvoices(
seller, buyer, paymentMethod, lease, invoiceStatus, dueDate);
if (invoices == null || invoices.size() == 0) {
return newInvoice(applicationTenancy, seller, buyer, paymentMethod, settingsService.systemCurrency(), dueDate, lease, interactionId);
}
return invoices.get(0);
}
public List<InvoiceForLease> findByLease(final Lease lease) {
return allMatches("findByLease", "lease", lease);
}
public InvoiceForLease newInvoice(
final ApplicationTenancy applicationTenancy,
final Party seller,
final Party buyer,
final PaymentMethod paymentMethod,
final Currency currency,
final LocalDate dueDate,
final Lease lease,
final String interactionId
) {
InvoiceForLease invoice = newTransientInstance();
invoice.setApplicationTenancyPath(applicationTenancy.getPath());
invoice.setBuyer(buyer);
invoice.setSeller(seller);
invoice.setPaymentMethod(paymentMethod);
invoice.setStatus(InvoiceStatus.NEW);
invoice.setCurrency(currency);
invoice.setLease(lease);
invoice.setDueDate(dueDate);
invoice.setRunId(interactionId);
// copy down form the agreement, we require all invoice items to relate
// back to this (root) fixed asset
invoice.setPaidBy(lease.getPaidBy());
invoice.setFixedAsset(lease.getProperty());
// copy over the current invoice address (if any)
final CommunicationChannel sendTo = firstCurrentTenantInvoiceAddress(lease);
invoice.setSendTo(sendTo);
invoice.updateDescriptions();
persistIfNotAlready(invoice);
getContainer().flush();
return invoice;
}
CommunicationChannel firstCurrentTenantInvoiceAddress(final Agreement agreement) {
final List<CommunicationChannel> channels = currentTenantInvoiceAddresses(agreement);
return channels.size() > 0 ? channels.get(0): null;
}
List<CommunicationChannel> currentTenantInvoiceAddresses(final Agreement agreement) {
return locator.current(agreement, LeaseAgreementRoleTypeEnum.TENANT.getTitle(), AgreementRoleCommunicationChannelTypeEnum.INVOICE_ADDRESS.getTitle());
}
public List<InvoiceForLease> findByFixedAssetAndStatus(
final FixedAsset fixedAsset,
final InvoiceStatus status) {
return allMatches("findByFixedAssetAndStatus",
"fixedAsset", fixedAsset,
"status", status);
}
public List<InvoiceForLease> findByFixedAssetAndDueDate(
final FixedAsset fixedAsset,
final LocalDate dueDate) {
return allMatches("findByFixedAssetAndDueDate",
"fixedAsset", fixedAsset,
"dueDate", dueDate);
}
public List<InvoiceForLease> findByFixedAssetAndDueDateAndStatus(
final FixedAsset fixedAsset,
final LocalDate dueDate,
final InvoiceStatus status) {
return allMatches("findByFixedAssetAndDueDateAndStatus",
"fixedAsset", fixedAsset,
"dueDate", dueDate,
"status", status);
}
public List<InvoiceForLease> findByApplicationTenancyPathAndSellerAndDueDateAndStatus(
final String applicationTenancyPath,
final Party seller,
final LocalDate dueDate,
final InvoiceStatus status) {
return allMatches("findByApplicationTenancyPathAndSellerAndDueDateAndStatus",
"applicationTenancyPath", applicationTenancyPath,
"seller", seller,
"dueDate", dueDate,
"status", status);
}
public List<InvoiceForLease> findByApplicationTenancyPathAndSellerAndInvoiceDate(
final String applicationTenancyPath,
final Party seller,
final LocalDate invoiceDate) {
return allMatches("findByApplicationTenancyPathAndSellerAndInvoiceDate",
"applicationTenancyPath", applicationTenancyPath,
"seller", seller,
"invoiceDate", invoiceDate);
}
public List<InvoiceForLease> findInvoicesByRunId(final String runId) {
return allMatches("findByRunId",
"runId", runId);
}
public List<InvoiceForLease> findByRunIdAndApplicationTenancyPath(final String runId, final String applicationTenancyPath) {
return allMatches("findByRunIdAndApplicationTenancyPath",
"runId", runId,
"applicationTenancyPath", applicationTenancyPath);
}
public void removeRuns(InvoiceCalculationParameters parameters) {
List<InvoiceForLease> invoices = findByFixedAssetAndDueDateAndStatus(parameters.property(), parameters.invoiceDueDate(), InvoiceStatus.NEW);
for (Invoice invoice : invoices) {
factoryService.mixin(Invoice._remove.class, invoice).exec();
}
}
public List<InvoiceForLease> findInvoicesByInvoiceNumber(
final String invoiceNumber,
final Integer yearIfAny) {
return invoiceRepository.findMatchingInvoiceNumber(invoiceNumber).stream()
.filter(InvoiceForLease.class::isInstance)
.map(InvoiceForLease.class::cast)
.filter(i -> {
final LocalDate codaValDate = i.getCodaValDate();
return yearIfAny == null || codaValDate == null || codaValDate.getYear() == yearIfAny;
})
.collect(Collectors.toList());
}
public Optional<InvoiceForLease> findInvoiceByInvoiceNumber(
final String invoiceNumber,
final Integer year) {
return invoiceRepository.findMatchingInvoiceNumber(invoiceNumber).stream()
.filter(InvoiceForLease.class::isInstance)
.map(InvoiceForLease.class::cast)
.filter(i -> {
final LocalDate codaValDate = i.getCodaValDate();
return codaValDate != null && codaValDate.getYear() == year;
})
.findFirst();
}
@Inject
FactoryService factoryService;
@Inject
AgreementCommunicationChannelLocator locator;
@Inject
LeaseInvoicingSettingsService settingsService;
@Inject
InvoiceRepository invoiceRepository;
}
| |
/* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2020, Arnaud Roques
*
* Project Info: https://plantuml.com
*
* If you like this project or if you find it useful, you can support us at:
*
* https://plantuml.com/patreon (only 1$ per month!)
* https://plantuml.com/paypal
*
* This file is part of PlantUML.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Original Author: Arnaud Roques
*/
package net.sourceforge.plantuml.activitydiagram.command;
import net.sourceforge.plantuml.Direction;
import net.sourceforge.plantuml.LineLocation;
import net.sourceforge.plantuml.StringUtils;
import net.sourceforge.plantuml.Url;
import net.sourceforge.plantuml.UrlBuilder;
import net.sourceforge.plantuml.UrlBuilder.ModeUrl;
import net.sourceforge.plantuml.activitydiagram.ActivityDiagram;
import net.sourceforge.plantuml.classdiagram.command.CommandLinkClass;
import net.sourceforge.plantuml.command.CommandExecutionResult;
import net.sourceforge.plantuml.command.SingleLineCommand2;
import net.sourceforge.plantuml.command.regex.IRegex;
import net.sourceforge.plantuml.command.regex.RegexConcat;
import net.sourceforge.plantuml.command.regex.RegexLeaf;
import net.sourceforge.plantuml.command.regex.RegexOptional;
import net.sourceforge.plantuml.command.regex.RegexOr;
import net.sourceforge.plantuml.command.regex.RegexPartialMatch;
import net.sourceforge.plantuml.command.regex.RegexResult;
import net.sourceforge.plantuml.cucadiagram.Code;
import net.sourceforge.plantuml.cucadiagram.Display;
import net.sourceforge.plantuml.cucadiagram.GroupType;
import net.sourceforge.plantuml.cucadiagram.IEntity;
import net.sourceforge.plantuml.cucadiagram.ILeaf;
import net.sourceforge.plantuml.cucadiagram.Ident;
import net.sourceforge.plantuml.cucadiagram.LeafType;
import net.sourceforge.plantuml.cucadiagram.Link;
import net.sourceforge.plantuml.cucadiagram.LinkDecor;
import net.sourceforge.plantuml.cucadiagram.LinkType;
import net.sourceforge.plantuml.cucadiagram.NamespaceStrategy;
import net.sourceforge.plantuml.cucadiagram.Stereotype;
import net.sourceforge.plantuml.descdiagram.command.CommandLinkElement;
import net.sourceforge.plantuml.graphic.color.ColorParser;
import net.sourceforge.plantuml.graphic.color.ColorType;
import net.sourceforge.plantuml.ugraphic.color.NoSuchColorException;
public class CommandLinkActivity extends SingleLineCommand2<ActivityDiagram> {
public CommandLinkActivity() {
super(getRegexConcat());
}
private static IRegex getRegexConcat() {
return RegexConcat.build(CommandLinkActivity.class.getName(), RegexLeaf.start(), //
new RegexOptional(//
new RegexOr("FIRST", //
new RegexLeaf("STAR", "(\\(\\*(top)?\\))"), //
new RegexLeaf("CODE", "([%pLN][%pLN_.]*)"), //
new RegexLeaf("BAR", "(?:==+)[%s]*([%pLN_.]+)[%s]*(?:==+)"), //
new RegexLeaf("QUOTED", "[%g]([^%g]+)[%g](?:[%s]+as[%s]+([%pLN_.]+))?"))), //
RegexLeaf.spaceZeroOrMore(), //
new RegexLeaf("STEREOTYPE", "(\\<\\<.*\\>\\>)?"), //
RegexLeaf.spaceZeroOrMore(), //
ColorParser.exp2(), //
RegexLeaf.spaceZeroOrMore(), //
new RegexLeaf("URL", "(" + UrlBuilder.getRegexp() + ")?"), //
new RegexLeaf("ARROW_BODY1", "([-.]+)"), //
new RegexLeaf("ARROW_STYLE1", "(?:\\[(" + CommandLinkElement.LINE_STYLE + ")\\])?"), //
new RegexLeaf("ARROW_DIRECTION", "(\\*|left|right|up|down|le?|ri?|up?|do?)?"), //
new RegexLeaf("ARROW_STYLE2", "(?:\\[(" + CommandLinkElement.LINE_STYLE + ")\\])?"), //
new RegexLeaf("ARROW_BODY2", "([-.]*)"), //
new RegexLeaf("\\>"), //
RegexLeaf.spaceZeroOrMore(), //
new RegexOptional(new RegexLeaf("BRACKET", "\\[([^\\]*]+[^\\]]*)\\]")), //
RegexLeaf.spaceZeroOrMore(), //
new RegexOr("FIRST2", //
new RegexLeaf("STAR2", "(\\(\\*(top|\\d+)?\\))"), //
new RegexLeaf("OPENBRACKET2", "(\\{)"), //
new RegexLeaf("CODE2", "([%pLN][%pLN_.]*)"), //
new RegexLeaf("BAR2", "(?:==+)[%s]*([%pLN_.]+)[%s]*(?:==+)"), //
new RegexLeaf("QUOTED2", "[%g]([^%g]+)[%g](?:[%s]+as[%s]+([%pLN][%pLN_.]*))?"), //
new RegexLeaf("QUOTED_INVISIBLE2", "(\\w.*?)")), //
RegexLeaf.spaceZeroOrMore(), //
new RegexLeaf("STEREOTYPE2", "(\\<\\<.*\\>\\>)?"), //
RegexLeaf.spaceZeroOrMore(), //
new RegexOptional( //
new RegexConcat( //
new RegexLeaf("in"), //
RegexLeaf.spaceOneOrMore(), //
new RegexLeaf("PARTITION2", "([%g][^%g]+[%g]|\\S+)") //
)), //
RegexLeaf.spaceZeroOrMore(), //
ColorParser.exp3(), //
RegexLeaf.end());
}
@Override
protected CommandExecutionResult executeArg(ActivityDiagram diagram, LineLocation location, RegexResult arg)
throws NoSuchColorException {
final IEntity entity1 = getEntity(diagram, arg, true);
if (entity1 == null) {
return CommandExecutionResult.error("No such activity");
}
if (arg.get("STEREOTYPE", 0) != null) {
entity1.setStereotype(Stereotype.build(arg.get("STEREOTYPE", 0)));
}
if (arg.get("BACKCOLOR", 0) != null) {
String s = arg.get("BACKCOLOR", 0);
entity1.setSpecificColorTOBEREMOVED(ColorType.BACK,
diagram.getSkinParam().getIHtmlColorSet().getColor(diagram.getSkinParam().getThemeStyle(), s));
}
final IEntity entity2 = getEntity(diagram, arg, false);
if (entity2 == null) {
return CommandExecutionResult.error("No such activity");
}
if (arg.get("BACKCOLOR2", 0) != null) {
String s = arg.get("BACKCOLOR2", 0);
entity2.setSpecificColorTOBEREMOVED(ColorType.BACK,
diagram.getSkinParam().getIHtmlColorSet().getColor(diagram.getSkinParam().getThemeStyle(), s));
}
if (arg.get("STEREOTYPE2", 0) != null) {
entity2.setStereotype(Stereotype.build(arg.get("STEREOTYPE2", 0)));
}
final Display linkLabel = Display.getWithNewlines(arg.get("BRACKET", 0));
final String arrowBody1 = CommandLinkClass.notNull(arg.get("ARROW_BODY1", 0));
final String arrowBody2 = CommandLinkClass.notNull(arg.get("ARROW_BODY2", 0));
final String arrowDirection = CommandLinkClass.notNull(arg.get("ARROW_DIRECTION", 0));
final String arrow = StringUtils.manageArrowForCuca(arrowBody1 + arrowDirection + arrowBody2 + ">");
int lenght = arrow.length() - 1;
if (arrowDirection.contains("*")) {
lenght = 2;
}
LinkType type = new LinkType(LinkDecor.ARROW, LinkDecor.NONE);
if ((arrowBody1 + arrowBody2).contains(".")) {
type = type.goDotted();
}
Link link = new Link(entity1, entity2, type, linkLabel, lenght,
diagram.getSkinParam().getCurrentStyleBuilder());
if (arrowDirection.contains("*")) {
link.setConstraint(false);
}
final Direction direction = StringUtils.getArrowDirection(arrowBody1 + arrowDirection + arrowBody2 + ">");
if (direction == Direction.LEFT || direction == Direction.UP) {
link = link.getInv();
}
if (arg.get("URL", 0) != null) {
final UrlBuilder urlBuilder = new UrlBuilder(diagram.getSkinParam().getValue("topurl"), ModeUrl.STRICT);
final Url urlLink = urlBuilder.getUrl(arg.get("URL", 0));
link.setUrl(urlLink);
}
link.applyStyle(diagram.getSkinParam().getThemeStyle(), arg.getLazzy("ARROW_STYLE", 0));
diagram.addLink(link);
return CommandExecutionResult.ok();
}
static IEntity getEntity(ActivityDiagram diagram, RegexResult arg, final boolean start) {
final String suf = start ? "" : "2";
final String openBracket2 = arg.get("OPENBRACKET" + suf, 0);
if (openBracket2 != null) {
return diagram.createInnerActivity();
}
if (arg.get("STAR" + suf, 0) != null) {
final String suppId = arg.get("STAR" + suf, 1);
if (start) {
if (suppId != null) {
diagram.getStart().setTop(true);
}
return diagram.getStart();
}
return diagram.getEnd(suppId);
}
String partition = arg.get("PARTITION" + suf, 0);
if (partition != null) {
partition = StringUtils.eventuallyRemoveStartingAndEndingDoubleQuote(partition);
}
final String idShort = arg.get("CODE" + suf, 0);
if (idShort != null) {
if (partition != null) {
final Ident idNewLong = diagram.buildLeafIdent(partition);
final Code codeP = diagram.V1972() ? idNewLong : diagram.buildCode(partition);
diagram.gotoGroup(idNewLong, codeP, Display.getWithNewlines(partition), GroupType.PACKAGE,
diagram.getRootGroup(), NamespaceStrategy.SINGLE);
}
final Ident ident = diagram.buildLeafIdent(idShort);
final Code code = diagram.V1972() ? ident : diagram.buildCode(idShort);
final LeafType type = diagram.V1972() ? getTypeIfExistingSmart(diagram, ident)
: getTypeIfExisting(diagram, code);
IEntity result;
if (diagram.V1972()) {
result = diagram.getLeafVerySmart(ident);
if (result == null)
result = diagram.getOrCreate(ident, code, Display.getWithNewlines(code), type);
} else
result = diagram.getOrCreate(ident, code, Display.getWithNewlines(code), type);
if (partition != null) {
diagram.endGroup();
}
return result;
}
final String bar = arg.get("BAR" + suf, 0);
if (bar != null) {
final Ident identBar = diagram.buildLeafIdent(bar);
final Code codeBar = diagram.V1972() ? identBar : diagram.buildCode(bar);
if (diagram.V1972()) {
final ILeaf result = diagram.getLeafVerySmart(identBar);
if (result != null) {
return result;
}
}
return diagram.getOrCreate(identBar, codeBar, Display.getWithNewlines(bar), LeafType.SYNCHRO_BAR);
}
final RegexPartialMatch quoted = arg.get("QUOTED" + suf);
if (quoted.get(0) != null) {
final String quotedString = quoted.get(1) == null ? quoted.get(0) : quoted.get(1);
if (partition != null) {
final Ident idNewLong = diagram.buildLeafIdent(partition);
final Code codeP = diagram.V1972() ? idNewLong : diagram.buildCode(partition);
diagram.gotoGroup(idNewLong, codeP, Display.getWithNewlines(partition), GroupType.PACKAGE,
diagram.getRootGroup(), NamespaceStrategy.SINGLE);
}
final Ident quotedIdent = diagram.buildLeafIdent(quotedString);
final Code quotedCode = diagram.V1972() ? quotedIdent : diagram.buildCode(quotedString);
final LeafType type = diagram.V1972() ? getTypeIfExistingSmart(diagram, quotedIdent)
: getTypeIfExisting(diagram, quotedCode);
final IEntity result = diagram.getOrCreate(quotedIdent, quotedCode, Display.getWithNewlines(quoted.get(0)),
type);
if (partition != null) {
diagram.endGroup();
}
return result;
}
final String quoteInvisibleString = arg.get("QUOTED_INVISIBLE" + suf, 0);
if (quoteInvisibleString != null) {
if (partition != null) {
final Ident idNewLong = diagram.buildLeafIdent(partition);
final Code codeP = diagram.V1972() ? idNewLong : diagram.buildCode(partition);
diagram.gotoGroup(idNewLong, codeP, Display.getWithNewlines(partition), GroupType.PACKAGE,
diagram.getRootGroup(), NamespaceStrategy.SINGLE);
}
final Ident identInvisible = diagram.buildLeafIdent(quoteInvisibleString);
final Code quotedInvisible = diagram.V1972() ? identInvisible : diagram.buildCode(quoteInvisibleString);
final IEntity result = diagram.getOrCreate(identInvisible, quotedInvisible,
Display.getWithNewlines(quotedInvisible), LeafType.ACTIVITY);
if (partition != null) {
diagram.endGroup();
}
return result;
}
final String first = arg.get("FIRST" + suf, 0);
if (first == null) {
return diagram.getLastEntityConsulted();
}
return null;
}
private static LeafType getTypeIfExistingSmart(ActivityDiagram system, Ident ident) {
final IEntity ent = system.getLeafSmart(ident);
if (ent != null) {
if (ent.getLeafType() == LeafType.BRANCH) {
return LeafType.BRANCH;
}
}
return LeafType.ACTIVITY;
}
private static LeafType getTypeIfExisting(ActivityDiagram system, Code code) {
if (system.leafExist(code)) {
final IEntity ent = system.getLeaf(code);
if (ent.getLeafType() == LeafType.BRANCH) {
return LeafType.BRANCH;
}
}
return LeafType.ACTIVITY;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.commandline;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
import java.util.UUID;
import java.util.function.Consumer;
import java.util.logging.Logger;
import org.apache.ignite.internal.client.GridClient;
import org.apache.ignite.internal.client.GridClientConfiguration;
import org.apache.ignite.internal.client.GridClientNode;
import org.apache.ignite.internal.commandline.argument.CommandArgUtils;
import org.apache.ignite.internal.commandline.tracing.configuration.TracingConfigurationArguments;
import org.apache.ignite.internal.commandline.tracing.configuration.TracingConfigurationCommandArg;
import org.apache.ignite.internal.commandline.tracing.configuration.TracingConfigurationSubcommand;
import org.apache.ignite.internal.visor.tracing.configuration.VisorTracingConfigurationTask;
import org.apache.ignite.internal.visor.tracing.configuration.VisorTracingConfigurationTaskArg;
import org.apache.ignite.internal.visor.tracing.configuration.VisorTracingConfigurationTaskResult;
import org.apache.ignite.spi.tracing.Scope;
import static org.apache.ignite.internal.commandline.CommandList.TRACING_CONFIGURATION;
import static org.apache.ignite.internal.commandline.CommandLogger.grouped;
import static org.apache.ignite.internal.commandline.CommandLogger.join;
import static org.apache.ignite.internal.commandline.CommandLogger.optional;
import static org.apache.ignite.internal.commandline.TaskExecutor.executeTaskByNameOnNode;
import static org.apache.ignite.internal.commandline.tracing.configuration.TracingConfigurationSubcommand.GET_ALL;
import static org.apache.ignite.internal.commandline.tracing.configuration.TracingConfigurationSubcommand.RESET_ALL;
import static org.apache.ignite.internal.commandline.tracing.configuration.TracingConfigurationSubcommand.of;
import static org.apache.ignite.spi.tracing.TracingConfigurationParameters.SAMPLING_RATE_ALWAYS;
import static org.apache.ignite.spi.tracing.TracingConfigurationParameters.SAMPLING_RATE_NEVER;
/**
* Commands associated with tracing configuration functionality.
*/
public class TracingConfigurationCommand implements Command<TracingConfigurationArguments> {
/** Arguments. */
private TracingConfigurationArguments args;
/** {@inheritDoc} */
@Override public void printUsage(Logger log) {
Command.usage(
log,
"Print tracing configuration: ",
TRACING_CONFIGURATION);
Command.usage(
log,
"Print tracing configuration: ",
TRACING_CONFIGURATION,
TracingConfigurationSubcommand.GET_ALL.text(),
optional(TracingConfigurationCommandArg.SCOPE.argName(), join("|", Scope.values())));
Command.usage(
log,
"Print specific tracing configuration based on specified " +
TracingConfigurationCommandArg.SCOPE.argName() + " and " +
TracingConfigurationCommandArg.LABEL.argName() + ": ",
TRACING_CONFIGURATION,
TracingConfigurationSubcommand.GET.text(),
grouped(TracingConfigurationCommandArg.SCOPE.argName(), join("|", Scope.values())),
optional(TracingConfigurationCommandArg.LABEL.argName()));
Command.usage(
log,
"Reset all specific tracing configuration the to default. If " +
TracingConfigurationCommandArg.SCOPE.argName() +
" is specified, then remove all label specific configuration for the given scope and reset given scope" +
" specific configuration to the default, if " + TracingConfigurationCommandArg.SCOPE.argName() +
" is skipped then reset all tracing configurations to the default. Print tracing configuration.",
TRACING_CONFIGURATION,
RESET_ALL.text(),
optional(TracingConfigurationCommandArg.SCOPE.argName(), join("|", Scope.values())));
Command.usage(
log,
"Reset specific tracing configuration to the default. If both " +
TracingConfigurationCommandArg.SCOPE.argName() + " and " +
TracingConfigurationCommandArg.LABEL.argName() + " are specified then remove given configuration," +
" if only " + TracingConfigurationCommandArg.SCOPE.argName() +
" is specified then reset given configuration to the default." +
" Print reseted configuration.",
TRACING_CONFIGURATION,
TracingConfigurationSubcommand.RESET.text(),
grouped(TracingConfigurationCommandArg.SCOPE.argName(), join("|", Scope.values())),
optional(TracingConfigurationCommandArg.LABEL.argName()));
Command.usage(
log,
"Set new tracing configuration. If both " +
TracingConfigurationCommandArg.SCOPE.argName() + " and " +
TracingConfigurationCommandArg.LABEL.argName() + " are specified then add or override label" +
" specific configuration, if only " + TracingConfigurationCommandArg.SCOPE.argName() +
" is specified, then override scope specific configuration. Print applied configuration.",
TRACING_CONFIGURATION,
TracingConfigurationSubcommand.SET.text(),
grouped(TracingConfigurationCommandArg.SCOPE.argName(), join("|", Scope.values()),
optional(TracingConfigurationCommandArg.LABEL.argName()),
optional(TracingConfigurationCommandArg.SAMPLING_RATE.argName(),
"Decimal value between 0 and 1, " +
"where 0 means never and 1 means always. " +
"More or less reflects the probability of sampling specific trace."),
optional(TracingConfigurationCommandArg.INCLUDED_SCOPES.argName(),
"Set of scopes with comma as separator ",
join("|", Scope.values()))));
}
/**
* Execute tracing-configuration command.
*
* @param clientCfg Client configuration.
* @throws Exception If failed to execute tracing-configuration action.
*/
@Override public Object execute(GridClientConfiguration clientCfg, Logger log) throws Exception {
try (GridClient client = Command.startClient(clientCfg)) {
UUID crdId = client.compute()
//Only non client node can be coordinator.
.nodes(node -> !node.isClient())
.stream()
.min(Comparator.comparingLong(GridClientNode::order))
.map(GridClientNode::nodeId)
.orElse(null);
VisorTracingConfigurationTaskResult res = executeTaskByNameOnNode(
client,
VisorTracingConfigurationTask.class.getName(),
toVisorArguments(args),
crdId,
clientCfg
);
printResult(res, log::info);
return res;
}
catch (Throwable e) {
log.severe("Failed to execute tracing-configuration command='" + args.command().text() + "'");
throw e;
}
}
/** {@inheritDoc} */
@Override public void parseArguments(CommandArgIterator argIter) {
// If there is no arguments, use list command.
if (!argIter.hasNextSubArg()) {
args = new TracingConfigurationArguments.Builder(TracingConfigurationSubcommand.GET_ALL).build();
return;
}
TracingConfigurationSubcommand cmd = of(argIter.nextArg("Expected tracing configuration action."));
if (cmd == null)
throw new IllegalArgumentException("Expected correct tracing configuration action.");
TracingConfigurationArguments.Builder tracingConfigurationArgs = new TracingConfigurationArguments.Builder(cmd);
Scope scope = null;
String lb = null;
double samplingRate = SAMPLING_RATE_NEVER;
Set<Scope> includedScopes = new HashSet<>();
while (argIter.hasNextSubArg()) {
TracingConfigurationCommandArg arg =
CommandArgUtils.of(argIter.nextArg(""), TracingConfigurationCommandArg.class);
String strVal;
assert arg != null;
switch (arg) {
case SCOPE: {
String peekedNextArg = argIter.peekNextArg();
if (!TracingConfigurationCommandArg.args().contains(peekedNextArg)) {
strVal = argIter.nextArg(
"The scope should be specified. The following " +
"values can be used: " + Arrays.toString(Scope.values()) + '.');
try {
scope = Scope.valueOf(strVal);
}
catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Invalid scope '" + strVal + "'. The following " +
"values can be used: " + Arrays.toString(Scope.values()) + '.');
}
}
break;
}
case LABEL: {
lb = argIter.nextArg(
"The label should be specified.");
break;
}
case SAMPLING_RATE: {
strVal = argIter.nextArg(
"The sampling rate should be specified. Decimal value between 0 and 1 should be used.");
try {
samplingRate = Double.parseDouble(strVal);
}
catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Invalid sampling-rate '" + strVal + "'. Decimal value between 0 and 1 should be used.");
}
if (samplingRate < SAMPLING_RATE_NEVER || samplingRate > SAMPLING_RATE_ALWAYS)
throw new IllegalArgumentException(
"Invalid sampling-rate '" + strVal + "'. Decimal value between 0 and 1 should be used.");
break;
}
case INCLUDED_SCOPES: {
Set<String> setStrVals = argIter.nextStringSet(
"At least one supported scope should be specified.");
for (String scopeStrVal : setStrVals) {
try {
includedScopes.add(Scope.valueOf(scopeStrVal));
}
catch (IllegalArgumentException e) {
throw new IllegalArgumentException(
"Invalid supported scope '" + scopeStrVal + "'. The following " +
"values can be used: " + Arrays.toString(Scope.values()) + '.');
}
}
break;
}
}
}
// Scope is a mandatory attribute for all sub-commands except get_all and reset_all.
if ((cmd != GET_ALL && cmd != RESET_ALL) && scope == null) {
throw new IllegalArgumentException(
"Scope attribute is missing. Following values can be used: " + Arrays.toString(Scope.values()) + '.');
}
switch (cmd) {
case GET_ALL:
case RESET_ALL: {
tracingConfigurationArgs.withScope(scope);
break;
}
case RESET:
case GET: {
tracingConfigurationArgs.withScope(scope).withLabel(lb);
break;
}
case SET: {
tracingConfigurationArgs.withScope(scope).withLabel(lb).withSamplingRate(samplingRate).
withIncludedScopes(includedScopes);
break;
}
default: {
// We should never get here.
assert false : "Unexpected tracing configuration argument [arg= " + cmd + ']';
}
}
args = tracingConfigurationArgs.build();
}
/** {@inheritDoc} */
@Override public TracingConfigurationArguments arg() {
return args;
}
/** {@inheritDoc} */
@Override public String name() {
return TRACING_CONFIGURATION.toCommandName();
}
/** {@inheritDoc} */
@Override public boolean experimental() {
return true;
}
/**
* Print result.
*
* @param res Visor tracing configuration result.
* @param printer Printer.
*/
private void printResult(VisorTracingConfigurationTaskResult res, Consumer<String> printer) {
res.print(printer);
}
/**
* Prepare task argument.
*
* @param args Argument from command line.
* @return Task argument.
*/
private VisorTracingConfigurationTaskArg toVisorArguments(TracingConfigurationArguments args) {
return new VisorTracingConfigurationTaskArg(
args.command().visorBaselineOperation(),
args.scope(),
args.label(),
args.samplingRate(),
args.includedScopes()
);
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package de.janquadflieg.mrracer.lut;
import de.janquadflieg.mrracer.Utils;
import java.awt.*;
import java.io.*;
import java.util.*;
/**
* A d-dimensional lookup table.
*
* @author Jan Quadflieg
*/
public class LookUpTable {
private double[] values;
private int[] readCtr;
private int[] writeCtr;
private Dimension[] dimensions;
private int[] offsets;
private LookUpTable() {
}
public LookUpTable(Dimension[] dims) {
this.dimensions = new Dimension[dims.length];
this.offsets = new int[dims.length];
int size = 1;
for (int i = 0; i < dims.length; ++i) {
dimensions[i] = new Dimension(dims[i]);
size *= dims[i].getSize();
}
//System.out.println("Size = "+size);
values = new double[size];
readCtr = new int[size];
writeCtr = new int[size];
offsets[dimensions.length - 1] = 1;
for (int i = dimensions.length - 2; i >= 0; --i) {
offsets[i] = dimensions[i + 1].getSize() * offsets[i + 1];
}
//for(int i=0; i < offsets.length; ++i){
// System.out.println("Offsets["+i+"] "+offsets[i]);
//}
reset();
}
public boolean contains(double[] d) {
return oldIndexOf(d) != -1;
}
public double get(int i) {
++readCtr[i];
return values[i];
}
public double get(double[] d) {
int index = oldIndexOf(d);
if (index != -1) {
++readCtr[index];
return values[index];
}
return Double.NaN;
}
public double getLinInt(double[] d) {
return getLinInt(d, indicesOf(d), 0);
}
private double getLinInt(double[] d, int[] indices, int dim) {
double center = dimensions[dim].getCenter(indices[dim]);
double[] bounds = dimensions[dim].getBoundaries(indices[dim]);
// in this case we cannot interpolate
if((indices[dim] == 0 && d[dim] <= center) ||
(indices[dim] == dimensions[dim].getSize()-1 && d[dim] >= center)){
// check the depth
if (dim + 1 == dimensions.length) {
return values[indexOf(indices)];
} else {
return getLinInt(d, indices, dim + 1);
}
}
int[] lower = Arrays.copyOf(indices, indices.length);
int[] upper = Arrays.copyOf(indices, indices.length);
double wl, wu;
if (d[dim] <= center) {
lower[dim] = Math.max(0, lower[dim] - 1);
wu = 0.5 + (((d[dim] - bounds[0]) / (center - bounds[0])) * 0.5);
wl = 1 - wu;
} else {
upper[dim] = Math.min(dimensions[dim].getSize() - 1, upper[dim] + 1);
wl = 1 - (((d[dim] - center) / (bounds[1] - center)) * 0.5);
wu = 1 - wl;
}
if (dim + 1 == dimensions.length) {
return (values[indexOf(lower)] * wl) + (values[indexOf(upper)] * wu);
} else {
return (getLinInt(d, lower, dim + 1) * wl) + (getLinInt(d, upper, dim + 1) * wu);
}
}
private int indexOf(int[] indices) {
int result = 0;
for (int i = 0; i < indices.length; ++i) {
result += (indices[i] * offsets[i]);
}
return result;
}
public int indexOf(double[] d) {
return indexOf(indicesOf(d));
}
private int[] indicesOf(double[] d) {
if (d.length != dimensions.length) {
throw new RuntimeException();
}
int[] result = new int[dimensions.length];
for (int i = 0; i < dimensions.length; ++i) {
result[i] = dimensions[i].indexOf(d[i]);
}
return result;
}
public boolean insert(double[] d, double v) {
int index = oldIndexOf(d);
if (index != -1) {
insert(index, v);
return true;
}
return false;
}
public void insert(int i, double v) {
if (i < 0 || i >= values.length) {
throw new RuntimeException();
}
++writeCtr[i];
values[i] = v;
}
private int[] oldIndicesOf(double[] d) {
if (d.length != dimensions.length) {
throw new RuntimeException();
}
int[] result = new int[dimensions.length];
for (int i = 0; i < dimensions.length; ++i) {
int index = dimensions[i].oldIndexOf(d[i]);
if (index == -1) {
//System.out.println(dimensions[i]+" does not contain "+Utils.dTS(d[i]));
return null;
} else {
result[i] = index;
}
}
return result;
}
public int oldIndexOf(double[] d) {
int[] i = oldIndicesOf(d);
if (i != null) {
return oldIndexOf(i);
}
return -1;
}
private int oldIndexOf(int[] indices) {
int result = 0;
for (int i = 0; i < indices.length; ++i) {
result += (indices[i] * offsets[i]);
}
return result;
}
public double oldGetLinInt(double[] d) {
return oldGetLinInt(d, oldIndicesOf(d), 0);
}
private double oldGetLinInt(double[] d, int[] indices, int dim) {
int[] lower = Arrays.copyOf(indices, indices.length);
int[] upper = Arrays.copyOf(indices, indices.length);
double center = dimensions[dim].getCenter(indices[dim]);
double[] bounds = dimensions[dim].getBoundaries(indices[dim]);
double wl, wu;
if (d[dim] < center) {
lower[dim] = Math.max(0, lower[dim] - 1);
wu = 0.5 + ((d[dim] - bounds[0]) / (center - bounds[0]) * 0.5);
wl = 1 - wu;
} else {
upper[dim] = Math.min(dimensions[dim].getSize() - 1, upper[dim] + 1);
wl = 1 - (((d[dim] - center) / (bounds[1] - center)) * 0.5);
wu = 1 - wl;
}
if (dim + 1 == dimensions.length) {
return values[oldIndexOf(lower)] * wl + values[oldIndexOf(upper)] * wu;
} else {
return oldGetLinInt(d, lower, dim + 1) * wl + oldGetLinInt(d, upper, dim + 1);
}
}
public void reset() {
Arrays.fill(values, Double.NaN);
Arrays.fill(readCtr, 0);
Arrays.fill(writeCtr, 0);
}
public void resetCounters() {
Arrays.fill(readCtr, 0);
Arrays.fill(writeCtr, 0);
}
public int getSize() {
return values.length;
}
public double[][] getBoundaries(int index) {
if (index < 0 || index >= values.length) {
throw new RuntimeException();
}
double[][] result = new double[dimensions.length][2];
for (int i = 0; i < this.dimensions.length; ++i) {
int k = index / offsets[i];
index -= k * offsets[i];
double[] d = dimensions[i].getBoundaries(k);
result[i][0] = d[0];
result[i][1] = d[1];
}
return result;
}
public double usage() {
int ctr = 0;
for (int i = 0; i < values.length; ++i) {
if (!Double.isNaN(values[i])) {
++ctr;
}
}
return (double) ctr / (double) values.length;
}
public static LookUpTable load(InputStream in) {
try {
return LookUpTable.load(new BufferedReader(new InputStreamReader(in, "UTF-8")));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static LookUpTable load(BufferedReader reader) {
try {
LookUpTable result = new LookUpTable();
String line;
// number of dimensions (first line) and dimensions (following lines)
line = reader.readLine();
result.dimensions = new Dimension[Integer.parseInt(line)];
result.offsets = new int[result.dimensions.length];
int size = 1;
for (int i = 0; i < result.dimensions.length; ++i) {
line = reader.readLine();
result.dimensions[i] = new Dimension(line);
size *= result.dimensions[i].getSize();
}
result.values = new double[size];
result.readCtr = new int[size];
result.writeCtr = new int[size];
// offsets
line = reader.readLine();
StringTokenizer tokenizer = new StringTokenizer(line, " ");
for (int i = 0; i < result.offsets.length; ++i) {
result.offsets[i] = Integer.parseInt(tokenizer.nextToken());
}
// data
line = reader.readLine();
tokenizer = new StringTokenizer(line, " ");
for (int i = 0; i < result.values.length; ++i) {
result.values[i] = Double.parseDouble(tokenizer.nextToken());
}
line = reader.readLine();
tokenizer = new StringTokenizer(line, " ");
for (int i = 0; i < result.readCtr.length; ++i) {
result.readCtr[i] = Integer.parseInt(tokenizer.nextToken());
}
line = reader.readLine();
tokenizer = new StringTokenizer(line, " ");
for (int i = 0; i < result.writeCtr.length; ++i) {
result.writeCtr[i] = Integer.parseInt(tokenizer.nextToken());
}
reader.close();
return result;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static LookUpTable load(String file) {
try {
return LookUpTable.load(new FileInputStream(file));
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void write(java.io.OutputStreamWriter w)
throws Exception {
w.write(String.valueOf(dimensions.length));
w.write("\n");
for (int i = 0; i < dimensions.length; ++i) {
dimensions[i].write(w);
w.write("\n");
}
for (int i = 0; i < offsets.length; ++i) {
w.write(String.valueOf(offsets[i]));
w.write(" ");
}
w.write("\n");
for (int i = 0; i < values.length; ++i) {
w.write(String.valueOf(values[i]));
w.write(" ");
}
w.write("\n");
for (int i = 0; i < readCtr.length; ++i) {
w.write(String.valueOf(readCtr[i]));
w.write(" ");
}
w.write("\n");
for (int i = 0; i < writeCtr.length; ++i) {
w.write(String.valueOf(writeCtr[i]));
w.write(" ");
}
}
public void save(String file)
throws Exception {
FileOutputStream fo = new FileOutputStream(file);
OutputStreamWriter w = new OutputStreamWriter(fo, "UTF-8");
write(w);
w.flush();
fo.flush();
w.close();
fo.close();
}
public static String boundsToString(double[][] d) {
StringBuffer result = new StringBuffer();
for (int k = 0; k < d.length; ++k) {
result.append("[");
result.append(Utils.dTS(d[k][0]));
result.append(", ");
result.append(Utils.dTS(d[k][1]));
result.append("[");
if (k < d.length - 1) {
result.append(" ");
}
}
return result.toString();
}
public void print() {
for(int i = 0; i < this.dimensions.length; ++i){
System.out.println((i+1)+" "+dimensions[i]);
}
for (int i = 0; i < values.length; ++i) {
double[][] d = getBoundaries(i);
System.out.print(boundsToString(d));
System.out.print(" " + Utils.dTS(values[i]));
System.out.println("");
}
}
public void paint(Graphics2D g, Color c, boolean paintBG) {
java.awt.Dimension size = new java.awt.Dimension(400, 100);
final int OFFSET = 30;
final int SPACE = size.width - (OFFSET + OFFSET);
int x;
if (paintBG) {
g.setColor(Color.WHITE);
g.fillRect(0, 0, size.width, size.height);
g.setColor(Color.BLACK);
g.drawLine(OFFSET - 5, size.height / 2, size.width - OFFSET, size.height / 2);
g.drawString("0", 15, (size.height / 2) + 6);
g.drawLine(OFFSET, 10, OFFSET, size.height - 10);
g.drawLine(OFFSET - 5, 10, OFFSET + 5, 10);
g.drawString("1", 15, 15);
g.drawLine(OFFSET - 5, size.height - 10, OFFSET + 5, size.height - 10);
g.drawString("-1", 11, size.height - 7);
x = OFFSET + (int) Math.round(100.0 / 350.0 * SPACE);
g.drawLine(x, (size.height / 2) - 5, x, (size.height / 2) + 5);
g.drawString("100km/h", x - 30, (size.height / 2) + 20);
x = OFFSET + (int) Math.round(200.0 / 350.0 * SPACE);
g.drawLine(x, (size.height / 2) - 5, x, (size.height / 2) + 5);
g.drawString("200km/h", x - 30, (size.height / 2) + 20);
x = OFFSET + (int) Math.round(300.0 / 350.0 * SPACE);
g.drawLine(x, (size.height / 2) - 5, x, (size.height / 2) + 5);
g.drawString("300km/h", x - 30, (size.height / 2) + 20);
x = OFFSET + (int) Math.round(350.0 / 350.0 * SPACE);
g.drawLine(x, (size.height / 2) - 5, x, (size.height / 2) + 5);
g.drawString("350km/h", x - 20, (size.height / 2) + 20);
}
int x1, x2, y1, y2;
g.setColor(c);
x1 = OFFSET;
x2 = OFFSET + (int) Math.round(34.0 / 350.0 * SPACE);
y2 = (int) Math.round((size.height / 2) - (get(0) * ((size.height - 20) / 2)));
g.drawLine(x1, y2, x2, y2);
for (int i = 1; i < 5; ++i) {
x1 = x2;
y1 = y2;
x2 = OFFSET + (int) Math.round((34.0 + (i * 68.0)) / 350.0 * SPACE);
y2 = (int) Math.round((size.height / 2) - (get(i) * ((size.height - 20) / 2)));
g.drawLine(x1, y1, x2, y2);
}
x1 = x2;
y1 = y2;
x2 = OFFSET + (int) Math.round(340.0 / 350.0 * SPACE);
g.drawLine(x1, y1, x2, y2);
}
public static void main(String[] args) {
// try {
// paintTables();
// } catch (Exception e) {
// e.printStackTrace();
// }
// Dimension d1 = new Dimension("Speed", new double[]{0, 340}, 5);
// Dimension d2 = new Dimension("Trackpos", new double[]{-1, 1}, 5);
// Dimension d3 = new Dimension("Trackangle", new double[]{-9, 9}, 1);
//
// LookUpTable lut = new LookUpTable(new Dimension[]{d1, d2, d3});
// try {
// lut.save("/home/quad/lut_test");
//
// } catch (Exception e) {
// e.printStackTrace();
// }
/*for(int i=0; i < lut.getSize(); ++i){
double[][] d = lut.getBoundaries(i);
for(int k=0; k < d.length; ++k){
System.out.print("["+d[k][0]+", "+d[k][1]+"[ ");
}
System.out.println("");
}*/
/*lut = LookUpTable.load("/home/quad/experiments/test3/TYPE_FULL_acc");
for (int i = 0; i < lut.getSize(); ++i) {
double[][] d = lut.getBoundaries(i);
for (int k = 0; k < d.length; ++k) {
System.out.print("[" + d[k][0] + ", " + d[k][1] + "[ ");
}
System.out.println("");
}*/
// for (int i = 1; i < 330; ++i) {
// System.out.println(Utils.dTS(1.0 * i) + ": " +
// Utils.dTS(lut.get(new double[]{1.0 * i})) + ", " +
// Utils.dTS(lut.oldGetLinInt(new double[]{1.0 * i})));
//
// }
//lut = LookUpTable.load("/home/quad/experiments/luthistexp/TYPE_STRAIGHT_steering_fast");
/*lut = LookUpTable.load("/home/quad/ALL_FAST_steering");
for (int i = 0; i < lut.getSize(); ++i) {
System.out.print(i+" ");
double[][] d = lut.getBoundaries(i);
for (int k = 0; k < d.length; ++k) {
System.out.print("[" + d[k][0] + ", " + d[k][1] + "[ ");
}
System.out.println(lut.get(i)+"");
}*/
// for (int i = 0; i < 20; ++i) {
// double trackPos = -1 + (i*0.1);
//
// for(int j=0; j < 18; ++j){
// double trackAngle = -9 + (j*1.0);
//
// double[] v = new double[2];
// v[0] = trackPos;
// v[1] = trackAngle;
//
// System.out.println("("+Utils.dTS(trackPos)+", "+Utils.dTS(trackAngle)+"): " +
// Utils.dTS(lut.get(v)) + ", " +
// Utils.dTS(lut.oldGetLinInt(v)));
// }
// }
java.io.InputStream in = (new Object()).getClass().getResourceAsStream("/de/janquadflieg/mrracer/data/cigtables/ALL_FAST_steering");
LookUpTable lut = LookUpTable.load(in);
// quadratisch
for (int i = 0; i < 40; ++i) {
double trackPos = -2 + (i * 0.1);
if(i==0){
System.out.println(" Trackangle");
System.out.printf("%6s", "");
for (int j = 0; j < 40; ++j) {
double trackAngle = -20 + (j * 1.0);
System.out.printf("%7s", Utils.dTS(trackAngle));
}
System.out.println("");
}
System.out.printf("%6s", Utils.dTS(trackPos)+" ");
for (int j = 0; j < 40; ++j) {
double trackAngle = -20 + (j * 1.0);
double[] v = new double[2];
v[0] = trackPos;
v[1] = trackAngle;
System.out.printf("%7s", Utils.dTS(lut.getLinInt(v))+" ");
}
System.out.println("");
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.text.DecimalFormat;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import org.apache.pig.PigServer;
import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.Tuple;
import org.apache.pig.parser.ParserException;
import org.apache.pig.test.utils.TestHelper;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Test;
public class TestForEachNestedPlan {
static MiniGenericCluster cluster = MiniGenericCluster.buildCluster();
private PigServer pig ;
public TestForEachNestedPlan() throws Throwable {
pig = new PigServer(cluster.getExecType(), cluster.getProperties()) ;
}
Boolean[] nullFlags = new Boolean[]{ false, true };
@AfterClass
public static void oneTimeTearDown() throws Exception {
cluster.shutDown();
}
@Test
public void testInnerOrderBy() throws Exception {
for (int i = 0; i < nullFlags.length; i++) {
System.err.println("Running testInnerOrderBy with nullFlags set to :"
+ nullFlags[i]);
File tmpFile = genDataSetFile1(nullFlags[i]);
pig.registerQuery("a = load '"
+ Util.generateURI(tmpFile.toString(), pig.getPigContext()) + "'; ");
pig.registerQuery("b = group a by $0; ");
pig.registerQuery("c = foreach b { " + " c1 = order $1 by *; "
+ " generate flatten(c1); " + "};");
Iterator<Tuple> it = pig.openIterator("c");
Tuple t = null;
int count = 0;
while (it.hasNext()) {
t = it.next();
System.out.println(count + ":" + t);
count++;
}
Assert.assertEquals(count, 30);
}
}
@Test
public void testInnerOrderByStarWithSchema() throws Exception {
File tmpFile = genDataSetFile1(false);
pig.registerQuery("a = load '" + Util.generateURI(tmpFile.toString(),
pig.getPigContext()) + "' as (a0, a1);");
pig.registerQuery("b = group a by a0; ");
pig.registerQuery("c = foreach b { d = order a by *; "
+ " generate group, d; };");
Iterator<Tuple> it = pig.openIterator("c");
Tuple t = null;
int count = 0;
while (it.hasNext()) {
t = it.next();
System.out.println(count + ":" + t);
count++;
}
Assert.assertEquals(count, 10);
}
@Test
public void testMultiColInAlias() throws Exception {
pig.getPigContext().getProperties().setProperty("pig.exec.nosecondarykey", "true");
String INPUT_FILE = "test-multi-alias.txt";
PrintWriter w = new PrintWriter(new FileWriter(INPUT_FILE));
w.println("10\tnrai01\t01");
w.println("20\tnrai02\t02");
w.close();
try {
Util.copyFromLocalToCluster(cluster, INPUT_FILE, INPUT_FILE);
pig.registerQuery("A = load '" + INPUT_FILE + "' "
+ "as (a:int, b:chararray, c:int);");
pig.registerQuery("B = GROUP A BY (a, b);") ;
DataBag dbfrj = BagFactory.getInstance().newDefaultBag(), dbshj = BagFactory.getInstance().newDefaultBag();
{
pig.registerQuery("C = FOREACH B { bg = A.($1,$2); GENERATE group, bg; } ;") ;
Iterator<Tuple> iter1 = pig.openIterator("C");
while(iter1.hasNext()) {
dbfrj.add(iter1.next());
}
}
{
pig.registerQuery("D = FOREACH B { GENERATE group, A.($1,$2);};") ;
Iterator<Tuple> iter2 = pig.openIterator("D");
while(iter2.hasNext()) {
dbshj.add(iter2.next());
}
}
Assert.assertEquals(dbfrj.size(), dbshj.size());
Assert.assertEquals(true, TestHelper.compareBags(dbfrj, dbshj));
} finally{
new File(INPUT_FILE).delete();
try {
Util.deleteFile(cluster, INPUT_FILE);
} catch (IOException e) {
e.printStackTrace();
Assert.fail();
}
}
}
@Test
public void testAlgebricFuncWithoutGroupBy()
throws IOException, ParserException {
String INPUT_FILE = "test-sum.txt";
PrintWriter w = new PrintWriter(new FileWriter(INPUT_FILE));
w.println("10\t{(1),(2),(3)}");
w.println("20\t{(4),(5),(6),(7)}");
w.println("30\t{(8),(9)}");
w.close();
try {
Util.copyFromLocalToCluster(cluster, INPUT_FILE, INPUT_FILE);
pig.registerQuery("a = load '" + INPUT_FILE + "' "
+ "as (id:int, g:bag{t:tuple(u:int)});");
pig.registerQuery("b = foreach a generate id, SUM(g);") ;
Iterator<Tuple> iter = pig.openIterator("b");
List<Tuple> expectedResults =
Util.getTuplesFromConstantTupleStrings(
new String[] {
"(10,6L)",
"(20,22L)",
"(30,17L)"
});
int counter = 0;
while (iter.hasNext()) {
assertEquals(expectedResults.get(counter++).toString(),
iter.next().toString());
}
assertEquals(expectedResults.size(), counter);
} finally{
new File(INPUT_FILE).delete();
try {
Util.deleteFile(cluster, INPUT_FILE);
} catch (IOException e) {
e.printStackTrace();
Assert.fail();
}
}
}
@Test
public void testInnerDistinct()
throws IOException, ParserException {
String INPUT_FILE = "test-distinct.txt";
PrintWriter w = new PrintWriter(new FileWriter(INPUT_FILE));
w.println("10\t89");
w.println("20\t78");
w.println("10\t68");
w.println("10\t89");
w.println("20\t92");
w.close();
try {
Util.copyFromLocalToCluster(cluster, INPUT_FILE, INPUT_FILE);
pig.registerQuery("A = load '" + INPUT_FILE
+ "' as (age:int, gpa:int);");
pig.registerQuery("B = group A by age;");
pig.registerQuery("C = foreach B { D = A.gpa; E = distinct D; " +
"generate group, MIN(E); };");
Iterator<Tuple> iter = pig.openIterator("C");
List<Tuple> expectedResults =
Util.getTuplesFromConstantTupleStrings(
new String[] {"(10,68)", "(20,78)"});
int counter = 0;
while (iter.hasNext()) {
assertEquals(expectedResults.get(counter++).toString(),
iter.next().toString());
}
assertEquals(expectedResults.size(), counter);
} finally{
new File(INPUT_FILE).delete();
try {
Util.deleteFile(cluster, INPUT_FILE);
} catch (IOException e) {
e.printStackTrace();
Assert.fail();
}
}
}
@Test
public void testInnerOrderByAliasReuse()
throws IOException, ParserException {
String INPUT_FILE = "test-innerorderbyaliasreuse.txt";
PrintWriter w = new PrintWriter(new FileWriter(INPUT_FILE));
w.println("1\t4");
w.println("1\t3");
w.println("2\t3");
w.println("2\t4");
w.close();
try {
Util.copyFromLocalToCluster(cluster, INPUT_FILE, INPUT_FILE);
pig.registerQuery("A = load '" + INPUT_FILE
+ "' as (v1:int, v2:int);");
pig.registerQuery("B = group A by v1;");
pig.registerQuery("C = foreach B { X = A; X = order X by v2 asc; " +
"generate flatten(X); };");
Iterator<Tuple> iter = pig.openIterator("C");
List<Tuple> expectedResults =
Util.getTuplesFromConstantTupleStrings(
new String[] {"(1,3)", "(1,4)", "(2,3)", "(2,4)"});
int counter = 0;
while (iter.hasNext()) {
assertEquals(expectedResults.get(counter++).toString(),
iter.next().toString());
}
assertEquals(expectedResults.size(), counter);
} finally{
new File(INPUT_FILE).delete();
try {
Util.deleteFile(cluster, INPUT_FILE);
} catch (IOException e) {
e.printStackTrace();
Assert.fail();
}
}
}
/***
* For generating a sample dataset
*/
private File genDataSetFile1(boolean withNulls) throws IOException {
int dataLength = 30;
String[][] data = new String[dataLength][] ;
DecimalFormat formatter = new DecimalFormat("0000000");
Random r = new Random();
for (int i = 0; i < dataLength; i++) {
data[i] = new String[2] ;
// inject nulls randomly
if(withNulls && r.nextInt(dataLength) < 0.3 * dataLength) {
data[i][0] = "";
} else {
data[i][0] = formatter.format(i % 10);
}
data[i][1] = formatter.format((dataLength - i)/2);
}
return TestHelper.createTempFile(data) ;
}
}
| |
/*
* ******************************************************************************
* Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
package com.spectralogic.ds3autogen.utils;
import com.google.common.collect.ImmutableList;
import com.spectralogic.ds3autogen.api.models.enums.Action;
import com.spectralogic.ds3autogen.api.models.Arguments;
import com.spectralogic.ds3autogen.api.models.apispec.Ds3ResponseCode;
import com.spectralogic.ds3autogen.api.models.enums.HttpVerb;
import org.hamcrest.CoreMatchers;
import org.junit.Test;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.*;
public class Helper_Test {
@Test
public void camelCaseToUnderscore_Test() {
assertEquals(Helper.camelToUnderscore("BumpyCaseWord"), "bumpy_case_word");
}
@Test
public void removeTrailingRequestHandler_Test() {
assertEquals(Helper.removeTrailingRequestHandler("SomeRequestHandler"), "Some");
}
@Test
public void underscoreToCamel_Test() {
assertEquals(Helper.underscoreToCamel("DEGRADED_DATA_PERSISTENCE_RULE"), "DegradedDataPersistenceRule");
}
@Test
public void removeTrailingRequestHandlerWithDollarSign_Test() {
assertEquals(Helper.removeTrailingRequestHandler("SomeRequestHandler$SomeExtensionApiBeanBlah"), "Some");
}
@Test
public void unqualifiedName_Test() {
assertEquals(Helper.unqualifiedName("some.qualified.name"), "name");
}
@Test
public void getHttpVerb() {
assertThat(Helper.getHttpVerb(HttpVerb.DELETE, null), is("DELETE"));
assertThat(Helper.getHttpVerb(HttpVerb.DELETE, Action.CREATE), is("DELETE"));
assertThat(Helper.getHttpVerb(null, Action.BULK_MODIFY), is("PUT"));
assertThat(Helper.getHttpVerb(null, Action.CREATE), is("PUT"));
assertThat(Helper.getHttpVerb(null, Action.DELETE), is("DELETE"));
assertThat(Helper.getHttpVerb(null, Action.LIST), is("GET"));
assertThat(Helper.getHttpVerb(null, Action.MODIFY), is("PUT"));
assertThat(Helper.getHttpVerb(null, Action.SHOW), is("GET"));
assertThat(Helper.getHttpVerb(null, Action.BULK_DELETE), is("DELETE"));
}
@Test
public void sortConstructorArgs() {
final ImmutableList<Arguments> expectedResult = ImmutableList.of(
new Arguments("String", "BucketName"),
new Arguments("String", "ObjectName"),
new Arguments("Type1", "Arg1"),
new Arguments("Type2", "Arg2"),
new Arguments("Type3", "Arg3"));
final ImmutableList<Arguments> arguments = ImmutableList.of(
new Arguments("Type2", "Arg2"),
new Arguments("String", "ObjectName"),
new Arguments("Type1", "Arg1"),
new Arguments("Type3", "Arg3"),
new Arguments("String", "BucketName"));
final ImmutableList<Arguments> result = Helper.sortConstructorArgs(arguments);
for (int i = 0; i < arguments.size(); i++) {
assertTrue(result.get(i).getName().equals(expectedResult.get(i).getName()));
}
}
@Test
public void containsArgument_NullList_Test() {
assertFalse(Helper.containsArgument(null, "ArgName"));
assertFalse(Helper.containsArgument(null, ""));
assertFalse(Helper.containsArgument(null, null));
}
@Test
public void containsArgument_EmptyList_Test() {
assertFalse(Helper.containsArgument(ImmutableList.of(), "ArgName"));
assertFalse(Helper.containsArgument(ImmutableList.of(), ""));
assertFalse(Helper.containsArgument(ImmutableList.of(), null));
}
@Test
public void containsArgument_FullList_Test() {
final ImmutableList<Arguments> arguments = ImmutableList.of(
new Arguments("Type1", "Arg1"),
new Arguments("Type2", "Arg2"));
assertTrue(Helper.containsArgument(arguments, "Arg1"));
assertTrue(Helper.containsArgument(arguments, "Arg2"));
assertFalse(Helper.containsArgument(arguments, "Arg3"));
assertFalse(Helper.containsArgument(arguments, ""));
assertFalse(Helper.containsArgument(arguments, null));
}
@Test
public void addArgumentListsNull() {
final ImmutableList<Arguments> arguments = ImmutableList.of(
new Arguments("Type1", "Arg1"),
new Arguments("Type2", "Arg2"));
final ImmutableList<Arguments> resultAddLists1 = Helper.addArgument(arguments, null);
assertThat(resultAddLists1.size(), CoreMatchers.is(2));
assertTrue(Helper.containsArgument(resultAddLists1, "Arg1"));
assertTrue(Helper.containsArgument(resultAddLists1, "Arg2"));
final ImmutableList<Arguments> resultAddLists2 = Helper.addArgument(null, arguments);
assertThat(resultAddLists2.size(), CoreMatchers.is(2));
assertTrue(Helper.containsArgument(resultAddLists2, "Arg1"));
assertTrue(Helper.containsArgument(resultAddLists2, "Arg2"));
final ImmutableList<Arguments> resultAddLists3 = Helper.addArgument(null, null);
assertThat(resultAddLists3.size(), CoreMatchers.is(0));
}
@Test
public void addArgumentListsEmpty() {
final ImmutableList<Arguments> arguments = ImmutableList.of(
new Arguments("Type1", "Arg1"),
new Arguments("Type2", "Arg2"));
final ImmutableList<Arguments> resultAddLists1 = Helper.addArgument(arguments, ImmutableList.of());
assertThat(resultAddLists1.size(), CoreMatchers.is(2));
assertTrue(Helper.containsArgument(resultAddLists1, "Arg1"));
assertTrue(Helper.containsArgument(resultAddLists1, "Arg2"));
final ImmutableList<Arguments> resultAddLists2 = Helper.addArgument(ImmutableList.of(), arguments);
assertThat(resultAddLists2.size(), CoreMatchers.is(2));
assertTrue(Helper.containsArgument(resultAddLists2, "Arg1"));
assertTrue(Helper.containsArgument(resultAddLists2, "Arg2"));
final ImmutableList<Arguments> resultAddLists3 = Helper.addArgument(ImmutableList.of(), ImmutableList.of());
assertThat(resultAddLists3.size(), CoreMatchers.is(0));
}
@Test
public void addArgumentElementNullOrEmpty() {
final ImmutableList<Arguments> resultAddElementNull = Helper.addArgument(null, "ArgName", "ArgType");
assertThat(resultAddElementNull.size(), CoreMatchers.is(1));
assertTrue(Helper.containsArgument(resultAddElementNull, "ArgName"));
final ImmutableList<Arguments> resultAddElementEmpty = Helper.addArgument(ImmutableList.of(), "ArgName", "ArgType");
assertThat(resultAddElementEmpty.size(), CoreMatchers.is(1));
assertTrue(Helper.containsArgument(resultAddElementEmpty, "ArgName"));
}
@Test
public void addArgumentFull() {
final ImmutableList<Arguments> arguments1 = ImmutableList.of(
new Arguments("Type1", "Arg1"),
new Arguments("Type2", "Arg2"));
final ImmutableList<Arguments> arguments2 = ImmutableList.of(
new Arguments("Type3", "Arg3"),
new Arguments("Type4", "Arg4"));
final ImmutableList<Arguments> resultAddLists = Helper.addArgument(arguments1, arguments2);
assertTrue(Helper.containsArgument(resultAddLists, "Arg1"));
assertTrue(Helper.containsArgument(resultAddLists, "Arg2"));
assertTrue(Helper.containsArgument(resultAddLists, "Arg3"));
assertTrue(Helper.containsArgument(resultAddLists, "Arg4"));
final ImmutableList<Arguments> resultAddSingle = Helper.addArgument(arguments1, "Arg5", "Type5");
assertTrue(Helper.containsArgument(resultAddSingle, "Arg1"));
assertTrue(Helper.containsArgument(resultAddSingle, "Arg2"));
assertTrue(Helper.containsArgument(resultAddSingle, "Arg5"));
}
@Test
public void removeArgument() {
final ImmutableList<Arguments> arguments = ImmutableList.of(
new Arguments("Type1", "Arg1"),
new Arguments("Type2", "Arg2"),
new Arguments("Type3", "Arg3"));
final ImmutableList<Arguments> result = Helper.removeArgument(arguments, "Arg2");
assertFalse(Helper.containsArgument(result, "Arg2"));
assertTrue(Helper.containsArgument(result, "Arg1"));
assertTrue(Helper.containsArgument(result, "Arg3"));
}
@Test
public void getResponseCodes() {
final String expectedResult = "200, 206, 307, 400";
final ImmutableList<Ds3ResponseCode> responseCodes = ImmutableList.of(
new Ds3ResponseCode(307, null),
new Ds3ResponseCode(206, null),
new Ds3ResponseCode(200, null),
new Ds3ResponseCode(400, null));
final String result = Helper.getResponseCodes(responseCodes);
assertThat(result, CoreMatchers.is(expectedResult));
}
@Test
public void stripPath() {
final String expectedResult = "BlobApiBean";
final String result = Helper.stripPath("com.spectralogic.s3.common.platform.domain.BlobApiBean");
assertThat(result, CoreMatchers.is(expectedResult));
final String result2 = Helper.stripPath("BlobApiBean");
assertThat(result2, CoreMatchers.is(expectedResult));
}
@Test
public void addVoidArgument() {
final Arguments voidArg = new Arguments("void", "ArgName");
assertTrue(Helper.addVoidArgument(voidArg, Helper.SelectRemoveVoidType.SELECT_VOID));
assertFalse(Helper.addVoidArgument(voidArg, Helper.SelectRemoveVoidType.REMOVE_VOID));
final Arguments intArg = new Arguments("int", "ArgName");
assertFalse(Helper.addVoidArgument(intArg, Helper.SelectRemoveVoidType.SELECT_VOID));
assertTrue(Helper.addVoidArgument(intArg, Helper.SelectRemoveVoidType.REMOVE_VOID));
}
@Test
public void adjustVoidArguments() {
final ImmutableList<Arguments> arguments = ImmutableList.of(
new Arguments("void", "VoidArg1"),
new Arguments("void", "VoidArg2"),
new Arguments("int", "IntArg"),
new Arguments("double", "DoubleArg"));
final ImmutableList<Arguments> voidArgs = Helper
.selectOrRemoveVoidArguments(arguments, Helper.SelectRemoveVoidType.SELECT_VOID);
assertThat(voidArgs.size(), CoreMatchers.is(2));
assertTrue(voidArgs.get(0).getType().equals("void"));
assertTrue(voidArgs.get(1).getType().equals("void"));
final ImmutableList<Arguments> nonVoidArgs = Helper
.selectOrRemoveVoidArguments(arguments, Helper.SelectRemoveVoidType.REMOVE_VOID);
assertThat(nonVoidArgs.size(), CoreMatchers.is(2));
assertFalse(nonVoidArgs.get(0).getType().equals("void"));
assertFalse(nonVoidArgs.get(1).getType().equals("void"));
}
@Test
public void getVoidArguments() {
final ImmutableList<Arguments> arguments = ImmutableList.of(
new Arguments("void", "VoidArg1"),
new Arguments("void", "VoidArg2"),
new Arguments("int", "IntArg"),
new Arguments("double", "DoubleArg"));
final ImmutableList<Arguments> result = Helper.getVoidArguments(arguments);
assertThat(result.size(), CoreMatchers.is(2));
assertTrue(result.get(0).getType().equals("void"));
assertTrue(result.get(1).getType().equals("void"));
}
@Test
public void removeVoidArguments() {
final ImmutableList<Arguments> arguments = ImmutableList.of(
new Arguments("void", "VoidArg1"),
new Arguments("void", "VoidArg2"),
new Arguments("int", "IntArg"),
new Arguments("double", "DoubleArg"));
final ImmutableList<Arguments> result = Helper.removeVoidArguments(arguments);
assertThat(result.size(), CoreMatchers.is(2));
assertFalse(result.get(0).getType().equals("void"));
assertFalse(result.get(1).getType().equals("void"));
}
@Test
public void testIsBasicType() {
final String testString = "boolean";
assertThat(Helper.isPrimitiveType(testString), is(true));
}
@Test
public void testIsBasicTypeNegative() {
final String testString = "com.spectralogic.s3.server.domain.UserApiBean";
assertThat(Helper.isPrimitiveType(testString), is(false));
}
@Test
public void capFirstTest() {
assertThat(Helper.capFirst(""), is(""));
assertThat(Helper.capFirst("helloWorld"), is("HelloWorld"));
assertThat(Helper.capFirst("HelloWorld"), is("HelloWorld"));
assertThat(Helper.capFirst(".HelloWorld"), is(".HelloWorld"));
assertThat(Helper.capFirst(".helloWorld"), is(".helloWorld"));
assertThat(Helper.capFirst(" helloWorld"), is(" helloWorld"));
assertThat(Helper.capFirst(" HelloWorld"), is(" HelloWorld"));
}
@Test
public void uncapFirstTest() {
assertThat(Helper.uncapFirst(""), is(""));
assertThat(Helper.uncapFirst("helloWorld"), is("helloWorld"));
assertThat(Helper.uncapFirst("HelloWorld"), is("helloWorld"));
assertThat(Helper.uncapFirst(".HelloWorld"), is(".HelloWorld"));
assertThat(Helper.uncapFirst(".helloWorld"), is(".helloWorld"));
assertThat(Helper.uncapFirst(" helloWorld"), is(" helloWorld"));
assertThat(Helper.uncapFirst(" HelloWorld"), is(" HelloWorld"));
}
}
| |
package io.coding.me.m2p2.plugin.nexus2x;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.sonatype.nexus.proxy.*;
import org.sonatype.nexus.proxy.attributes.Attributes;
import org.sonatype.nexus.proxy.item.DefaultStorageCollectionItem;
import org.sonatype.nexus.proxy.item.RepositoryItemUid;
import org.sonatype.nexus.proxy.item.StorageCollectionItem;
import org.sonatype.nexus.proxy.item.StorageItem;
/**
* Adds virtual files to a collection of storage items.
*
*/
@SuppressWarnings ("deprecation")
class VirtualStorageCollectionItemFacade implements StorageCollectionItem {
final StorageCollectionItem masterCollectionItem;
final List<StorageItem> itemList = new ArrayList<>();
/**
* Creates a new instance
*
* @param masterCollectionItem The real collection
* @throws AccessDeniedException In case of an Nexus error
* @throws StorageException In case of an Nexus error
* @throws NoSuchResourceStoreException In case of an Nexus error
* @throws IllegalOperationException In case of an Nexus error
* @throws ItemNotFoundException In case of an Nexus error
*/
public VirtualStorageCollectionItemFacade(StorageCollectionItem masterCollectionItem)
throws AccessDeniedException, StorageException, NoSuchResourceStoreException, IllegalOperationException,
ItemNotFoundException {
this.masterCollectionItem = masterCollectionItem;
itemList.addAll(masterCollectionItem.list());
}
/**
* Adds an item to the internal list
*
* @param item The item
*/
public void addVirtualStorageItem(StorageItem item) {
itemList.add(item);
}
@Override
public ResourceStoreRequest getResourceStoreRequest() {
return masterCollectionItem.getResourceStoreRequest();
}
@Override
public RepositoryItemUid getRepositoryItemUid() {
return masterCollectionItem.getRepositoryItemUid();
}
@Override
public void setRepositoryItemUid(RepositoryItemUid repositoryItemUid) {
masterCollectionItem.setRepositoryItemUid(repositoryItemUid);
}
@Override
public String getRepositoryId() {
return masterCollectionItem.getRepositoryId();
}
@Override
public long getCreated() {
return masterCollectionItem.getCreated();
}
@Override
public long getModified() {
return masterCollectionItem.getModified();
}
@Override
public long getStoredLocally() {
return masterCollectionItem.getStoredLocally();
}
@Override
public void setStoredLocally(long ts) {
masterCollectionItem.setStoredLocally(ts);
}
@Override
public long getRemoteChecked() {
return masterCollectionItem.getRemoteChecked();
}
@Override
public void setRemoteChecked(long ts) {
masterCollectionItem.setRemoteChecked(ts);
}
@Override
public long getLastRequested() {
return masterCollectionItem.getLastRequested();
}
@Override
public void setLastRequested(long ts) {
masterCollectionItem.setLastRequested(ts);
}
@Override
public boolean isVirtual() {
return masterCollectionItem.isVirtual();
}
@Override
public boolean isReadable() {
return masterCollectionItem.isReadable();
}
@Override
public boolean isWritable() {
return masterCollectionItem.isWritable();
}
@Override
public boolean isExpired() {
return masterCollectionItem.isExpired();
}
@Override
public void setExpired(boolean expired) {
masterCollectionItem.setExpired(expired);
}
@Override
public String getPath() {
return masterCollectionItem.getPath();
}
@Override
public String getName() {
return masterCollectionItem.getName();
}
@Override
public String getParentPath() {
return masterCollectionItem.getParentPath();
}
@Override
public int getPathDepth() {
return masterCollectionItem.getPathDepth();
}
@Override
public String getRemoteUrl() {
return masterCollectionItem.getRemoteUrl();
}
@Override
public Attributes getRepositoryItemAttributes() {
return masterCollectionItem.getRepositoryItemAttributes();
}
@Override
public RequestContext getItemContext() {
return masterCollectionItem.getItemContext();
}
@Override
public Collection<StorageItem> list() throws AccessDeniedException, NoSuchResourceStoreException,
IllegalOperationException, ItemNotFoundException, StorageException {
return itemList;
}
}
| |
package edu.uga.cs.pcf.base;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.List;
import models.pcf.pcl.BinaryExpression;
import models.pcf.pcl.Func;
import models.pcf.pcl.PCLCondition;
import models.pcf.pcl.PCLConstraint;
import models.pcf.pcl.PCLConstraintConstantAttribute;
import models.pcf.pcl.PCLConstraintOperation;
import models.pcf.pcl.PCLContext;
import models.pcf.pcl.PCLExceptionHandler;
import models.pcf.pcl.PCLExpression;
import models.pcf.pcl.PCLPreCondition;
import models.pcf.pcl.PCLProcessElement;
import models.pcf.pcl.UnaryExpression;
import org.apache.log4j.Logger;
import org.switchyard.component.camel.model.v1.V1CamelImplementationModel;
import org.switchyard.config.model.composite.ComponentImplementationModel;
import org.switchyard.config.model.composite.ComponentModel;
import org.switchyard.config.model.composite.ComponentServiceModel;
import org.switchyard.config.model.composite.CompositeReferenceModel;
import org.switchyard.config.model.composite.CompositeServiceModel;
import org.switchyard.config.model.composite.InterfaceModel;
import org.switchyard.config.model.composite.v1.V1ComponentModel;
import org.switchyard.config.model.composite.v1.V1ComponentReferenceModel;
import org.switchyard.config.model.composite.v1.V1InterfaceModel;
import pcf.pcl.PCLParseResult;
import pcf.pcl.PCLParser;
import scala.Option;
import scala.collection.Iterator;
import com.google.common.base.Charsets;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.io.CharStreams;
import com.google.common.io.Closeables;
import edu.uga.cs.pcf.base.ServiceRegistry.ServiceRegistryInformation;
/**
* Takes pcl and switchyard.xml as input, generates the backbone of constraint
* operations and service integration template that are compatible with SCA
* specifications.
*/
public class PcfLinker {
private static final String SWITCHYARD_XML = "switchyard.xml";
private static final String NEPOTISM_PCL = "nepotism.pcl";
private static final Logger logger = Logger.getLogger(PcfLinker.class);
private InputStream pcl;
private InputStream switchyard;
private PCLParseResult pclResult;
private SwitchyardConfigParser switchyardParser;
/**
* Parses pcl file and switchyard configuration
*/
public void initialize() {
// Parses pcl
PCLParser parser = new PCLParser();
String content = null;
try {
content = CharStreams.toString(new InputStreamReader(pcl, Charsets.UTF_8));
this.pclResult = parser.pclParse(content);
logger.info("------------------ parsed pcl --------------------");
logger.info(this.pclResult);
} catch (IOException e) {
e.printStackTrace();
} finally {
Closeables.closeQuietly(pcl);
}
// Parses switchyard configuration
this.switchyard = getDefaultSwitchyard();
try {
this.switchyardParser = new SwitchyardConfigParser(this.switchyard);
logger.info("------------------ parsed switchyard configuration --------------------");
logger.info(this.switchyardParser);
} catch (Exception ex) {
logger.error("failed to parse switchard configuration xml.", ex);
} finally {
Closeables.closeQuietly(this.switchyard);
}
}
/**
* Connects pcl and switchyard and links the two by generating the service
* integration template
*/
public void link() {
logger.info("--------------begin linking pcl and switchyard----------------");
// Gets all the constraints
Option<PCLConstraint> constraint = this.pclResult.constraint();
PCLConstraint pclConstraint = null;
if (!constraint.isEmpty()) {
pclConstraint = constraint.get();
}
PCLContext context = pclConstraint.context();
scala.collection.immutable.List<PCLProcessElement> processElements = context.elements();
Iterator<PCLProcessElement> iter = processElements.toIterator();
logger.info("--------------process elements----------------");
List<PCLProcessElement> activities = Lists.newArrayList();
while (iter.hasNext()) {
PCLProcessElement processElement = iter.next();
logger.info(processElement);
activities.add(processElement);
}
// constraint operations
logger.info("--------------constraint operations----------------");
scala.collection.immutable.List<PCLCondition> conditions = pclConstraint.conditions();
Iterator<PCLCondition> condIter = conditions.toIterator();
List<PCLCondition> pclConditions = Lists.newArrayList();
while (condIter.hasNext()) {
PCLCondition cond = condIter.next();
logger.info(cond);
pclConditions.add(cond);
}
// pcl exceptions
logger.info("--------------pcl exceptions----------------");
Option<PCLExceptionHandler> exceptionHandlerOption = pclConstraint.exceptionHandler();
PCLExceptionHandler exceptionHandler = null;
if (exceptionHandlerOption.isDefined()) {
exceptionHandler = exceptionHandlerOption.get();
}
logger.info(exceptionHandler);
// Gets all the switchyard configuration
java.util.List<CompositeServiceModel> services = this.switchyardParser.getServices();
java.util.List<CompositeReferenceModel> references = this.switchyardParser.getReferences();
java.util.List<ComponentModel> components = this.switchyardParser.getComponents();
for (CompositeServiceModel service : services) {
logger.info("-----------------------------------------------------------------------");
logger.info(String.format("service name %s", service.getName()));
}
for (ComponentModel component : components) {
logger.info("-----------------------------------------------------------------------");
logger.info(String.format("component name %s\tservice name %s", component.getName(), getServiceName(component)));
}
for (CompositeReferenceModel reference : references) {
logger.info("-----------------------------------------------------------------------");
logger.info(String.format("reference name %s", reference.getName()));
}
/*
* Links pcl and switchyard configuration and follows service integration
* template
* Step 1: match the services with an sca component associated to an sca service
*/
List<PclSwitchyardMatcher> matches = findMatched(activities, pclConditions, exceptionHandler, services, components);
logger.info("-----------------------------------------------------------------------");
logger.info("found matched activity and sca component");
logger.info(Joiner.on("\n").join(matches));
/*
* Step 2: for these matched services, creates a new camel route following
* service integration template.
* The new component should have the exact implementation with the old one.
*/
for (PclSwitchyardMatcher match : matches) {
ComponentModel newComponent = buildCamelRouteComponent(match);
match.setNewComponent(newComponent);
}
}
/**
* Creates a camel route component and creates the skeleton camel route in Java.
* @param match
* @return
*/
private ComponentModel buildCamelRouteComponent(PclSwitchyardMatcher match) {
Preconditions.checkArgument(match != null);
// sca component
ComponentModel old = match.getOldComponent();
logger.info("------------old component--------------");
logger.info(old);
// sca service
CompositeServiceModel service = match.getService();
logger.info("------------sca service--------------");
logger.info(service);
// Constraint pcl
// This target name space is kept the same
String namespace = old.getTargetNamespace();
ComponentModel result = new V1ComponentModel();
// Set up service name
result.setName(old.getName());
// Sets up the service and uses the same old ones
List<ComponentServiceModel> services = old.getServices();
for (ComponentServiceModel s : services) {
result.addService(s);
}
/*
* sample sca references and services:
* <sca:service name="JobInterview">
* <sca:interface.wsdl interface="JobInterview.wsdl#wsdl.porttype(JobInterviewPortType)"/>
* </sca:service>
* <sca:reference name="JobInterviewService">
* <sca:interface.wsdl interface="JobInterview.wsdl#wsdl.porttype(JobInterviewPortType)"/>
* </sca:reference>
* <sca:reference name="NepotismCheckerInvoker">
* <sca:interface.java interface="edu.uga.cs.pcf.services.jobinterview.NepotismCheckerInvoker"/>
* </sca:reference>
* <sca:reference name="FailedConstraintOperationLoggerInvoker">
* <sca:interface.java interface="edu.uga.cs.pcf.process.emergencyreaction.FailedConstraintOperationLoggerInvoker"/>
* </sca:reference>
* <sca:reference name="ContextMonitorInvoker">
* <sca:interface.java interface="edu.uga.cs.pcf.process.monitor.ContextMonitorInvoker"/>
* </sca:reference>
*/
// Adds a references to the existing service, because the original service
// is now integrated within and referenced by the new camel route.
// Sets up name and interface properly.
logger.info("-------------------add original service reference-------------------");
V1ComponentReferenceModel serviceRef = new V1ComponentReferenceModel(namespace);
serviceRef.setName(service.getName());
serviceRef.setInterface(service.getInterface());
result.addReference(serviceRef);
// Continues to add the following references that are referenced by the camel route:
// 1. monitor
// 2. constraint operation
// 3. exception handler if an exception handler is specified in pcl
// Adds context monitor
logger.info("-------------------add context monitor reference-------------------");
ServiceRegistryInformation monitorServiceRegistry = ServiceRegistry.getRegistry(ServiceRegistry.MONITOR_OPERATION);
V1ComponentReferenceModel monitorRef = new V1ComponentReferenceModel(namespace);
monitorRef.setName(monitorServiceRegistry.getServiceName());
V1InterfaceModel monitorJavaInterface = new V1InterfaceModel(InterfaceModel.JAVA, namespace);
monitorJavaInterface.setInterface(monitorServiceRegistry.getJavaImplementation());
monitorRef.setInterface(monitorJavaInterface);
result.addReference(monitorRef);
// Adds constraint operations
List<PCLCondition> conditions = match.getConditions();
for (PCLCondition condition : conditions) {
ConstraintOperationAttributeEntry constraintOperationAttributeEntry = getConstraintOperation(condition);
if (constraintOperationAttributeEntry != null) {
logger.info("-------------------add constraint operation reference-------------------");
String operationName = constraintOperationAttributeEntry.constraintOperation.operation();
logger.info(String.format("operation name = %s", operationName));
ServiceRegistryInformation constraintOperationServiceRegistry = ServiceRegistry.getRegistry(operationName);
if (constraintOperationServiceRegistry != null) {
V1ComponentReferenceModel constraintOperationRef = new V1ComponentReferenceModel(namespace);
constraintOperationRef.setName(constraintOperationServiceRegistry.getServiceName());
V1InterfaceModel constraintOperationJavaInterface = new V1InterfaceModel(InterfaceModel.JAVA, namespace);
constraintOperationJavaInterface.setInterface(constraintOperationServiceRegistry.getJavaImplementation());
constraintOperationRef.setInterface(constraintOperationJavaInterface);
result.addReference(constraintOperationRef);
}
}
}
// Adds exception reference if necessary
if (match.getExceptionHandler() != null) {
PCLExceptionHandler exceptionHandler = match.getExceptionHandler();
logger.info("------------add exception handler--------------");
String operationName = exceptionHandler.handler();
logger.info(String.format("operation name = %s", operationName));
ServiceRegistryInformation exceptionHandlerServiceRegistry = ServiceRegistry.getRegistry(operationName);
if (exceptionHandlerServiceRegistry != null) {
V1ComponentReferenceModel exceptionHandlerRef = new V1ComponentReferenceModel(namespace);
exceptionHandlerRef.setName(exceptionHandlerServiceRegistry.getServiceName());
V1InterfaceModel exceptionHandlerJavaInterface = new V1InterfaceModel(InterfaceModel.JAVA, namespace);
exceptionHandlerJavaInterface.setInterface(exceptionHandlerServiceRegistry.getJavaImplementation());
exceptionHandlerRef.setInterface(exceptionHandlerJavaInterface);
result.addReference(exceptionHandlerRef);
}
}
// After all the references have been created, generates a Camel route skeleton Java class
// Replaces the existing implementation with a Camel route implementation
ComponentImplementationModel existingImplementation = old.getImplementation();
logger.info(String.format("Existing implementation is\n%s", existingImplementation));
logger.info("This implementation will be replaced with a Camel route implementation.");
String javaFileName = String.format("edu.uga.cs.pcf.camel.%sCamelRoute.java", result.getName());
generateCamelRouteSkeleton(javaFileName, match);
V1CamelImplementationModel camelRoute = new V1CamelImplementationModel(namespace);
camelRoute.setJavaClass(javaFileName);
// Replaces the existing implementation with the Camel route
result.setImplementation(camelRoute);
// Prints out the new component
logger.info("------------new sca component--------------");
logger.info(String.format("\n%s", result));
return result;
}
private static class ConstraintOperationAttributeEntry {
PCLConstraintOperation constraintOperation;
PCLConstraintConstantAttribute attribute;
}
/**
* As of now, only handles pre condition and the expression
* 1. isRelative(a,b) == false
* 2. not isRelative(a,b)
* @param condition
* @return
*/
private ConstraintOperationAttributeEntry getConstraintOperation(PCLCondition condition) {
if (condition instanceof PCLPreCondition) {
PCLPreCondition pre = (PCLPreCondition)condition;
PCLExpression expression = pre.expression();
if (expression instanceof BinaryExpression) {
BinaryExpression binary = (BinaryExpression)expression;
if (binary.left() instanceof Func) {
Func function = (Func)binary.left();
logger.info(binary.right());
models.pcf.pcl.Constant constant = (models.pcf.pcl.Constant)binary.right();
ConstraintOperationAttributeEntry result = new ConstraintOperationAttributeEntry();
result.constraintOperation = function.operation();
result.attribute = constant.constant();
return result;
}
} else if (expression instanceof UnaryExpression) {
// UnaryExpression(not,Func(isRelative(List(Var(PCLConstraintComplexAttribute(List(t1, ., interviewer))), Var(PCLConstraintComplexAttribute(List(t1, ., interviewee)))))))
UnaryExpression unary = (UnaryExpression)expression;
System.out.print(unary);
String operator = unary.operator();
Func function = (Func)unary.arg();
logger.info(function);
ConstraintOperationAttributeEntry result = new ConstraintOperationAttributeEntry();
result.constraintOperation = function.operation();
if ("not".equals(operator)) {
result.attribute = new PCLConstraintConstantAttribute<>("false");
}
return result;
}
}
return null;
}
/**
* Generates a camel route in Java.
* @param javaFileName
* @param match
*/
private void generateCamelRouteSkeleton(String javaFileName,
PclSwitchyardMatcher match) {
logger.info(String.format("creates a java file %s.", javaFileName));
String template =
"package edu.uga.cs.pcf.process.glycomics;\n" +
"\n" +
"import org.apache.camel.builder.RouteBuilder;\n" +
"\n" +
"public class %sCamelServiceRoute extends RouteBuilder {\n" +
" /**\n" +
" * The Camel route is configured via this method. \n" +
" * The from endpoint is required to be a SwitchYard service.\n" +
" */\n" +
" public void configure() {\n" +
" from(\"switchyard://%s\")\n" +
" .log(\"Received message for '%s' : ${body}\")\n" +
" // Send context information to context monitor using wiretap pattern\n" +
" .log(\"wiretap to context monitor\")\n" +
" .wireTap(\"switchyard://%s\")\n" +
" // Apply content based router pattern and invoke constraint operation\n" +
" .log(\"Invoke constraint operation...\")\n" +
" .to(\"switchyard://%s\")\n" +
" .choice()\n" +
" .when(body().contains(\"%s\"))\n" +
" .log(\"go to original service.....\")\n" +
" .to(\"switchyard://%s\")\n" +
" .otherwise()\n" +
" .log(\"go to exception handler.....\")\n" +
" .to(\"switchyard://%s\");\n" +
" }\n" +
"}";
// sca component
ComponentModel old = match.getOldComponent();
// sca service
CompositeServiceModel service = match.getService();
String originalServiceName = old.getName();
ServiceRegistryInformation monitorServiceRegistry = ServiceRegistry.getRegistry(ServiceRegistry.MONITOR_OPERATION);
List<PCLCondition> conditions = match.getConditions();
String constraintOperation = "";
String attribute = "";
for (PCLCondition condition : conditions) {
ConstraintOperationAttributeEntry constraintOperationAttributeEntry = getConstraintOperation(condition);
if (constraintOperationAttributeEntry != null) {
String operationName = constraintOperationAttributeEntry.constraintOperation.operation();
attribute = constraintOperationAttributeEntry.attribute.value().toString();
ServiceRegistryInformation constraintOperationServiceRegistry = ServiceRegistry.getRegistry(operationName);
if (constraintOperationServiceRegistry != null) {
constraintOperation = constraintOperationServiceRegistry.getServiceName();
}
}
}
// Exception handler
String exceptionHandlerService = "";
if (match.getExceptionHandler() != null) {
PCLExceptionHandler exceptionHandler = match.getExceptionHandler();
logger.info("------------add exception handler--------------");
String operationName = exceptionHandler.handler();
logger.info(String.format("operation name = %s", operationName));
ServiceRegistryInformation exceptionHandlerServiceRegistry = ServiceRegistry.getRegistry(operationName);
if (exceptionHandlerServiceRegistry != null) {
exceptionHandlerService = exceptionHandlerServiceRegistry.getServiceName();
}
}
String content = String.format(template,
service.getName(),
service.getName(),
service.getName(),
monitorServiceRegistry.getServiceName(),
constraintOperation,
attribute,
originalServiceName,
exceptionHandlerService);
logger.info(String.format("generated camel route following service integration template.\n%s", content));
}
/**
* Finds the match between pcl's process element and switchyard's sca component.
* @param activities
* @param components
* @return
*/
private List<PclSwitchyardMatcher> findMatched(
List<PCLProcessElement> activities,
List<PCLCondition> conditions,
PCLExceptionHandler exceptionHandler,
java.util.List<CompositeServiceModel> services,
List<ComponentModel> components) {
List<PclSwitchyardMatcher> result = Lists.newArrayList();
for (PCLProcessElement activity : activities) {
PclSwitchyardMatcher match = new PclSwitchyardMatcher();
// Set the conditions
match.setConditions(conditions);
// Set the exception handler
match.setExceptionHandler(exceptionHandler);
for (ComponentModel component : components) {
// A sca component is linked to an sca service via service attribute.
if (activity.name().equals(getServiceName(component))) {
match.setOldComponent(component);
match.setProcessElement(activity);
break;
}
}
for (CompositeServiceModel service : services) {
if (activity.name().equals(service.getName())) {
match.setService(service);
break;
}
}
result.add(match);
}
return result;
}
private static class PclSwitchyardMatcher {
private PCLProcessElement processElement;
private CompositeServiceModel service;
private ComponentModel oldComponent;
private ComponentModel newComponent;
private List<PCLCondition> conditions;
private PCLExceptionHandler exceptionHandler;
public PclSwitchyardMatcher() {
super();
}
public PCLProcessElement getProcessElement() {
return processElement;
}
public void setProcessElement(PCLProcessElement processElement) {
this.processElement = processElement;
}
public ComponentModel getOldComponent() {
return oldComponent;
}
public void setOldComponent(ComponentModel oldComponent) {
this.oldComponent = oldComponent;
}
public ComponentModel getNewComponent() {
return newComponent;
}
public void setNewComponent(ComponentModel newComponent) {
this.newComponent = newComponent;
}
public CompositeServiceModel getService() {
return service;
}
public void setService(CompositeServiceModel service) {
this.service = service;
}
public List<PCLCondition> getConditions() {
return conditions;
}
public void setConditions(List<PCLCondition> conditions) {
this.conditions = conditions;
}
public PCLExceptionHandler getExceptionHandler() {
return exceptionHandler;
}
public void setExceptionHandler(PCLExceptionHandler exceptionHandler) {
this.exceptionHandler = exceptionHandler;
}
@Override
public String toString() {
return String.format(
"PclSwitchyardMatcher [processElement=%s, conditions=%s, exceptionHandler=%s, service=%s, oldComponent=%s, newComponent=%s]",
processElement.name(),
conditions == null || conditions.isEmpty() ? "null" : "[" + Joiner.on(", ").join(conditions),
exceptionHandler == null ? "null" : exceptionHandler,
service == null ? "null" : service.getName(),
oldComponent == null ? "null" : oldComponent.getName(),
newComponent == null ? "null" : newComponent.getName());
}
}
private String getServiceName(ComponentModel component) {
return component.getServices().get(0).getName();
}
public PcfLinker(InputStream pcl, InputStream switchyard) {
super();
this.pcl = pcl;
this.switchyard = switchyard;
}
public static InputStream getDefaultPcl() {
InputStream is = PcfLinker.class.getClassLoader().getResourceAsStream(NEPOTISM_PCL);
return is;
}
public static InputStream getDefaultSwitchyard() {
InputStream is = PcfLinker.class.getClassLoader().getResourceAsStream(SWITCHYARD_XML);
return is;
}
public static void main(String[] args) {
InputStream pcl = getDefaultPcl();
InputStream switchyard = getDefaultSwitchyard();
PcfLinker linker = new PcfLinker(pcl, switchyard);
linker.initialize();
linker.link();
}
}
| |
package org.nnsoft.commons.bspi;
/*
* Copyright 2010-2011 The 99 Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.Closeable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.net.URL;
import java.nio.charset.Charset;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.NoSuchElementException;
/**
* @param <S> The type of the service to be loaded by this loader.
* @since 1.0.1
*/
final class ServiceClassIterator<S>
implements Iterator<Class<? extends S>>
{
/**
* The default <code>UTF-8</code> character encoding.
*/
private static final Charset UTF_8 = Charset.forName( "UTF-8" );
/**
* The class or interface representing the service being loaded.
*/
private final Class<S> service;
/**
* The class loader used to locate, load, and instantiate providers.
*/
private final ClassLoader classLoader;
private final Enumeration<URL> serviceResources;
/**
* Cached providers types, in instantiation order.
*/
private final LinkedHashMap<String, Class<? extends S>> providerTypes;
private Iterator<String> pending = null;
private String nextName = null;
/**
* @param service the class or interface representing the service being loaded.
* @param classLoader the class loader used to locate, load, and instantiate providers.
* @param serviceResources
* @param providerTypes cached providers types, in instantiation order.
*/
public ServiceClassIterator( Class<S> service, ClassLoader classLoader, Enumeration<URL> serviceResources,
LinkedHashMap<String, Class<? extends S>> providerTypes )
{
this.service = service;
this.classLoader = classLoader;
this.serviceResources = serviceResources;
this.providerTypes = providerTypes;
}
/**
* {@inheritDoc}
*/
public boolean hasNext()
{
if ( nextName != null )
{
return true;
}
while ( ( pending == null ) || !pending.hasNext() )
{
if ( !serviceResources.hasMoreElements() )
{
return false;
}
pending = parseServiceFile( serviceResources.nextElement() );
}
nextName = pending.next();
return true;
}
/**
* {@inheritDoc}
*/
public Class<? extends S> next()
{
if ( !hasNext() )
{
throw new NoSuchElementException();
}
String className = nextName;
nextName = null;
try
{
Class<?> clazz = classLoader.loadClass( className );
if ( !service.isAssignableFrom( clazz ) )
{
throw new ServiceConfigurationError( "Provider '" + className + "' is not assignable to Service '"
+ service.getName() + "'" );
}
Class<? extends S> serviceClass = clazz.asSubclass( service );
providerTypes.put( className, serviceClass );
return serviceClass;
}
catch ( ClassNotFoundException e )
{
throw new ServiceConfigurationError( "Provider '" + className + "' not found", e );
}
catch ( ClassCastException e )
{
throw new ServiceConfigurationError( "Provider '"
+ className
+ "' is not assignable to Service '"
+ service.getName()
+ "'", e );
}
catch ( Throwable e )
{
throw new ServiceConfigurationError( "Provider '" + className + "' could not be instantiated", e );
}
}
/**
* {@inheritDoc}
*/
public void remove()
{
throw new UnsupportedOperationException();
}
/**
* Parse the content of the given URL as a provider-configuration file.
*
* @param url the URL naming the configuration file to be parsed.
* @return a (possibly empty) iterator that will yield the provider-class names in the given configuration file that
* are not yet members of the returned set
*/
private Iterator<String> parseServiceFile( URL url )
{
InputStream inputStream = null;
Reader reader = null;
try
{
inputStream = url.openStream();
reader = new InputStreamReader( inputStream, UTF_8 );
ServiceFileParser<S> serviceFileParser = new ServiceFileParser<S>( reader );
serviceFileParser.setProviderTypes( providerTypes );
serviceFileParser.parse();
return serviceFileParser.iterator();
}
catch ( Exception e )
{
throw new ServiceConfigurationError( "An error occurred while reading service resource '"
+ url
+ "' for service class '"
+ service.getName()
+ "'", e );
}
finally
{
closeQuietly( reader );
closeQuietly( inputStream );
}
}
/**
* Unconditionally close a {@link Closeable} element.
*
* @param closeable the {@link Closeable} element.
*/
private static void closeQuietly( Closeable closeable )
{
if ( closeable != null )
{
try
{
closeable.close();
}
catch ( IOException e )
{
// close quietly
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.netty.handlers;
import org.apache.camel.AsyncCallback;
import org.apache.camel.CamelExchangeException;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.component.netty.NettyCamelState;
import org.apache.camel.component.netty.NettyConstants;
import org.apache.camel.component.netty.NettyHelper;
import org.apache.camel.component.netty.NettyPayloadHelper;
import org.apache.camel.component.netty.NettyProducer;
import org.apache.camel.util.ExchangeHelper;
import org.jboss.netty.channel.ChannelHandler;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Client handler which cannot be shared
*/
public class ClientChannelHandler extends SimpleChannelUpstreamHandler {
// use NettyProducer as logger to make it easier to read the logs as this is part of the producer
private static final Logger LOG = LoggerFactory.getLogger(NettyProducer.class);
private final NettyProducer producer;
private volatile boolean messageReceived;
private volatile boolean exceptionHandled;
public ClientChannelHandler(NettyProducer producer) {
this.producer = producer;
}
@Override
public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent channelStateEvent) throws Exception {
if (LOG.isTraceEnabled()) {
LOG.trace("Channel open: {}", ctx.getChannel());
}
// to keep track of open sockets
producer.getAllChannels().add(channelStateEvent.getChannel());
// make sure the event can be processed by other handlers
super.channelOpen(ctx, channelStateEvent);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent exceptionEvent) throws Exception {
if (LOG.isTraceEnabled()) {
LOG.trace("Exception caught at Channel: " + ctx.getChannel(), exceptionEvent.getCause());
}
if (exceptionHandled) {
// ignore subsequent exceptions being thrown
return;
}
exceptionHandled = true;
Throwable cause = exceptionEvent.getCause();
if (LOG.isDebugEnabled()) {
LOG.debug("Closing channel as an exception was thrown from Netty", cause);
}
Exchange exchange = getExchange(ctx);
AsyncCallback callback = getAsyncCallback(ctx);
// the state may not be set
if (exchange != null && callback != null) {
// set the cause on the exchange
exchange.setException(cause);
// close channel in case an exception was thrown
NettyHelper.close(exceptionEvent.getChannel());
// signal callback
callback.done(false);
}
}
@Override
public void channelClosed(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
if (LOG.isTraceEnabled()) {
LOG.trace("Channel closed: {}", ctx.getChannel());
}
Exchange exchange = getExchange(ctx);
AsyncCallback callback = getAsyncCallback(ctx);
// remove state
producer.removeState(ctx.getChannel());
// to keep track of open sockets
producer.getAllChannels().remove(ctx.getChannel());
if (producer.getConfiguration().isSync() && !messageReceived && !exceptionHandled) {
// To avoid call the callback.done twice
exceptionHandled = true;
// session was closed but no message received. This could be because the remote server had an internal error
// and could not return a response. We should count down to stop waiting for a response
if (LOG.isDebugEnabled()) {
LOG.debug("Channel closed but no message received from address: {}", producer.getConfiguration().getAddress());
}
exchange.setException(new CamelExchangeException("No response received from remote server: " + producer.getConfiguration().getAddress(), exchange));
// signal callback
callback.done(false);
}
// make sure the event can be processed by other handlers
super.channelClosed(ctx, e);
}
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent messageEvent) throws Exception {
messageReceived = true;
if (LOG.isTraceEnabled()) {
LOG.trace("Message received: {}", messageEvent);
}
if (producer.getConfiguration().getRequestTimeout() > 0) {
ChannelHandler handler = ctx.getPipeline().get("timeout");
if (handler != null) {
LOG.trace("Removing timeout channel as we received message");
ctx.getPipeline().remove(handler);
}
}
Exchange exchange = getExchange(ctx);
if (exchange == null) {
// we just ignore the received message as the channel is closed
return;
}
AsyncCallback callback = getAsyncCallback(ctx);
Message message;
try {
message = getResponseMessage(exchange, messageEvent);
} catch (Exception e) {
exchange.setException(e);
callback.done(false);
return;
}
// set the result on either IN or OUT on the original exchange depending on its pattern
if (ExchangeHelper.isOutCapable(exchange)) {
exchange.setOut(message);
} else {
exchange.setIn(message);
}
try {
// should channel be closed after complete?
Boolean close;
if (ExchangeHelper.isOutCapable(exchange)) {
close = exchange.getOut().getHeader(NettyConstants.NETTY_CLOSE_CHANNEL_WHEN_COMPLETE, Boolean.class);
} else {
close = exchange.getIn().getHeader(NettyConstants.NETTY_CLOSE_CHANNEL_WHEN_COMPLETE, Boolean.class);
}
// check the setting on the exchange property
if (close == null) {
close = exchange.getProperty(NettyConstants.NETTY_CLOSE_CHANNEL_WHEN_COMPLETE, Boolean.class);
}
// should we disconnect, the header can override the configuration
boolean disconnect = producer.getConfiguration().isDisconnect();
if (close != null) {
disconnect = close;
}
if (disconnect) {
if (LOG.isTraceEnabled()) {
LOG.trace("Closing channel when complete at address: {}", producer.getConfiguration().getAddress());
}
NettyHelper.close(ctx.getChannel());
}
} finally {
// signal callback
callback.done(false);
}
}
/**
* Gets the Camel {@link Message} to use as the message to be set on the current {@link Exchange} when
* we have received a reply message.
* <p/>
*
* @param exchange the current exchange
* @param messageEvent the incoming event which has the response message from Netty.
* @return the Camel {@link Message} to set on the current {@link Exchange} as the response message.
* @throws Exception is thrown if error getting the response message
*/
protected Message getResponseMessage(Exchange exchange, MessageEvent messageEvent) throws Exception {
Object body = messageEvent.getMessage();
if (LOG.isDebugEnabled()) {
LOG.debug("Channel: {} received body: {}", new Object[]{messageEvent.getChannel(), body});
}
// if textline enabled then covert to a String which must be used for textline
if (producer.getConfiguration().isTextline()) {
body = producer.getContext().getTypeConverter().mandatoryConvertTo(String.class, exchange, body);
}
// set the result on either IN or OUT on the original exchange depending on its pattern
if (ExchangeHelper.isOutCapable(exchange)) {
NettyPayloadHelper.setOut(exchange, body);
return exchange.getOut();
} else {
NettyPayloadHelper.setIn(exchange, body);
return exchange.getIn();
}
}
protected Exchange getExchange(ChannelHandlerContext ctx) {
NettyCamelState state = producer.getState(ctx.getChannel());
return state != null ? state.getExchange() : null;
}
private AsyncCallback getAsyncCallback(ChannelHandlerContext ctx) {
NettyCamelState state = producer.getState(ctx.getChannel());
return state != null ? state.getCallback() : null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.linq4j.tree;
import org.apache.calcite.linq4j.Linq4j;
import java.util.List;
/**
* Node visitor.
*/
public class Visitor {
public Visitor preVisit(WhileStatement whileStatement) {
return this;
}
public Statement visit(WhileStatement whileStatement, Expression condition,
Statement body) {
return condition == whileStatement.condition
&& body == whileStatement.body
? whileStatement
: Expressions.while_(condition, body);
}
public Visitor preVisit(ConditionalStatement conditionalStatement) {
return this;
}
public Statement visit(ConditionalStatement conditionalStatement,
List<Node> list) {
return list.equals(conditionalStatement.expressionList)
? conditionalStatement
: Expressions.ifThenElse(list);
}
public Visitor preVisit(BlockStatement blockStatement) {
return this;
}
public BlockStatement visit(BlockStatement blockStatement,
List<Statement> statements) {
return statements.equals(blockStatement.statements)
? blockStatement
: Expressions.block(statements);
}
public Visitor preVisit(GotoStatement gotoStatement) {
return this;
}
public Statement visit(GotoStatement gotoStatement, Expression expression) {
return expression == gotoStatement.expression
? gotoStatement
: Expressions.makeGoto(
gotoStatement.kind, gotoStatement.labelTarget,
expression);
}
public LabelStatement visit(LabelStatement labelStatement) {
return labelStatement;
}
public Visitor preVisit(ForStatement forStatement) {
return this;
}
public ForStatement visit(ForStatement forStatement,
List<DeclarationStatement> declarations, Expression condition,
Expression post, Statement body) {
return declarations.equals(forStatement.declarations)
&& condition == forStatement.condition
&& post == forStatement.post
&& body == forStatement.body
? forStatement
: Expressions.for_(declarations, condition, post, body);
}
public Visitor preVisit(ThrowStatement throwStatement) {
return this;
}
public Statement visit(ThrowStatement throwStatement, Expression expression) {
return expression == throwStatement.expression
? throwStatement
: Expressions.throw_(expression);
}
public Visitor preVisit(DeclarationStatement declarationStatement) {
return this;
}
public DeclarationStatement visit(DeclarationStatement declarationStatement,
Expression initializer) {
return declarationStatement.initializer == initializer
? declarationStatement
: Expressions.declare(
declarationStatement.modifiers, declarationStatement.parameter,
initializer);
}
public Expression visit(LambdaExpression lambdaExpression) {
return lambdaExpression;
}
public Visitor preVisit(FunctionExpression functionExpression) {
return this;
}
public Expression visit(FunctionExpression functionExpression,
BlockStatement body) {
return functionExpression.body.equals(body)
? functionExpression
: Expressions.lambda(body, functionExpression.parameterList);
}
public Visitor preVisit(BinaryExpression binaryExpression) {
return this;
}
public Expression visit(BinaryExpression binaryExpression,
Expression expression0, Expression expression1) {
return binaryExpression.expression0 == expression0
&& binaryExpression.expression1 == expression1
? binaryExpression
: Expressions.makeBinary(binaryExpression.nodeType, expression0,
expression1);
}
public Visitor preVisit(TernaryExpression ternaryExpression) {
return this;
}
public Expression visit(TernaryExpression ternaryExpression,
Expression expression0, Expression expression1, Expression expression2) {
return ternaryExpression.expression0 == expression0
&& ternaryExpression.expression1 == expression1
&& ternaryExpression.expression2 == expression2
? ternaryExpression
: Expressions.makeTernary(ternaryExpression.nodeType, expression0,
expression1, expression2);
}
public Visitor preVisit(IndexExpression indexExpression) {
return this;
}
public Expression visit(IndexExpression indexExpression, Expression array,
List<Expression> indexExpressions) {
return indexExpression.array == array
&& indexExpression.indexExpressions.equals(indexExpressions)
? indexExpression
: new IndexExpression(array, indexExpressions);
}
public Visitor preVisit(UnaryExpression unaryExpression) {
return this;
}
public Expression visit(UnaryExpression unaryExpression,
Expression expression) {
return unaryExpression.expression == expression
? unaryExpression
: Expressions.makeUnary(unaryExpression.nodeType, expression,
unaryExpression.type, null);
}
public Visitor preVisit(MethodCallExpression methodCallExpression) {
return this;
}
public Expression visit(MethodCallExpression methodCallExpression,
Expression targetExpression, List<Expression> expressions) {
return methodCallExpression.targetExpression == targetExpression
&& methodCallExpression.expressions.equals(expressions)
? methodCallExpression
: Expressions.call(targetExpression, methodCallExpression.method,
expressions);
}
public Expression visit(DefaultExpression defaultExpression) {
return defaultExpression;
}
public Expression visit(DynamicExpression dynamicExpression) {
return dynamicExpression;
}
public Visitor preVisit(MemberExpression memberExpression) {
return this;
}
public Expression visit(MemberExpression memberExpression,
Expression expression) {
return memberExpression.expression == expression
? memberExpression
: Expressions.field(expression, memberExpression.field);
}
public Expression visit(InvocationExpression invocationExpression) {
return invocationExpression;
}
static <T> boolean eq(T t0, T t1) {
return t0 == t1 || t0 != null && t1 != null && t0.equals(t1);
}
public Visitor preVisit(NewArrayExpression newArrayExpression) {
return this;
}
public Expression visit(NewArrayExpression newArrayExpression, int dimension,
Expression bound, List<Expression> expressions) {
return eq(expressions, newArrayExpression.expressions)
&& eq(bound, newArrayExpression.bound)
? newArrayExpression
: expressions == null
? Expressions.newArrayBounds(
Types.getComponentTypeN(newArrayExpression.type), dimension, bound)
: Expressions.newArrayInit(
Types.getComponentTypeN(newArrayExpression.type),
dimension, expressions);
}
public Expression visit(ListInitExpression listInitExpression) {
return listInitExpression;
}
public Visitor preVisit(NewExpression newExpression) {
return this;
}
public Expression visit(NewExpression newExpression,
List<Expression> arguments, List<MemberDeclaration> memberDeclarations) {
return arguments.equals(newExpression.arguments)
&& Linq4j.equals(memberDeclarations, newExpression.memberDeclarations)
? newExpression
: Expressions.new_(newExpression.type, arguments, memberDeclarations);
}
public Statement visit(SwitchStatement switchStatement) {
return switchStatement;
}
public Statement visit(TryStatement tryStatement) {
return tryStatement;
}
public Expression visit(MemberInitExpression memberInitExpression) {
return memberInitExpression;
}
public Visitor preVisit(TypeBinaryExpression typeBinaryExpression) {
return this;
}
public Expression visit(TypeBinaryExpression typeBinaryExpression,
Expression expression) {
return typeBinaryExpression.expression == expression
? typeBinaryExpression
: new TypeBinaryExpression(expression.getNodeType(), expression,
expression.type);
}
public Visitor preVisit(MethodDeclaration methodDeclaration) {
return this;
}
public MemberDeclaration visit(MethodDeclaration methodDeclaration,
BlockStatement body) {
return body.equals(methodDeclaration.body)
? methodDeclaration
: Expressions.methodDecl(methodDeclaration.modifier,
methodDeclaration.resultType, methodDeclaration.name,
methodDeclaration.parameters, body);
}
public Visitor preVisit(FieldDeclaration fieldDeclaration) {
return this;
}
public MemberDeclaration visit(FieldDeclaration fieldDeclaration,
Expression initializer) {
return eq(initializer, fieldDeclaration.initializer)
? fieldDeclaration
: Expressions.fieldDecl(fieldDeclaration.modifier,
fieldDeclaration.parameter, initializer);
}
public Expression visit(ParameterExpression parameterExpression) {
return parameterExpression;
}
public ConstantExpression visit(ConstantExpression constantExpression) {
return constantExpression;
}
public Visitor preVisit(ClassDeclaration classDeclaration) {
return this;
}
public ClassDeclaration visit(ClassDeclaration classDeclaration,
List<MemberDeclaration> memberDeclarations) {
return Linq4j.equals(memberDeclarations,
classDeclaration.memberDeclarations)
? classDeclaration
: Expressions.classDecl(classDeclaration.modifier,
classDeclaration.name, classDeclaration.extended,
classDeclaration.implemented, memberDeclarations);
}
public Visitor preVisit(ConstructorDeclaration constructorDeclaration) {
return this;
}
public MemberDeclaration visit(ConstructorDeclaration constructorDeclaration,
BlockStatement body) {
return body.equals(constructorDeclaration.body)
? constructorDeclaration
: Expressions.constructorDecl(constructorDeclaration.modifier,
constructorDeclaration.resultType,
constructorDeclaration.parameters,
body);
}
}
// End Visitor.java
| |
package experimental;
import java.util.Arrays;
import java.util.function.DoublePredicate;
import java.util.function.IntPredicate;
public class BinarySearchFull {
public static int binarySearchFirstTrueInteger(IntPredicate p, int from, int to) {
int lo = from - 1;
int hi = to + 1;
while (hi - lo > 1) {
int mid = (lo + hi) / 2;
if (!p.test(mid)) {
lo = mid;
} else {
hi = mid;
}
}
return hi;
}
public static double binarySearchFirstTrueDouble(DoublePredicate p, double lo, double hi) {
for (int step = 0; step < 1000; step++) {
double mid = (lo + hi) / 2;
if (!p.test(mid)) {
lo = mid;
} else {
hi = mid;
}
}
return hi;
}
/**
* Returns min(p|a[p]==true) 000[1]11
* <p/>
* invariant: a[lo]==false, a[hi]==true, hence can't set lo=mid+1 or hi=mid-1
* <p/>
* boundary case: lo1=p,hi1=p+1,lo2=(lo1+hi1)/2=p,hi2=p+1
*/
public static int binarySearchFirst(boolean[] a) {
int lo = -1;
int hi = a.length;
while (hi - lo > 1) {
int mid = (lo + hi) / 2;
if (!a[mid]) {
lo = mid;
} else {
hi = mid;
}
}
return hi;
}
// Returns max(p|a[p]==false) 00[0]111
public static int binarySearchLast(boolean[] a) {
int lo = -1;
int hi = a.length;
while (hi - lo > 1) {
int mid = (lo + hi) / 2;
if (!a[mid]) {
lo = mid;
} else {
hi = mid;
}
}
return lo;
}
/**
* Returns min(p|a[p]==true) 000[1]11
* <p/>
* invariant: lo<=min(p|a[p]=true), a[hi]==true, hence can set lo = mid+1
* <p/>
* boundary case: lo1=p,hi1=p,lo2=p,hi2=(lo1+hi1)/2=p
*/
public static int binarySearchFirst2(boolean[] a) {
int lo = 0;
int hi = a.length;
while (lo < hi) {
int mid = (lo + hi) / 2;
if (!a[mid]) {
lo = mid + 1;
} else {
hi = mid;
}
}
return lo;
}
/**
* Returns max(p|a[p]==false) 00[0]111
* <p/>
* invariant: a[lo]=false, hi>=max(p|a[p]=false), hence can set hi=mid-1
* <p/>
* boundary case: lo1=p,hi1=p,lo2=p,hi2=(lo1+hi1+1)/2=p
*/
public static int binarySearchLast2(boolean[] a) {
int lo = -1;
int hi = a.length - 1;
while (lo < hi) {
int mid = (lo + hi + 1) / 2;
if (!a[mid]) {
lo = mid;
} else {
hi = mid - 1;
}
}
return lo;
}
/**
* Returns (p|a[p]==key), if it is contained in the array; otherwise, -(insertion point+1).
* <p/>
* invariant: lo<=min(p|a[p]>=key), hi>=max(p|a[p]<=key), hence can set lo=mid+1 and hi=mid-1
* <p/>
* boundary case: lo1=p+1,hi1=p,lo2=(lo1+hi1)/2+1=p+1,hi2=p
*/
public static int binarySearch(int[] a, int key) {
int lo = 0;
int hi = a.length - 1;
while (lo <= hi) {
int mid = (lo + hi) / 2;
int midVal = a[mid];
if (midVal < key) {
lo = mid + 1;
} else if (midVal > key) {
hi = mid - 1;
} else {
return mid;
}
}
return -(lo + 1);
}
/**
* Returns min(p|a[p]>=key)
* <p/>
* invariant: a[lo]<key, a[hi]>=key, hence can't set lo=mid+1 or hi=mid-1
* <p/>
* boundary case: lo1=p,hi1=p+1,lo2=(lo1+hi1)/2=p,hi2=p+1
*/
public static int stl_lower_bound(int[] a, int key) {
// return binarySearchFirstTrueInteger(i -> a[i] >= key, 0, a.length - 1);
int lo = -1;
int hi = a.length;
while (hi - lo > 1) {
int mid = (lo + hi) / 2;
int midVal = a[mid];
if (midVal < key) {
lo = mid;
} else {
hi = mid;
}
}
return hi;
}
/**
* Returns min(p|a[p]>key)
* <p/>
* invariant: a[lo]<=key, a[hi]>key, hence can't set lo=mid+1 or hi=mid-1
* <p/>
* boundary case: lo1=p,hi1=p+1,lo2=(lo1+hi1)/2=p,hi2=p+1
*/
public static int stl_upper_bound(int[] a, int key) {
// return binarySearchFirstTrueInteger(i -> a[i] > key, 0, a.length - 1);
int lo = -1;
int hi = a.length;
while (hi - lo > 1) {
int mid = (lo + hi) / 2;
int midVal = a[mid];
if (midVal <= key) {
lo = mid;
} else {
hi = mid;
}
}
return hi;
}
/**
* Returns min(p|a[p]>=key)
*/
public static int stl_lower_bound2(int[] a, int key) {
int len = a.length;
int from = 0;
while (len > 0) {
int half = len / 2;
int mid = from + half;
if (a[mid] < key) {
from = mid + 1;
len -= half + 1;
} else {
len = half;
}
}
return from;
}
/**
* Returns min(p|a[p]>key)
*/
public static int stl_upper_bound2(int[] a, int key) {
int len = a.length;
int from = 0;
while (len > 0) {
int half = len / 2;
int mid = from + half;
if (a[mid] <= key) {
from = mid + 1;
len -= half + 1;
} else {
len = half;
}
}
return from;
}
// Usage example
public static void main(String[] args) {
boolean[] b = {false, false, true};
System.out.println(2 == binarySearchFirst(b));
System.out.println(1 == binarySearchLast(b));
int[] a = {1, 3, 7, 10, 15};
System.out.println(4 == stl_upper_bound(a, 11));
System.out.println(4 == stl_upper_bound2(a, 11));
System.out.println(binarySearchFirstTrueInteger(i -> b[i], 0, b.length - 1));
System.out.println(binarySearchFirstTrueInteger(i -> a[i] >= 6, 0, a.length - 1));
System.out.println(Arrays.binarySearch(new int[5], 0));
}
}
| |
/**
* Copyright 2014 Unicon (R) Licensed under the
* Educational Community License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
package org.apereo.openlrs.model.xapi;
import java.io.Serializable;
import java.net.URI;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
/**
* @author ggilbert
*
*/
@JsonInclude(Include.NON_NULL)
public class XApiObjectDefinition implements Serializable {
/**
* serialVersionUID
*/
private static final long serialVersionUID = 1;
private Map<String, String> name;
private Map<String, String> description;
private String type;
private String moreInfo;
private String interactionType;
private List<String> correctResponsesPattern;
private List<XApiInteractionComponent> choices;
private List<XApiInteractionComponent> scale;
private List<XApiInteractionComponent> source;
private List<XApiInteractionComponent> target;
private List<XApiInteractionComponent> steps;
private Map<URI, Object> extensions;
/**
* @return the name
*/
public Map<String, String> getName() {
return name;
}
/**
* @param name the name to set
*/
public void setName(Map<String, String> name) {
this.name = name;
}
/**
* @return the description
*/
public Map<String, String> getDescription() {
return description;
}
/**
* @param description the description to set
*/
public void setDescription(Map<String, String> description) {
this.description = description;
}
/**
* @return the type
*/
public String getType() {
return type;
}
/**
* @param type the type to set
*/
public void setType(String type) {
this.type = type;
}
/**
* @return the moreInfo
*/
public String getMoreInfo() {
return moreInfo;
}
/**
* @param moreInfo the moreInfo to set
*/
public void setMoreInfo(String moreInfo) {
this.moreInfo = moreInfo;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "XApiObjectDefinition [name=" + name + ", description="
+ description + ", type=" + type + ", moreInfo=" + moreInfo
+ "]";
}
/**
* @return the interactionType
*/
public String getInteractionType() {
return interactionType;
}
/**
* @param interactionType the interactionType to set
*/
public void setInteractionType(String interactionType) {
this.interactionType = interactionType;
}
/**
* @return the correctResponsesPattern
*/
public List<String> getCorrectResponsesPattern() {
return correctResponsesPattern;
}
/**
* @param correctResponsesPattern the correctResponsesPattern to set
*/
public void setCorrectResponsesPattern(List<String> correctResponsesPattern) {
this.correctResponsesPattern = correctResponsesPattern;
}
/**
* @return the choices
*/
public List<XApiInteractionComponent> getChoices() {
return choices;
}
/**
* @param choices the choices to set
*/
public void setChoices(List<XApiInteractionComponent> choices) {
this.choices = choices;
}
/**
* @return the scale
*/
public List<XApiInteractionComponent> getScale() {
return scale;
}
/**
* @param scale the scale to set
*/
public void setScale(List<XApiInteractionComponent> scale) {
this.scale = scale;
}
/**
* @return the source
*/
public List<XApiInteractionComponent> getSource() {
return source;
}
/**
* @param source the source to set
*/
public void setSource(List<XApiInteractionComponent> source) {
this.source = source;
}
/**
* @return the target
*/
public List<XApiInteractionComponent> getTarget() {
return target;
}
/**
* @param target the target to set
*/
public void setTarget(List<XApiInteractionComponent> target) {
this.target = target;
}
/**
* @return the steps
*/
public List<XApiInteractionComponent> getSteps() {
return steps;
}
/**
* @param steps the steps to set
*/
public void setSteps(List<XApiInteractionComponent> steps) {
this.steps = steps;
}
/**
* @return the extensions
*/
public Map<URI, Object> getExtensions() {
return extensions;
}
/**
* @param extensions the extensions to set
*/
public void setExtensions(Map<URI, Object> extensions) {
this.extensions = extensions;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.io.IOException;
import java.util.Date;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.mapred.JobConf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This program executes a specified operation that applies load to
* the NameNode. Possible operations include create/writing files,
* opening/reading files, renaming files, and deleting files.
*
* When run simultaneously on multiple nodes, this program functions
* as a stress-test and benchmark for namenode, especially when
* the number of bytes written to each file is small.
*
* This version does not use the map reduce framework
*
*/
public class NNBenchWithoutMR {
private static final Logger LOG =
LoggerFactory.getLogger(NNBenchWithoutMR.class);
// variable initialzed from command line arguments
private static long startTime = 0;
private static int numFiles = 0;
private static long bytesPerBlock = 1;
private static long blocksPerFile = 0;
private static long bytesPerFile = 1;
private static short replicationFactorPerFile = 1; // default is 1
private static Path baseDir = null;
// variables initialized in main()
private static FileSystem fileSys = null;
private static Path taskDir = null;
private static byte[] buffer;
private static long maxExceptionsPerFile = 200;
/**
* Returns when the current number of seconds from the epoch equals
* the command line argument given by <code>-startTime</code>.
* This allows multiple instances of this program, running on clock
* synchronized nodes, to start at roughly the same time.
*/
static void barrier() {
long sleepTime;
while ((sleepTime = startTime - System.currentTimeMillis()) > 0) {
try {
Thread.sleep(sleepTime);
} catch (InterruptedException ex) {
//This left empty on purpose
}
}
}
static private void handleException(String operation, Throwable e,
int singleFileExceptions) {
LOG.warn("Exception while " + operation + ": " +
StringUtils.stringifyException(e));
if (singleFileExceptions >= maxExceptionsPerFile) {
throw new RuntimeException(singleFileExceptions +
" exceptions for a single file exceeds threshold. Aborting");
}
}
/**
* Create and write to a given number of files. Repeat each remote
* operation until is succeeds (does not throw an exception).
*
* @return the number of exceptions caught
*/
static int createWrite() {
int totalExceptions = 0;
FSDataOutputStream out = null;
boolean success;
for (int index = 0; index < numFiles; index++) {
int singleFileExceptions = 0;
do { // create file until is succeeds or max exceptions reached
try {
out = fileSys.create(
new Path(taskDir, "" + index), false, 512,
(short)replicationFactorPerFile, bytesPerBlock);
success = true;
} catch (IOException ioe) {
success=false;
totalExceptions++;
handleException("creating file #" + index, ioe,
++singleFileExceptions);
}
} while (!success);
long toBeWritten = bytesPerFile;
while (toBeWritten > 0) {
int nbytes = (int) Math.min(buffer.length, toBeWritten);
toBeWritten -= nbytes;
try { // only try once
out.write(buffer, 0, nbytes);
} catch (IOException ioe) {
totalExceptions++;
handleException("writing to file #" + index, ioe,
++singleFileExceptions);
}
}
do { // close file until is succeeds
try {
out.close();
success = true;
} catch (IOException ioe) {
success=false;
totalExceptions++;
handleException("closing file #" + index, ioe,
++singleFileExceptions);
}
} while (!success);
}
return totalExceptions;
}
/**
* Open and read a given number of files.
*
* @return the number of exceptions caught
*/
static int openRead() {
int totalExceptions = 0;
FSDataInputStream in;
for (int index = 0; index < numFiles; index++) {
int singleFileExceptions = 0;
try {
in = fileSys.open(new Path(taskDir, "" + index), 512);
long toBeRead = bytesPerFile;
while (toBeRead > 0) {
int nbytes = (int) Math.min(buffer.length, toBeRead);
toBeRead -= nbytes;
try { // only try once && we don't care about a number of bytes read
in.read(buffer, 0, nbytes);
} catch (IOException ioe) {
totalExceptions++;
handleException("reading from file #" + index, ioe,
++singleFileExceptions);
}
}
in.close();
} catch (IOException ioe) {
totalExceptions++;
handleException("opening file #" + index, ioe, ++singleFileExceptions);
}
}
return totalExceptions;
}
/**
* Rename a given number of files. Repeat each remote
* operation until is succeeds (does not throw an exception).
*
* @return the number of exceptions caught
*/
static int rename() {
int totalExceptions = 0;
boolean success;
for (int index = 0; index < numFiles; index++) {
int singleFileExceptions = 0;
do { // rename file until is succeeds
try {
// Possible result of this operation is at no interest to us for it
// can return false only if the namesystem
// could rename the path from the name
// space (e.g. no Exception has been thrown)
fileSys.rename(new Path(taskDir, "" + index),
new Path(taskDir, "A" + index));
success = true;
} catch (IOException ioe) {
success = false;
totalExceptions++;
handleException("creating file #" + index, ioe, ++singleFileExceptions);
}
} while (!success);
}
return totalExceptions;
}
/**
* Delete a given number of files. Repeat each remote
* operation until is succeeds (does not throw an exception).
*
* @return the number of exceptions caught
*/
static int delete() {
int totalExceptions = 0;
boolean success;
for (int index = 0; index < numFiles; index++) {
int singleFileExceptions = 0;
do { // delete file until is succeeds
try {
// Possible result of this operation is at no interest to us for it
// can return false only if namesystem
// delete could remove the path from the name
// space (e.g. no Exception has been thrown)
fileSys.delete(new Path(taskDir, "A" + index), true);
success = true;
} catch (IOException ioe) {
success=false;
totalExceptions++;
handleException("creating file #" + index, ioe, ++singleFileExceptions);
}
} while (!success);
}
return totalExceptions;
}
/**
* This launches a given namenode operation (<code>-operation</code>),
* starting at a given time (<code>-startTime</code>). The files used
* by the openRead, rename, and delete operations are the same files
* created by the createWrite operation. Typically, the program
* would be run four times, once for each operation in this order:
* createWrite, openRead, rename, delete.
*
* <pre>
* Usage: nnbench
* -operation <one of createWrite, openRead, rename, or delete>
* -baseDir <base output/input DFS path>
* -startTime <time to start, given in seconds from the epoch>
* -numFiles <number of files to create, read, rename, or delete>
* -blocksPerFile <number of blocks to create per file>
* [-bytesPerBlock <number of bytes to write to each block, default is 1>]
* [-bytesPerChecksum <value for io.bytes.per.checksum>]
* </pre>
*
* @param args is an array of the program command line arguments
* @throws IOException indicates a problem with test startup
*/
public static void main(String[] args) throws IOException {
String version = "NameNodeBenchmark.0.3";
System.out.println(version);
int bytesPerChecksum = -1;
String usage =
"Usage: nnbench " +
" -operation <one of createWrite, openRead, rename, or delete>\n " +
" -baseDir <base output/input DFS path>\n " +
" -startTime <time to start, given in seconds from the epoch>\n" +
" -numFiles <number of files to create>\n " +
" -replicationFactorPerFile <Replication factor for the files, default is 1>\n" +
" -blocksPerFile <number of blocks to create per file>\n" +
" [-bytesPerBlock <number of bytes to write to each block, default is 1>]\n" +
" [-bytesPerChecksum <value for io.bytes.per.checksum>]\n" +
"Note: bytesPerBlock MUST be a multiple of bytesPerChecksum\n";
String operation = null;
for (int i = 0; i < args.length; i++) { // parse command line
if (args[i].equals("-baseDir")) {
baseDir = new Path(args[++i]);
} else if (args[i].equals("-numFiles")) {
numFiles = Integer.parseInt(args[++i]);
} else if (args[i].equals("-blocksPerFile")) {
blocksPerFile = Integer.parseInt(args[++i]);
} else if (args[i].equals("-bytesPerBlock")) {
bytesPerBlock = Long.parseLong(args[++i]);
} else if (args[i].equals("-bytesPerChecksum")) {
bytesPerChecksum = Integer.parseInt(args[++i]);
} else if (args[i].equals("-replicationFactorPerFile")) {
replicationFactorPerFile = Short.parseShort(args[++i]);
} else if (args[i].equals("-startTime")) {
startTime = Long.parseLong(args[++i]) * 1000;
} else if (args[i].equals("-operation")) {
operation = args[++i];
} else {
System.out.println(usage);
System.exit(-1);
}
}
bytesPerFile = bytesPerBlock * blocksPerFile;
JobConf jobConf = new JobConf(new Configuration(), NNBench.class);
if ( bytesPerChecksum < 0 ) { // if it is not set in cmdline
bytesPerChecksum = jobConf.getInt("io.bytes.per.checksum", 512);
}
jobConf.set("io.bytes.per.checksum", Integer.toString(bytesPerChecksum));
System.out.println("Inputs: ");
System.out.println(" operation: " + operation);
System.out.println(" baseDir: " + baseDir);
System.out.println(" startTime: " + startTime);
System.out.println(" numFiles: " + numFiles);
System.out.println(" replicationFactorPerFile: " + replicationFactorPerFile);
System.out.println(" blocksPerFile: " + blocksPerFile);
System.out.println(" bytesPerBlock: " + bytesPerBlock);
System.out.println(" bytesPerChecksum: " + bytesPerChecksum);
if (operation == null || // verify args
baseDir == null ||
numFiles < 1 ||
blocksPerFile < 1 ||
bytesPerBlock < 0 ||
bytesPerBlock % bytesPerChecksum != 0)
{
System.err.println(usage);
System.exit(-1);
}
fileSys = FileSystem.get(jobConf);
String uniqueId = java.net.InetAddress.getLocalHost().getHostName();
taskDir = new Path(baseDir, uniqueId);
// initialize buffer used for writing/reading file
buffer = new byte[(int) Math.min(bytesPerFile, 32768L)];
Date execTime;
Date endTime;
long duration;
int exceptions = 0;
barrier(); // wait for coordinated start time
execTime = new Date();
System.out.println("Job started: " + startTime);
if (operation.equals("createWrite")) {
if (!fileSys.mkdirs(taskDir)) {
throw new IOException("Mkdirs failed to create " + taskDir.toString());
}
exceptions = createWrite();
} else if (operation.equals("openRead")) {
exceptions = openRead();
} else if (operation.equals("rename")) {
exceptions = rename();
} else if (operation.equals("delete")) {
exceptions = delete();
} else {
System.err.println(usage);
System.exit(-1);
}
endTime = new Date();
System.out.println("Job ended: " + endTime);
duration = (endTime.getTime() - execTime.getTime()) /1000;
System.out.println("The " + operation + " job took " + duration + " seconds.");
System.out.println("The job recorded " + exceptions + " exceptions.");
}
}
| |
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) frentix GmbH<br>
* http://www.frentix.com<br>
* <p>
*/
package org.olat.presentation.webfeed.podcast;
/* TODO: ORID-1007 'File' */
import java.io.File;
import java.util.Date;
import org.olat.data.commons.fileutil.FileNameValidator;
import org.olat.data.commons.vfs.VFSContainer;
import org.olat.lms.webfeed.Feed;
import org.olat.lms.webfeed.FeedManager;
import org.olat.lms.webfeed.Item;
import org.olat.presentation.framework.core.UserRequest;
import org.olat.presentation.framework.core.components.form.flexible.FormItem;
import org.olat.presentation.framework.core.components.form.flexible.FormItemContainer;
import org.olat.presentation.framework.core.components.form.flexible.elements.FileElement;
import org.olat.presentation.framework.core.components.form.flexible.elements.FormLink;
import org.olat.presentation.framework.core.components.form.flexible.elements.RichTextElement;
import org.olat.presentation.framework.core.components.form.flexible.elements.TextElement;
import org.olat.presentation.framework.core.components.form.flexible.impl.FormBasicController;
import org.olat.presentation.framework.core.components.form.flexible.impl.FormEvent;
import org.olat.presentation.framework.core.components.form.flexible.impl.FormLayoutContainer;
import org.olat.presentation.framework.core.components.form.flexible.impl.elements.richText.RichTextConfiguration;
import org.olat.presentation.framework.core.components.link.Link;
import org.olat.presentation.framework.core.control.Controller;
import org.olat.presentation.framework.core.control.WindowControl;
import org.olat.presentation.framework.core.translator.Translator;
import org.olat.system.event.Event;
/**
* Provides a form for editing episode data (title, description, file ...)
* <p>
* Events fired by this controller:
* <ul>
* <li>CANCELLED_EVENT</li>
* <li>DONE_EVENT</li>
* </ul>
* Initial Date: Mar 2, 2009 <br>
*
* @author gwassmann
*/
public class EpisodeFormController extends FormBasicController {
public static final String MIME_TYPES_ALLOWED = ".*[.](flv|mp3|mp4|m4v|m4a|aac)";
private final Item episode;
private final Feed podcast;
private TextElement title;
private RichTextElement desc;
private final VFSContainer baseDir;
private FileElement file;
private FormLink cancelButton;
/**
* @param ureq
* @param control
*/
public EpisodeFormController(final UserRequest ureq, final WindowControl control, final Item episode, final Feed podcast, final Translator translator) {
super(ureq, control);
this.episode = episode;
this.podcast = podcast;
this.baseDir = FeedManager.getInstance().getItemContainer(episode, podcast);
setTranslator(translator);
initForm(ureq);
}
/**
*/
@Override
protected void doDispose() {
// nothing to do
}
/**
*/
@Override
protected void formOK(final UserRequest ureq) {
// Update episode. It is saved by the manager.
episode.setTitle(title.getValue());
// episode.setDescription(Formatter.escapeAll(description.getValue()).toString());
episode.setDescription(desc.getValue());
episode.setLastModified(new Date());
FileElement fileElement = getFile();
if (fileElement != null) {
fileElement.logUpload();
}
episode.setMediaFile(fileElement);
// Set episode as published (no draft feature for podcast)
episode.setDraft(false);
this.fireEvent(ureq, Event.CHANGED_EVENT);
}
/**
* org.olat.presentation.framework.components.form.flexible.FormItem, org.olat.presentation.framework.components.form.flexible.impl.FormEvent)
*/
@Override
protected void formInnerEvent(final UserRequest ureq, final FormItem source, final FormEvent event) {
if (source == cancelButton && event.wasTriggerdBy(FormEvent.ONCLICK)) {
fireEvent(ureq, Event.CANCELLED_EVENT);
} else if (source == file && event.wasTriggerdBy(FormEvent.ONCHANGE)) {
// display the uploaded file
if (file.isUploadSuccess()) {
final String newFilename = file.getUploadFileName();
final boolean isValidFileType = newFilename.toLowerCase().matches(MIME_TYPES_ALLOWED);
final boolean isFilenameValid = validateFilename(newFilename);
if (!isValidFileType || !isFilenameValid) {
if (!isValidFileType) {
file.setErrorKey("feed.form.file.type.error", null);
} else if (!isFilenameValid) {
file.setErrorKey("podcastfile.name.notvalid", null);
}
} else {
file.clearError();
}
}
}
}
/**
*/
@Override
protected boolean validateFormLogic(final UserRequest ureq) {
// Since mimetype restrictions have been proved to be problematic, let us
// validate the file ending instead as a pragmatic solution.
final String name = file.getUploadFileName();
if (name != null) {
final boolean isValidFileType = name.toLowerCase().matches(MIME_TYPES_ALLOWED);
final boolean isFilenameValid = validateFilename(name);
if (!isValidFileType || !isFilenameValid) {
if (!isValidFileType) {
file.setErrorKey("feed.form.file.type.error", null);
} else if (!isFilenameValid) {
file.setErrorKey("podcastfile.name.notvalid", null);
}
return false;
} else {
file.clearError();
flc.setDirty(true);
}
}
return super.validateFormLogic(ureq);
}
private boolean validateFilename(final String filename) {
final boolean valid = FileNameValidator.validate(filename);
// the Flash Player has some problem with spaces too
if (valid) {
return filename.indexOf(' ') < 0;
}
return valid;
}
/**
* org.olat.presentation.framework.control.Controller, org.olat.presentation.framework.UserRequest)
*/
@Override
@SuppressWarnings("unused")
protected void initForm(final FormItemContainer formLayout, final Controller listener, final UserRequest ureq) {
this.setFormTitle("feed.edit.item");
this.setFormContextHelp(this.getClass().getPackage().getName(), "episode_form_help.html", "chelp.hover.episode");
title = uifactory.addTextElement("title", "feed.title.label", 256, episode.getTitle(), this.flc);
title.setMandatory(true);
title.setNotEmptyCheck("feed.form.field.is_mandatory");
boolean fullProfileDescription = false;
desc = uifactory.addRichTextElementForStringData("desc", "feed.form.description", episode.getDescription(), 12, -1, false, fullProfileDescription, baseDir, null,
formLayout, ureq.getUserSession(), getWindowControl());
final RichTextConfiguration richTextConfig = desc.getEditorConfiguration();
// set upload dir to the media dir
richTextConfig.setFileBrowserUploadRelPath("media");
richTextConfig.disableMediaAndOlatMovieViewer(baseDir, fullProfileDescription, 2);
file = uifactory.addFileElement("file", this.flc);
file.setLabel("podcast.episode.file.label", null);
file.setMandatory(true, "podcast.episode.mandatory");
final File mediaFile = FeedManager.getInstance().getItemEnclosureFile(episode, podcast);
file.setInitialFile(mediaFile);
file.addActionListener(this, FormEvent.ONCHANGE);
// Submit and cancel buttons
final FormLayoutContainer buttonLayout = FormLayoutContainer.createButtonLayout("button_layout", getTranslator());
this.flc.add(buttonLayout);
uifactory.addFormSubmitButton("feed.publish", buttonLayout);
cancelButton = uifactory.addFormLink("cancel", buttonLayout, Link.BUTTON);
}
/**
* @return The file element of this form
*/
private FileElement getFile() {
FileElement fileElement = null;
if (file.isUploadSuccess()) {
fileElement = file;
}
return fileElement;
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple;
import static com.facebook.buck.apple.ProjectGeneratorTestUtils.assertHasSingletonFrameworksPhaseWithFrameworkEntries;
import static com.facebook.buck.apple.ProjectGeneratorTestUtils.assertHasSingletonPhaseWithEntries;
import static com.facebook.buck.apple.ProjectGeneratorTestUtils.assertTargetExistsAndReturnTarget;
import static com.facebook.buck.apple.ProjectGeneratorTestUtils.createDescriptionArgWithDefaults;
import static com.facebook.buck.apple.ProjectGeneratorTestUtils.getSingletonPhaseByType;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.startsWith;
import static org.hamcrest.collection.IsCollectionWithSize.hasSize;
import static org.hamcrest.collection.IsEmptyIterable.emptyIterable;
import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import com.facebook.buck.apple.xcode.xcodeproj.CopyFilePhaseDestinationSpec;
import com.facebook.buck.apple.xcode.xcodeproj.PBXBuildPhase;
import com.facebook.buck.apple.xcode.xcodeproj.PBXCopyFilesBuildPhase;
import com.facebook.buck.apple.xcode.xcodeproj.PBXFileReference;
import com.facebook.buck.apple.xcode.xcodeproj.PBXGroup;
import com.facebook.buck.apple.xcode.xcodeproj.PBXNativeTarget;
import com.facebook.buck.apple.xcode.xcodeproj.PBXProject;
import com.facebook.buck.apple.xcode.xcodeproj.PBXReference;
import com.facebook.buck.apple.xcode.xcodeproj.PBXResourcesBuildPhase;
import com.facebook.buck.apple.xcode.xcodeproj.PBXShellScriptBuildPhase;
import com.facebook.buck.apple.xcode.xcodeproj.ProductType;
import com.facebook.buck.apple.xcode.xcodeproj.SourceTreePath;
import com.facebook.buck.cli.BuildTargetNodeToBuildRuleTransformer;
import com.facebook.buck.cli.FakeBuckConfig;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.js.IosReactNativeLibraryBuilder;
import com.facebook.buck.js.ReactNativeBuckConfig;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.PathSourcePath;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.TargetNode;
import com.facebook.buck.rules.coercer.FrameworkPath;
import com.facebook.buck.rules.coercer.SourceWithFlags;
import com.facebook.buck.testutil.AllExistingProjectFilesystem;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import org.junit.Before;
import org.junit.Test;
import java.nio.file.Paths;
import java.util.List;
public class NewNativeTargetProjectMutatorTest {
private PBXProject generatedProject;
private PathRelativizer pathRelativizer;
private SourcePathResolver sourcePathResolver;
@Before
public void setUp() {
generatedProject = new PBXProject("TestProject");
sourcePathResolver =
new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer()));
pathRelativizer = new PathRelativizer(
Paths.get("_output"),
sourcePathResolver.deprecatedPathFunction());
}
@Test
public void shouldCreateTargetAndTargetGroup() throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = new NewNativeTargetProjectMutator(
pathRelativizer,
sourcePathResolver.deprecatedPathFunction());
mutator
.setTargetName("TestTarget")
.setProduct(
ProductType.BUNDLE,
"TestTargetProduct",
Paths.get("TestTargetProduct.bundle"))
.buildTargetAndAddToProject(generatedProject);
assertTargetExistsAndReturnTarget(generatedProject, "TestTarget");
assertHasTargetGroupWithName(generatedProject, "TestTarget");
}
@Test
public void shouldCreateTargetAndCustomTargetGroup() throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = new NewNativeTargetProjectMutator(
pathRelativizer,
sourcePathResolver.deprecatedPathFunction());
mutator
.setTargetName("TestTarget")
.setTargetGroupPath(ImmutableList.of("Grandparent", "Parent"))
.setProduct(
ProductType.BUNDLE,
"TestTargetProduct",
Paths.get("TestTargetProduct.bundle"))
.buildTargetAndAddToProject(generatedProject);
assertTargetExistsAndReturnTarget(generatedProject, "TestTarget");
PBXGroup grandparentGroup =
assertHasSubgroupAndReturnIt(generatedProject.getMainGroup(), "Grandparent");
assertHasSubgroupAndReturnIt(grandparentGroup, "Parent");
}
@Test
public void testSourceGroups() throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
SourcePath foo = new FakeSourcePath("Group1/foo.m");
SourcePath bar = new FakeSourcePath("Group1/bar.m");
SourcePath baz = new FakeSourcePath("Group2/baz.m");
mutator.setSourcesWithFlags(
ImmutableSet.of(
SourceWithFlags.of(foo),
SourceWithFlags.of(bar, ImmutableList.of("-Wall")),
SourceWithFlags.of(baz)));
NewNativeTargetProjectMutator.Result result = mutator.buildTargetAndAddToProject(
generatedProject);
PBXGroup sourcesGroup = result.targetGroup.getOrCreateChildGroupByName("Sources");
PBXGroup group1 = (PBXGroup) Iterables.get(sourcesGroup.getChildren(), 0);
assertEquals("Group1", group1.getName());
assertThat(group1.getChildren(), hasSize(2));
PBXFileReference fileRefBar = (PBXFileReference) Iterables.get(group1.getChildren(), 0);
assertEquals("bar.m", fileRefBar.getName());
PBXFileReference fileRefFoo = (PBXFileReference) Iterables.get(group1.getChildren(), 1);
assertEquals("foo.m", fileRefFoo.getName());
PBXGroup group2 = (PBXGroup) Iterables.get(sourcesGroup.getChildren(), 1);
assertEquals("Group2", group2.getName());
assertThat(group2.getChildren(), hasSize(1));
PBXFileReference fileRefBaz = (PBXFileReference) Iterables.get(group2.getChildren(), 0);
assertEquals("baz.m", fileRefBaz.getName());
}
@Test
public void testLibraryHeaderGroups() throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
SourcePath foo = new FakeSourcePath("HeaderGroup1/foo.h");
SourcePath bar = new FakeSourcePath("HeaderGroup1/bar.h");
SourcePath baz = new FakeSourcePath("HeaderGroup2/baz.h");
mutator.setPublicHeaders(ImmutableSet.of(bar, baz));
mutator.setPrivateHeaders(ImmutableSet.of(foo));
NewNativeTargetProjectMutator.Result result = mutator.buildTargetAndAddToProject(
generatedProject);
PBXGroup sourcesGroup = result.targetGroup.getOrCreateChildGroupByName("Sources");
assertThat(sourcesGroup.getChildren(), hasSize(2));
PBXGroup group1 = (PBXGroup) Iterables.get(sourcesGroup.getChildren(), 0);
assertEquals("HeaderGroup1", group1.getName());
assertThat(group1.getChildren(), hasSize(2));
PBXFileReference fileRefBar = (PBXFileReference) Iterables.get(group1.getChildren(), 0);
assertEquals("bar.h", fileRefBar.getName());
PBXFileReference fileRefFoo = (PBXFileReference) Iterables.get(group1.getChildren(), 1);
assertEquals("foo.h", fileRefFoo.getName());
PBXGroup group2 = (PBXGroup) Iterables.get(sourcesGroup.getChildren(), 1);
assertEquals("HeaderGroup2", group2.getName());
assertThat(group2.getChildren(), hasSize(1));
PBXFileReference fileRefBaz = (PBXFileReference) Iterables.get(group2.getChildren(), 0);
assertEquals("baz.h", fileRefBaz.getName());
}
@Test
public void testPrefixHeaderInSourceGroup() throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
SourcePath prefixHeader = new FakeSourcePath("Group1/prefix.pch");
mutator.setPrefixHeader(Optional.of(prefixHeader));
NewNativeTargetProjectMutator.Result result = mutator.buildTargetAndAddToProject(
generatedProject);
// No matter where the prefixHeader file is it should always be directly inside Sources
PBXGroup sourcesGroup = result.targetGroup.getOrCreateChildGroupByName("Sources");
assertThat(sourcesGroup.getChildren(), hasSize(1));
PBXFileReference fileRef = (PBXFileReference) Iterables.get(sourcesGroup.getChildren(), 0);
assertEquals("prefix.pch", fileRef.getName());
}
@Test
public void testFrameworkBuildPhase() throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
mutator.setFrameworks(
ImmutableSet.of(
FrameworkPath.ofSourceTreePath(
new SourceTreePath(
PBXReference.SourceTree.SDKROOT, Paths.get("Foo.framework"),
Optional.<String>absent()))));
mutator.setArchives(
ImmutableSet.of(
new PBXFileReference(
"libdep.a",
"libdep.a",
PBXReference.SourceTree.BUILT_PRODUCTS_DIR,
Optional.<String>absent())));
NewNativeTargetProjectMutator.Result result =
mutator.buildTargetAndAddToProject(generatedProject);
assertHasSingletonFrameworksPhaseWithFrameworkEntries(
result.target,
ImmutableList.of(
"$SDKROOT/Foo.framework",
"$BUILT_PRODUCTS_DIR/libdep.a"));
}
@Test
public void testResourcesBuildPhase() throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
AppleResourceDescription appleResourceDescription = new AppleResourceDescription();
AppleResourceDescription.Arg arg = createDescriptionArgWithDefaults(appleResourceDescription);
arg.files = ImmutableSet.<SourcePath>of(new FakeSourcePath("foo.png"));
mutator.setRecursiveResources(ImmutableSet.of(arg));
NewNativeTargetProjectMutator.Result result =
mutator.buildTargetAndAddToProject(generatedProject);
assertHasSingletonPhaseWithEntries(
result.target,
PBXResourcesBuildPhase.class,
ImmutableList.of("$SOURCE_ROOT/../foo.png"));
}
@Test
public void testCopyFilesBuildPhase() throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
CopyFilePhaseDestinationSpec.Builder specBuilder = CopyFilePhaseDestinationSpec.builder();
specBuilder.setDestination(PBXCopyFilesBuildPhase.Destination.FRAMEWORKS);
specBuilder.setPath("foo.png");
PBXBuildPhase copyPhase = new PBXCopyFilesBuildPhase(specBuilder.build());
mutator.setCopyFilesPhases(ImmutableList.of(copyPhase));
NewNativeTargetProjectMutator.Result result =
mutator.buildTargetAndAddToProject(generatedProject);
PBXBuildPhase buildPhaseToTest = getSingletonPhaseByType(
result.target,
PBXCopyFilesBuildPhase.class);
assertThat(copyPhase, equalTo(buildPhaseToTest));
}
@Test
public void testCopyFilesBuildPhaseIsBeforePostBuildScriptBuildPhase()
throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
CopyFilePhaseDestinationSpec.Builder specBuilder = CopyFilePhaseDestinationSpec.builder();
specBuilder.setDestination(PBXCopyFilesBuildPhase.Destination.FRAMEWORKS);
specBuilder.setPath("script/input.png");
PBXBuildPhase copyFilesPhase = new PBXCopyFilesBuildPhase(specBuilder.build());
mutator.setCopyFilesPhases(ImmutableList.of(copyFilesPhase));
TargetNode<?> postbuildNode = XcodePostbuildScriptBuilder
.createBuilder(BuildTargetFactory.newInstance("//foo:script"))
.setCmd("echo \"hello world!\"")
.build();
mutator.setPostBuildRunScriptPhasesFromTargetNodes(
ImmutableList.<TargetNode<?>>of(postbuildNode));
NewNativeTargetProjectMutator.Result result =
mutator.buildTargetAndAddToProject(generatedProject);
PBXNativeTarget target = result.target;
List<PBXBuildPhase> buildPhases = target.getBuildPhases();
PBXBuildPhase copyBuildPhaseToTest = getSingletonPhaseByType(
target,
PBXCopyFilesBuildPhase.class);
PBXBuildPhase postBuildScriptPhase = getSingletonPhaseByType(
target,
PBXShellScriptBuildPhase.class);
assertThat(
buildPhases.indexOf(copyBuildPhaseToTest),
lessThan(buildPhases.indexOf(postBuildScriptPhase)));
}
@Test
public void assetCatalogsBuildPhaseBuildsAssetCatalogs()
throws NoSuchBuildTargetException {
AppleAssetCatalogDescription.Arg arg = new AppleAssetCatalogDescription.Arg();
arg.dirs = ImmutableSortedSet.<SourcePath>of(new FakeSourcePath("AssetCatalog1.xcassets"));
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
mutator.setRecursiveAssetCatalogs(
ImmutableSet.of(arg));
NewNativeTargetProjectMutator.Result result =
mutator.buildTargetAndAddToProject(generatedProject);
assertHasSingletonPhaseWithEntries(
result.target,
PBXResourcesBuildPhase.class,
ImmutableList.of("$SOURCE_ROOT/../AssetCatalog1.xcassets"));
}
@Test
public void testScriptBuildPhase() throws NoSuchBuildTargetException{
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
TargetNode<?> prebuildNode = XcodePrebuildScriptBuilder
.createBuilder(BuildTargetFactory.newInstance("//foo:script"))
.setSrcs(ImmutableSortedSet.<SourcePath>of(new FakeSourcePath("script/input.png")))
.setOutputs(ImmutableSortedSet.of("helloworld.txt"))
.setCmd("echo \"hello world!\"")
.build();
mutator.setPostBuildRunScriptPhasesFromTargetNodes(
ImmutableList.<TargetNode<?>>of(prebuildNode));
NewNativeTargetProjectMutator.Result result =
mutator.buildTargetAndAddToProject(generatedProject);
PBXShellScriptBuildPhase phase =
getSingletonPhaseByType(result.target, PBXShellScriptBuildPhase.class);
assertThat(
"Should set input paths correctly",
"../script/input.png",
is(equalTo(Iterables.getOnlyElement(phase.getInputPaths()))));
assertThat(
"Should set output paths correctly",
"helloworld.txt",
is(equalTo(Iterables.getOnlyElement(phase.getOutputPaths()))));
assertEquals(
"should set script correctly",
"echo \"hello world!\"",
phase.getShellScript());
}
@Test
public void testScriptBuildPhaseWithReactNative() throws NoSuchBuildTargetException {
NewNativeTargetProjectMutator mutator = mutatorWithCommonDefaults();
BuildTarget depBuildTarget = BuildTargetFactory.newInstance("//foo:dep");
ProjectFilesystem filesystem = new AllExistingProjectFilesystem();
ReactNativeBuckConfig buckConfig = new ReactNativeBuckConfig(
FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"react-native",
ImmutableMap.of("packager", "react-native/packager.sh")))
.setFilesystem(filesystem)
.build());
TargetNode<?> reactNativeNode =
IosReactNativeLibraryBuilder.builder(depBuildTarget, buckConfig)
.setBundleName("Apps/Foo/FooBundle.js")
.setEntryPath(new PathSourcePath(filesystem, Paths.get("js/FooApp.js")))
.build();
mutator.setPostBuildRunScriptPhasesFromTargetNodes(
ImmutableList.<TargetNode<?>>of(reactNativeNode));
NewNativeTargetProjectMutator.Result result =
mutator.buildTargetAndAddToProject(generatedProject);
PBXShellScriptBuildPhase phase =
getSingletonPhaseByType(result.target, PBXShellScriptBuildPhase.class);
String shellScript = phase.getShellScript();
assertThat(
shellScript,
startsWith("BASE_DIR=${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}\n" +
"JS_OUT=${BASE_DIR}/Apps/Foo/FooBundle.js\n" +
"SOURCE_MAP=${TEMP_DIR}/rn_source_map/Apps/Foo/FooBundle.js.map\n"));
assertThat(shellScript, containsString("if false"));
}
private NewNativeTargetProjectMutator mutatorWithCommonDefaults() {
NewNativeTargetProjectMutator mutator = new NewNativeTargetProjectMutator(
pathRelativizer,
sourcePathResolver.deprecatedPathFunction());
mutator
.setTargetName("TestTarget")
.setProduct(
ProductType.BUNDLE,
"TestTargetProduct",
Paths.get("TestTargetProduct.bundle"));
return mutator;
}
private static void assertHasTargetGroupWithName(PBXProject project, final String name) {
assertThat(
"Should contain a target group named: " + name,
Iterables.filter(
project.getMainGroup().getChildren(), new Predicate<PBXReference>() {
@Override
public boolean apply(PBXReference input) {
return input.getName().equals(name);
}
}),
not(emptyIterable()));
}
private static PBXGroup assertHasSubgroupAndReturnIt(PBXGroup group, final String subgroupName) {
ImmutableList<PBXGroup> candidates = FluentIterable
.from(group.getChildren())
.filter(
new Predicate<PBXReference>() {
@Override
public boolean apply(PBXReference input) {
return input.getName().equals(subgroupName);
}
})
.filter(PBXGroup.class)
.toList();
if (candidates.size() != 1) {
fail("Could not find a unique subgroup by its name");
}
return candidates.get(0);
}
}
| |
package org.openapitools.model;
import java.util.Objects;
import java.util.ArrayList;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.List;
import org.openapitools.model.FreeStyleBuild;
import org.openapitools.model.FreeStyleProjectactions;
import org.openapitools.model.FreeStyleProjecthealthReport;
import org.openapitools.model.NullSCM;
import javax.validation.constraints.*;
import io.swagger.annotations.*;
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaResteasyServerCodegen", date = "2022-02-13T02:21:30.640855Z[Etc/UTC]")
public class FreeStyleProject {
private String propertyClass;
private String name;
private String url;
private String color;
private List<FreeStyleProjectactions> actions = new ArrayList<FreeStyleProjectactions>();
private String description;
private String displayName;
private String displayNameOrNull;
private String fullDisplayName;
private String fullName;
private Boolean buildable;
private List<FreeStyleBuild> builds = new ArrayList<FreeStyleBuild>();
private FreeStyleBuild firstBuild;
private List<FreeStyleProjecthealthReport> healthReport = new ArrayList<FreeStyleProjecthealthReport>();
private Boolean inQueue;
private Boolean keepDependencies;
private FreeStyleBuild lastBuild;
private FreeStyleBuild lastCompletedBuild;
private String lastFailedBuild;
private FreeStyleBuild lastStableBuild;
private FreeStyleBuild lastSuccessfulBuild;
private String lastUnstableBuild;
private String lastUnsuccessfulBuild;
private Integer nextBuildNumber;
private String queueItem;
private Boolean concurrentBuild;
private NullSCM scm;
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("_class")
public String getPropertyClass() {
return propertyClass;
}
public void setPropertyClass(String propertyClass) {
this.propertyClass = propertyClass;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("name")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("url")
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("color")
public String getColor() {
return color;
}
public void setColor(String color) {
this.color = color;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("actions")
public List<FreeStyleProjectactions> getActions() {
return actions;
}
public void setActions(List<FreeStyleProjectactions> actions) {
this.actions = actions;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("description")
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("displayName")
public String getDisplayName() {
return displayName;
}
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("displayNameOrNull")
public String getDisplayNameOrNull() {
return displayNameOrNull;
}
public void setDisplayNameOrNull(String displayNameOrNull) {
this.displayNameOrNull = displayNameOrNull;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("fullDisplayName")
public String getFullDisplayName() {
return fullDisplayName;
}
public void setFullDisplayName(String fullDisplayName) {
this.fullDisplayName = fullDisplayName;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("fullName")
public String getFullName() {
return fullName;
}
public void setFullName(String fullName) {
this.fullName = fullName;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("buildable")
public Boolean getBuildable() {
return buildable;
}
public void setBuildable(Boolean buildable) {
this.buildable = buildable;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("builds")
public List<FreeStyleBuild> getBuilds() {
return builds;
}
public void setBuilds(List<FreeStyleBuild> builds) {
this.builds = builds;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("firstBuild")
public FreeStyleBuild getFirstBuild() {
return firstBuild;
}
public void setFirstBuild(FreeStyleBuild firstBuild) {
this.firstBuild = firstBuild;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("healthReport")
public List<FreeStyleProjecthealthReport> getHealthReport() {
return healthReport;
}
public void setHealthReport(List<FreeStyleProjecthealthReport> healthReport) {
this.healthReport = healthReport;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("inQueue")
public Boolean getInQueue() {
return inQueue;
}
public void setInQueue(Boolean inQueue) {
this.inQueue = inQueue;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("keepDependencies")
public Boolean getKeepDependencies() {
return keepDependencies;
}
public void setKeepDependencies(Boolean keepDependencies) {
this.keepDependencies = keepDependencies;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("lastBuild")
public FreeStyleBuild getLastBuild() {
return lastBuild;
}
public void setLastBuild(FreeStyleBuild lastBuild) {
this.lastBuild = lastBuild;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("lastCompletedBuild")
public FreeStyleBuild getLastCompletedBuild() {
return lastCompletedBuild;
}
public void setLastCompletedBuild(FreeStyleBuild lastCompletedBuild) {
this.lastCompletedBuild = lastCompletedBuild;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("lastFailedBuild")
public String getLastFailedBuild() {
return lastFailedBuild;
}
public void setLastFailedBuild(String lastFailedBuild) {
this.lastFailedBuild = lastFailedBuild;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("lastStableBuild")
public FreeStyleBuild getLastStableBuild() {
return lastStableBuild;
}
public void setLastStableBuild(FreeStyleBuild lastStableBuild) {
this.lastStableBuild = lastStableBuild;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("lastSuccessfulBuild")
public FreeStyleBuild getLastSuccessfulBuild() {
return lastSuccessfulBuild;
}
public void setLastSuccessfulBuild(FreeStyleBuild lastSuccessfulBuild) {
this.lastSuccessfulBuild = lastSuccessfulBuild;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("lastUnstableBuild")
public String getLastUnstableBuild() {
return lastUnstableBuild;
}
public void setLastUnstableBuild(String lastUnstableBuild) {
this.lastUnstableBuild = lastUnstableBuild;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("lastUnsuccessfulBuild")
public String getLastUnsuccessfulBuild() {
return lastUnsuccessfulBuild;
}
public void setLastUnsuccessfulBuild(String lastUnsuccessfulBuild) {
this.lastUnsuccessfulBuild = lastUnsuccessfulBuild;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("nextBuildNumber")
public Integer getNextBuildNumber() {
return nextBuildNumber;
}
public void setNextBuildNumber(Integer nextBuildNumber) {
this.nextBuildNumber = nextBuildNumber;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("queueItem")
public String getQueueItem() {
return queueItem;
}
public void setQueueItem(String queueItem) {
this.queueItem = queueItem;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("concurrentBuild")
public Boolean getConcurrentBuild() {
return concurrentBuild;
}
public void setConcurrentBuild(Boolean concurrentBuild) {
this.concurrentBuild = concurrentBuild;
}
/**
**/
@ApiModelProperty(value = "")
@JsonProperty("scm")
public NullSCM getScm() {
return scm;
}
public void setScm(NullSCM scm) {
this.scm = scm;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FreeStyleProject freeStyleProject = (FreeStyleProject) o;
return Objects.equals(propertyClass, freeStyleProject.propertyClass) &&
Objects.equals(name, freeStyleProject.name) &&
Objects.equals(url, freeStyleProject.url) &&
Objects.equals(color, freeStyleProject.color) &&
Objects.equals(actions, freeStyleProject.actions) &&
Objects.equals(description, freeStyleProject.description) &&
Objects.equals(displayName, freeStyleProject.displayName) &&
Objects.equals(displayNameOrNull, freeStyleProject.displayNameOrNull) &&
Objects.equals(fullDisplayName, freeStyleProject.fullDisplayName) &&
Objects.equals(fullName, freeStyleProject.fullName) &&
Objects.equals(buildable, freeStyleProject.buildable) &&
Objects.equals(builds, freeStyleProject.builds) &&
Objects.equals(firstBuild, freeStyleProject.firstBuild) &&
Objects.equals(healthReport, freeStyleProject.healthReport) &&
Objects.equals(inQueue, freeStyleProject.inQueue) &&
Objects.equals(keepDependencies, freeStyleProject.keepDependencies) &&
Objects.equals(lastBuild, freeStyleProject.lastBuild) &&
Objects.equals(lastCompletedBuild, freeStyleProject.lastCompletedBuild) &&
Objects.equals(lastFailedBuild, freeStyleProject.lastFailedBuild) &&
Objects.equals(lastStableBuild, freeStyleProject.lastStableBuild) &&
Objects.equals(lastSuccessfulBuild, freeStyleProject.lastSuccessfulBuild) &&
Objects.equals(lastUnstableBuild, freeStyleProject.lastUnstableBuild) &&
Objects.equals(lastUnsuccessfulBuild, freeStyleProject.lastUnsuccessfulBuild) &&
Objects.equals(nextBuildNumber, freeStyleProject.nextBuildNumber) &&
Objects.equals(queueItem, freeStyleProject.queueItem) &&
Objects.equals(concurrentBuild, freeStyleProject.concurrentBuild) &&
Objects.equals(scm, freeStyleProject.scm);
}
@Override
public int hashCode() {
return Objects.hash(propertyClass, name, url, color, actions, description, displayName, displayNameOrNull, fullDisplayName, fullName, buildable, builds, firstBuild, healthReport, inQueue, keepDependencies, lastBuild, lastCompletedBuild, lastFailedBuild, lastStableBuild, lastSuccessfulBuild, lastUnstableBuild, lastUnsuccessfulBuild, nextBuildNumber, queueItem, concurrentBuild, scm);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class FreeStyleProject {\n");
sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" url: ").append(toIndentedString(url)).append("\n");
sb.append(" color: ").append(toIndentedString(color)).append("\n");
sb.append(" actions: ").append(toIndentedString(actions)).append("\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n");
sb.append(" displayNameOrNull: ").append(toIndentedString(displayNameOrNull)).append("\n");
sb.append(" fullDisplayName: ").append(toIndentedString(fullDisplayName)).append("\n");
sb.append(" fullName: ").append(toIndentedString(fullName)).append("\n");
sb.append(" buildable: ").append(toIndentedString(buildable)).append("\n");
sb.append(" builds: ").append(toIndentedString(builds)).append("\n");
sb.append(" firstBuild: ").append(toIndentedString(firstBuild)).append("\n");
sb.append(" healthReport: ").append(toIndentedString(healthReport)).append("\n");
sb.append(" inQueue: ").append(toIndentedString(inQueue)).append("\n");
sb.append(" keepDependencies: ").append(toIndentedString(keepDependencies)).append("\n");
sb.append(" lastBuild: ").append(toIndentedString(lastBuild)).append("\n");
sb.append(" lastCompletedBuild: ").append(toIndentedString(lastCompletedBuild)).append("\n");
sb.append(" lastFailedBuild: ").append(toIndentedString(lastFailedBuild)).append("\n");
sb.append(" lastStableBuild: ").append(toIndentedString(lastStableBuild)).append("\n");
sb.append(" lastSuccessfulBuild: ").append(toIndentedString(lastSuccessfulBuild)).append("\n");
sb.append(" lastUnstableBuild: ").append(toIndentedString(lastUnstableBuild)).append("\n");
sb.append(" lastUnsuccessfulBuild: ").append(toIndentedString(lastUnsuccessfulBuild)).append("\n");
sb.append(" nextBuildNumber: ").append(toIndentedString(nextBuildNumber)).append("\n");
sb.append(" queueItem: ").append(toIndentedString(queueItem)).append("\n");
sb.append(" concurrentBuild: ").append(toIndentedString(concurrentBuild)).append("\n");
sb.append(" scm: ").append(toIndentedString(scm)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.ha;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import static org.junit.Assert.assertNotNull;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionAttributes;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache30.ClientServerTestCase;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.AvailablePort;
import org.apache.geode.internal.cache.CacheServerImpl;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.Assert;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.NetworkUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase;
import org.apache.geode.test.junit.categories.ClientSubscriptionTest;
/**
* This test verifies that all entry operations (create,put,destroy,invalidate) which propagate from
* one server1 to another server2 do get delivered to the client connected to server2 (server2 is
* primary for client)
*/
@Category({ClientSubscriptionTest.class})
public class OperationsPropagationDUnitTest extends JUnit4DistributedTestCase {
VM server1 = null;
VM server2 = null;
VM client1 = null;
public int PORT1;
public int PORT2;
private static final String REGION_NAME =
OperationsPropagationDUnitTest.class.getSimpleName() + "_Region";
private static Cache cache = null;
/**
* Create the server1, server2 (in the same DS) and client1 (which is connected only to server2
*/
@Override
public final void postSetUp() throws Exception {
final Host host = Host.getHost(0);
// Server1 VM
server1 = host.getVM(0);
// Server2 VM
server2 = host.getVM(1);
// Client 1 VM
client1 = host.getVM(2);
PORT1 = ((Integer) server1.invoke(() -> OperationsPropagationDUnitTest.createServerCache()))
.intValue();
PORT2 = ((Integer) server2.invoke(() -> OperationsPropagationDUnitTest.createServerCache()))
.intValue();
client1.invoke(() -> OperationsPropagationDUnitTest
.createClientCache(NetworkUtils.getServerHostName(host), new Integer(PORT2)));
}
/**
* close the caches of the client and the servers
*/
@Override
public final void preTearDown() throws Exception {
client1.invoke(() -> OperationsPropagationDUnitTest.closeCache());
server1.invoke(() -> OperationsPropagationDUnitTest.closeCache());
server2.invoke(() -> OperationsPropagationDUnitTest.closeCache());
}
/**
* closes the cache and disconnects the vm from the distributed system
*/
public static void closeCache() {
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().disconnect();
}
}
/**
* connect to the DS and create a cache
*/
private void createCache(Properties props) throws Exception {
DistributedSystem ds = getSystem(props);
assertNotNull(ds);
ds.disconnect();
ds = getSystem(props);
cache = CacheFactory.create(ds);
assertNotNull(cache);
}
/**
* region to be created for the test
*/
protected static Region region = null;
/**
* Create the server
*/
public static Integer createServerCache() throws Exception {
new OperationsPropagationDUnitTest().createCache(new Properties());
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
RegionAttributes attrs = factory.create();
region = cache.createRegion(REGION_NAME, attrs);
CacheServerImpl server = (CacheServerImpl) cache.addCacheServer();
assertNotNull(server);
int port = AvailablePort.getRandomAvailablePort(AvailablePort.SOCKET);
server.setPort(port);
server.setNotifyBySubscription(true);
server.start();
return new Integer(server.getPort());
}
/**
* create the client and connect it to the server with the given port
*/
public static void createClientCache(String host, Integer port2) throws Exception {
int PORT2 = port2.intValue();
Properties props = new Properties();
props.setProperty(MCAST_PORT, "0");
props.setProperty(LOCATORS, "");
new OperationsPropagationDUnitTest().createCache(props);
props.setProperty("retryAttempts", "2");
props.setProperty("endpoints", "ep1=" + host + ":" + PORT2);
props.setProperty("redundancyLevel", "-1");
props.setProperty("establishCallbackConnection", "true");
props.setProperty("LBPolicy", "Sticky");
props.setProperty("readTimeout", "2000");
props.setProperty("socketBufferSize", "1000");
props.setProperty("retryInterval", "250");
props.setProperty("connectionsPerServer", "2");
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
ClientServerTestCase.configureConnectionPool(factory, host, PORT2, -1, true, -1, 2, null);
RegionAttributes attrs = factory.create();
region = cache.createRegion(REGION_NAME, attrs);
assertNotNull(region);
region.registerInterest("ALL_KEYS");
}
public static final String CREATE_KEY = "createKey";
public static final String CREATE_VALUE = "createValue";
public static final String UPDATE_KEY = "updateKey";
public static final String UPDATE_VALUE1 = "updateValue1";
public static final String UPDATE_VALUE2 = "updateValue2";
public static final String INVALIDATE_KEY = "invalidateKey";
public static final String INVALIDATE_VALUE = "invalidateValue";
public static final String DESTROY_KEY = "destroyKey";
public static final String DESTROY_VALUE = "destroyValue";
public static final String PUTALL_KEY = "putAllKey";
public static final String PUTALL_VALUE = "putAllValue";
public static final String PUTALL_KEY2 = "putAllKey2";
public static final String PUTALL_VALUE2 = "putAllValue2";
/**
* This test: 1) First the initial keys and values 2) Verify that the initial keys and values have
* reached the client 3) Do the operations which we want to propagate (create, update, invalidate
* and destroy) 4) Verify the operations reached the client 5) Do a removeAll 6) Verify it reached
* the client
*/
@Test
public void testOperationsPropagation() throws Exception {
server1.invoke(() -> OperationsPropagationDUnitTest.initialPutKeyValue());
client1.invoke(() -> OperationsPropagationDUnitTest.assertKeyValuePresent());
server1.invoke(() -> OperationsPropagationDUnitTest.doOperations());
client1.invoke(() -> OperationsPropagationDUnitTest.assertOperationsSucceeded());
server1.invoke(() -> OperationsPropagationDUnitTest.doRemoveAll());
client1.invoke(() -> OperationsPropagationDUnitTest.assertRemoveAllSucceeded());
}
/**
* put the initial keys and values
*/
public static void initialPutKeyValue() {
try {
region.put(UPDATE_KEY, UPDATE_VALUE1);
region.put(INVALIDATE_KEY, INVALIDATE_VALUE);
region.put(DESTROY_KEY, DESTROY_VALUE);
} catch (Exception e) {
Assert.fail(" Test failed due to " + e, e);
}
}
/**
* do the operations which you want to propagate
*/
public static void doOperations() {
try {
region.create(CREATE_KEY, CREATE_VALUE);
region.put(UPDATE_KEY, UPDATE_VALUE2);
region.invalidate(INVALIDATE_KEY);
region.destroy(DESTROY_KEY);
Map map = new HashMap();
map.put(PUTALL_KEY, PUTALL_VALUE);
map.put(PUTALL_KEY2, PUTALL_VALUE2);
region.putAll(map);
} catch (Exception e) {
Assert.fail(" Test failed due to " + e, e);
}
}
/**
* assert the initial key values are present
*/
public static void assertKeyValuePresent() {
try {
WaitCriterion wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
Object val = region.get(UPDATE_KEY);
return UPDATE_VALUE1.equals(val);
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
Object val = region.get(INVALIDATE_KEY);
return INVALIDATE_VALUE.equals(val);
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
Object val = region.get(DESTROY_KEY);
return DESTROY_VALUE.equals(val);
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
} catch (Exception e) {
Assert.fail(" Test failed due to " + e, e);
}
}
/**
* assert the operations reached the client successfully
*/
public static void assertOperationsSucceeded() {
try {
// Thread.sleep(5000);
WaitCriterion wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
Object val = region.get(CREATE_KEY);
return CREATE_VALUE.equals(val);
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
Object val = region.get(UPDATE_KEY);
return UPDATE_VALUE2.equals(val);
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
return !region.containsKey(DESTROY_KEY);
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
Object val = region.get(INVALIDATE_KEY);
return val == null;
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
Object val = region.get(PUTALL_KEY);
return PUTALL_VALUE.equals(val);
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
Object val = region.get(PUTALL_KEY2);
return PUTALL_VALUE2.equals(val);
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
} catch (Exception e) {
Assert.fail(" Test failed due to " + e, e);
}
}
public static void doRemoveAll() {
region.removeAll(Arrays.asList(PUTALL_KEY, PUTALL_KEY2));
}
public static void assertRemoveAllSucceeded() {
WaitCriterion wc = new WaitCriterion() {
String excuse;
@Override
public boolean done() {
Object val = region.get(PUTALL_KEY);
return !region.containsKey(PUTALL_KEY) && !region.containsKey(PUTALL_KEY2);
}
@Override
public String description() {
return excuse;
}
};
GeodeAwaitility.await().untilAsserted(wc);
}
}
| |
package io.dropwizard.metrics;
import org.junit.Test;
import io.dropwizard.metrics.Snapshot;
import io.dropwizard.metrics.WeightedSnapshot;
import io.dropwizard.metrics.WeightedSnapshot.WeightedSample;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.offset;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
public class WeightedSnapshotTest {
static public ArrayList<WeightedSample> WeightedArray(long[] values, double[] weights) {
if (values.length != weights.length) {
throw new IllegalArgumentException("Mismatched lengths: " + values.length + " vs " + weights.length);
}
final ArrayList<WeightedSample> samples = new ArrayList<WeightedSnapshot.WeightedSample>();
for (int i = 0; i < values.length; i++) {
samples.add(new WeightedSnapshot.WeightedSample(values[i], weights[i]));
}
return samples;
}
private final Snapshot snapshot = new WeightedSnapshot(
WeightedArray(new long[]{5, 1, 2, 3, 4}, new double[]{1, 2, 3, 2, 2}) );
@Test
public void smallQuantilesAreTheFirstValue() throws Exception {
assertThat(snapshot.getValue(0.0))
.isEqualTo(1.0, offset(0.1));
}
@Test
public void bigQuantilesAreTheLastValue() throws Exception {
assertThat(snapshot.getValue(1.0))
.isEqualTo(5.0, offset(0.1));
}
@Test(expected = IllegalArgumentException.class)
public void disallowsNotANumberQuantile() {
snapshot.getValue( Double.NaN );
}
@Test(expected = IllegalArgumentException.class)
public void disallowsNegativeQuantile() {
snapshot.getValue( -0.5 );
}
@Test(expected = IllegalArgumentException.class)
public void disallowsQuantileOverOne() {
snapshot.getValue( 1.5 );
}
@Test
public void hasAMedian() throws Exception {
assertThat(snapshot.getMedian()).isEqualTo(3.0, offset(0.1));
}
@Test
public void hasAp75() throws Exception {
assertThat(snapshot.get75thPercentile()).isEqualTo(4.0, offset(0.1));
}
@Test
public void hasAp95() throws Exception {
assertThat(snapshot.get95thPercentile()).isEqualTo(5.0, offset(0.1));
}
@Test
public void hasAp98() throws Exception {
assertThat(snapshot.get98thPercentile()).isEqualTo(5.0, offset(0.1));
}
@Test
public void hasAp99() throws Exception {
assertThat(snapshot.get99thPercentile()).isEqualTo(5.0, offset(0.1));
}
@Test
public void hasAp999() throws Exception {
assertThat(snapshot.get999thPercentile()).isEqualTo(5.0, offset(0.1));
}
@Test
public void hasValues() throws Exception {
assertThat(snapshot.getValues())
.containsOnly(1, 2, 3, 4, 5);
}
@Test
public void hasASize() throws Exception {
assertThat(snapshot.size())
.isEqualTo(5);
}
@Test
public void worksWithUnderestimatedCollections() throws Exception {
final List<WeightedSample> items = spy(WeightedArray(new long[]{5, 1, 2, 3, 4}, new double[]{1, 2, 3, 2, 2}));
when(items.size()).thenReturn(4, 5);
final Snapshot other = new WeightedSnapshot(items);
assertThat(other.getValues())
.containsOnly(1, 2, 3, 4, 5);
}
@Test
public void worksWithOverestimatedCollections() throws Exception {
final List<WeightedSample> items = spy(WeightedArray(new long[]{5, 1, 2, 3, 4}, new double[]{1, 2, 3, 2, 2}));
when(items.size()).thenReturn(6, 5);
final Snapshot other = new WeightedSnapshot(items);
assertThat(other.getValues())
.containsOnly(1, 2, 3, 4, 5);
}
@Test
public void dumpsToAStream() throws Exception {
final ByteArrayOutputStream output = new ByteArrayOutputStream();
snapshot.dump(output);
assertThat(output.toString())
.isEqualTo(String.format("1%n2%n3%n4%n5%n"));
}
@Test
public void calculatesTheMinimumValue() throws Exception {
assertThat(snapshot.getMin())
.isEqualTo(1);
}
@Test
public void calculatesTheMaximumValue() throws Exception {
assertThat(snapshot.getMax())
.isEqualTo(5);
}
@Test
public void calculatesTheMeanValue() throws Exception {
assertThat(snapshot.getMean())
.isEqualTo(2.7);
}
@Test
public void calculatesTheStdDev() throws Exception {
assertThat(snapshot.getStdDev())
.isEqualTo(1.2688, offset(0.0001));
}
@Test
public void calculatesAMinOfZeroForAnEmptySnapshot() throws Exception {
final Snapshot emptySnapshot = new WeightedSnapshot(
WeightedArray(new long[]{}, new double[]{}) );
assertThat(emptySnapshot.getMin())
.isZero();
}
@Test
public void calculatesAMaxOfZeroForAnEmptySnapshot() throws Exception {
final Snapshot emptySnapshot = new WeightedSnapshot(
WeightedArray(new long[]{}, new double[]{}) );
assertThat(emptySnapshot.getMax())
.isZero();
}
@Test
public void calculatesAMeanOfZeroForAnEmptySnapshot() throws Exception {
final Snapshot emptySnapshot = new WeightedSnapshot(
WeightedArray(new long[]{}, new double[]{}) );
assertThat(emptySnapshot.getMean())
.isZero();
}
@Test
public void calculatesAStdDevOfZeroForAnEmptySnapshot() throws Exception {
final Snapshot emptySnapshot = new WeightedSnapshot(
WeightedArray(new long[]{}, new double[]{}) );
assertThat(emptySnapshot.getStdDev())
.isZero();
}
@Test
public void calculatesAStdDevOfZeroForASingletonSnapshot() throws Exception {
final Snapshot singleItemSnapshot = new WeightedSnapshot(
WeightedArray(new long[]{ 1 }, new double[]{ 1.0 }) );
assertThat(singleItemSnapshot.getStdDev())
.isZero();
}
@Test
public void expectNoOverflowForLowWeights() throws Exception {
final Snapshot scatteredSnapshot = new WeightedSnapshot(
WeightedArray(
new long[]{ 1, 2, 3 },
new double[]{ Double.MIN_VALUE, Double.MIN_VALUE, Double.MIN_VALUE }
)
);
assertThat(scatteredSnapshot.getMean())
.isEqualTo(2);
}
}
| |
/*
* Copyright 2001-present Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joda.beans.ser;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
/**
* A factory used to create wrappers around collection-like objects.
*/
public class SerIteratorFactory {
/**
* Singleton instance.
*/
public static final SerIteratorFactory INSTANCE = getInstance();
private static SerIteratorFactory getInstance() {
try {
Class.forName("org.joda.collect.grid.Grid");
return new CollectSerIteratorFactory();
} catch (Exception | LinkageError ex) {
try {
Class.forName("com.google.common.collect.Multimap");
return new GuavaSerIteratorFactory();
} catch (Exception | LinkageError ex2) {
return new SerIteratorFactory();
}
}
}
/**
* An empty list of classes.
*/
public static final List<Class<?>> EMPTY_VALUE_TYPES = Collections.emptyList();
/**
* Map of array types.
*/
private static final Map<String, Class<?>> META_TYPE_MAP = new HashMap<>();
static {
META_TYPE_MAP.put("Object[]", Object.class);
META_TYPE_MAP.put("String[]", String.class);
META_TYPE_MAP.put("boolean[]", boolean.class);
META_TYPE_MAP.put("char[]", char.class);
META_TYPE_MAP.put("byte[]", byte.class);
META_TYPE_MAP.put("short[]", short.class);
META_TYPE_MAP.put("int[]", int.class);
META_TYPE_MAP.put("long[]", long.class);
META_TYPE_MAP.put("float[]", float.class);
META_TYPE_MAP.put("double[]", double.class);
META_TYPE_MAP.put("Object[][]", Object[].class);
META_TYPE_MAP.put("String[][]", String[].class);
META_TYPE_MAP.put("boolean[][]", boolean[].class);
META_TYPE_MAP.put("char[][]", char[].class);
META_TYPE_MAP.put("byte[][]", byte[].class);
META_TYPE_MAP.put("short[][]", short[].class);
META_TYPE_MAP.put("int[][]", int[].class);
META_TYPE_MAP.put("long[][]", long[].class);
META_TYPE_MAP.put("float[][]", float[].class);
META_TYPE_MAP.put("double[][]", double[].class);
}
//-----------------------------------------------------------------------
/**
* Creates an iterator wrapper for a meta-property value.
*
* @param value the possible collection-like object, not null
* @param prop the meta-property defining the value, not null
* @param beanClass the class of the bean, not the meta-property, for better generics, not null
* @param allowPrimitiveArrays whether to allow primitive arrays
* @return the iterator, null if not a collection-like type
*/
public SerIterator create(Object value, MetaProperty<?> prop, Class<?> beanClass, boolean allowPrimitiveArrays) {
if (allowPrimitiveArrays &&
value.getClass().isArray() &&
value.getClass().getComponentType().isPrimitive() &&
value.getClass().getComponentType() != byte.class) {
return arrayPrimitive(value, prop.propertyType(), value.getClass().getComponentType());
}
return create(value, prop, beanClass);
}
/**
* Creates an iterator wrapper for a meta-property value.
*
* @param value the possible collection-like object, not null
* @param prop the meta-property defining the value, not null
* @param beanClass the class of the bean, not the meta-property, for better generics, not null
* @return the iterator, null if not a collection-like type
*/
public SerIterator create(Object value, MetaProperty<?> prop, Class<?> beanClass) {
Class<?> declaredType = prop.propertyType();
if (value instanceof Collection) {
Class<?> valueType = defaultToObjectClass(JodaBeanUtils.collectionType(prop, beanClass));
List<Class<?>> valueTypeTypes = JodaBeanUtils.collectionTypeTypes(prop, beanClass);
return collection((Collection<?>) value, declaredType, valueType, valueTypeTypes);
}
if (value instanceof Map) {
Class<?> keyType = defaultToObjectClass(JodaBeanUtils.mapKeyType(prop, beanClass));
Class<?> valueType = defaultToObjectClass(JodaBeanUtils.mapValueType(prop, beanClass));
List<Class<?>> valueTypeTypes = JodaBeanUtils.mapValueTypeTypes(prop, beanClass);
return map((Map<?, ?>) value, declaredType, keyType, valueType, valueTypeTypes);
}
if (value.getClass().isArray() && value.getClass().getComponentType().isPrimitive() == false) {
Object[] array = (Object[]) value;
return array(array, declaredType, array.getClass().getComponentType());
}
return null;
}
/**
* Creates an iterator wrapper for a value retrieved from a parent iterator.
* <p>
* Allows the parent iterator to define the child iterator using generic type information.
* This handles cases such as a {@code List} as the value in a {@code Map}.
*
* @param value the possible collection-like object, not null
* @param parent the parent iterator, not null
* @return the iterator, null if not a collection-like type
*/
public SerIterator createChild(Object value, SerIterator parent) {
Class<?> declaredType = parent.valueType();
List<Class<?>> childGenericTypes = parent.valueTypeTypes();
if (value instanceof Collection) {
if (childGenericTypes.size() == 1) {
return collection((Collection<?>) value, declaredType, childGenericTypes.get(0), EMPTY_VALUE_TYPES);
}
return collection((Collection<?>) value, Object.class, Object.class, EMPTY_VALUE_TYPES);
}
if (value instanceof Map) {
if (childGenericTypes.size() == 2) {
return map((Map<?, ?>) value, declaredType, childGenericTypes.get(0), childGenericTypes.get(1), EMPTY_VALUE_TYPES);
}
return map((Map<?, ?>) value, Object.class, Object.class, Object.class, EMPTY_VALUE_TYPES);
}
if (value.getClass().isArray() && value.getClass().getComponentType().isPrimitive() == false) {
Object[] array = (Object[]) value;
return array(array, Object.class, value.getClass().getComponentType());
}
return null;
}
/**
* Defaults input class to Object class.
*
* @param type the type, may be null
* @return the type, not null
*/
protected Class<?> defaultToObjectClass(Class<?> type) {
return (type != null ? type : Object.class);
}
//-----------------------------------------------------------------------
/**
* Creates an iterator wrapper for a meta-type description.
*
* @param metaTypeDescription the description of the collection type, not null
* @param settings the settings object, not null
* @param knownTypes the known types map, null if not using known type shortening
* @return the iterable, null if not a collection-like type
*/
public SerIterable createIterable(String metaTypeDescription, JodaBeanSer settings, Map<String, Class<?>> knownTypes) {
if (metaTypeDescription.equals("Set")) {
return set(Object.class, EMPTY_VALUE_TYPES);
}
if (metaTypeDescription.equals("List")) {
return list(Object.class, EMPTY_VALUE_TYPES);
}
if (metaTypeDescription.equals("Collection")) {
return list(Object.class, EMPTY_VALUE_TYPES);
}
if (metaTypeDescription.equals("Map")) {
return map(Object.class, Object.class, EMPTY_VALUE_TYPES);
}
if (metaTypeDescription.endsWith("[][][]")) {
throw new IllegalArgumentException("Three-dimensional arrays cannot be parsed");
}
if (metaTypeDescription.endsWith("[][]")) {
Class<?> type = META_TYPE_MAP.get(metaTypeDescription);
if (type != null) {
return array(type);
}
String clsStr = metaTypeDescription.substring(0, metaTypeDescription.length() - 4);
try {
Class<?> cls = SerTypeMapper.decodeType(clsStr, settings, null, knownTypes);
String compound = "[L" + cls.getName() + ";";
return array(Class.forName(compound)); // needs to be Class.forName
} catch (ClassNotFoundException ex) {
throw new RuntimeException(ex);
}
}
if (metaTypeDescription.endsWith("[]")) {
Class<?> type = META_TYPE_MAP.get(metaTypeDescription);
if (type == null) {
String clsStr = metaTypeDescription.substring(0, metaTypeDescription.length() - 2);
try {
type = SerTypeMapper.decodeType(clsStr, settings, null, knownTypes);
} catch (ClassNotFoundException ex) {
throw new RuntimeException(ex);
}
}
return type.isPrimitive() ? arrayPrimitive(type) : array(type);
}
return null;
}
/**
* Creates an iterator wrapper for a child where there are second level generic parameters.
*
* @param iterable the parent iterable, not null
* @return the iterable, null if not a collection-like type
*/
public SerIterable createIterable(SerIterable iterable) {
List<Class<?>> valueTypeTypes = iterable.valueTypeTypes();
if (valueTypeTypes.size() > 0) {
Class<?> valueType = iterable.valueType();
if (NavigableSet.class.isAssignableFrom(valueType)) {
return navigableSet(valueTypeTypes.get(0), EMPTY_VALUE_TYPES);
}
if (SortedSet.class.isAssignableFrom(valueType)) {
return sortedSet(valueTypeTypes.get(0), EMPTY_VALUE_TYPES);
}
if (Set.class.isAssignableFrom(valueType)) {
return set(valueTypeTypes.get(0), EMPTY_VALUE_TYPES);
}
if (Collection.class.isAssignableFrom(valueType)) { // includes List
return list(valueTypeTypes.get(0), EMPTY_VALUE_TYPES);
}
if (NavigableMap.class.isAssignableFrom(valueType)) {
if (valueTypeTypes.size() == 2) {
return navigableMap(valueTypeTypes.get(0), valueTypeTypes.get(1), EMPTY_VALUE_TYPES);
}
return navigableMap(Object.class, Object.class, EMPTY_VALUE_TYPES);
}
if (SortedMap.class.isAssignableFrom(valueType)) {
if (valueTypeTypes.size() == 2) {
return sortedMap(valueTypeTypes.get(0), valueTypeTypes.get(1), EMPTY_VALUE_TYPES);
}
return sortedMap(Object.class, Object.class, EMPTY_VALUE_TYPES);
}
if (Map.class.isAssignableFrom(valueType)) {
if (valueTypeTypes.size() == 2) {
return map(valueTypeTypes.get(0), valueTypeTypes.get(1), EMPTY_VALUE_TYPES);
}
return map(Object.class, Object.class, EMPTY_VALUE_TYPES);
}
if (valueType.isArray()) {
if (valueType.getComponentType().isPrimitive()) {
return arrayPrimitive(valueType.getComponentType());
} else {
return array(valueType.getComponentType());
}
}
}
return null;
}
/**
* Creates an iterator wrapper for a meta-property value.
*
* @param prop the meta-property defining the value, not null
* @param beanClass the class of the bean, not the meta-property, for better generics, not null
* @param allowPrimitiveArrays whether to allow primitive arrays
* @return the iterable, null if not a collection-like type
*/
public SerIterable createIterable(MetaProperty<?> prop, Class<?> beanClass, boolean allowPrimitiveArrays) {
if (allowPrimitiveArrays &&
prop.propertyType().isArray() &&
prop.propertyType().getComponentType().isPrimitive() &&
prop.propertyType().getComponentType() != byte.class) {
return arrayPrimitive(prop.propertyType().getComponentType());
}
return createIterable(prop, beanClass);
}
/**
* Creates an iterator wrapper for a meta-property value.
*
* @param prop the meta-property defining the value, not null
* @param beanClass the class of the bean, not the meta-property, for better generics, not null
* @return the iterable, null if not a collection-like type
*/
public SerIterable createIterable(MetaProperty<?> prop, Class<?> beanClass) {
if (NavigableSet.class.isAssignableFrom(prop.propertyType())) {
Class<?> valueType = JodaBeanUtils.collectionType(prop, beanClass);
List<Class<?>> valueTypeTypes = JodaBeanUtils.collectionTypeTypes(prop, beanClass);
return navigableSet(valueType, valueTypeTypes);
}
if (SortedSet.class.isAssignableFrom(prop.propertyType())) {
Class<?> valueType = JodaBeanUtils.collectionType(prop, beanClass);
List<Class<?>> valueTypeTypes = JodaBeanUtils.collectionTypeTypes(prop, beanClass);
return sortedSet(valueType, valueTypeTypes);
}
if (Set.class.isAssignableFrom(prop.propertyType())) {
Class<?> valueType = JodaBeanUtils.collectionType(prop, beanClass);
List<Class<?>> valueTypeTypes = JodaBeanUtils.collectionTypeTypes(prop, beanClass);
return set(valueType, valueTypeTypes);
}
if (Collection.class.isAssignableFrom(prop.propertyType())) { // includes List
Class<?> valueType = JodaBeanUtils.collectionType(prop, beanClass);
List<Class<?>> valueTypeTypes = JodaBeanUtils.collectionTypeTypes(prop, beanClass);
return list(valueType, valueTypeTypes);
}
if (NavigableMap.class.isAssignableFrom(prop.propertyType())) {
Class<?> keyType = JodaBeanUtils.mapKeyType(prop, beanClass);
Class<?> valueType = JodaBeanUtils.mapValueType(prop, beanClass);
List<Class<?>> valueTypeTypes = JodaBeanUtils.mapValueTypeTypes(prop, beanClass);
return navigableMap(keyType, valueType, valueTypeTypes);
}
if (SortedMap.class.isAssignableFrom(prop.propertyType())) {
Class<?> keyType = JodaBeanUtils.mapKeyType(prop, beanClass);
Class<?> valueType = JodaBeanUtils.mapValueType(prop, beanClass);
List<Class<?>> valueTypeTypes = JodaBeanUtils.mapValueTypeTypes(prop, beanClass);
return sortedMap(keyType, valueType, valueTypeTypes);
}
if (Map.class.isAssignableFrom(prop.propertyType())) {
Class<?> keyType = JodaBeanUtils.mapKeyType(prop, beanClass);
Class<?> valueType = JodaBeanUtils.mapValueType(prop, beanClass);
List<Class<?>> valueTypeTypes = JodaBeanUtils.mapValueTypeTypes(prop, beanClass);
return map(keyType, valueType, valueTypeTypes);
}
if (prop.propertyType().isArray() && prop.propertyType().getComponentType().isPrimitive() == false) {
return array(prop.propertyType().getComponentType());
}
return null;
}
//-----------------------------------------------------------------------
/**
* Gets an iterable wrapper for {@code List}.
*
* @param valueType the value type, not null
* @param valueTypeTypes the generic parameters of the value type
* @return the iterable, not null
*/
public static final SerIterable list(
final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
final List<Object> coll = new ArrayList<>();
return new SerIterable() {
@Override
public SerIterator iterator() {
return collection(coll, Object.class, valueType, valueTypeTypes);
}
@Override
public void add(Object key, Object column, Object value, int count) {
if (key != null) {
throw new IllegalArgumentException("Unexpected key");
}
for (int i = 0; i < count; i++) {
coll.add(value);
}
}
@Override
public Object build() {
return coll;
}
@Override
public Class<?> valueType() {
return valueType;
}
@Override
public List<Class<?>> valueTypeTypes() {
return valueTypeTypes;
}
};
}
/**
* Gets an iterable wrapper for {@code Set}.
*
* @param valueType the value type, not null
* @param valueTypeTypes the generic parameters of the value type
* @return the iterable, not null
*/
public static final SerIterable set(final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
final Set<Object> coll = new HashSet<>();
return set(valueType, valueTypeTypes, coll);
}
/**
* Gets an iterable wrapper for {@code SortedSet}.
*
* @param valueType the value type, not null
* @param valueTypeTypes the generic parameters of the value type
* @return the iterable, not null
*/
public static final SerIterable sortedSet(final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
final SortedSet<Object> coll = new TreeSet<>();
return set(valueType, valueTypeTypes, coll);
}
/**
* Gets an iterable wrapper for {@code NavigableSet}.
*
* @param valueType the value type, not null
* @param valueTypeTypes the generic parameters of the value type
* @return the iterable, not null
*/
public static final SerIterable navigableSet(final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
final NavigableSet<Object> coll = new TreeSet<>();
return set(valueType, valueTypeTypes, coll);
}
private static SerIterable set(
final Class<?> valueType, final List<Class<?>> valueTypeTypes, final Set<Object> coll) {
return new SerIterable() {
@Override
public SerIterator iterator() {
return collection(coll, Object.class, valueType, valueTypeTypes);
}
@Override
public void add(Object key, Object column, Object value, int count) {
if (key != null) {
throw new IllegalArgumentException("Unexpected key");
}
for (int i = 0; i < count; i++) {
coll.add(value);
}
}
@Override
public Object build() {
return coll;
}
@Override
public Class<?> valueType() {
return valueType;
}
@Override
public List<Class<?>> valueTypeTypes() {
return valueTypeTypes;
}
};
}
/**
* Gets an iterator wrapper for {@code Collection}.
*
* @param coll the collection, not null
* @param declaredType the declared type, not null
* @param valueType the value type, not null
* @param valueTypeTypes the generic parameters of the value type
* @return the iterator, not null
*/
@SuppressWarnings("rawtypes")
public static final SerIterator collection(
final Collection<?> coll, final Class<?> declaredType, final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
return new SerIterator() {
private final Iterator it = coll.iterator();
private Object current;
@Override
public String metaTypeName() {
if (coll instanceof Set) {
return "Set";
}
if (coll instanceof List) {
return "List";
}
return "Collection";
}
@Override
public boolean metaTypeRequired() {
if (coll instanceof Set) {
return Set.class.isAssignableFrom(declaredType) == false;
}
if (coll instanceof List) {
return List.class.isAssignableFrom(declaredType) == false;
}
return Collection.class.isAssignableFrom(declaredType) == false;
}
@Override
public int size() {
return coll.size();
}
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public void next() {
current = it.next();
}
@Override
public Class<?> valueType() {
return valueType;
}
@Override
public List<Class<?>> valueTypeTypes() {
return valueTypeTypes;
}
@Override
public Object value() {
return current;
}
};
}
//-----------------------------------------------------------------------
/**
* Gets an iterable wrapper for {@code Map}.
*
* @param keyType the value type, not null
* @param valueType the value type, not null
* @param valueTypeTypes the generic parameters of the value type
* @return the iterable, not null
*/
public static final SerIterable map(final Class<?> keyType, final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
final Map<Object, Object> map = new HashMap<>();
return map(keyType, valueType, valueTypeTypes, map);
}
/**
* Gets an iterable wrapper for {@code SortedMap}.
*
* @param keyType the value type, not null
* @param valueType the value type, not null
* @param valueTypeTypes the generic parameters of the value type
* @return the iterable, not null
*/
public static final SerIterable sortedMap(final Class<?> keyType, final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
final SortedMap<Object, Object> map = new TreeMap<>();
return map(keyType, valueType, valueTypeTypes, map);
}
/**
* Gets an iterable wrapper for {@code NavigableMap}.
*
* @param keyType the value type, not null
* @param valueType the value type, not null
* @param valueTypeTypes the generic parameters of the value type
* @return the iterable, not null
*/
public static final SerIterable navigableMap(final Class<?> keyType, final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
final NavigableMap<Object, Object> map = new TreeMap<>();
return map(keyType, valueType, valueTypeTypes, map);
}
static SerIterable map(
final Class<?> keyType, final Class<?> valueType,
final List<Class<?>> valueTypeTypes, final Map<Object, Object> map) {
return new SerIterable() {
@Override
public SerIterator iterator() {
return map(map, Object.class, keyType, valueType, valueTypeTypes);
}
@Override
public void add(Object key, Object column, Object value, int count) {
if (key == null) {
throw new IllegalArgumentException("Missing key");
}
if (count != 1) {
throw new IllegalArgumentException("Unexpected count");
}
map.put(key, value);
}
@Override
public Object build() {
return map;
}
@Override
public SerCategory category() {
return SerCategory.MAP;
}
@Override
public Class<?> keyType() {
return keyType;
}
@Override
public Class<?> valueType() {
return valueType;
}
@Override
public List<Class<?>> valueTypeTypes() {
return valueTypeTypes;
}
};
}
/**
* Gets an iterator wrapper for {@code Map}.
*
* @param map the collection, not null
* @param declaredType the declared type, not null
* @param keyType the value type, not null
* @param valueType the value type, not null
* @param valueTypeTypes the generic parameters of the value type
* @return the iterator, not null
*/
@SuppressWarnings("rawtypes")
public static final SerIterator map(
final Map<?, ?> map, final Class<?> declaredType,
final Class<?> keyType, final Class<?> valueType, final List<Class<?>> valueTypeTypes) {
return new SerIterator() {
private final Iterator it = map.entrySet().iterator();
private Entry current;
@Override
public String metaTypeName() {
return "Map";
}
@Override
public boolean metaTypeRequired() {
return Map.class.isAssignableFrom(declaredType) == false;
}
@Override
public SerCategory category() {
return SerCategory.MAP;
}
@Override
public int size() {
return map.size();
}
@Override
public boolean hasNext() {
return it.hasNext();
}
@Override
public void next() {
current = (Entry) it.next();
}
@Override
public Class<?> keyType() {
return keyType;
}
@Override
public Object key() {
return current.getKey();
}
@Override
public Class<?> valueType() {
return valueType;
}
@Override
public List<Class<?>> valueTypeTypes() {
return valueTypeTypes;
}
@Override
public Object value() {
return current.getValue();
}
};
}
//-----------------------------------------------------------------------
/**
* Gets an iterable wrapper for an object array.
*
* @param valueType the value type, not null
* @return the iterable, not null
*/
public static final SerIterable array(final Class<?> valueType) {
final List<Object> list = new ArrayList<>();
return new SerIterable() {
@Override
public SerIterator iterator() {
return array(build(), Object.class, valueType);
}
@Override
public void add(Object key, Object column, Object value, int count) {
if (key != null) {
throw new IllegalArgumentException("Unexpected key");
}
if (count != 1) {
throw new IllegalArgumentException("Unexpected count");
}
for (int i = 0; i < count; i++) {
list.add(value);
}
}
@Override
public Object[] build() {
Object[] array = (Object[]) Array.newInstance(valueType, list.size());
return list.toArray(array);
}
@Override
public Class<?> valueType() {
return valueType;
}
@Override
public List<Class<?>> valueTypeTypes() {
return EMPTY_VALUE_TYPES;
}
};
}
/**
* Gets an iterable wrapper for a primitive array.
*
* @param valueType the value type, not null
* @return the iterable, not null
*/
static final SerIterable arrayPrimitive(final Class<?> valueType) {
final List<Object> list = new ArrayList<>();
return new SerIterable() {
@Override
public SerIterator iterator() {
return arrayPrimitive(build(), Object.class, valueType);
}
@Override
public void add(Object key, Object column, Object value, int count) {
if (key != null) {
throw new IllegalArgumentException("Unexpected key");
}
if (count != 1) {
throw new IllegalArgumentException("Unexpected count");
}
for (int i = 0; i < count; i++) {
list.add(value);
}
}
@Override
public Object build() {
Object array = Array.newInstance(valueType, list.size());
for (int i = 0; i < list.size(); i++) {
Array.set(array, i, list.get(i));
}
return array;
}
@Override
public Class<?> valueType() {
return valueType;
}
@Override
public List<Class<?>> valueTypeTypes() {
return EMPTY_VALUE_TYPES;
}
};
}
/**
* Gets an iterator wrapper for an object array.
*
* @param array the array, not null
* @param declaredType the declared type, not null
* @param valueType the value type, not null
* @return the iterator, not null
*/
public static final SerIterator array(
final Object[] array, final Class<?> declaredType, final Class<?> valueType) {
return new SerIterator() {
private int index = -1;
@Override
public String metaTypeName() {
return metaTypeNameBase(valueType);
}
private String metaTypeNameBase(Class<?> arrayType) {
if (arrayType.isArray()) {
return metaTypeNameBase(arrayType.getComponentType()) + "[]";
}
if (arrayType == Object.class) {
return "Object[]";
}
if (arrayType == String.class) {
return "String[]";
}
return arrayType.getName() + "[]";
}
@Override
public boolean metaTypeRequired() {
if (valueType == Object.class) {
return Object[].class.isAssignableFrom(declaredType) == false;
}
if (valueType == String.class) {
return String[].class.isAssignableFrom(declaredType) == false;
}
return true;
}
@Override
public int size() {
return array.length;
}
@Override
public boolean hasNext() {
return (index + 1) < array.length;
}
@Override
public void next() {
index++;
}
@Override
public Class<?> valueType() {
return valueType;
}
@Override
public List<Class<?>> valueTypeTypes() {
return Collections.emptyList();
}
@Override
public Object value() {
return array[index];
}
};
}
/**
* Gets an iterator wrapper for a primitive array.
*
* @param array the array, not null
* @param declaredType the declared type, not null
* @param valueType the value type, not null
* @return the iterator, not null
*/
static final SerIterator arrayPrimitive(
final Object array, final Class<?> declaredType, final Class<?> valueType) {
final int arrayLength = Array.getLength(array);
return new SerIterator() {
private int index = -1;
@Override
public String metaTypeName() {
return metaTypeNameBase(valueType);
}
private String metaTypeNameBase(Class<?> arrayType) {
if (arrayType.isArray()) {
return metaTypeNameBase(arrayType.getComponentType()) + "[]";
}
if (arrayType == Object.class) {
return "Object[]";
}
if (arrayType == String.class) {
return "String[]";
}
return arrayType.getName() + "[]";
}
@Override
public boolean metaTypeRequired() {
if (valueType == Object.class) {
return Object[].class.isAssignableFrom(declaredType) == false;
}
if (valueType == String.class) {
return String[].class.isAssignableFrom(declaredType) == false;
}
return true;
}
@Override
public int size() {
return arrayLength;
}
@Override
public boolean hasNext() {
return (index + 1) < arrayLength;
}
@Override
public void next() {
index++;
}
@Override
public Class<?> valueType() {
return valueType;
}
@Override
public List<Class<?>> valueTypeTypes() {
return Collections.emptyList();
}
@Override
public Object value() {
return Array.get(array, index);
}
};
}
}
| |
package org.openforis.collect.manager;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.lang3.StringUtils;
import org.openforis.collect.model.CollectRecord;
import org.openforis.collect.model.CollectRecord.Step;
import org.openforis.collect.model.CollectSurvey;
import org.openforis.collect.model.EntityAddChange;
import org.openforis.collect.model.NodeChange;
import org.openforis.collect.model.NodeChangeSet;
import org.openforis.collect.model.RecordUpdater;
import org.openforis.collect.model.SamplingDesignItem;
import org.openforis.collect.model.User;
import org.openforis.idm.metamodel.AttributeDefinition;
import org.openforis.idm.metamodel.CodeAttributeDefinition;
import org.openforis.idm.metamodel.EntityDefinition;
import org.openforis.idm.metamodel.EntityDefinition.TraversalType;
import org.openforis.idm.metamodel.ModelVersion;
import org.openforis.idm.metamodel.NodeDefinition;
import org.openforis.idm.metamodel.NodeDefinitionVisitor;
import org.openforis.idm.model.Attribute;
import org.openforis.idm.model.Code;
import org.openforis.idm.model.CodeAttribute;
import org.openforis.idm.model.Entity;
import org.openforis.idm.model.Value;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
@Component
public class RecordGenerator {
@Autowired
RecordManager recordManager;
@Autowired
SurveyManager surveyManager;
@Autowired
UserManager userManager;
@Autowired
SamplingDesignManager samplingDesignManager;
RecordUpdater recordUpdater = new RecordUpdater();
@Transactional
public CollectRecord generate(CollectSurvey survey, NewRecordParameters parameters) {
List<AttributeDefinition> keyDefs = getKeyAttributeDefs(survey);
RecordKey recordKey = new RecordKey(keyDefs, parameters.getRecordKey());
return generate(survey, parameters, recordKey);
}
@Transactional
public CollectRecord generate(CollectSurvey survey, NewRecordParameters parameters, RecordKey recordKey) {
User user = loadUser(parameters.getUserId(), parameters.getUsername());
EntityDefinition rootEntityDef = StringUtils.isBlank(parameters.getRootEntityName()) ?
survey.getSchema().getFirstRootEntityDefinition()
: survey.getSchema().getRootEntityDefinition(parameters.getRootEntityName());
CollectRecord record = createRecord(survey, rootEntityDef, parameters.getVersionId(),
parameters.getStep(), user, recordKey);
record.setPreview(parameters.isPreview());
if (parameters.isAddSecondLevelEntities()) {
addSecondLevelEntities(record, recordKey);
}
if (!record.isPreview()) {
recordManager.save(record);
}
return record;
}
private CollectRecord createRecord(CollectSurvey survey, EntityDefinition rootEntityDef,
Integer versionId, Step step, User user, RecordKey recordKey) {
String rootEntityName = rootEntityDef.getName();
ModelVersion version = versionId == null ? null : survey.getVersionById(versionId);
String versionName = version == null ? null : version.getName();
CollectRecord record = recordManager.create(survey, rootEntityName, user, versionName, null, step);
if (recordKey.isNotEmpty()) {
setRecordKeyValues(record, recordKey);
}
return record;
}
private void addSecondLevelEntities(CollectRecord record, RecordKey recordKey) {
CollectSurvey survey = (CollectSurvey) record.getSurvey();
List<AttributeDefinition> nonMeasurementKeyDefs = getNonMeasurementKeyDefs(survey);
List<String> keyValues = recordKey.getValues(nonMeasurementKeyDefs);
List<SamplingDesignItem> secondLevelSamplingPointItems = samplingDesignManager.loadChildItems(survey.getId(), keyValues);
List<CodeAttributeDefinition> samplingPointDataCodeAttributeDefs = findSamplingPointCodeAttributes(survey);
if (! secondLevelSamplingPointItems.isEmpty() && samplingPointDataCodeAttributeDefs.size() > 1) {
int levelIndex = 1;
for (SamplingDesignItem samplingDesignItem : secondLevelSamplingPointItems) {
CodeAttributeDefinition levelKeyDef = samplingPointDataCodeAttributeDefs.get(levelIndex);
EntityDefinition levelEntityDef = levelKeyDef.getParentEntityDefinition();
Entity parentLevelEntity = record.getRootEntity();
NodeChangeSet addEntityChangeSet = recordUpdater.addEntity(parentLevelEntity, levelEntityDef);
Entity entity = getAddedEntity(addEntityChangeSet);
CodeAttribute keyAttr = entity.getChild(levelKeyDef);
recordUpdater.updateAttribute(keyAttr, new Code(samplingDesignItem.getLevelCode(levelIndex + 1)));
}
}
}
private void setRecordKeyValues(CollectRecord record, RecordKey recordKey) {
CollectSurvey survey = (CollectSurvey) record.getSurvey();
List<AttributeDefinition> keyAttributeDefs = survey.getSchema().getFirstRootEntityDefinition()
.getKeyAttributeDefinitions();
for (AttributeDefinition keyAttrDef : keyAttributeDefs) {
String keyPart = recordKey.getValue(keyAttrDef.getPath());
Attribute<?,Value> keyAttribute = record.findNodeByPath(keyAttrDef.getPath());
Value value = keyAttrDef.createValue(keyPart);
recordUpdater.updateAttribute(keyAttribute, value);
}
}
private List<CodeAttributeDefinition> findSamplingPointCodeAttributes(final CollectSurvey survey) {
EntityDefinition rootEntityDef = survey.getSchema().getFirstRootEntityDefinition();
final List<CodeAttributeDefinition> samplingPointDataCodeAttributeDefs = new ArrayList<CodeAttributeDefinition>();
rootEntityDef.traverse(new NodeDefinitionVisitor() {
public void visit(NodeDefinition def) {
if (def instanceof CodeAttributeDefinition
&& ((CodeAttributeDefinition) def).getList().equals(survey.getSamplingDesignCodeList())) {
samplingPointDataCodeAttributeDefs.add((CodeAttributeDefinition) def);
}
}
}, TraversalType.BFS);
return samplingPointDataCodeAttributeDefs;
}
private Entity getAddedEntity(NodeChangeSet changeSet) {
List<NodeChange<?>> changes = changeSet.getChanges();
for (NodeChange<?> nodeChange : changes) {
if (nodeChange instanceof EntityAddChange) {
Entity entity = (Entity) nodeChange.getNode();
return entity;
}
}
throw new IllegalArgumentException("Cannot find added entity in node change set");
}
protected List<AttributeDefinition> getKeyAttributeDefs(CollectSurvey survey) {
EntityDefinition rootEntityDef = survey.getSchema().getFirstRootEntityDefinition();
return rootEntityDef.getKeyAttributeDefinitions();
}
protected List<AttributeDefinition> getMeasurementKeyDefs(CollectSurvey survey) {
List<AttributeDefinition> keyAttrDefs = getKeyAttributeDefs(survey);
List<AttributeDefinition> measurementKeyAttrDefs = new ArrayList<AttributeDefinition>();
for (AttributeDefinition keyAttrDef : keyAttrDefs) {
if (survey.getAnnotations().isMeasurementAttribute(keyAttrDef)) {
measurementKeyAttrDefs.add(keyAttrDef);
}
}
return measurementKeyAttrDefs;
}
protected List<AttributeDefinition> getNonMeasurementKeyDefs(CollectSurvey survey) {
List<AttributeDefinition> keyAttrDefs = getKeyAttributeDefs(survey);
List<AttributeDefinition> measurementKeyDefs = getMeasurementKeyDefs(survey);
List<AttributeDefinition> result = new ArrayList<AttributeDefinition>(keyAttrDefs);
Iterator<AttributeDefinition> it = result.iterator();
while(it.hasNext()) {
AttributeDefinition keyDef = it.next();
if (measurementKeyDefs.contains(keyDef)) {
it.remove();
}
}
return result;
}
protected User loadUser(Integer userId, String username) {
if (userId != null) {
return userManager.loadById(userId);
} else if (username != null) {
return userManager.loadByUserName(username);
} else {
return null;
}
}
public static class NewRecordParameters {
private String username;
private Integer userId;
private String rootEntityName;
private Integer versionId;
private Step step = Step.ENTRY;
private boolean preview;
private boolean addSecondLevelEntities = false;
private boolean onlyUnanalyzedSamplingPoints = false;
private List<String> recordKey = new ArrayList<String>();
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public Integer getUserId() {
return userId;
}
public void setUserId(Integer userId) {
this.userId = userId;
}
public String getRootEntityName() {
return rootEntityName;
}
public void setRootEntityName(String rootEntityName) {
this.rootEntityName = rootEntityName;
}
public Integer getVersionId() {
return versionId;
}
public void setVersionId(Integer versionId) {
this.versionId = versionId;
}
public Step getStep() {
return step;
}
public void setStep(Step step) {
this.step = step;
}
public boolean isPreview() {
return preview;
}
public void setPreview(boolean preview) {
this.preview = preview;
}
public boolean isAddSecondLevelEntities() {
return addSecondLevelEntities;
}
public void setAddSecondLevelEntities(boolean addSecondLevelEntities) {
this.addSecondLevelEntities = addSecondLevelEntities;
}
public boolean isOnlyUnanalyzedSamplingPoints() {
return onlyUnanalyzedSamplingPoints;
}
public void setOnlyUnanalyzedSamplingPoints(boolean onlyUnanalyzedSamplingPoints) {
this.onlyUnanalyzedSamplingPoints = onlyUnanalyzedSamplingPoints;
}
public List<String> getRecordKey() {
return recordKey;
}
public void setRecordKey(List<String> recordKey) {
this.recordKey = recordKey;
}
}
public static class RecordKey {
private Map<String,String> valueByPath = new HashMap<String,String>();
public RecordKey() {
}
public List<String> getValues(List<AttributeDefinition> keyDefs) {
List<String> values = keyDefs.stream()
.map(keyDef -> valueByPath.get(keyDef.getPath()))
.collect(Collectors.toList());
return values;
}
public RecordKey(List<AttributeDefinition> keyDefs, List<String> keys) {
for (int i = 0; i < keyDefs.size() && i < keys.size(); i++) {
AttributeDefinition def = keyDefs.get(i);
putValue(def.getPath(), keys.get(i));
};
}
public String getValue(String path) {
return valueByPath.get(path);
}
public void putValue(String path, String value) {
valueByPath.put(path, value);
}
public boolean isNotEmpty() {
return ! valueByPath.isEmpty();
}
public Map<String, String> getValueByPath() {
return valueByPath;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((valueByPath == null) ? 0 : valueByPath.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
RecordKey other = (RecordKey) obj;
if (valueByPath == null) {
if (other.valueByPath != null)
return false;
} else if (!valueByPath.equals(other.valueByPath))
return false;
return true;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.shindig.auth;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.apache.shindig.common.crypto.BasicBlobCrypter;
import org.apache.shindig.common.crypto.BlobCrypter;
import org.apache.shindig.common.util.CharsetUtil;
import org.apache.shindig.common.util.FakeTimeSource;
import org.apache.shindig.config.AbstractContainerConfig;
import org.apache.shindig.config.ContainerConfig;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
/**
* Tests for BlobCrypterSecurityTokenDecoder
*/
public class BlobCrypterSecurityTokenDecoderTest {
private BlobCrypterSecurityTokenDecoder decoder;
private final FakeTimeSource timeSource = new FakeTimeSource();
@Before
public void setUp() throws Exception {
ContainerConfig config = new AbstractContainerConfig() {
@Override
public Object getProperty(String container, String name) {
if (BlobCrypterSecurityTokenDecoder.SECURITY_TOKEN_KEY_FILE.equals(name)) {
return getContainerKey(container);
}
if (BlobCrypterSecurityTokenDecoder.SIGNED_FETCH_DOMAIN.equals(name)) {
return container + ".com";
}
throw new RuntimeException("Mock not smart enough, unknown name " + name);
}
@Override
public Collection<String> getContainers() {
return Lists.newArrayList("container", "example");
}
};
decoder = new DecoderWithLoadStubbedOut(config);
}
protected String getContainerKey(String container) {
return "KEY FOR CONTAINER " + container;
}
protected BlobCrypter getBlobCrypter(String fileName) {
BasicBlobCrypter c = new BasicBlobCrypter(CharsetUtil.getUtf8Bytes(fileName));
c.timeSource = timeSource;
return c;
}
/**
* Stubs out loading the key file.
*/
private class DecoderWithLoadStubbedOut extends BlobCrypterSecurityTokenDecoder {
public DecoderWithLoadStubbedOut(ContainerConfig config) {
super(config);
}
/**
* @return a crypter based on the name of the file passed in, rather than the contents
*/
@Override
protected BlobCrypter loadCrypterFromFile(File file) throws IOException {
if (file.getPath().contains("fail")) {
throw new IOException("Load failed: " + file);
}
return getBlobCrypter(file.getPath());
}
}
@Test
public void testCreateToken() throws Exception {
BlobCrypterSecurityToken t = new BlobCrypterSecurityToken(
getBlobCrypter(getContainerKey("container")), "container", null);
t.setAppUrl("http://www.example.com/gadget.xml");
t.setModuleId(12345L);
t.setOwnerId("owner");
t.setViewerId("viewer");
t.setTrustedJson("trusted");
String encrypted = t.encrypt();
SecurityToken t2 = decoder.createToken(
ImmutableMap.of(SecurityTokenDecoder.SECURITY_TOKEN_NAME, encrypted));
assertEquals("http://www.example.com/gadget.xml", t2.getAppId());
assertEquals("http://www.example.com/gadget.xml", t2.getAppUrl());
assertEquals("container.com", t2.getDomain());
assertEquals(12345L, t2.getModuleId());
assertEquals("owner", t2.getOwnerId());
assertEquals("viewer", t2.getViewerId());
assertEquals("trusted", t2.getTrustedJson());
}
@Test
public void testUnknownContainer() throws Exception {
BlobCrypterSecurityToken t = new BlobCrypterSecurityToken(
getBlobCrypter(getContainerKey("container")), "container", null);
t.setAppUrl("http://www.example.com/gadget.xml");
t.setModuleId(12345L);
t.setOwnerId("owner");
t.setViewerId("viewer");
t.setTrustedJson("trusted");
String encrypted = t.encrypt();
encrypted = encrypted.replace("container:", "other:");
try {
decoder.createToken(ImmutableMap.of(SecurityTokenDecoder.SECURITY_TOKEN_NAME, encrypted));
fail("should have reported that container was unknown");
} catch (SecurityTokenException e) {
assertTrue(e.getMessage(), e.getMessage().contains("Unknown container"));
}
}
@Test
public void testWrongContainer() throws Exception {
BlobCrypterSecurityToken t = new BlobCrypterSecurityToken(
getBlobCrypter(getContainerKey("container")), "container", null);
t.setAppUrl("http://www.example.com/gadget.xml");
t.setModuleId(12345L);
t.setOwnerId("owner");
t.setViewerId("viewer");
t.setTrustedJson("trusted");
String encrypted = t.encrypt();
encrypted = encrypted.replace("container:", "example:");
try {
decoder.createToken(ImmutableMap.of(SecurityTokenDecoder.SECURITY_TOKEN_NAME, encrypted));
fail("should have tried to decrypt with wrong key");
} catch (SecurityTokenException e) {
assertTrue(e.getMessage(), e.getMessage().contains("Invalid token signature"));
}
}
@Test
public void testExpired() throws Exception {
BlobCrypterSecurityToken t = new BlobCrypterSecurityToken(
getBlobCrypter(getContainerKey("container")), "container", null);
t.setAppUrl("http://www.example.com/gadget.xml");
t.setModuleId(12345L);
t.setOwnerId("owner");
t.setViewerId("viewer");
t.setTrustedJson("trusted");
String encrypted = t.encrypt();
timeSource.incrementSeconds(3600 + 181); // one hour plus clock skew
try {
decoder.createToken(ImmutableMap.of(SecurityTokenDecoder.SECURITY_TOKEN_NAME, encrypted));
fail("should have expired");
} catch (SecurityTokenException e) {
assertTrue(e.getMessage(), e.getMessage().contains("Blob expired"));
}
}
@Test
public void testMalformed() throws Exception {
try {
decoder.createToken(ImmutableMap.of(SecurityTokenDecoder.SECURITY_TOKEN_NAME, "foo"));
fail("should have tried to decrypt with wrong key");
} catch (SecurityTokenException e) {
assertTrue(e.getMessage(), e.getMessage().contains("Invalid security token foo"));
}
}
@Test
public void testAnonymous() throws Exception {
SecurityToken t = decoder.createToken(
ImmutableMap.of(SecurityTokenDecoder.SECURITY_TOKEN_NAME, " "));
assertTrue(t.isAnonymous());
Map<String, String> empty = ImmutableMap.of();
t = decoder.createToken(empty);
assertTrue(t.isAnonymous());
}
@Test
public void testLoadFailure() throws Exception {
ContainerConfig config = new AbstractContainerConfig() {
@Override
public Object getProperty(String container, String name) {
if (BlobCrypterSecurityTokenDecoder.SECURITY_TOKEN_KEY_FILE.equals(name)) {
return getContainerKey(container);
}
if (BlobCrypterSecurityTokenDecoder.SIGNED_FETCH_DOMAIN.equals(name)) {
return container + ".com";
}
throw new RuntimeException("Mock not smart enough, unknown name " + name);
}
@Override
public Collection<String> getContainers() {
return Lists.newArrayList("container", "example", "failure");
}
};
try {
new DecoderWithLoadStubbedOut(config);
fail("Should have failed to load crypter");
} catch (RuntimeException e) {
assertTrue(e.getMessage(), e.getMessage().contains("Load failed"));
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.bootstrap;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.util.Constants;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.transport.BoundTransportAddress;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.discovery.DiscoveryModule;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.monitor.jvm.JvmInfo;
import org.elasticsearch.monitor.process.ProcessProbe;
import org.elasticsearch.node.NodeValidationException;
import java.io.BufferedReader;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.security.AllPermission;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import java.util.function.Predicate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* We enforce bootstrap checks once a node has the transport protocol bound to a non-loopback interface or if the system property {@code
* es.enforce.bootstrap.checks} is set to {@true}. In this case we assume the node is running in production and all bootstrap checks must
* pass.
*/
public final class BootstrapChecks {
private BootstrapChecks() {
}
static final String ES_ENFORCE_BOOTSTRAP_CHECKS = "es.enforce.bootstrap.checks";
/**
* Executes the bootstrap checks if the node has the transport protocol bound to a non-loopback interface. If the system property
* {@code es.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether or not
* the transport protocol is bound to a non-loopback interface.
*
* @param context the current node bootstrap context
* @param boundTransportAddress the node network bindings
*/
public static void check(final BootstrapContext context, final BoundTransportAddress boundTransportAddress,
List<BootstrapCheck> additionalChecks) throws NodeValidationException {
final List<BootstrapCheck> builtInChecks = checks();
final List<BootstrapCheck> combinedChecks = new ArrayList<>(builtInChecks);
combinedChecks.addAll(additionalChecks);
check( context,
enforceLimits(boundTransportAddress, DiscoveryModule.DISCOVERY_TYPE_SETTING.get(context.settings())),
Collections.unmodifiableList(combinedChecks));
}
/**
* Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system
* property {@code es.enforce.bootstrap.checks} is set to {@code true} then the bootstrap checks will be enforced regardless of whether
* or not the transport protocol is bound to a non-loopback interface.
*
* @param context the current node boostrap context
* @param enforceLimits {@code true} if the checks should be enforced or otherwise warned
* @param checks the checks to execute
*/
static void check(
final BootstrapContext context,
final boolean enforceLimits,
final List<BootstrapCheck> checks) throws NodeValidationException {
check(context, enforceLimits, checks, LogManager.getLogger(BootstrapChecks.class));
}
/**
* Executes the provided checks and fails the node if {@code enforceLimits} is {@code true}, otherwise logs warnings. If the system
* property {@code es.enforce.bootstrap.checks }is set to {@code true} then the bootstrap checks will be enforced regardless of whether
* or not the transport protocol is bound to a non-loopback interface.
*
* @param context the current node boostrap context
* @param enforceLimits {@code true} if the checks should be enforced or otherwise warned
* @param checks the checks to execute
* @param logger the logger to
*/
static void check(
final BootstrapContext context,
final boolean enforceLimits,
final List<BootstrapCheck> checks,
final Logger logger) throws NodeValidationException {
final List<String> errors = new ArrayList<>();
final List<String> ignoredErrors = new ArrayList<>();
final String esEnforceBootstrapChecks = System.getProperty(ES_ENFORCE_BOOTSTRAP_CHECKS);
final boolean enforceBootstrapChecks;
if (esEnforceBootstrapChecks == null) {
enforceBootstrapChecks = false;
} else if (Boolean.TRUE.toString().equals(esEnforceBootstrapChecks)) {
enforceBootstrapChecks = true;
} else {
final String message =
String.format(
Locale.ROOT,
"[%s] must be [true] but was [%s]",
ES_ENFORCE_BOOTSTRAP_CHECKS,
esEnforceBootstrapChecks);
throw new IllegalArgumentException(message);
}
if (enforceLimits) {
logger.info("bound or publishing to a non-loopback address, enforcing bootstrap checks");
} else if (enforceBootstrapChecks) {
logger.info("explicitly enforcing bootstrap checks");
}
for (final BootstrapCheck check : checks) {
final BootstrapCheck.BootstrapCheckResult result = check.check(context);
if (result.isFailure()) {
if (!(enforceLimits || enforceBootstrapChecks) && !check.alwaysEnforce()) {
ignoredErrors.add(result.getMessage());
} else {
errors.add(result.getMessage());
}
}
}
if (!ignoredErrors.isEmpty()) {
ignoredErrors.forEach(error -> log(logger, error));
}
if (!errors.isEmpty()) {
final List<String> messages = new ArrayList<>(1 + errors.size());
messages.add("[" + errors.size() + "] bootstrap checks failed");
for (int i = 0; i < errors.size(); i++) {
messages.add("[" + (i + 1) + "]: " + errors.get(i));
}
final NodeValidationException ne = new NodeValidationException(String.join("\n", messages));
errors.stream().map(IllegalStateException::new).forEach(ne::addSuppressed);
throw ne;
}
}
static void log(final Logger logger, final String error) {
logger.warn(error);
}
/**
* Tests if the checks should be enforced.
*
* @param boundTransportAddress the node network bindings
* @param discoveryType the discovery type
* @return {@code true} if the checks should be enforced
*/
static boolean enforceLimits(final BoundTransportAddress boundTransportAddress, final String discoveryType) {
final Predicate<TransportAddress> isLoopbackAddress = t -> t.address().getAddress().isLoopbackAddress();
final boolean bound =
!(Arrays.stream(boundTransportAddress.boundAddresses()).allMatch(isLoopbackAddress) &&
isLoopbackAddress.test(boundTransportAddress.publishAddress()));
return bound && !"single-node".equals(discoveryType);
}
// the list of checks to execute
static List<BootstrapCheck> checks() {
final List<BootstrapCheck> checks = new ArrayList<>();
checks.add(new HeapSizeCheck());
final FileDescriptorCheck fileDescriptorCheck
= Constants.MAC_OS_X ? new OsXFileDescriptorCheck() : new FileDescriptorCheck();
checks.add(fileDescriptorCheck);
checks.add(new MlockallCheck());
if (Constants.LINUX) {
checks.add(new MaxNumberOfThreadsCheck());
}
if (Constants.LINUX || Constants.MAC_OS_X) {
checks.add(new MaxSizeVirtualMemoryCheck());
}
if (Constants.LINUX || Constants.MAC_OS_X) {
checks.add(new MaxFileSizeCheck());
}
if (Constants.LINUX) {
checks.add(new MaxMapCountCheck());
}
checks.add(new ClientJvmCheck());
checks.add(new UseSerialGCCheck());
//checks.add(new SystemCallFilterCheck());
checks.add(new OnErrorCheck());
checks.add(new OnOutOfMemoryErrorCheck());
checks.add(new EarlyAccessCheck());
checks.add(new G1GCCheck());
//checks.add(new AllPermissionCheck());
return Collections.unmodifiableList(checks);
}
static class HeapSizeCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
final long initialHeapSize = getInitialHeapSize();
final long maxHeapSize = getMaxHeapSize();
if (initialHeapSize != 0 && maxHeapSize != 0 && initialHeapSize != maxHeapSize) {
final String message = String.format(
Locale.ROOT,
"initial heap size [%d] not equal to maximum heap size [%d]; " +
"this can cause resize pauses and prevents mlockall from locking the entire heap",
getInitialHeapSize(),
getMaxHeapSize());
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
}
// visible for testing
long getInitialHeapSize() {
return JvmInfo.jvmInfo().getConfiguredInitialHeapSize();
}
// visible for testing
long getMaxHeapSize() {
return JvmInfo.jvmInfo().getConfiguredMaxHeapSize();
}
}
static class OsXFileDescriptorCheck extends FileDescriptorCheck {
OsXFileDescriptorCheck() {
// see constant OPEN_MAX defined in
// /usr/include/sys/syslimits.h on OS X and its use in JVM
// initialization in int os:init_2(void) defined in the JVM
// code for BSD (contains OS X)
super(10240);
}
}
static class FileDescriptorCheck implements BootstrapCheck {
private final int limit;
FileDescriptorCheck() {
this(65535);
}
protected FileDescriptorCheck(final int limit) {
if (limit <= 0) {
throw new IllegalArgumentException("limit must be positive but was [" + limit + "]");
}
this.limit = limit;
}
public final BootstrapCheckResult check(BootstrapContext context) {
final long maxFileDescriptorCount = getMaxFileDescriptorCount();
if (maxFileDescriptorCount != -1 && maxFileDescriptorCount < limit) {
final String message = String.format(
Locale.ROOT,
"max file descriptors [%d] for elasticsearch process is too low, increase to at least [%d]",
getMaxFileDescriptorCount(),
limit);
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
}
// visible for testing
long getMaxFileDescriptorCount() {
return ProcessProbe.getInstance().getMaxFileDescriptorCount();
}
}
static class MlockallCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if (BootstrapSettings.MEMORY_LOCK_SETTING.get(context.settings()) && !isMemoryLocked()) {
return BootstrapCheckResult.failure("memory locking requested for elasticsearch process but memory is not locked");
} else {
return BootstrapCheckResult.success();
}
}
// visible for testing
boolean isMemoryLocked() {
return Natives.isMemoryLocked();
}
}
static class MaxNumberOfThreadsCheck implements BootstrapCheck {
// this should be plenty for machines up to 256 cores
private static final long MAX_NUMBER_OF_THREADS_THRESHOLD = 1 << 12;
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if (getMaxNumberOfThreads() != -1 && getMaxNumberOfThreads() < MAX_NUMBER_OF_THREADS_THRESHOLD) {
final String message = String.format(
Locale.ROOT,
"max number of threads [%d] for user [%s] is too low, increase to at least [%d]",
getMaxNumberOfThreads(),
BootstrapInfo.getSystemProperties().get("user.name"),
MAX_NUMBER_OF_THREADS_THRESHOLD);
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
}
// visible for testing
long getMaxNumberOfThreads() {
return JNANatives.MAX_NUMBER_OF_THREADS;
}
}
static class MaxSizeVirtualMemoryCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if (getMaxSizeVirtualMemory() != Long.MIN_VALUE && getMaxSizeVirtualMemory() != getRlimInfinity()) {
final String message = String.format(
Locale.ROOT,
"max size virtual memory [%d] for user [%s] is too low, increase to [unlimited]",
getMaxSizeVirtualMemory(),
BootstrapInfo.getSystemProperties().get("user.name"));
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
}
// visible for testing
long getRlimInfinity() {
return JNACLibrary.RLIM_INFINITY;
}
// visible for testing
long getMaxSizeVirtualMemory() {
return JNANatives.MAX_SIZE_VIRTUAL_MEMORY;
}
}
/**
* Bootstrap check that the maximum file size is unlimited (otherwise Elasticsearch could run in to an I/O exception writing files).
*/
static class MaxFileSizeCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
final long maxFileSize = getMaxFileSize();
if (maxFileSize != Long.MIN_VALUE && maxFileSize != getRlimInfinity()) {
final String message = String.format(
Locale.ROOT,
"max file size [%d] for user [%s] is too low, increase to [unlimited]",
getMaxFileSize(),
BootstrapInfo.getSystemProperties().get("user.name"));
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
}
long getRlimInfinity() {
return JNACLibrary.RLIM_INFINITY;
}
long getMaxFileSize() {
return JNANatives.MAX_FILE_SIZE;
}
}
static class MaxMapCountCheck implements BootstrapCheck {
static final long LIMIT = 1 << 18;
@Override
public BootstrapCheckResult check(final BootstrapContext context) {
// we only enforce the check if a store is allowed to use mmap at all
if (IndexModule.NODE_STORE_ALLOW_MMAP.get(context.settings())) {
if (getMaxMapCount() != -1 && getMaxMapCount() < LIMIT) {
final String message = String.format(
Locale.ROOT,
"max virtual memory areas vm.max_map_count [%d] is too low, increase to at least [%d]",
getMaxMapCount(),
LIMIT);
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
} else {
return BootstrapCheckResult.success();
}
}
// visible for testing
long getMaxMapCount() {
return getMaxMapCount(LogManager.getLogger(BootstrapChecks.class));
}
// visible for testing
long getMaxMapCount(Logger logger) {
final Path path = getProcSysVmMaxMapCountPath();
try (BufferedReader bufferedReader = getBufferedReader(path)) {
final String rawProcSysVmMaxMapCount = readProcSysVmMaxMapCount(bufferedReader);
if (rawProcSysVmMaxMapCount != null) {
try {
return parseProcSysVmMaxMapCount(rawProcSysVmMaxMapCount);
} catch (final NumberFormatException e) {
logger.warn(() -> new ParameterizedMessage("unable to parse vm.max_map_count [{}]", rawProcSysVmMaxMapCount), e);
}
}
} catch (final IOException e) {
logger.warn(() -> new ParameterizedMessage("I/O exception while trying to read [{}]", path), e);
}
return -1;
}
@SuppressForbidden(reason = "access /proc/sys/vm/max_map_count")
private Path getProcSysVmMaxMapCountPath() {
return PathUtils.get("/proc/sys/vm/max_map_count");
}
// visible for testing
BufferedReader getBufferedReader(final Path path) throws IOException {
return Files.newBufferedReader(path);
}
// visible for testing
String readProcSysVmMaxMapCount(final BufferedReader bufferedReader) throws IOException {
return bufferedReader.readLine();
}
// visible for testing
long parseProcSysVmMaxMapCount(final String procSysVmMaxMapCount) throws NumberFormatException {
return Long.parseLong(procSysVmMaxMapCount);
}
}
static class ClientJvmCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if (getVmName().toLowerCase(Locale.ROOT).contains("client")) {
final String message = String.format(
Locale.ROOT,
"JVM is using the client VM [%s] but should be using a server VM for the best performance",
getVmName());
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
}
// visible for testing
String getVmName() {
return JvmInfo.jvmInfo().getVmName();
}
}
/**
* Checks if the serial collector is in use. This collector is single-threaded and devastating
* for performance and should not be used for a server application like Elasticsearch.
*/
static class UseSerialGCCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if (getUseSerialGC().equals("true")) {
final String message = String.format(
Locale.ROOT,
"JVM is using the serial collector but should not be for the best performance; " +
"either it's the default for the VM [%s] or -XX:+UseSerialGC was explicitly specified",
JvmInfo.jvmInfo().getVmName());
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
}
// visible for testing
String getUseSerialGC() {
return JvmInfo.jvmInfo().useSerialGC();
}
}
/**
* Bootstrap check that if system call filters are enabled, then system call filters must have installed successfully.
*/
static class SystemCallFilterCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if (BootstrapSettings.SYSTEM_CALL_FILTER_SETTING.get(context.settings()) && !isSystemCallFilterInstalled()) {
final String message = "system call filters failed to install; " +
"check the logs and fix your configuration or disable system call filters at your own risk";
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
}
// visible for testing
boolean isSystemCallFilterInstalled() {
return Natives.isSystemCallFilterInstalled();
}
}
abstract static class MightForkCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if (isSystemCallFilterInstalled() && mightFork()) {
return BootstrapCheckResult.failure(message(context));
} else {
return BootstrapCheckResult.success();
}
}
abstract String message(BootstrapContext context);
// visible for testing
boolean isSystemCallFilterInstalled() {
return Natives.isSystemCallFilterInstalled();
}
// visible for testing
abstract boolean mightFork();
@Override
public final boolean alwaysEnforce() {
return true;
}
}
static class OnErrorCheck extends MightForkCheck {
@Override
boolean mightFork() {
final String onError = onError();
return onError != null && !onError.equals("");
}
// visible for testing
String onError() {
return JvmInfo.jvmInfo().onError();
}
@Override
String message(BootstrapContext context) {
return String.format(
Locale.ROOT,
"OnError [%s] requires forking but is prevented by system call filters ([%s=true]);" +
" upgrade to at least Java 8u92 and use ExitOnOutOfMemoryError",
onError(),
BootstrapSettings.SYSTEM_CALL_FILTER_SETTING.getKey());
}
}
static class OnOutOfMemoryErrorCheck extends MightForkCheck {
@Override
boolean mightFork() {
final String onOutOfMemoryError = onOutOfMemoryError();
return onOutOfMemoryError != null && !onOutOfMemoryError.equals("");
}
// visible for testing
String onOutOfMemoryError() {
return JvmInfo.jvmInfo().onOutOfMemoryError();
}
String message(BootstrapContext context) {
return String.format(
Locale.ROOT,
"OnOutOfMemoryError [%s] requires forking but is prevented by system call filters ([%s=true]);" +
" upgrade to at least Java 8u92 and use ExitOnOutOfMemoryError",
onOutOfMemoryError(),
BootstrapSettings.SYSTEM_CALL_FILTER_SETTING.getKey());
}
}
/**
* Bootstrap check for early-access builds from OpenJDK.
*/
static class EarlyAccessCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
final String javaVersion = javaVersion();
if ("Oracle Corporation".equals(jvmVendor()) && javaVersion.endsWith("-ea")) {
final String message = String.format(
Locale.ROOT,
"Java version [%s] is an early-access build, only use release builds",
javaVersion);
return BootstrapCheckResult.failure(message);
} else {
return BootstrapCheckResult.success();
}
}
String jvmVendor() {
return Constants.JVM_VENDOR;
}
String javaVersion() {
return Constants.JAVA_VERSION;
}
}
/**
* Bootstrap check for versions of HotSpot that are known to have issues that can lead to index corruption when G1GC is enabled.
*/
static class G1GCCheck implements BootstrapCheck {
@Override
public BootstrapCheckResult check(BootstrapContext context) {
if ("Oracle Corporation".equals(jvmVendor()) && isJava8() && isG1GCEnabled()) {
final String jvmVersion = jvmVersion();
// HotSpot versions on Java 8 match this regular expression; note that this changes with Java 9 after JEP-223
final Pattern pattern = Pattern.compile("(\\d+)\\.(\\d+)-b\\d+");
final Matcher matcher = pattern.matcher(jvmVersion);
final boolean matches = matcher.matches();
assert matches : jvmVersion;
final int major = Integer.parseInt(matcher.group(1));
final int update = Integer.parseInt(matcher.group(2));
// HotSpot versions for Java 8 have major version 25, the bad versions are all versions prior to update 40
if (major == 25 && update < 40) {
final String message = String.format(
Locale.ROOT,
"JVM version [%s] can cause data corruption when used with G1GC; upgrade to at least Java 8u40", jvmVersion);
return BootstrapCheckResult.failure(message);
}
}
return BootstrapCheckResult.success();
}
// visible for testing
String jvmVendor() {
return Constants.JVM_VENDOR;
}
// visible for testing
boolean isG1GCEnabled() {
assert "Oracle Corporation".equals(jvmVendor());
return JvmInfo.jvmInfo().useG1GC().equals("true");
}
// visible for testing
String jvmVersion() {
assert "Oracle Corporation".equals(jvmVendor());
return Constants.JVM_VERSION;
}
// visible for testing
boolean isJava8() {
assert "Oracle Corporation".equals(jvmVendor());
return JavaVersion.current().equals(JavaVersion.parse("1.8"));
}
}
static class AllPermissionCheck implements BootstrapCheck {
@Override
public final BootstrapCheckResult check(BootstrapContext context) {
if (isAllPermissionGranted()) {
return BootstrapCheck.BootstrapCheckResult.failure("granting the all permission effectively disables security");
}
return BootstrapCheckResult.success();
}
boolean isAllPermissionGranted() {
final SecurityManager sm = System.getSecurityManager();
try {
sm.checkPermission(new AllPermission());
} catch (final SecurityException e) {
return false;
}
return true;
}
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package JMeter.plugins.functional.samplers.websocket;
import java.awt.Color;
import org.apache.jmeter.config.gui.ArgumentsPanel;
import org.apache.jmeter.protocol.http.gui.HTTPArgumentsPanel;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
/**
*
* @author Maciej Zaleski
*/
public class WebSocketSamplerPanel extends javax.swing.JPanel {
private static final Logger log = LoggingManager.getLoggerForClass();
private HTTPArgumentsPanel attributePanel;
/**
* Creates new form WebSocketSamplerPanel
*/
public WebSocketSamplerPanel() {
initComponents();
attributePanel = new HTTPArgumentsPanel();
querystringAttributesPanel.add(attributePanel);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jPanel1 = new javax.swing.JPanel();
jLabel1 = new javax.swing.JLabel();
serverAddressTextField = new javax.swing.JTextField();
jLabel2 = new javax.swing.JLabel();
serverPortTextField = new javax.swing.JTextField();
jPanel2 = new javax.swing.JPanel();
jLabel3 = new javax.swing.JLabel();
connectionTimeoutTextField = new javax.swing.JTextField();
jLabel17 = new javax.swing.JLabel();
responseTimeoutTextField = new javax.swing.JTextField();
jPanel3 = new javax.swing.JPanel();
jLabel4 = new javax.swing.JLabel();
jLabel5 = new javax.swing.JLabel();
jLabel6 = new javax.swing.JLabel();
contextPathTextField = new javax.swing.JTextField();
protocolTextField = new javax.swing.JTextField();
contentEncodingTextField = new javax.swing.JTextField();
jLabel8 = new javax.swing.JLabel();
connectionIdTextField = new javax.swing.JTextField();
querystringAttributesPanel = new javax.swing.JPanel();
ignoreSslErrorsCheckBox = new javax.swing.JCheckBox();
jScrollPane1 = new javax.swing.JScrollPane();
requestPayloadEditorPane = new javax.swing.JEditorPane();
jLabel14 = new javax.swing.JLabel();
jLabel15 = new javax.swing.JLabel();
implementationComboBox = new javax.swing.JComboBox();
streamingConnectionCheckBox = new javax.swing.JCheckBox();
jPanel5 = new javax.swing.JPanel();
jLabel7 = new javax.swing.JLabel();
responsePatternTextField = new javax.swing.JTextField();
jLabel9 = new javax.swing.JLabel();
closeConncectionPatternTextField = new javax.swing.JTextField();
jLabel16 = new javax.swing.JLabel();
messageBacklogTextField = new javax.swing.JTextField();
jPanel6 = new javax.swing.JPanel();
jLabel10 = new javax.swing.JLabel();
proxyAddressTextField = new javax.swing.JTextField();
jLabel11 = new javax.swing.JLabel();
proxyPortTextField = new javax.swing.JTextField();
jLabel12 = new javax.swing.JLabel();
proxyUsernameTextField = new javax.swing.JTextField();
jLabel13 = new javax.swing.JLabel();
proxyPasswordTextField = new javax.swing.JTextField();
jPanel1.setBorder(javax.swing.BorderFactory.createTitledBorder("Web Server"));
jLabel1.setText("Server Name or IP:");
jLabel2.setText("Port Number:");
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
jPanel1.setLayout(jPanel1Layout);
jPanel1Layout.setHorizontalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(serverAddressTextField)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jLabel2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(serverPortTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 43, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
jPanel1Layout.setVerticalGroup(
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel1Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel1)
.addComponent(serverAddressTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel2)
.addComponent(serverPortTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel2.setBorder(javax.swing.BorderFactory.createTitledBorder("Timeout (milliseconds)"));
jLabel3.setText("Connection:");
jLabel17.setText("Response:");
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel3)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(connectionTimeoutTextField)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jLabel17)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(responseTimeoutTextField)
.addContainerGap())
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel2Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(connectionTimeoutTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel17)
.addComponent(responseTimeoutTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel3.setBorder(javax.swing.BorderFactory.createTitledBorder("WebSocket Request"));
jLabel4.setText("Protocol [ws/wss]:");
jLabel5.setText("Path:");
jLabel6.setText("Content encoding:");
protocolTextField.setToolTipText("");
jLabel8.setText("Connection Id:");
querystringAttributesPanel.setLayout(new javax.swing.BoxLayout(querystringAttributesPanel, javax.swing.BoxLayout.LINE_AXIS));
ignoreSslErrorsCheckBox.setText("Ignore SSL certificate errors");
jScrollPane1.setViewportView(requestPayloadEditorPane);
jLabel14.setText("Request data");
jLabel15.setText("Implementation:");
implementationComboBox.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "RFC6455 (v13)" }));
streamingConnectionCheckBox.setText("Streaming connection");
javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3);
jPanel3.setLayout(jPanel3Layout);
jPanel3Layout.setHorizontalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(querystringAttributesPanel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jScrollPane1)
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(jLabel15)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(implementationComboBox, 0, 1, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jLabel4)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(protocolTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jLabel6)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(contentEncodingTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jLabel8)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(connectionIdTextField))
.addGroup(jPanel3Layout.createSequentialGroup()
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel14)
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(ignoreSslErrorsCheckBox)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(streamingConnectionCheckBox)))
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(jPanel3Layout.createSequentialGroup()
.addComponent(jLabel5)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(contextPathTextField)))
.addContainerGap())
);
jPanel3Layout.setVerticalGroup(
jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel3Layout.createSequentialGroup()
.addGap(10, 10, 10)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel4)
.addComponent(protocolTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel6)
.addComponent(contentEncodingTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel8)
.addComponent(connectionIdTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel15)
.addComponent(implementationComboBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel5)
.addComponent(contextPathTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(ignoreSslErrorsCheckBox)
.addComponent(streamingConnectionCheckBox))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(querystringAttributesPanel, javax.swing.GroupLayout.DEFAULT_SIZE, 102, Short.MAX_VALUE)
.addGap(8, 8, 8)
.addComponent(jLabel14)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 118, Short.MAX_VALUE)
.addContainerGap())
);
jPanel5.setBorder(javax.swing.BorderFactory.createTitledBorder("WebSocket Response"));
jLabel7.setText("Response pattern:");
jLabel9.setText("Close connection pattern:");
jLabel16.setText("Message backlog:");
javax.swing.GroupLayout jPanel5Layout = new javax.swing.GroupLayout(jPanel5);
jPanel5.setLayout(jPanel5Layout);
jPanel5Layout.setHorizontalGroup(
jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel5Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel5Layout.createSequentialGroup()
.addComponent(jLabel7)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(responsePatternTextField)
.addGap(18, 18, 18)
.addComponent(jLabel16)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(messageBacklogTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel5Layout.createSequentialGroup()
.addComponent(jLabel9)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(closeConncectionPatternTextField)))
.addContainerGap())
);
jPanel5Layout.setVerticalGroup(
jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel5Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel16)
.addComponent(messageBacklogTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel7)
.addComponent(responsePatternTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel9)
.addComponent(closeConncectionPatternTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
jPanel6.setBorder(javax.swing.BorderFactory.createTitledBorder("Proxy Server (currently not supported by Jetty)"));
jLabel10.setText("Server Name or IP:");
proxyAddressTextField.setEnabled(false);
jLabel11.setText("Port Number:");
proxyPortTextField.setEnabled(false);
jLabel12.setText("Username:");
proxyUsernameTextField.setEnabled(false);
jLabel13.setText("Password:");
proxyPasswordTextField.setEnabled(false);
javax.swing.GroupLayout jPanel6Layout = new javax.swing.GroupLayout(jPanel6);
jPanel6.setLayout(jPanel6Layout);
jPanel6Layout.setHorizontalGroup(
jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel6Layout.createSequentialGroup()
.addContainerGap()
.addComponent(jLabel10)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(proxyAddressTextField)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jLabel11)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(proxyPortTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 39, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(jLabel12)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(proxyUsernameTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 64, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18)
.addComponent(jLabel13)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(proxyPasswordTextField, javax.swing.GroupLayout.PREFERRED_SIZE, 64, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
jPanel6Layout.setVerticalGroup(
jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel6Layout.createSequentialGroup()
.addContainerGap()
.addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(proxyUsernameTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel12))
.addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel11)
.addComponent(proxyPortTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(jPanel6Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel10)
.addComponent(proxyAddressTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel13)
.addComponent(proxyPasswordTextField, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
);
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addComponent(jPanel6, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel3, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel5, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel6, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JTextField closeConncectionPatternTextField;
private javax.swing.JTextField connectionIdTextField;
private javax.swing.JTextField connectionTimeoutTextField;
private javax.swing.JTextField contentEncodingTextField;
private javax.swing.JTextField contextPathTextField;
private javax.swing.JCheckBox ignoreSslErrorsCheckBox;
private javax.swing.JComboBox implementationComboBox;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel10;
private javax.swing.JLabel jLabel11;
private javax.swing.JLabel jLabel12;
private javax.swing.JLabel jLabel13;
private javax.swing.JLabel jLabel14;
private javax.swing.JLabel jLabel15;
private javax.swing.JLabel jLabel16;
private javax.swing.JLabel jLabel17;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel jLabel9;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JPanel jPanel3;
private javax.swing.JPanel jPanel5;
private javax.swing.JPanel jPanel6;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTextField messageBacklogTextField;
private javax.swing.JTextField protocolTextField;
private javax.swing.JTextField proxyAddressTextField;
private javax.swing.JTextField proxyPasswordTextField;
private javax.swing.JTextField proxyPortTextField;
private javax.swing.JTextField proxyUsernameTextField;
private javax.swing.JPanel querystringAttributesPanel;
private javax.swing.JEditorPane requestPayloadEditorPane;
private javax.swing.JTextField responsePatternTextField;
private javax.swing.JTextField responseTimeoutTextField;
private javax.swing.JTextField serverAddressTextField;
private javax.swing.JTextField serverPortTextField;
private javax.swing.JCheckBox streamingConnectionCheckBox;
// End of variables declaration//GEN-END:variables
public void initFields() {
}
public void setCloseConncectionPattern(String closeConncectionPattern) {
closeConncectionPatternTextField.setText(closeConncectionPattern);
}
public String getCloseConncectionPattern() {
return closeConncectionPatternTextField.getText();
}
public void setConnectionId(String connectionId) {
connectionIdTextField.setText(connectionId);
}
public String getConnectionId() {
return connectionIdTextField.getText();
}
public void setContentEncoding(String contentEncoding) {
contentEncodingTextField.setText(contentEncoding);
}
public String getContentEncoding() {
return contentEncodingTextField.getText();
}
public void setContextPath(String contextPath) {
contextPathTextField.setText(contextPath);
}
public String getContextPath() {
return contextPathTextField.getText();
}
public void setProtocol(String protocol) {
protocolTextField.setText(protocol);
}
public String getProtocol() {
return protocolTextField.getText();
}
public void setProxyAddress(String proxyAddress) {
proxyAddressTextField.setText(proxyAddress);
}
public String getProxyAddress() {
return proxyAddressTextField.getText();
}
public void setProxyPassword(String proxyPassword) {
proxyPasswordTextField.setText(proxyPassword);
}
public String getProxyPassword() {
return proxyPasswordTextField.getText();
}
public void setProxyPort(String proxyPort) {
proxyPortTextField.setText(proxyPort);
}
public String getProxyPort() {
return proxyPortTextField.getText();
}
public void setProxyUsername(String proxyUsername) {
proxyUsernameTextField.setText(proxyUsername);
}
public String getProxyUsername() {
return proxyUsernameTextField.getText();
}
public void setResponsePattern(String responsePattern) {
responsePatternTextField.setText(responsePattern);
}
public String getResponsePattern() {
return responsePatternTextField.getText();
}
public void setResponseTimeout(String responseTimeout) {
responseTimeoutTextField.setText(responseTimeout);
}
public String getResponseTimeout() {
return responseTimeoutTextField.getText();
}
public void setConnectionTimeout(String connectionTimeout) {
connectionTimeoutTextField.setText(connectionTimeout);
}
public String getConnectionTimeout() {
return connectionTimeoutTextField.getText();
}
public void setServerAddress(String serverAddress) {
serverAddressTextField.setText(serverAddress);
}
public String getServerAddress() {
return serverAddressTextField.getText();
}
public void setServerPort(String serverPort) {
serverPortTextField.setText(serverPort);
}
public String getServerPort() {
return serverPortTextField.getText();
}
public void setRequestPayload(String requestPayload) {
requestPayloadEditorPane.setText(requestPayload);
}
public String getRequestPayload() {
return requestPayloadEditorPane.getText();
}
public void setStreamingConnection(Boolean streamingConnection) {
streamingConnectionCheckBox.setSelected(streamingConnection);
}
public Boolean isStreamingConnection() {
return streamingConnectionCheckBox.isSelected();
}
public void setIgnoreSslErrors(Boolean ignoreSslErrors) {
ignoreSslErrorsCheckBox.setSelected(ignoreSslErrors);
}
public Boolean isIgnoreSslErrors() {
return ignoreSslErrorsCheckBox.isSelected();
}
public void setImplementation(String implementation) {
implementationComboBox.setSelectedItem(implementation);
}
public String getImplementation() {
return (String) implementationComboBox.getSelectedItem();
}
public void setMessageBacklog(String messageBacklog) {
messageBacklogTextField.setText(messageBacklog);
}
public String getMessageBacklog() {
return messageBacklogTextField.getText();
}
/**
* @return the attributePanel
*/
public ArgumentsPanel getAttributePanel() {
return attributePanel;
}
}
| |
package com.graphlib.graph.layout;
public class LayoutNode {
int id;
String label;
int rank;
int order;
double xCoordinate;
double yCoordinate;
boolean isVirtual;
boolean isLabel;
boolean isSlackNode;
double height;
double leftWidth;
double rightWidth;
/*
* Post-order traversal number.
*/
int lim;
/*
* Lowest post-order traversal number of any descendant.
*/
int low;
/*
* Edge that lead to the discovery of this node in post-order traversal on
* the feasible tree selected by the network simplex algorithm.
*/
LayoutEdge parent;
public LayoutNode(int id, String label) {
this.id = id;
this.label = label;
}
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
public int getRank() {
return rank;
}
public void setRank(int rank) {
this.rank = rank;
}
public int getOrder() {
return order;
}
public void setOrder(int order) {
this.order = order;
}
public boolean isVirtual() {
return isVirtual;
}
public void setVirtual(boolean isVirtual) {
this.isVirtual = isVirtual;
}
public boolean isLabel() {
return isLabel;
}
public void setLabel(boolean isLabel) {
this.isLabel = isLabel;
}
public boolean isSlackNode() {
return isSlackNode;
}
public void setSlackNode(boolean isSlackNode) {
this.isSlackNode = isSlackNode;
}
public int getId() {
return id;
}
public double getxCoordinate() {
return xCoordinate;
}
public void setxCoordinate(double xCoordinate) {
this.xCoordinate = xCoordinate;
}
public double getyCoordinate() {
return yCoordinate;
}
public void setyCoordinate(double yCoordinate) {
this.yCoordinate = yCoordinate;
}
public double getHeight() {
return height;
}
public double getHeightAboveCenter() {
//TODO: Height above and below center may be different for different nodes.
return height/2;
}
public double getHeightBelowCenter() {
//TODO: Height above and below center may be different for different nodes.
return height/2;
}
public void setHeight(double height) {
this.height = height;
}
public double getLeftWidth() {
return leftWidth;
}
public void setLeftWidth(double leftWidth) {
this.leftWidth = leftWidth;
}
public double getRightWidth() {
return rightWidth;
}
public void setRightWidth(double rightWidth) {
this.rightWidth = rightWidth;
}
public int getLim() {
return lim;
}
public void setLim(int lim) {
this.lim = lim;
}
public int getLow() {
return low;
}
public void setLow(int low) {
this.low = low;
}
public LayoutEdge getParent() {
return parent;
}
public void setParent(LayoutEdge parent) {
this.parent = parent;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + id;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
LayoutNode other = (LayoutNode) obj;
if (id != other.id)
return false;
return true;
}
@Override
public String toString() {
return label;
}
}
| |
/*L
* Copyright Ekagra Software Technologies Ltd.
* Copyright SAIC, SAIC-Frederick
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/cacore-sdk/LICENSE.txt for details.
*/
package test.gov.nih.nci.cacoresdk.domain.onetoone.multipleassociation;
import gov.nih.nci.cacoresdk.domain.onetoone.multipleassociation.Child;
import javax.ws.rs.core.Response;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.InputStream;
import java.io.FileWriter;
import java.io.File;
import java.util.List;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.entity.FileEntity;
import org.apache.cxf.jaxrs.client.WebClient;
import org.apache.cxf.common.util.Base64Utility;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriInfo;
import java.lang.reflect.Modifier;
import java.util.Collection;
import java.util.Iterator;
import test.gov.nih.nci.cacoresdk.SDKRESTfulTestBase;
import gov.nih.nci.system.applicationservice.ApplicationException;
public class ChildResourceTest extends SDKRESTfulTestBase
{
public static String getTestCaseName()
{
return "Child RESTful Resource Test Case";
}
/**
* Uses Nested Search Criteria for search
* Verifies that the results are returned
* Verifies size of the result set
* Verifies that none of the attributes are null
*
* @throws Exception
*/
public void testGet() throws Exception
{
try {
Child searchObject = new Child();
Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.onetoone.multipleassociation.Child",searchObject );
String id = "";
if(results != null && results.size() > 0)
{
Child obj = (Child) ((List)results).get(0);
id = obj.getId().getExtension();
}
else
return;
if(id.equals(""))
return;
String url = baseURL + "/rest/Child/"+id;
WebClient client = WebClient.create(url);
client.type("application/xml").accept("application/xml");
Response response = client.get();
if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ response.getStatus());
}
File myFile = new File("Child"+"XML.xml");
System.out.println("writing data to file "+myFile.getAbsolutePath());
FileWriter myWriter = new FileWriter(myFile);
BufferedReader br = new BufferedReader(
new InputStreamReader(((InputStream)response.getEntity())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
myWriter.write(output);
System.out.println(output);
}
myWriter.flush();
myWriter.close();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
public void testSearch() throws Exception
{
try {
String url = baseURL + "/rest/Child/search;id=*";
WebClient client = WebClient.create(url);
client.type("application/xml").accept("application/xml");
Response response = client.get();
if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ response.getStatus());
}
File myFile = new File("Child_Search"+"XML.xml");
System.out.println("writing data to file "+myFile.getAbsolutePath());
FileWriter myWriter = new FileWriter(myFile);
BufferedReader br = new BufferedReader(
new InputStreamReader(((InputStream)response.getEntity())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
myWriter.write(output);
System.out.println(output);
}
myWriter.flush();
myWriter.close();
} catch (Exception e) {
e.printStackTrace();
}
}
//***************************************************
public void testChild1()
{
try
{
String url = baseURL + "/rest/Child/search;id=*/father";
WebClient client = WebClient.create(url);
client.type("application/xml").accept("application/xml");
Response response = client.get();
if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ response.getStatus());
}
File myFile = new File("Child_Search"+"XML.xml");
System.out.println("writing data to file "+myFile.getAbsolutePath());
FileWriter myWriter = new FileWriter(myFile);
BufferedReader br = new BufferedReader(
new InputStreamReader(((InputStream)response.getEntity())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
myWriter.write(output);
System.out.println(output);
}
myWriter.flush();
myWriter.close();
}
catch(Exception e)
{
e.printStackTrace();
ResponseBuilder builder = Response.status(Status.INTERNAL_SERVER_ERROR);
builder.type("application/xml");
StringBuffer buffer = new StringBuffer();
buffer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
buffer.append("<response>");
buffer.append("<type>ERROR</type>");
buffer.append("<code>INTERNAL_ERROR_4</code>");
buffer.append("<message>Failed to Query due to: "+e.getMessage()+"</message>");
buffer.append("</response>");
builder.entity(buffer.toString());
throw new WebApplicationException(builder.build());
}
}
public void testgetFather()
{
try
{
Child searchObject = new Child();
Collection results5 = getApplicationService().search("gov.nih.nci.cacoresdk.domain.onetoone.multipleassociation.Child",searchObject );
String id = "";
if(results5 != null && results5.size() > 0)
{
Child obj = (Child) ((List)results5).get(0);
id = obj.getId().getExtension();
}
else
return;
if(id.equals(""))
return;
String url = baseURL + "/rest/Child/"+id+"/father";
WebClient client = WebClient.create(url);
client.type("application/xml").accept("application/xml");
Response response = client.get();
if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ response.getStatus());
}
File myFile = new File("Child_Search"+"XML.xml");
System.out.println("writing data to file "+myFile.getAbsolutePath());
FileWriter myWriter = new FileWriter(myFile);
BufferedReader br = new BufferedReader(
new InputStreamReader(((InputStream)response.getEntity())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
myWriter.write(output);
System.out.println(output);
}
myWriter.flush();
myWriter.close();
}
catch(Exception e)
{
e.printStackTrace();
ResponseBuilder builder = Response.status(Status.INTERNAL_SERVER_ERROR);
builder.type("application/xml");
StringBuffer buffer = new StringBuffer();
buffer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
buffer.append("<response>");
buffer.append("<type>ERROR</type>");
buffer.append("<code>INTERNAL_ERROR_4</code>");
buffer.append("<message>Failed to Query due to: "+e.getMessage()+"</message>");
buffer.append("</response>");
builder.entity(buffer.toString());
throw new WebApplicationException(builder.build());
}
}
public void testChild2()
{
try
{
String url = baseURL + "/rest/Child/search;id=*/mother";
WebClient client = WebClient.create(url);
client.type("application/xml").accept("application/xml");
Response response = client.get();
if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ response.getStatus());
}
File myFile = new File("Child_Search"+"XML.xml");
System.out.println("writing data to file "+myFile.getAbsolutePath());
FileWriter myWriter = new FileWriter(myFile);
BufferedReader br = new BufferedReader(
new InputStreamReader(((InputStream)response.getEntity())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
myWriter.write(output);
System.out.println(output);
}
myWriter.flush();
myWriter.close();
}
catch(Exception e)
{
e.printStackTrace();
ResponseBuilder builder = Response.status(Status.INTERNAL_SERVER_ERROR);
builder.type("application/xml");
StringBuffer buffer = new StringBuffer();
buffer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
buffer.append("<response>");
buffer.append("<type>ERROR</type>");
buffer.append("<code>INTERNAL_ERROR_4</code>");
buffer.append("<message>Failed to Query due to: "+e.getMessage()+"</message>");
buffer.append("</response>");
builder.entity(buffer.toString());
throw new WebApplicationException(builder.build());
}
}
public void testgetMother()
{
try
{
Child searchObject = new Child();
Collection results5 = getApplicationService().search("gov.nih.nci.cacoresdk.domain.onetoone.multipleassociation.Child",searchObject );
String id = "";
if(results5 != null && results5.size() > 0)
{
Child obj = (Child) ((List)results5).get(0);
id = obj.getId().getExtension();
}
else
return;
if(id.equals(""))
return;
String url = baseURL + "/rest/Child/"+id+"/mother";
WebClient client = WebClient.create(url);
client.type("application/xml").accept("application/xml");
Response response = client.get();
if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) {
InputStream is = (InputStream) response.getEntity();
org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder(
false);
org.jdom.Document jDoc = builder.build(is);
assertEquals(jDoc.getRootElement().getName(), "response");
}
else if (response.getStatus() != 200) {
throw new RuntimeException("Failed : HTTP error code : "
+ response.getStatus());
}
File myFile = new File("Child_Search"+"XML.xml");
System.out.println("writing data to file "+myFile.getAbsolutePath());
FileWriter myWriter = new FileWriter(myFile);
BufferedReader br = new BufferedReader(
new InputStreamReader(((InputStream)response.getEntity())));
String output;
System.out.println("Output from Server .... \n");
while ((output = br.readLine()) != null) {
myWriter.write(output);
System.out.println(output);
}
myWriter.flush();
myWriter.close();
}
catch(Exception e)
{
e.printStackTrace();
ResponseBuilder builder = Response.status(Status.INTERNAL_SERVER_ERROR);
builder.type("application/xml");
StringBuffer buffer = new StringBuffer();
buffer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
buffer.append("<response>");
buffer.append("<type>ERROR</type>");
buffer.append("<code>INTERNAL_ERROR_4</code>");
buffer.append("<message>Failed to Query due to: "+e.getMessage()+"</message>");
buffer.append("</response>");
builder.entity(buffer.toString());
throw new WebApplicationException(builder.build());
}
}
//********************************************************End
}
| |
/*
* Copyright (c) 2004-2013 Regents of the University of California.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the University nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* Copyright (c) 2014 Martin Stockhammer
*/
package prefux.action.layout.graph;
import java.util.Iterator;
import javafx.geometry.Rectangle2D;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import prefux.action.layout.Layout;
import prefux.data.Graph;
import prefux.data.Schema;
import prefux.data.tuple.TupleSet;
import prefux.data.util.Point2D;
import prefux.util.PrefuseLib;
import prefux.util.force.DragForce;
import prefux.util.force.ForceItem;
import prefux.util.force.ForceSimulator;
import prefux.util.force.NBodyForce;
import prefux.util.force.SpringForce;
import prefux.visual.EdgeItem;
import prefux.visual.NodeItem;
import prefux.visual.VisualItem;
/**
* <p>
* Layout that positions graph elements based on a physics simulation of
* interacting forces; by default, nodes repel each other, edges act as springs,
* and drag forces (similar to air resistance) are applied. This algorithm can
* be run for multiple iterations for a run-once layout computation or
* repeatedly run in an animated fashion for a dynamic and interactive layout.
* </p>
*
* <p>
* The running time of this layout algorithm is the greater of O(N log N) and
* O(E), where N is the number of nodes and E the number of edges. The addition
* of custom force calculation modules may, however, increase this value.
* </p>
*
* <p>
* The {@link prefux.util.force.ForceSimulator} used to drive this layout can be
* set explicitly, allowing any number of custom force directed layouts to be
* created through the user's selection of included
* {@link prefux.util.force.Force} components. Each node in the layout is mapped
* to a {@link prefux.util.force.ForceItem} instance and each edge to a
* {@link prefux.util.force.Spring} instance for storing the state of the
* simulation. See the {@link prefux.util.force} package for more.
* </p>
*
* @author <a href="http://jheer.org">jeffrey heer</a>
*/
public class ForceDirectedLayout extends Layout {
private ForceSimulator m_fsim;
private long m_lasttime = -1L;
private long m_maxstep = 50L;
private boolean m_runonce;
private int m_iterations = 100;
private boolean m_enforceBounds;
protected transient VisualItem referrer;
protected String m_nodeGroup;
protected String m_edgeGroup;
private static final Logger log = LogManager.getLogger(ForceDirectedLayout.class);
/**
* Create a new ForceDirectedLayout. By default, this layout will not
* restrict the layout to the layout bounds and will assume it is being run
* in animated (rather than run-once) fashion.
*
* @param graph
* the data group to layout. Must resolve to a Graph instance.
*/
public ForceDirectedLayout(String graph) {
this(graph, false, false);
}
/**
* Create a new ForceDirectedLayout. The layout will assume it is being run
* in animated (rather than run-once) fashion.
*
* @param group
* the data group to layout. Must resolve to a Graph instance.
* @param enforceBounds
* indicates whether or not the layout should require that all
* node placements stay within the layout bounds.
*/
public ForceDirectedLayout(String group, boolean enforceBounds) {
this(group, enforceBounds, false);
}
/**
* Create a new ForceDirectedLayout.
*
* @param group
* the data group to layout. Must resolve to a Graph instance.
* @param enforceBounds
* indicates whether or not the layout should require that all
* node placements stay within the layout bounds.
* @param runonce
* indicates if the layout will be run in a run-once or animated
* fashion. In run-once mode, the layout will run for a set
* number of iterations when invoked. In animation mode, only one
* iteration of the layout is computed.
*/
public ForceDirectedLayout(String group, boolean enforceBounds,
boolean runonce) {
super(group);
m_nodeGroup = PrefuseLib.getGroupName(group, Graph.NODES);
m_edgeGroup = PrefuseLib.getGroupName(group, Graph.EDGES);
m_enforceBounds = enforceBounds;
m_runonce = runonce;
m_fsim = new ForceSimulator();
m_fsim.addForce(new NBodyForce());
m_fsim.addForce(new SpringForce());
m_fsim.addForce(new DragForce());
}
/**
* Create a new ForceDirectedLayout. The layout will assume it is being run
* in animated (rather than run-once) fashion.
*
* @param group
* the data group to layout. Must resolve to a Graph instance.
* @param fsim
* the force simulator used to drive the layout computation
* @param enforceBounds
* indicates whether or not the layout should require that all
* node placements stay within the layout bounds.
*/
public ForceDirectedLayout(String group, ForceSimulator fsim,
boolean enforceBounds) {
this(group, fsim, enforceBounds, false);
}
/**
* Create a new ForceDirectedLayout.
*
* @param group
* the data group to layout. Must resolve to a Graph instance.
* @param fsim
* the force simulator used to drive the layout computation
* @param enforceBounds
* indicates whether or not the layout should require that all
* node placements stay within the layout bounds.
* @param runonce
* indicates if the layout will be run in a run-once or animated
* fashion. In run-once mode, the layout will run for a set
* number of iterations when invoked. In animation mode, only one
* iteration of the layout is computed.
*/
public ForceDirectedLayout(String group, ForceSimulator fsim,
boolean enforceBounds, boolean runonce) {
super(group);
m_nodeGroup = PrefuseLib.getGroupName(group, Graph.NODES);
m_edgeGroup = PrefuseLib.getGroupName(group, Graph.EDGES);
m_enforceBounds = enforceBounds;
m_runonce = runonce;
m_fsim = fsim;
}
// ------------------------------------------------------------------------
/**
* Get the maximum timestep allowed for integrating node settings between
* runs of this layout. When computation times are longer than desired, and
* node positions are changing dramatically between animated frames, the max
* step time can be lowered to suppress node movement.
*
* @return the maximum timestep allowed for integrating between two layout
* steps.
*/
public long getMaxTimeStep() {
return m_maxstep;
}
/**
* Set the maximum timestep allowed for integrating node settings between
* runs of this layout. When computation times are longer than desired, and
* node positions are changing dramatically between animated frames, the max
* step time can be lowered to suppress node movement.
*
* @param maxstep
* the maximum timestep allowed for integrating between two
* layout steps
*/
public void setMaxTimeStep(long maxstep) {
this.m_maxstep = maxstep;
}
/**
* Get the force simulator driving this layout.
*
* @return the force simulator
*/
public ForceSimulator getForceSimulator() {
return m_fsim;
}
/**
* Set the force simulator driving this layout.
*
* @param fsim
* the force simulator
*/
public void setForceSimulator(ForceSimulator fsim) {
m_fsim = fsim;
}
/**
* Get the number of iterations to use when computing a layout in run-once
* mode.
*
* @return the number of layout iterations to run
*/
public int getIterations() {
return m_iterations;
}
/**
* Set the number of iterations to use when computing a layout in run-once
* mode.
*
* @param iter
* the number of layout iterations to run
*/
public void setIterations(int iter) {
if (iter < 1)
throw new IllegalArgumentException(
"Iterations must be a positive number!");
m_iterations = iter;
}
/**
* Explicitly sets the node and edge groups to use for this layout,
* overriding the group setting passed to the constructor.
*
* @param nodeGroup
* the node data group
* @param edgeGroup
* the edge data group
*/
public void setDataGroups(String nodeGroup, String edgeGroup) {
m_nodeGroup = nodeGroup;
m_edgeGroup = edgeGroup;
}
// ------------------------------------------------------------------------
/**
* @see prefux.action.Action#run(double)
*/
public void run(double frac) {
// perform different actions if this is a run-once or
// run-continuously layout
if (m_runonce) {
Point2D anchor = getLayoutAnchor();
Iterator<VisualItem> iter = m_vis.visibleItems(m_nodeGroup);
while (iter.hasNext()) {
VisualItem item = iter.next();
item.setX(anchor.getX());
item.setY(anchor.getY());
}
m_fsim.clear();
long timestep = 1000L;
initSimulator(m_fsim);
for (int i = 0; i < m_iterations; i++) {
// use an annealing schedule to set time step
timestep *= (1.0 - i / (double) m_iterations);
long step = timestep + 50;
// run simulator
m_fsim.runSimulator(step);
// debugging output
// if (i % 10 == 0 ) {
// System.out.println("iter: "+i);
// }
}
updateNodePositions();
} else {
// get timestep
if (m_lasttime == -1)
m_lasttime = System.currentTimeMillis() - 20;
long time = System.currentTimeMillis();
long timestep = Math.min(m_maxstep, time - m_lasttime);
m_lasttime = time;
// run force simulator
m_fsim.clear();
initSimulator(m_fsim);
long newstep=m_fsim.runSimulator(timestep);
if (newstep!=timestep) {
m_maxstep=newstep;
}
updateNodePositions();
}
if (frac == 1.0) {
reset();
}
}
private synchronized void updateNodePositions() {
Rectangle2D bounds = getLayoutBounds();
double x1 = 0, x2 = 0, y1 = 0, y2 = 0;
if (bounds != null) {
x1 = bounds.getMinX();
y1 = bounds.getMinY();
x2 = bounds.getMaxX();
y2 = bounds.getMaxY();
}
// update positions
Iterator<VisualItem> iter = m_vis.visibleItems(m_nodeGroup);
while (iter.hasNext()) {
VisualItem item = iter.next();
ForceItem fitem = (ForceItem) item.get(FORCEITEM);
if (item.isFixed()) {
// clear any force computations
fitem.force[0] = 0.0f;
fitem.force[1] = 0.0f;
fitem.velocity[0] = 0.0f;
fitem.velocity[1] = 0.0f;
if (Double.isNaN(item.getX())) {
setX(item, referrer, 0.0);
setY(item, referrer, 0.0);
}
fitem.location[0]=item.getX();
fitem.location[1]=item.getY();
continue;
}
double x = fitem.location[0];
double y = fitem.location[1];
if (m_enforceBounds && bounds != null) {
Rectangle2D b = item.getBounds();
double hw = b.getWidth() / 2;
double hh = b.getHeight() / 2;
if (x + hw > x2)
x = x2 - hw;
if (x - hw < x1)
x = x1 + hw;
if (y + hh > y2)
y = y2 - hh;
if (y - hh < y1)
y = y1 + hh;
}
// set the actual position
setX(item, referrer, x);
setY(item, referrer, y);
}
}
/**
* Reset the force simulation state for all nodes processed by this layout.
*/
public void reset() {
Iterator<VisualItem> iter = m_vis.visibleItems(m_nodeGroup);
while (iter.hasNext()) {
VisualItem item = iter.next();
ForceItem fitem = (ForceItem) item.get(FORCEITEM);
if (fitem != null) {
fitem.location[0] = item.getEndX();
fitem.location[1] = item.getEndY();
fitem.force[0] = fitem.force[1] = 0;
fitem.velocity[0] = fitem.velocity[1] = 0;
}
}
m_lasttime = -1L;
}
/**
* Loads the simulator with all relevant force items and springs.
*
* @param fsim
* the force simulator driving this layout
*/
protected synchronized void initSimulator(ForceSimulator fsim) {
// make sure we have force items to work with
TupleSet ts = m_vis.getGroup(m_nodeGroup);
if (ts == null)
return;
try {
ts.addColumns(FORCEITEM_SCHEMA);
} catch (IllegalArgumentException iae) { /* ignored */
}
double startX = (referrer == null ? 0f : referrer.getX());
double startY = (referrer == null ? 0f : referrer.getY());
startX = Double.isNaN(startX) ? 0f : startX;
startY = Double.isNaN(startY) ? 0f : startY;
Iterator<VisualItem> iter = m_vis.visibleItems(m_nodeGroup);
while (iter.hasNext()) {
VisualItem item = (VisualItem) iter.next();
ForceItem fitem = (ForceItem) item.get(FORCEITEM);
fitem.mass = getMassValue(item);
double x = item.getEndX();
double y = item.getEndY();
fitem.location[0] = (Double.isNaN(x) ? startX : x);
fitem.location[1] = (Double.isNaN(y) ? startY : y);
fsim.addItem(fitem);
}
if (m_edgeGroup != null) {
iter = m_vis.visibleItems(m_edgeGroup);
while (iter.hasNext()) {
EdgeItem e = (EdgeItem) iter.next();
NodeItem n1 = e.getSourceItem();
ForceItem f1 = (ForceItem) n1.get(FORCEITEM);
NodeItem n2 = e.getTargetItem();
ForceItem f2 = (ForceItem) n2.get(FORCEITEM);
double coeff = getSpringCoefficient(e);
double slen = getSpringLength(e);
fsim.addSpring(f1, f2, (coeff >= 0 ? coeff : -1.),
(slen >= 0 ? slen : -1.));
}
}
}
/**
* Get the mass value associated with the given node. Subclasses should
* override this method to perform custom mass assignment.
*
* @param n
* the node for which to compute the mass value
* @return the mass value for the node. By default, all items are given a
* mass value of 1.0.
*/
protected double getMassValue(VisualItem n) {
return 1.0f;
}
/**
* Get the spring length for the given edge. Subclasses should override this
* method to perform custom spring length assignment.
*
* @param e
* the edge for which to compute the spring length
* @return the spring length for the edge. A return value of -1 means to
* ignore this method and use the global default.
*/
protected double getSpringLength(EdgeItem e) {
return -1.;
}
/**
* Get the spring coefficient for the given edge, which controls the tension
* or strength of the spring. Subclasses should override this method to
* perform custom spring tension assignment.
*
* @param e
* the edge for which to compute the spring coefficient.
* @return the spring coefficient for the edge. A return value of -1 means
* to ignore this method and use the global default.
*/
protected double getSpringCoefficient(EdgeItem e) {
return -1.;
}
/**
* Get the referrer item to use to set x or y coordinates that are
* initialized to NaN.
*
* @return the referrer item.
* @see prefux.util.PrefuseLib#setX(VisualItem, VisualItem, double)
* @see prefux.util.PrefuseLib#setY(VisualItem, VisualItem, double)
*/
public VisualItem getReferrer() {
return referrer;
}
/**
* Set the referrer item to use to set x or y coordinates that are
* initialized to NaN.
*
* @param referrer
* the referrer item to use.
* @see prefux.util.PrefuseLib#setX(VisualItem, VisualItem, double)
* @see prefux.util.PrefuseLib#setY(VisualItem, VisualItem, double)
*/
public void setReferrer(VisualItem referrer) {
this.referrer = referrer;
}
// ------------------------------------------------------------------------
// ForceItem Schema Addition
/**
* The data field in which the parameters used by this layout are stored.
*/
public static final String FORCEITEM = "_forceItem";
/**
* The schema for the parameters used by this layout.
*/
public static final Schema FORCEITEM_SCHEMA = new Schema();
static {
FORCEITEM_SCHEMA.addColumn(FORCEITEM, ForceItem.class, new ForceItem());
}
} // end of class ForceDirectedLayout
| |
/*
* Copyright 2021-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.kubevirtnetworking.impl;
import org.onlab.packet.ARP;
import org.onlab.packet.EthType;
import org.onlab.packet.Ethernet;
import org.onlab.packet.Ip4Address;
import org.onlab.packet.IpAddress;
import org.onlab.packet.MacAddress;
import org.onlab.packet.VlanId;
import org.onosproject.cluster.ClusterService;
import org.onosproject.cluster.LeadershipService;
import org.onosproject.cluster.NodeId;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.CoreService;
import org.onosproject.kubevirtnetworking.api.KubevirtFlowRuleService;
import org.onosproject.kubevirtnetworking.api.KubevirtNetworkAdminService;
import org.onosproject.kubevirtnetworking.api.KubevirtPeerRouter;
import org.onosproject.kubevirtnetworking.api.KubevirtRouter;
import org.onosproject.kubevirtnetworking.api.KubevirtRouterAdminService;
import org.onosproject.kubevirtnetworking.api.KubevirtRouterEvent;
import org.onosproject.kubevirtnetworking.api.KubevirtRouterListener;
import org.onosproject.kubevirtnetworking.util.KubevirtNetworkingUtil;
import org.onosproject.kubevirtnode.api.KubevirtNode;
import org.onosproject.kubevirtnode.api.KubevirtNodeService;
import org.onosproject.net.PortNumber;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.packet.DefaultOutboundPacket;
import org.onosproject.net.packet.InboundPacket;
import org.onosproject.net.packet.PacketContext;
import org.onosproject.net.packet.PacketProcessor;
import org.onosproject.net.packet.PacketService;
import org.osgi.service.component.annotations.Activate;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Deactivate;
import org.osgi.service.component.annotations.Reference;
import org.osgi.service.component.annotations.ReferenceCardinality;
import org.slf4j.Logger;
import java.nio.ByteBuffer;
import java.util.Objects;
import java.util.concurrent.ExecutorService;
import static java.util.concurrent.Executors.newSingleThreadExecutor;
import static org.onlab.util.Tools.groupedThreads;
import static org.onosproject.kubevirtnetworking.api.Constants.DEFAULT_GATEWAY_MAC;
import static org.onosproject.kubevirtnetworking.api.Constants.GW_ENTRY_TABLE;
import static org.onosproject.kubevirtnetworking.api.Constants.KUBEVIRT_NETWORKING_APP_ID;
import static org.onosproject.kubevirtnetworking.api.Constants.PRIORITY_ARP_GATEWAY_RULE;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Handles arp packet.
*/
@Component(immediate = true)
public class KubevirtRoutingArpHandler {
protected final Logger log = getLogger(getClass());
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected CoreService coreService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected ClusterService clusterService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected LeadershipService leadershipService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected PacketService packetService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected DeviceService deviceService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected KubevirtRouterAdminService kubevirtRouterService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected KubevirtNetworkAdminService kubevirtNetworkService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected KubevirtNodeService kubevirtNodeService;
@Reference(cardinality = ReferenceCardinality.MANDATORY)
protected KubevirtFlowRuleService kubevirtFlowRuleService;
private final ExecutorService eventExecutor = newSingleThreadExecutor(
groupedThreads(this.getClass().getSimpleName(), "event-handler"));
private final PacketProcessor packetProcessor = new InternalPacketProcessor();
private final InternalRouterEventListener kubevirtRouterlistener = new InternalRouterEventListener();
private ApplicationId appId;
private NodeId localNodeId;
@Activate
protected void activate() {
appId = coreService.registerApplication(KUBEVIRT_NETWORKING_APP_ID);
localNodeId = clusterService.getLocalNode().id();
leadershipService.runForLeadership(appId.name());
packetService.addProcessor(packetProcessor, PacketProcessor.director(1));
kubevirtRouterService.addListener(kubevirtRouterlistener);
log.info("Started");
}
@Deactivate
protected void deactivate() {
leadershipService.withdraw(appId.name());
packetService.removeProcessor(packetProcessor);
kubevirtRouterService.removeListener(kubevirtRouterlistener);
eventExecutor.shutdown();
log.info("Stopped");
}
/**
* Triggers ARP request to retrieve the peer router mac address.
*
* @param router kubevirt router
* @param peerRouterIp peer router IP address
*/
private void retrievePeerRouterMac(KubevirtRouter router, IpAddress peerRouterIp) {
log.info("Sending ARP request to the peer router {} to retrieve the MAC address.",
peerRouterIp.getIp4Address().toString());
String routerSnatIp = router.external().keySet().stream().findAny().orElse(null);
if (routerSnatIp == null) {
return;
}
IpAddress sourceIp = IpAddress.valueOf(routerSnatIp);
MacAddress sourceMac = DEFAULT_GATEWAY_MAC;
Ethernet ethRequest = ARP.buildArpRequest(sourceMac.toBytes(),
sourceIp.toOctets(),
peerRouterIp.toOctets(), VlanId.NO_VID);
KubevirtNode gatewayNode = kubevirtNodeService.node(router.electedGateway());
if (gatewayNode == null) {
return;
}
PortNumber externalPatchPortNum = KubevirtNetworkingUtil.externalPatchPortNum(deviceService, gatewayNode);
if (externalPatchPortNum == null) {
return;
}
TrafficTreatment treatment = DefaultTrafficTreatment.builder()
.setOutput(externalPatchPortNum)
.build();
packetService.emit(new DefaultOutboundPacket(
gatewayNode.intgBridge(),
treatment,
ByteBuffer.wrap(ethRequest.serialize())));
}
/**
* Sets default ARP flow rule to retrieve peer router mac address.
*
* @param routerSnatIp route Snat IP
* @param peerRouterIp peer router IP
* @param gatewayNodeId gateway node
* @param install install if true, uninstall otherwise
*/
private void setRuleArpRequestToController(IpAddress routerSnatIp,
IpAddress peerRouterIp,
String gatewayNodeId,
boolean install) {
KubevirtNode gatewayNode = kubevirtNodeService.node(gatewayNodeId);
if (gatewayNode == null) {
return;
}
if (routerSnatIp == null) {
return;
}
TrafficSelector selector = DefaultTrafficSelector.builder()
.matchEthType(EthType.EtherType.ARP.ethType().toShort())
.matchArpOp(ARP.OP_REPLY)
.matchArpSpa(peerRouterIp.getIp4Address())
.matchArpTpa(routerSnatIp.getIp4Address())
.build();
TrafficTreatment treatment = DefaultTrafficTreatment.builder()
.punt()
.build();
kubevirtFlowRuleService.setRule(
appId,
gatewayNode.intgBridge(),
selector,
treatment,
PRIORITY_ARP_GATEWAY_RULE,
GW_ENTRY_TABLE,
install
);
}
private class InternalRouterEventListener implements KubevirtRouterListener {
private boolean isRelevantHelper() {
return Objects.equals(localNodeId, leadershipService.getLeader(appId.name()));
}
@Override
public void event(KubevirtRouterEvent event) {
switch (event.type()) {
case KUBEVIRT_GATEWAY_NODE_ATTACHED:
case KUBEVIRT_ROUTER_EXTERNAL_NETWORK_ATTACHED:
eventExecutor.execute(() -> processRouterExternalNetAttachedOrGwAttached(event.subject()));
break;
case KUBEVIRT_ROUTER_REMOVED:
eventExecutor.execute(() -> processRouterRemoved(event.subject()));
break;
case KUBEVIRT_ROUTER_EXTERNAL_NETWORK_DETACHED:
eventExecutor.execute(() -> processRouterExternalNetDetached(event.subject(),
event.externalIp(), event.externalPeerRouterIp()));
break;
case KUBEVIRT_GATEWAY_NODE_DETACHED:
eventExecutor.execute(() -> processRouterGatewayNodeDetached(event.subject(), event.gateway()));
break;
case KUBEVIRT_GATEWAY_NODE_CHANGED:
eventExecutor.execute(() -> processRouterGatewayNodeChanged(event.subject(),
event.gateway()));
break;
default:
//do nothing
break;
}
}
private void processRouterGatewayNodeChanged(KubevirtRouter router, String oldGateway) {
if (!isRelevantHelper()) {
return;
}
processRouterGatewayNodeDetached(router, oldGateway);
processRouterExternalNetAttachedOrGwAttached(router);
}
private void processRouterExternalNetAttachedOrGwAttached(KubevirtRouter router) {
if (!isRelevantHelper()) {
return;
}
KubevirtNode gatewayNode = kubevirtNodeService.node(router.electedGateway());
if (gatewayNode == null) {
return;
}
String routerSnatIp = router.external().keySet().stream().findAny().orElse(null);
if (routerSnatIp == null) {
return;
}
if (router.peerRouter() != null &&
router.peerRouter().macAddress() == null &&
router.peerRouter().ipAddress() != null) {
setRuleArpRequestToController(IpAddress.valueOf(routerSnatIp),
router.peerRouter().ipAddress(), gatewayNode.hostname(), true);
retrievePeerRouterMac(router, router.peerRouter().ipAddress());
}
}
private void processRouterExternalNetDetached(KubevirtRouter router, String routerSnatIp,
String peerRouterIp) {
log.info("processRouterRemovedOrExternalNetDetached called");
if (!isRelevantHelper()) {
return;
}
if (router.electedGateway() == null) {
return;
}
KubevirtNode gatewayNode = kubevirtNodeService.node(router.electedGateway());
if (gatewayNode == null) {
return;
}
if (routerSnatIp == null || peerRouterIp == null) {
return;
}
setRuleArpRequestToController(IpAddress.valueOf(routerSnatIp),
IpAddress.valueOf(peerRouterIp), gatewayNode.hostname(), false);
}
private void processRouterRemoved(KubevirtRouter router) {
if (!isRelevantHelper()) {
return;
}
if (router.electedGateway() == null) {
return;
}
KubevirtNode gatewayNode = kubevirtNodeService.node(router.electedGateway());
if (gatewayNode == null) {
return;
}
String routerSnatIp = router.external().keySet().stream().findAny().orElse(null);
if (routerSnatIp == null) {
return;
}
IpAddress peerRouterIp = router.peerRouter().ipAddress();
if (peerRouterIp == null) {
return;
}
setRuleArpRequestToController(IpAddress.valueOf(routerSnatIp),
peerRouterIp, gatewayNode.hostname(), false);
}
private void processRouterGatewayNodeDetached(KubevirtRouter router, String detachedGatewayNode) {
if (!isRelevantHelper()) {
return;
}
if (detachedGatewayNode == null) {
return;
}
String routerSnatIp = router.external().keySet().stream().findAny().orElse(null);
if (routerSnatIp == null) {
return;
}
if (router.peerRouter() != null && router.peerRouter().ipAddress() != null) {
setRuleArpRequestToController(IpAddress.valueOf(routerSnatIp),
router.peerRouter().ipAddress(), detachedGatewayNode, false);
}
}
}
private class InternalPacketProcessor implements PacketProcessor {
@Override
public void process(PacketContext context) {
if (context.isHandled()) {
return;
}
InboundPacket pkt = context.inPacket();
Ethernet ethernet = pkt.parsed();
if (ethernet != null && ethernet.getEtherType() == Ethernet.TYPE_ARP) {
processArpPacket(ethernet);
}
}
private void processArpPacket(Ethernet ethernet) {
ARP arp = (ARP) ethernet.getPayload();
if (arp.getOpCode() == ARP.OP_REQUEST) {
return;
}
IpAddress spa = Ip4Address.valueOf(arp.getSenderProtocolAddress());
MacAddress sha = MacAddress.valueOf(arp.getSenderHardwareAddress());
IpAddress tpa = Ip4Address.valueOf(arp.getTargetProtocolAddress());
KubevirtRouter router = kubevirtRouterService.routers().stream()
.filter(r -> r.peerRouter() != null && r.peerRouter().ipAddress().equals(spa))
.filter(r -> {
String routerSnatIp = r.external().keySet().stream().findAny().orElse(null);
if (routerSnatIp == null) {
return false;
}
return IpAddress.valueOf(routerSnatIp).equals(tpa);
})
.findAny().orElse(null);
if (router == null) {
return;
}
KubevirtPeerRouter peerRouter = new KubevirtPeerRouter(spa, sha);
log.info("Update peer router mac adress {} to router {}", peerRouter.macAddress(), router.name());
kubevirtRouterService.updatePeerRouterMac(router.name(), sha);
}
}
}
| |
package org.archboy.clobaframe.web.theme;
import com.fasterxml.jackson.databind.util.ISO8601DateFormat;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import javax.inject.Inject;
import org.archboy.clobaframe.resource.ResourceManager;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import static org.junit.Assert.*;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.util.Assert;
/**
*
* @author yang
*/
//@RunWith(SpringJUnit4ClassRunner.class)
//@ContextConfiguration(locations = { "/applicationContext.xml", "/webContext.xml"})
@RunWith(SpringJUnit4ClassRunner.class)
@WebAppConfiguration("src/test/resources/webapp")
@ContextConfiguration(locations = { "/webapp/WEB-INF/servlet.xml"})
public class ThemeManagerTest {
@Inject
private ThemeManager themeManager;
@Inject
private ResourceManager resourceManager;
@Before
public void setUp() throws Exception {
}
@After
public void tearDown() throws Exception {
}
@Test
public void testListBase() {
// list base
Collection<ThemePackage> packages1 = themeManager.list(ThemeManager.PACKAGE_CATALOG_BASE);
assertEquals(1, packages1.size());
ThemePackage basePackage = packages1.iterator().next();
assertEquals(ThemeManager.PACKAGE_ID_BASE, basePackage.getId());
// test get package info
assertNull(basePackage.getAuthorName());
assertEquals(ThemeManager.PACKAGE_CATALOG_BASE, basePackage.getCatalog());
assertNull(basePackage.getDescription());
assertNull(basePackage.getLastModified());
assertEquals(ThemeManager.PACKAGE_ID_BASE, basePackage.getId());
assertNull(basePackage.getVersion());
assertNull(basePackage.getWebsite());
// test get resource
List<String> imageCssJs1 = Arrays.asList("resource/css/common.css", "resource/css/index.css",
"resource/image/loading-16x16.gif", "resource/js/index.js",
"resource/js/jquery-1.11.1.js","resource/js/jquery-1.11.1.min.js",
"resource/js/i18n/messages.js", "resource/js/i18n/messages_zh_CN.js");
List<String> templates1 = Arrays.asList("template/index.vm", "template/layout/default.vm",
"template/share/footer.vm", "template/share/header.vm");
// get all resources.
Collection<ThemeResourceInfo> themeResourceInfos1 = basePackage.listResource();
for (String name : imageCssJs1) {
boolean found = false;
for(ThemeResourceInfo info : themeResourceInfos1) {
if (info.getName().equals(name) && info.getContentType() == ThemeResourceInfo.TYPE_RESOURCE) {
found = true;
break;
}
}
assertTrue(found);
}
for (String name : templates1) {
boolean found = false;
for(ThemeResourceInfo info : themeResourceInfos1) {
if (info.getName().equals(name) && info.getContentType() == ThemeResourceInfo.TYPE_TEMPLATE) {
//if (info.isTemplate()) {
found = true;
break;
//}
}
}
assertTrue(found);
}
for (String name : imageCssJs1) {
assertNotNull(basePackage.getResource(name));
}
for (String name : templates1) {
assertNotNull(basePackage.getResource(name));
}
// get get none-exist resource
assertNull(basePackage.getResource("none-exists"));
// test get web resource
for (String name : imageCssJs1) {
String webResourceName = name.substring("resource/".length());
assertNotNull(resourceManager.get(webResourceName));
}
// get package by name
ThemePackage themePackage1 = themeManager.get(ThemeManager.PACKAGE_CATALOG_BASE,
ThemeManager.PACKAGE_ID_BASE);
assertEquals(basePackage, themePackage1);
// get none-exist package
Assert.isNull(themeManager.get(
ThemeManager.PACKAGE_CATALOG_BASE,
"none-exists"));
}
@Test
public void testListLocal() throws ParseException {
// test list all local package
Collection<ThemePackage> packages1 = themeManager.list(ThemeManager.PACKAGE_CATALOG_LOCAL);
assertEquals(2, packages1.size());
List<String> packageNames1 = Arrays.asList("dark", "flat");
for(ThemePackage themePackage : packages1) {
assertTrue(packageNames1.contains(themePackage.getId()));
}
// test get info
String dateString1 = "2015-06-21T18:35:30.555Z";
DateFormat dateFormat1 = new ISO8601DateFormat();
Date date1 = dateFormat1.parse(dateString1);
ThemePackage themePackage1 = themeManager.get(ThemeManager.PACKAGE_CATALOG_LOCAL, "dark");
assertEquals("yang", themePackage1.getAuthorName());
assertEquals(ThemeManager.PACKAGE_CATALOG_LOCAL, themePackage1.getCatalog());
assertEquals("a test theme", themePackage1.getDescription());
assertDateEquals(date1, themePackage1.getLastModified());
assertEquals("The Dark Theme", themePackage1.getTitle());
assertEquals("dark", themePackage1.getId());
assertEquals("1.0.1", themePackage1.getVersion());
assertEquals("http://archboy.org", themePackage1.getWebsite());
// test get all resource
List<String> resources1 = Arrays.asList(
"info.json",
"resource/css/dark.css",
"resource/css/index.css",
"resource/image/dark.png",
"resource/js/dark.js",
"template/index.vm");
Collection<ThemeResourceInfo> themeResourceInfos1 = themePackage1.listResource();
for (String name : resources1) {
boolean found = false;
for(ThemeResourceInfo info : themeResourceInfos1) {
if (info.getName().equals(name)) {
found = true;
break;
}
}
assertTrue(found);
}
for (String name : resources1) {
assertNotNull(themePackage1.getResource(name));
}
// get get none-exist resource
assertNull(themePackage1.getResource("none-exists"));
// test get web resource
for (String name : resources1) {
String resourceName = "theme/dark/" + name;
assertNotNull(resourceManager.get(resourceName));
}
}
private static void assertDateEquals(Date expected, Date actual){
if (expected == null && actual == null){
//
}else if(expected == null || actual == null){
fail("date not equals");
}else{
assertTrue(Math.abs(expected.getTime() - actual.getTime()) < 1000 );
}
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.net;
import static android.system.OsConstants.AF_INET;
import static android.system.OsConstants.AF_INET6;
import android.annotation.SystemApi;
import android.app.Activity;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.content.pm.IPackageManager;
import android.content.pm.PackageManager;
import android.net.Network;
import android.net.NetworkUtils;
import android.os.Binder;
import android.os.IBinder;
import android.os.Parcel;
import android.os.ParcelFileDescriptor;
import android.os.RemoteException;
import android.os.ServiceManager;
import android.os.UserHandle;
import com.android.internal.net.VpnConfig;
import java.net.DatagramSocket;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
/**
* VpnService is a base class for applications to extend and build their
* own VPN solutions. In general, it creates a virtual network interface,
* configures addresses and routing rules, and returns a file descriptor
* to the application. Each read from the descriptor retrieves an outgoing
* packet which was routed to the interface. Each write to the descriptor
* injects an incoming packet just like it was received from the interface.
* The interface is running on Internet Protocol (IP), so packets are
* always started with IP headers. The application then completes a VPN
* connection by processing and exchanging packets with the remote server
* over a tunnel.
*
* <p>Letting applications intercept packets raises huge security concerns.
* A VPN application can easily break the network. Besides, two of them may
* conflict with each other. The system takes several actions to address
* these issues. Here are some key points:
* <ul>
* <li>User action is required the first time an application creates a VPN
* connection.</li>
* <li>There can be only one VPN connection running at the same time. The
* existing interface is deactivated when a new one is created.</li>
* <li>A system-managed notification is shown during the lifetime of a
* VPN connection.</li>
* <li>A system-managed dialog gives the information of the current VPN
* connection. It also provides a button to disconnect.</li>
* <li>The network is restored automatically when the file descriptor is
* closed. It also covers the cases when a VPN application is crashed
* or killed by the system.</li>
* </ul>
*
* <p>There are two primary methods in this class: {@link #prepare} and
* {@link Builder#establish}. The former deals with user action and stops
* the VPN connection created by another application. The latter creates
* a VPN interface using the parameters supplied to the {@link Builder}.
* An application must call {@link #prepare} to grant the right to use
* other methods in this class, and the right can be revoked at any time.
* Here are the general steps to create a VPN connection:
* <ol>
* <li>When the user presses the button to connect, call {@link #prepare}
* and launch the returned intent, if non-null.</li>
* <li>When the application becomes prepared, start the service.</li>
* <li>Create a tunnel to the remote server and negotiate the network
* parameters for the VPN connection.</li>
* <li>Supply those parameters to a {@link Builder} and create a VPN
* interface by calling {@link Builder#establish}.</li>
* <li>Process and exchange packets between the tunnel and the returned
* file descriptor.</li>
* <li>When {@link #onRevoke} is invoked, close the file descriptor and
* shut down the tunnel gracefully.</li>
* </ol>
*
* <p>Services extended this class need to be declared with appropriate
* permission and intent filter. Their access must be secured by
* {@link android.Manifest.permission#BIND_VPN_SERVICE} permission, and
* their intent filter must match {@link #SERVICE_INTERFACE} action. Here
* is an example of declaring a VPN service in {@code AndroidManifest.xml}:
* <pre>
* <service android:name=".ExampleVpnService"
* android:permission="android.permission.BIND_VPN_SERVICE">
* <intent-filter>
* <action android:name="android.net.VpnService"/>
* </intent-filter>
* </service></pre>
*
* @see Builder
*/
public class VpnService extends Service {
/**
* The action must be matched by the intent filter of this service. It also
* needs to require {@link android.Manifest.permission#BIND_VPN_SERVICE}
* permission so that other applications cannot abuse it.
*/
public static final String SERVICE_INTERFACE = VpnConfig.SERVICE_INTERFACE;
/**
* Use IConnectivityManager since those methods are hidden and not
* available in ConnectivityManager.
*/
private static IConnectivityManager getService() {
return IConnectivityManager.Stub.asInterface(
ServiceManager.getService(Context.CONNECTIVITY_SERVICE));
}
/**
* Prepare to establish a VPN connection. This method returns {@code null}
* if the VPN application is already prepared or if the user has previously
* consented to the VPN application. Otherwise, it returns an
* {@link Intent} to a system activity. The application should launch the
* activity using {@link Activity#startActivityForResult} to get itself
* prepared. The activity may pop up a dialog to require user action, and
* the result will come back via its {@link Activity#onActivityResult}.
* If the result is {@link Activity#RESULT_OK}, the application becomes
* prepared and is granted to use other methods in this class.
*
* <p>Only one application can be granted at the same time. The right
* is revoked when another application is granted. The application
* losing the right will be notified via its {@link #onRevoke}. Unless
* it becomes prepared again, subsequent calls to other methods in this
* class will fail.
*
* <p>The user may disable the VPN at any time while it is activated, in
* which case this method will return an intent the next time it is
* executed to obtain the user's consent again.
*
* @see #onRevoke
*/
public static Intent prepare(Context context) {
try {
if (getService().prepareVpn(context.getPackageName(), null, UserHandle.myUserId())) {
return null;
}
} catch (RemoteException e) {
// ignore
}
return VpnConfig.getIntentForConfirmation();
}
/**
* Version of {@link #prepare(Context)} which does not require user consent.
*
* <p>Requires {@link android.Manifest.permission#CONTROL_VPN} and should generally not be
* used. Only acceptable in situations where user consent has been obtained through other means.
*
* <p>Once this is run, future preparations may be done with the standard prepare method as this
* will authorize the package to prepare the VPN without consent in the future.
*
* @hide
*/
@SystemApi
public static void prepareAndAuthorize(Context context) {
IConnectivityManager cm = getService();
String packageName = context.getPackageName();
try {
// Only prepare if we're not already prepared.
int userId = UserHandle.myUserId();
if (!cm.prepareVpn(packageName, null, userId)) {
cm.prepareVpn(null, packageName, userId);
}
cm.setVpnPackageAuthorization(packageName, userId, true);
} catch (RemoteException e) {
// ignore
}
}
/**
* Protect a socket from VPN connections. After protecting, data sent
* through this socket will go directly to the underlying network,
* so its traffic will not be forwarded through the VPN.
* This method is useful if some connections need to be kept
* outside of VPN. For example, a VPN tunnel should protect itself if its
* destination is covered by VPN routes. Otherwise its outgoing packets
* will be sent back to the VPN interface and cause an infinite loop. This
* method will fail if the application is not prepared or is revoked.
*
* <p class="note">The socket is NOT closed by this method.
*
* @return {@code true} on success.
*/
public boolean protect(int socket) {
return NetworkUtils.protectFromVpn(socket);
}
/**
* Convenience method to protect a {@link Socket} from VPN connections.
*
* @return {@code true} on success.
* @see #protect(int)
*/
public boolean protect(Socket socket) {
return protect(socket.getFileDescriptor$().getInt$());
}
/**
* Convenience method to protect a {@link DatagramSocket} from VPN
* connections.
*
* @return {@code true} on success.
* @see #protect(int)
*/
public boolean protect(DatagramSocket socket) {
return protect(socket.getFileDescriptor$().getInt$());
}
/**
* Adds a network address to the VPN interface.
*
* Both IPv4 and IPv6 addresses are supported. The VPN must already be established. Fails if the
* address is already in use or cannot be assigned to the interface for any other reason.
*
* Adding an address implicitly allows traffic from that address family (i.e., IPv4 or IPv6) to
* be routed over the VPN. @see Builder#allowFamily
*
* @throws IllegalArgumentException if the address is invalid.
*
* @param address The IP address (IPv4 or IPv6) to assign to the VPN interface.
* @param prefixLength The prefix length of the address.
*
* @return {@code true} on success.
* @see Builder#addAddress
*
* @hide
*/
public boolean addAddress(InetAddress address, int prefixLength) {
check(address, prefixLength);
try {
return getService().addVpnAddress(address.getHostAddress(), prefixLength);
} catch (RemoteException e) {
throw new IllegalStateException(e);
}
}
/**
* Removes a network address from the VPN interface.
*
* Both IPv4 and IPv6 addresses are supported. The VPN must already be established. Fails if the
* address is not assigned to the VPN interface, or if it is the only address assigned (thus
* cannot be removed), or if the address cannot be removed for any other reason.
*
* After removing an address, if there are no addresses, routes or DNS servers of a particular
* address family (i.e., IPv4 or IPv6) configured on the VPN, that <b>DOES NOT</b> block that
* family from being routed. In other words, once an address family has been allowed, it stays
* allowed for the rest of the VPN's session. @see Builder#allowFamily
*
* @throws IllegalArgumentException if the address is invalid.
*
* @param address The IP address (IPv4 or IPv6) to assign to the VPN interface.
* @param prefixLength The prefix length of the address.
*
* @return {@code true} on success.
*
* @hide
*/
public boolean removeAddress(InetAddress address, int prefixLength) {
check(address, prefixLength);
try {
return getService().removeVpnAddress(address.getHostAddress(), prefixLength);
} catch (RemoteException e) {
throw new IllegalStateException(e);
}
}
/**
* Sets the underlying networks used by the VPN for its upstream connections.
*
* <p>Used by the system to know the actual networks that carry traffic for apps affected by
* this VPN in order to present this information to the user (e.g., via status bar icons).
*
* <p>This method only needs to be called if the VPN has explicitly bound its underlying
* communications channels — such as the socket(s) passed to {@link #protect(int)} —
* to a {@code Network} using APIs such as {@link Network#bindSocket(Socket)} or
* {@link Network#bindSocket(DatagramSocket)}. The VPN should call this method every time
* the set of {@code Network}s it is using changes.
*
* <p>{@code networks} is one of the following:
* <ul>
* <li><strong>a non-empty array</strong>: an array of one or more {@link Network}s, in
* decreasing preference order. For example, if this VPN uses both wifi and mobile (cellular)
* networks to carry app traffic, but prefers or uses wifi more than mobile, wifi should appear
* first in the array.</li>
* <li><strong>an empty array</strong>: a zero-element array, meaning that the VPN has no
* underlying network connection, and thus, app traffic will not be sent or received.</li>
* <li><strong>null</strong>: (default) signifies that the VPN uses whatever is the system's
* default network. I.e., it doesn't use the {@code bindSocket} or {@code bindDatagramSocket}
* APIs mentioned above to send traffic over specific channels.</li>
* </ul>
*
* <p>This call will succeed only if the VPN is currently established. For setting this value
* when the VPN has not yet been established, see {@link Builder#setUnderlyingNetworks}.
*
* @param networks An array of networks the VPN uses to tunnel traffic to/from its servers.
*
* @return {@code true} on success.
*/
public boolean setUnderlyingNetworks(Network[] networks) {
try {
return getService().setUnderlyingNetworksForVpn(networks);
} catch (RemoteException e) {
throw new IllegalStateException(e);
}
}
/**
* Return the communication interface to the service. This method returns
* {@code null} on {@link Intent}s other than {@link #SERVICE_INTERFACE}
* action. Applications overriding this method must identify the intent
* and return the corresponding interface accordingly.
*
* @see Service#onBind
*/
@Override
public IBinder onBind(Intent intent) {
if (intent != null && SERVICE_INTERFACE.equals(intent.getAction())) {
return new Callback();
}
return null;
}
/**
* Invoked when the application is revoked. At this moment, the VPN
* interface is already deactivated by the system. The application should
* close the file descriptor and shut down gracefully. The default
* implementation of this method is calling {@link Service#stopSelf()}.
*
* <p class="note">Calls to this method may not happen on the main thread
* of the process.
*
* @see #prepare
*/
public void onRevoke() {
stopSelf();
}
/**
* Use raw Binder instead of AIDL since now there is only one usage.
*/
private class Callback extends Binder {
@Override
protected boolean onTransact(int code, Parcel data, Parcel reply, int flags) {
if (code == IBinder.LAST_CALL_TRANSACTION) {
onRevoke();
return true;
}
return false;
}
}
/**
* Private method to validate address and prefixLength.
*/
private static void check(InetAddress address, int prefixLength) {
if (address.isLoopbackAddress()) {
throw new IllegalArgumentException("Bad address");
}
if (address instanceof Inet4Address) {
if (prefixLength < 0 || prefixLength > 32) {
throw new IllegalArgumentException("Bad prefixLength");
}
} else if (address instanceof Inet6Address) {
if (prefixLength < 0 || prefixLength > 128) {
throw new IllegalArgumentException("Bad prefixLength");
}
} else {
throw new IllegalArgumentException("Unsupported family");
}
}
/**
* Helper class to create a VPN interface. This class should be always
* used within the scope of the outer {@link VpnService}.
*
* @see VpnService
*/
public class Builder {
private final VpnConfig mConfig = new VpnConfig();
private final List<LinkAddress> mAddresses = new ArrayList<LinkAddress>();
private final List<RouteInfo> mRoutes = new ArrayList<RouteInfo>();
public Builder() {
mConfig.user = VpnService.this.getClass().getName();
}
/**
* Set the name of this session. It will be displayed in
* system-managed dialogs and notifications. This is recommended
* not required.
*/
public Builder setSession(String session) {
mConfig.session = session;
return this;
}
/**
* Set the {@link PendingIntent} to an activity for users to
* configure the VPN connection. If it is not set, the button
* to configure will not be shown in system-managed dialogs.
*/
public Builder setConfigureIntent(PendingIntent intent) {
mConfig.configureIntent = intent;
return this;
}
/**
* Set the maximum transmission unit (MTU) of the VPN interface. If
* it is not set, the default value in the operating system will be
* used.
*
* @throws IllegalArgumentException if the value is not positive.
*/
public Builder setMtu(int mtu) {
if (mtu <= 0) {
throw new IllegalArgumentException("Bad mtu");
}
mConfig.mtu = mtu;
return this;
}
/**
* Add a network address to the VPN interface. Both IPv4 and IPv6
* addresses are supported. At least one address must be set before
* calling {@link #establish}.
*
* Adding an address implicitly allows traffic from that address family
* (i.e., IPv4 or IPv6) to be routed over the VPN. @see #allowFamily
*
* @throws IllegalArgumentException if the address is invalid.
*/
public Builder addAddress(InetAddress address, int prefixLength) {
check(address, prefixLength);
if (address.isAnyLocalAddress()) {
throw new IllegalArgumentException("Bad address");
}
mAddresses.add(new LinkAddress(address, prefixLength));
mConfig.updateAllowedFamilies(address);
return this;
}
/**
* Convenience method to add a network address to the VPN interface
* using a numeric address string. See {@link InetAddress} for the
* definitions of numeric address formats.
*
* Adding an address implicitly allows traffic from that address family
* (i.e., IPv4 or IPv6) to be routed over the VPN. @see #allowFamily
*
* @throws IllegalArgumentException if the address is invalid.
* @see #addAddress(InetAddress, int)
*/
public Builder addAddress(String address, int prefixLength) {
return addAddress(InetAddress.parseNumericAddress(address), prefixLength);
}
/**
* Add a network route to the VPN interface. Both IPv4 and IPv6
* routes are supported.
*
* Adding a route implicitly allows traffic from that address family
* (i.e., IPv4 or IPv6) to be routed over the VPN. @see #allowFamily
*
* @throws IllegalArgumentException if the route is invalid.
*/
public Builder addRoute(InetAddress address, int prefixLength) {
check(address, prefixLength);
int offset = prefixLength / 8;
byte[] bytes = address.getAddress();
if (offset < bytes.length) {
for (bytes[offset] <<= prefixLength % 8; offset < bytes.length; ++offset) {
if (bytes[offset] != 0) {
throw new IllegalArgumentException("Bad address");
}
}
}
mRoutes.add(new RouteInfo(new IpPrefix(address, prefixLength), null));
mConfig.updateAllowedFamilies(address);
return this;
}
/**
* Convenience method to add a network route to the VPN interface
* using a numeric address string. See {@link InetAddress} for the
* definitions of numeric address formats.
*
* Adding a route implicitly allows traffic from that address family
* (i.e., IPv4 or IPv6) to be routed over the VPN. @see #allowFamily
*
* @throws IllegalArgumentException if the route is invalid.
* @see #addRoute(InetAddress, int)
*/
public Builder addRoute(String address, int prefixLength) {
return addRoute(InetAddress.parseNumericAddress(address), prefixLength);
}
/**
* Add a DNS server to the VPN connection. Both IPv4 and IPv6
* addresses are supported. If none is set, the DNS servers of
* the default network will be used.
*
* Adding a server implicitly allows traffic from that address family
* (i.e., IPv4 or IPv6) to be routed over the VPN. @see #allowFamily
*
* @throws IllegalArgumentException if the address is invalid.
*/
public Builder addDnsServer(InetAddress address) {
if (address.isLoopbackAddress() || address.isAnyLocalAddress()) {
throw new IllegalArgumentException("Bad address");
}
if (mConfig.dnsServers == null) {
mConfig.dnsServers = new ArrayList<String>();
}
mConfig.dnsServers.add(address.getHostAddress());
return this;
}
/**
* Convenience method to add a DNS server to the VPN connection
* using a numeric address string. See {@link InetAddress} for the
* definitions of numeric address formats.
*
* Adding a server implicitly allows traffic from that address family
* (i.e., IPv4 or IPv6) to be routed over the VPN. @see #allowFamily
*
* @throws IllegalArgumentException if the address is invalid.
* @see #addDnsServer(InetAddress)
*/
public Builder addDnsServer(String address) {
return addDnsServer(InetAddress.parseNumericAddress(address));
}
/**
* Add a search domain to the DNS resolver.
*/
public Builder addSearchDomain(String domain) {
if (mConfig.searchDomains == null) {
mConfig.searchDomains = new ArrayList<String>();
}
mConfig.searchDomains.add(domain);
return this;
}
/**
* Allows traffic from the specified address family.
*
* By default, if no address, route or DNS server of a specific family (IPv4 or IPv6) is
* added to this VPN, then all outgoing traffic of that family is blocked. If any address,
* route or DNS server is added, that family is allowed.
*
* This method allows an address family to be unblocked even without adding an address,
* route or DNS server of that family. Traffic of that family will then typically
* fall-through to the underlying network if it's supported.
*
* {@code family} must be either {@code AF_INET} (for IPv4) or {@code AF_INET6} (for IPv6).
* {@link IllegalArgumentException} is thrown if it's neither.
*
* @param family The address family ({@code AF_INET} or {@code AF_INET6}) to allow.
*
* @return this {@link Builder} object to facilitate chaining of method calls.
*/
public Builder allowFamily(int family) {
if (family == AF_INET) {
mConfig.allowIPv4 = true;
} else if (family == AF_INET6) {
mConfig.allowIPv6 = true;
} else {
throw new IllegalArgumentException(family + " is neither " + AF_INET + " nor " +
AF_INET6);
}
return this;
}
private void verifyApp(String packageName) throws PackageManager.NameNotFoundException {
IPackageManager pm = IPackageManager.Stub.asInterface(
ServiceManager.getService("package"));
try {
pm.getApplicationInfo(packageName, 0, UserHandle.getCallingUserId());
} catch (RemoteException e) {
throw new IllegalStateException(e);
}
}
/**
* Adds an application that's allowed to access the VPN connection.
*
* If this method is called at least once, only applications added through this method (and
* no others) are allowed access. Else (if this method is never called), all applications
* are allowed by default. If some applications are added, other, un-added applications
* will use networking as if the VPN wasn't running.
*
* A {@link Builder} may have only a set of allowed applications OR a set of disallowed
* ones, but not both. Calling this method after {@link #addDisallowedApplication} has
* already been called, or vice versa, will throw an {@link UnsupportedOperationException}.
*
* {@code packageName} must be the canonical name of a currently installed application.
* {@link PackageManager.NameNotFoundException} is thrown if there's no such application.
*
* @throws PackageManager.NameNotFoundException If the application isn't installed.
*
* @param packageName The full name (e.g.: "com.google.apps.contacts") of an application.
*
* @return this {@link Builder} object to facilitate chaining method calls.
*/
public Builder addAllowedApplication(String packageName)
throws PackageManager.NameNotFoundException {
if (mConfig.disallowedApplications != null) {
throw new UnsupportedOperationException("addDisallowedApplication already called");
}
verifyApp(packageName);
if (mConfig.allowedApplications == null) {
mConfig.allowedApplications = new ArrayList<String>();
}
mConfig.allowedApplications.add(packageName);
return this;
}
/**
* Adds an application that's denied access to the VPN connection.
*
* By default, all applications are allowed access, except for those denied through this
* method. Denied applications will use networking as if the VPN wasn't running.
*
* A {@link Builder} may have only a set of allowed applications OR a set of disallowed
* ones, but not both. Calling this method after {@link #addAllowedApplication} has already
* been called, or vice versa, will throw an {@link UnsupportedOperationException}.
*
* {@code packageName} must be the canonical name of a currently installed application.
* {@link PackageManager.NameNotFoundException} is thrown if there's no such application.
*
* @throws PackageManager.NameNotFoundException If the application isn't installed.
*
* @param packageName The full name (e.g.: "com.google.apps.contacts") of an application.
*
* @return this {@link Builder} object to facilitate chaining method calls.
*/
public Builder addDisallowedApplication(String packageName)
throws PackageManager.NameNotFoundException {
if (mConfig.allowedApplications != null) {
throw new UnsupportedOperationException("addAllowedApplication already called");
}
verifyApp(packageName);
if (mConfig.disallowedApplications == null) {
mConfig.disallowedApplications = new ArrayList<String>();
}
mConfig.disallowedApplications.add(packageName);
return this;
}
/**
* Allows all apps to bypass this VPN connection.
*
* By default, all traffic from apps is forwarded through the VPN interface and it is not
* possible for apps to side-step the VPN. If this method is called, apps may use methods
* such as {@link ConnectivityManager#bindProcessToNetwork} to instead send/receive
* directly over the underlying network or any other network they have permissions for.
*
* @return this {@link Builder} object to facilitate chaining of method calls.
*/
public Builder allowBypass() {
mConfig.allowBypass = true;
return this;
}
/**
* Sets the VPN interface's file descriptor to be in blocking/non-blocking mode.
*
* By default, the file descriptor returned by {@link #establish} is non-blocking.
*
* @param blocking True to put the descriptor into blocking mode; false for non-blocking.
*
* @return this {@link Builder} object to facilitate chaining method calls.
*/
public Builder setBlocking(boolean blocking) {
mConfig.blocking = blocking;
return this;
}
/**
* Sets the underlying networks used by the VPN for its upstream connections.
*
* @see VpnService#setUnderlyingNetworks
*
* @param networks An array of networks the VPN uses to tunnel traffic to/from its servers.
*
* @return this {@link Builder} object to facilitate chaining method calls.
*/
public Builder setUnderlyingNetworks(Network[] networks) {
mConfig.underlyingNetworks = networks != null ? networks.clone() : null;
return this;
}
/**
* Create a VPN interface using the parameters supplied to this
* builder. The interface works on IP packets, and a file descriptor
* is returned for the application to access them. Each read
* retrieves an outgoing packet which was routed to the interface.
* Each write injects an incoming packet just like it was received
* from the interface. The file descriptor is put into non-blocking
* mode by default to avoid blocking Java threads. To use the file
* descriptor completely in native space, see
* {@link ParcelFileDescriptor#detachFd()}. The application MUST
* close the file descriptor when the VPN connection is terminated.
* The VPN interface will be removed and the network will be
* restored by the system automatically.
*
* <p>To avoid conflicts, there can be only one active VPN interface
* at the same time. Usually network parameters are never changed
* during the lifetime of a VPN connection. It is also common for an
* application to create a new file descriptor after closing the
* previous one. However, it is rare but not impossible to have two
* interfaces while performing a seamless handover. In this case, the
* old interface will be deactivated when the new one is created
* successfully. Both file descriptors are valid but now outgoing
* packets will be routed to the new interface. Therefore, after
* draining the old file descriptor, the application MUST close it
* and start using the new file descriptor. If the new interface
* cannot be created, the existing interface and its file descriptor
* remain untouched.
*
* <p>An exception will be thrown if the interface cannot be created
* for any reason. However, this method returns {@code null} if the
* application is not prepared or is revoked. This helps solve
* possible race conditions between other VPN applications.
*
* @return {@link ParcelFileDescriptor} of the VPN interface, or
* {@code null} if the application is not prepared.
* @throws IllegalArgumentException if a parameter is not accepted
* by the operating system.
* @throws IllegalStateException if a parameter cannot be applied
* by the operating system.
* @throws SecurityException if the service is not properly declared
* in {@code AndroidManifest.xml}.
* @see VpnService
*/
public ParcelFileDescriptor establish() {
mConfig.addresses = mAddresses;
mConfig.routes = mRoutes;
try {
return getService().establishVpn(mConfig);
} catch (RemoteException e) {
throw new IllegalStateException(e);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.xsort.managed;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Random;
import org.apache.drill.common.expression.FieldReference;
import org.apache.drill.common.logical.data.Order.Ordering;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.ops.OperExecContext;
import org.apache.drill.exec.physical.config.Sort;
import org.apache.drill.exec.record.BatchSchema;
import org.apache.drill.test.DrillTest;
import org.apache.drill.test.OperatorFixture;
import org.apache.drill.test.rowSet.RowSet;
import org.apache.drill.test.rowSet.RowSet.ExtendableRowSet;
import org.apache.drill.test.rowSet.RowSet.RowSetReader;
import org.apache.drill.test.rowSet.RowSet.RowSetWriter;
import org.apache.drill.test.rowSet.RowSet.SingleRowSet;
import org.apache.drill.test.rowSet.RowSetBuilder;
import org.apache.drill.test.rowSet.RowSetComparison;
import org.apache.drill.test.rowSet.RowSetUtilities;
import org.apache.drill.test.rowSet.SchemaBuilder;
import org.joda.time.Period;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.collect.Lists;
/**
* Tests the generated per-batch sort code via its wrapper layer.
*/
public class TestSorter extends DrillTest {
public static OperatorFixture fixture;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
fixture = OperatorFixture.builder().build();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
fixture.close();
}
public static Sort makeSortConfig(String key, String sortOrder, String nullOrder) {
FieldReference expr = FieldReference.getWithQuotedRef(key);
Ordering ordering = new Ordering(sortOrder, expr, nullOrder);
return new Sort(null, Lists.newArrayList(ordering), false);
}
public void runSorterTest(SingleRowSet rowSet, SingleRowSet expected) throws Exception {
runSorterTest(makeSortConfig("key", Ordering.ORDER_ASC, Ordering.NULLS_LAST), rowSet, expected);
}
public void runSorterTest(Sort popConfig, SingleRowSet rowSet, SingleRowSet expected) throws Exception {
OperExecContext opContext = fixture.newOperExecContext(popConfig);
SorterWrapper sorter = new SorterWrapper(opContext);
sorter.sortBatch(rowSet.container(), rowSet.getSv2());
new RowSetComparison(expected)
.verifyAndClearAll(rowSet);
sorter.close();
}
// Test degenerate case: no rows
@Test
public void testEmptyRowSet() throws Exception {
BatchSchema schema = SortTestUtilities.nonNullSchema();
SingleRowSet rowSet = new RowSetBuilder(fixture.allocator(), schema)
.withSv2()
.build();
SingleRowSet expected = new RowSetBuilder(fixture.allocator(), schema)
.build();
runSorterTest(rowSet, expected);
}
// Sanity test: single row
@Test
public void testSingleRow() throws Exception {
BatchSchema schema = SortTestUtilities.nonNullSchema();
SingleRowSet rowSet = new RowSetBuilder(fixture.allocator(), schema)
.add(0, "0")
.withSv2()
.build();
SingleRowSet expected = new RowSetBuilder(fixture.allocator(), schema)
.add(0, "0")
.build();
runSorterTest(rowSet, expected);
}
// Paranoia: sort with two rows.
@Test
public void testTwoRows() throws Exception {
BatchSchema schema = SortTestUtilities.nonNullSchema();
SingleRowSet rowSet = new RowSetBuilder(fixture.allocator(), schema)
.add(1, "1")
.add(0, "0")
.withSv2()
.build();
SingleRowSet expected = new RowSetBuilder(fixture.allocator(), schema)
.add(0, "0")
.add(1, "1")
.build();
runSorterTest(rowSet, expected);
}
private abstract static class BaseSortTester {
protected final OperatorFixture fixture;
protected final SorterWrapper sorter;
protected final boolean nullable;
public BaseSortTester(OperatorFixture fixture, String sortOrder, String nullOrder, boolean nullable) {
this.fixture = fixture;
Sort popConfig = makeSortConfig("key", sortOrder, nullOrder);
this.nullable = nullable;
OperExecContext opContext = fixture.newOperExecContext(popConfig);
sorter = new SorterWrapper(opContext);
}
}
private abstract static class SortTester extends BaseSortTester {
protected DataItem data[];
public SortTester(OperatorFixture fixture, String sortOrder, String nullOrder, boolean nullable) {
super(fixture, sortOrder, nullOrder, nullable);
}
public void test(MinorType type) throws SchemaChangeException {
data = makeDataArray(20);
BatchSchema schema = SortTestUtilities.makeSchema(type, nullable);
SingleRowSet input = makeDataSet(fixture.allocator(), schema, data);
input = input.toIndirect();
sorter.sortBatch(input.container(), input.getSv2());
sorter.close();
verify(input);
}
public static class DataItem {
public final int key;
public final int value;
public final boolean isNull;
public DataItem(int key, int value, boolean isNull) {
this.key = key;
this.value = value;
this.isNull = isNull;
}
@Override
public String toString() {
return "(" + key + ", \"" + value + "\", " +
(isNull ? "null" : "set") + ")";
}
}
public DataItem[] makeDataArray(int size) {
DataItem values[] = new DataItem[size];
int key = 11;
int delta = 3;
for (int i = 0; i < size; i++) {
values[i] = new DataItem(key, i, key % 5 == 0);
key = (key + delta) % size;
}
return values;
}
public SingleRowSet makeDataSet(BufferAllocator allocator, BatchSchema schema, DataItem[] items) {
ExtendableRowSet rowSet = fixture.rowSet(schema);
RowSetWriter writer = rowSet.writer(items.length);
for (int i = 0; i < items.length; i++) {
DataItem item = items[i];
if (nullable && item.isNull) {
writer.column(0).setNull();
} else {
RowSetUtilities.setFromInt(writer, 0, item.key);
}
writer.column(1).setString(Integer.toString(item.value));
writer.save();
}
writer.done();
return rowSet;
}
private void verify(RowSet actual) {
DataItem expected[] = Arrays.copyOf(data, data.length);
doSort(expected);
RowSet expectedRows = makeDataSet(actual.allocator(), actual.schema().batch(), expected);
doVerify(expected, expectedRows, actual);
}
protected void doVerify(DataItem[] expected, RowSet expectedRows, RowSet actual) {
new RowSetComparison(expectedRows)
.verifyAndClearAll(actual);
}
protected abstract void doSort(DataItem[] expected);
}
private static class TestSorterNumeric extends SortTester {
private final int sign;
public TestSorterNumeric(OperatorFixture fixture, boolean asc) {
super(fixture,
asc ? Ordering.ORDER_ASC : Ordering.ORDER_DESC,
Ordering.NULLS_UNSPECIFIED, false);
sign = asc ? 1 : -1;
}
@Override
protected void doSort(DataItem[] expected) {
Arrays.sort(expected, new Comparator<DataItem>(){
@Override
public int compare(DataItem o1, DataItem o2) {
return sign * Integer.compare(o1.key, o2.key);
}
});
}
}
private static class TestSorterNullableNumeric extends SortTester {
private final int sign;
private final int nullSign;
public TestSorterNullableNumeric(OperatorFixture fixture, boolean asc, boolean nullsLast) {
super(fixture,
asc ? Ordering.ORDER_ASC : Ordering.ORDER_DESC,
nullsLast ? Ordering.NULLS_LAST : Ordering.NULLS_FIRST,
true);
sign = asc ? 1 : -1;
nullSign = nullsLast ? 1 : -1;
}
@Override
protected void doSort(DataItem[] expected) {
Arrays.sort(expected, new Comparator<DataItem>(){
@Override
public int compare(DataItem o1, DataItem o2) {
if (o1.isNull && o2.isNull) { return 0; }
if (o1.isNull) { return nullSign; }
if (o2.isNull) { return -nullSign; }
return sign * Integer.compare(o1.key, o2.key);
}
});
}
@Override
protected void doVerify(DataItem[] expected, RowSet expectedRows, RowSet actual) {
int nullCount = 0;
for (DataItem item : expected) {
if (item.isNull) { nullCount++; }
}
int length = expected.length - nullCount;
int offset = (nullSign == 1) ? 0 : nullCount;
new RowSetComparison(expectedRows)
.offset(offset)
.span(length)
.verify(actual);
offset = length - offset;
new RowSetComparison(expectedRows)
.offset(offset)
.span(nullCount)
.withMask(true, false)
.verifyAndClearAll(actual);
}
}
private static class TestSorterStringAsc extends SortTester {
public TestSorterStringAsc(OperatorFixture fixture) {
super(fixture, Ordering.ORDER_ASC, Ordering.NULLS_UNSPECIFIED, false);
}
@Override
protected void doSort(DataItem[] expected) {
Arrays.sort(expected, new Comparator<DataItem>(){
@Override
public int compare(DataItem o1, DataItem o2) {
return Integer.toString(o1.key).compareTo(Integer.toString(o2.key));
}
});
}
}
private static class TestSorterBinaryAsc extends SortTester {
public TestSorterBinaryAsc(OperatorFixture fixture) {
super(fixture, Ordering.ORDER_ASC, Ordering.NULLS_UNSPECIFIED, false);
}
@Override
protected void doSort(DataItem[] expected) {
Arrays.sort(expected, new Comparator<DataItem>(){
@Override
public int compare(DataItem o1, DataItem o2) {
return Integer.toHexString(o1.key).compareTo(Integer.toHexString(o2.key));
}
});
}
}
private abstract static class BaseTestSorterIntervalAsc extends BaseSortTester {
public BaseTestSorterIntervalAsc(OperatorFixture fixture) {
super(fixture, Ordering.ORDER_ASC, Ordering.NULLS_UNSPECIFIED, false);
}
public void test(MinorType type) throws SchemaChangeException {
BatchSchema schema = new SchemaBuilder()
.add("key", type)
.build();
SingleRowSet input = makeInputData(fixture.allocator(), schema);
input = input.toIndirect();
sorter.sortBatch(input.container(), input.getSv2());
sorter.close();
verify(input);
input.clear();
}
protected SingleRowSet makeInputData(BufferAllocator allocator,
BatchSchema schema) {
RowSetBuilder builder = fixture.rowSetBuilder(schema);
int rowCount = 100;
Random rand = new Random();
for (int i = 0; i < rowCount; i++) {
int ms = rand.nextInt(1000);
int sec = rand.nextInt(60);
int min = rand.nextInt(60);
int hr = rand.nextInt(24);
int day = rand.nextInt(28);
int mo = rand.nextInt(12);
int yr = rand.nextInt(10);
Period period = makePeriod(yr, mo, day, hr, min, sec, ms);
builder.add(period);
}
return builder.build();
}
protected abstract Period makePeriod(int yr, int mo, int day, int hr, int min, int sec,
int ms);
private void verify(SingleRowSet output) {
RowSetReader reader = output.reader();
int prevYears = 0;
int prevMonths = 0;
long prevMs = 0;
while (reader.next()) {
Period period = reader.column(0).getPeriod().normalizedStandard();
int years = period.getYears();
assertTrue(prevYears <= years);
if (prevYears != years) {
prevMonths = 0;
prevMs = 0;
}
prevYears = years;
int months = period.getMonths();
assertTrue(prevMonths <= months);
if (prevMonths != months) {
prevMs = 0;
}
prevMonths = months;
Period remainder = period
.withYears(0)
.withMonths(0);
long ms = remainder.toStandardDuration().getMillis();
assertTrue(prevMs <= ms);
prevMs = ms;
}
}
}
private static class TestSorterIntervalAsc extends BaseTestSorterIntervalAsc {
public TestSorterIntervalAsc(OperatorFixture fixture) {
super(fixture);
}
public void test() throws SchemaChangeException {
test(MinorType.INTERVAL);
}
@Override
protected Period makePeriod(int yr, int mo, int day, int hr, int min,
int sec, int ms) {
return Period.years(yr)
.withMonths(mo)
.withDays(day)
.withHours(hr)
.withMinutes(min)
.withSeconds(sec)
.withMillis(ms);
}
}
private static class TestSorterIntervalYearAsc extends BaseTestSorterIntervalAsc {
public TestSorterIntervalYearAsc(OperatorFixture fixture) {
super(fixture);
}
public void test() throws SchemaChangeException {
test(MinorType.INTERVALYEAR);
}
@Override
protected Period makePeriod(int yr, int mo, int day, int hr, int min,
int sec, int ms) {
return Period.years(yr)
.withMonths(mo);
}
}
private static class TestSorterIntervalDayAsc extends BaseTestSorterIntervalAsc {
public TestSorterIntervalDayAsc(OperatorFixture fixture) {
super(fixture);
}
public void test() throws SchemaChangeException {
test(MinorType.INTERVALDAY);
}
@Override
protected Period makePeriod(int yr, int mo, int day, int hr, int min,
int sec, int ms) {
return Period.days(day)
.withHours(hr)
.withMinutes(min)
.withSeconds(sec)
.withMillis(ms);
}
}
@Test
public void testNumericTypes() throws Exception {
TestSorterNumeric tester1 = new TestSorterNumeric(fixture, true);
// tester1.test(MinorType.TINYINT); // DRILL-5329
// tester1.test(MinorType.UINT1); DRILL-5329
// tester1.test(MinorType.SMALLINT); DRILL-5329
// tester1.test(MinorType.UINT2); DRILL-5329
tester1.test(MinorType.INT);
// tester1.test(MinorType.UINT4); DRILL-5329
tester1.test(MinorType.BIGINT);
// tester1.test(MinorType.UINT8); DRILL-5329
tester1.test(MinorType.FLOAT4);
tester1.test(MinorType.FLOAT8);
tester1.test(MinorType.DECIMAL9);
tester1.test(MinorType.DECIMAL18);
// tester1.test(MinorType.DECIMAL28SPARSE); DRILL-5329
// tester1.test(MinorType.DECIMAL38SPARSE); DRILL-5329
// tester1.test(MinorType.DECIMAL28DENSE); No writer
// tester1.test(MinorType.DECIMAL38DENSE); No writer
tester1.test(MinorType.DATE);
tester1.test(MinorType.TIME);
tester1.test(MinorType.TIMESTAMP);
}
@Test
public void testVarCharTypes() throws Exception {
TestSorterStringAsc tester = new TestSorterStringAsc(fixture);
tester.test(MinorType.VARCHAR);
// tester.test(MinorType.VAR16CHAR); DRILL-5329
}
/**
* Test the VARBINARY data type as a sort key.
*
* @throws Exception for internal errors
*/
@Test
public void testVarBinary() throws Exception {
TestSorterBinaryAsc tester = new TestSorterBinaryAsc(fixture);
tester.test(MinorType.VARBINARY);
}
/**
* Test the INTERVAL data type as a sort key.
*
* @throws Exception for internal errors
*/
@Test
public void testInterval() throws Exception {
TestSorterIntervalAsc tester = new TestSorterIntervalAsc(fixture);
tester.test();
}
/**
* Test the INTERVALYEAR data type as a sort key.
*
* @throws Exception for internal errors
*/
@Test
public void testIntervalYear() throws Exception {
TestSorterIntervalYearAsc tester = new TestSorterIntervalYearAsc(fixture);
tester.test();
}
/**
* Test the INTERVALDAY data type as a sort key.
*
* @throws Exception for internal errors
*/
@Test
public void testIntervalDay() throws Exception {
TestSorterIntervalDayAsc tester = new TestSorterIntervalDayAsc(fixture);
tester.test();
}
@Test
public void testDesc() throws Exception {
TestSorterNumeric tester = new TestSorterNumeric(fixture, false);
tester.test(MinorType.INT);
}
/**
* Verify that nulls sort in the requested position: high or low.
* Earlier tests verify that "unspecified" maps to high or low
* depending on sort order.
*/
@Test
public void testNullable() throws Exception {
TestSorterNullableNumeric tester = new TestSorterNullableNumeric(fixture, true, true);
tester.test(MinorType.INT);
tester = new TestSorterNullableNumeric(fixture, true, false);
tester.test(MinorType.INT);
tester = new TestSorterNullableNumeric(fixture, false, true);
tester.test(MinorType.INT);
tester = new TestSorterNullableNumeric(fixture, false, false);
tester.test(MinorType.INT);
}
@Test
@Ignore("DRILL-5384")
public void testMapKey() throws Exception {
BatchSchema schema = new SchemaBuilder()
.addMap("map")
.add("key", MinorType.INT)
.add("value", MinorType.VARCHAR)
.buildMap()
.build();
SingleRowSet input = fixture.rowSetBuilder(schema)
.add(3, "third")
.add(1, "first")
.add(2, "second")
.withSv2()
.build();
SingleRowSet output = fixture.rowSetBuilder(schema)
.add(1, "first")
.add(2, "second")
.add(3, "third")
.build();
Sort popConfig = makeSortConfig("map.key", Ordering.ORDER_ASC, Ordering.NULLS_LAST);
runSorterTest(popConfig, input, output);
}
}
| |
package com.xmomen.module.sms.util;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import org.apache.commons.lang.StringUtils;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
public class HttpUtils {
/**
* get
*
* @param host
* @param path
* @param method
* @param headers
* @param querys
* @return
* @throws Exception
*/
public static HttpResponse doGet(String host, String path, String method,
Map<String, String> headers,
Map<String, String> querys)
throws Exception {
HttpClient httpClient = wrapClient(host);
HttpGet request = new HttpGet(buildUrl(host, path, querys));
for (Map.Entry<String, String> e : headers.entrySet()) {
request.addHeader(e.getKey(), e.getValue());
}
return httpClient.execute(request);
}
/**
* post form
*
* @param host
* @param path
* @param method
* @param headers
* @param querys
* @param bodys
* @return
* @throws Exception
*/
public static HttpResponse doPost(String host, String path, String method,
Map<String, String> headers,
Map<String, String> querys,
Map<String, String> bodys)
throws Exception {
HttpClient httpClient = wrapClient(host);
HttpPost request = new HttpPost(buildUrl(host, path, querys));
for (Map.Entry<String, String> e : headers.entrySet()) {
request.addHeader(e.getKey(), e.getValue());
}
if (bodys != null) {
List<NameValuePair> nameValuePairList = new ArrayList<NameValuePair>();
for (String key : bodys.keySet()) {
nameValuePairList.add(new BasicNameValuePair(key, bodys.get(key)));
}
UrlEncodedFormEntity formEntity = new UrlEncodedFormEntity(nameValuePairList, "utf-8");
formEntity.setContentType("application/x-www-form-urlencoded; charset=UTF-8");
request.setEntity(formEntity);
}
return httpClient.execute(request);
}
/**
* Post String
*
* @param host
* @param path
* @param method
* @param headers
* @param querys
* @param body
* @return
* @throws Exception
*/
public static HttpResponse doPost(String host, String path, String method,
Map<String, String> headers,
Map<String, String> querys,
String body)
throws Exception {
HttpClient httpClient = wrapClient(host);
HttpPost request = new HttpPost(buildUrl(host, path, querys));
for (Map.Entry<String, String> e : headers.entrySet()) {
request.addHeader(e.getKey(), e.getValue());
}
if (StringUtils.isNotBlank(body)) {
request.setEntity(new StringEntity(body, "utf-8"));
}
return httpClient.execute(request);
}
/**
* Post stream
*
* @param host
* @param path
* @param method
* @param headers
* @param querys
* @param body
* @return
* @throws Exception
*/
public static HttpResponse doPost(String host, String path, String method,
Map<String, String> headers,
Map<String, String> querys,
byte[] body)
throws Exception {
HttpClient httpClient = wrapClient(host);
HttpPost request = new HttpPost(buildUrl(host, path, querys));
for (Map.Entry<String, String> e : headers.entrySet()) {
request.addHeader(e.getKey(), e.getValue());
}
if (body != null) {
request.setEntity(new ByteArrayEntity(body));
}
return httpClient.execute(request);
}
/**
* Put String
* @param host
* @param path
* @param method
* @param headers
* @param querys
* @param body
* @return
* @throws Exception
*/
public static HttpResponse doPut(String host, String path, String method,
Map<String, String> headers,
Map<String, String> querys,
String body)
throws Exception {
HttpClient httpClient = wrapClient(host);
HttpPut request = new HttpPut(buildUrl(host, path, querys));
for (Map.Entry<String, String> e : headers.entrySet()) {
request.addHeader(e.getKey(), e.getValue());
}
if (StringUtils.isNotBlank(body)) {
request.setEntity(new StringEntity(body, "utf-8"));
}
return httpClient.execute(request);
}
/**
* Put stream
* @param host
* @param path
* @param method
* @param headers
* @param querys
* @param body
* @return
* @throws Exception
*/
public static HttpResponse doPut(String host, String path, String method,
Map<String, String> headers,
Map<String, String> querys,
byte[] body)
throws Exception {
HttpClient httpClient = wrapClient(host);
HttpPut request = new HttpPut(buildUrl(host, path, querys));
for (Map.Entry<String, String> e : headers.entrySet()) {
request.addHeader(e.getKey(), e.getValue());
}
if (body != null) {
request.setEntity(new ByteArrayEntity(body));
}
return httpClient.execute(request);
}
/**
* Delete
*
* @param host
* @param path
* @param method
* @param headers
* @param querys
* @return
* @throws Exception
*/
public static HttpResponse doDelete(String host, String path, String method,
Map<String, String> headers,
Map<String, String> querys)
throws Exception {
HttpClient httpClient = wrapClient(host);
HttpDelete request = new HttpDelete(buildUrl(host, path, querys));
for (Map.Entry<String, String> e : headers.entrySet()) {
request.addHeader(e.getKey(), e.getValue());
}
return httpClient.execute(request);
}
private static String buildUrl(String host, String path, Map<String, String> querys) throws UnsupportedEncodingException {
StringBuilder sbUrl = new StringBuilder();
sbUrl.append(host);
if (!StringUtils.isBlank(path)) {
sbUrl.append(path);
}
if (null != querys) {
StringBuilder sbQuery = new StringBuilder();
for (Map.Entry<String, String> query : querys.entrySet()) {
if (0 < sbQuery.length()) {
sbQuery.append("&");
}
if (StringUtils.isBlank(query.getKey()) && !StringUtils.isBlank(query.getValue())) {
sbQuery.append(query.getValue());
}
if (!StringUtils.isBlank(query.getKey())) {
sbQuery.append(query.getKey());
if (!StringUtils.isBlank(query.getValue())) {
sbQuery.append("=");
sbQuery.append(URLEncoder.encode(query.getValue(), "utf-8"));
}
}
}
if (0 < sbQuery.length()) {
sbUrl.append("?").append(sbQuery);
}
}
return sbUrl.toString();
}
private static HttpClient wrapClient(String host) {
HttpClient httpClient = new DefaultHttpClient();
if (host.startsWith("https://")) {
sslClient(httpClient);
}
return httpClient;
}
private static void sslClient(HttpClient httpClient) {
try {
SSLContext ctx = SSLContext.getInstance("TLS");
X509TrustManager tm = new X509TrustManager() {
public X509Certificate[] getAcceptedIssuers() {
return null;
}
public void checkClientTrusted(X509Certificate[] xcs, String str) {
}
public void checkServerTrusted(X509Certificate[] xcs, String str) {
}
};
ctx.init(null, new TrustManager[] { tm }, null);
SSLSocketFactory ssf = new SSLSocketFactory(ctx);
ssf.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
ClientConnectionManager ccm = httpClient.getConnectionManager();
SchemeRegistry registry = ccm.getSchemeRegistry();
registry.register(new Scheme("https", 443, ssf));
} catch (KeyManagementException ex) {
throw new RuntimeException(ex);
} catch (NoSuchAlgorithmException ex) {
throw new RuntimeException(ex);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.overlord.http;
import com.google.common.base.Function;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.ByteSource;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.inject.Inject;
import com.sun.jersey.spi.container.ResourceFilters;
import org.apache.druid.audit.AuditEntry;
import org.apache.druid.audit.AuditInfo;
import org.apache.druid.audit.AuditManager;
import org.apache.druid.client.indexing.ClientTaskQuery;
import org.apache.druid.common.config.ConfigManager.SetResult;
import org.apache.druid.common.config.JacksonConfigManager;
import org.apache.druid.indexer.RunnerTaskState;
import org.apache.druid.indexer.TaskInfo;
import org.apache.druid.indexer.TaskLocation;
import org.apache.druid.indexer.TaskState;
import org.apache.druid.indexer.TaskStatus;
import org.apache.druid.indexer.TaskStatusPlus;
import org.apache.druid.indexing.common.actions.TaskActionClient;
import org.apache.druid.indexing.common.actions.TaskActionHolder;
import org.apache.druid.indexing.common.task.Task;
import org.apache.druid.indexing.overlord.IndexerMetadataStorageAdapter;
import org.apache.druid.indexing.overlord.TaskMaster;
import org.apache.druid.indexing.overlord.TaskQueue;
import org.apache.druid.indexing.overlord.TaskRunner;
import org.apache.druid.indexing.overlord.TaskRunnerWorkItem;
import org.apache.druid.indexing.overlord.TaskStorageQueryAdapter;
import org.apache.druid.indexing.overlord.WorkerTaskRunner;
import org.apache.druid.indexing.overlord.WorkerTaskRunnerQueryAdapter;
import org.apache.druid.indexing.overlord.autoscaling.ScalingStats;
import org.apache.druid.indexing.overlord.http.security.TaskResourceFilter;
import org.apache.druid.indexing.overlord.setup.WorkerBehaviorConfig;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.metadata.EntryExistsException;
import org.apache.druid.server.http.HttpMediaType;
import org.apache.druid.server.http.security.ConfigResourceFilter;
import org.apache.druid.server.http.security.DatasourceResourceFilter;
import org.apache.druid.server.http.security.StateResourceFilter;
import org.apache.druid.server.security.Access;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.ForbiddenException;
import org.apache.druid.server.security.Resource;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.server.security.ResourceType;
import org.apache.druid.tasklogs.TaskLogStreamer;
import org.apache.druid.timeline.DataSegment;
import org.joda.time.DateTime;
import org.joda.time.Duration;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
/**
*
*/
@Path("/druid/indexer/v1")
public class OverlordResource
{
private static final Logger log = new Logger(OverlordResource.class);
private final TaskMaster taskMaster;
private final TaskStorageQueryAdapter taskStorageQueryAdapter;
private final IndexerMetadataStorageAdapter indexerMetadataStorageAdapter;
private final TaskLogStreamer taskLogStreamer;
private final JacksonConfigManager configManager;
private final AuditManager auditManager;
private final AuthorizerMapper authorizerMapper;
private final WorkerTaskRunnerQueryAdapter workerTaskRunnerQueryAdapter;
private AtomicReference<WorkerBehaviorConfig> workerConfigRef = null;
private static final List API_TASK_STATES = ImmutableList.of("pending", "waiting", "running", "complete");
@Inject
public OverlordResource(
TaskMaster taskMaster,
TaskStorageQueryAdapter taskStorageQueryAdapter,
IndexerMetadataStorageAdapter indexerMetadataStorageAdapter,
TaskLogStreamer taskLogStreamer,
JacksonConfigManager configManager,
AuditManager auditManager,
AuthorizerMapper authorizerMapper,
WorkerTaskRunnerQueryAdapter workerTaskRunnerQueryAdapter
)
{
this.taskMaster = taskMaster;
this.taskStorageQueryAdapter = taskStorageQueryAdapter;
this.indexerMetadataStorageAdapter = indexerMetadataStorageAdapter;
this.taskLogStreamer = taskLogStreamer;
this.configManager = configManager;
this.auditManager = auditManager;
this.authorizerMapper = authorizerMapper;
this.workerTaskRunnerQueryAdapter = workerTaskRunnerQueryAdapter;
}
/**
* Warning, magic: {@link org.apache.druid.client.indexing.HttpIndexingServiceClient#runTask} may call this method
* remotely with {@link ClientTaskQuery} objects, but we deserialize {@link Task} objects. See the comment for {@link
* ClientTaskQuery} for details.
*/
@POST
@Path("/task")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response taskPost(final Task task, @Context final HttpServletRequest req)
{
final String dataSource = task.getDataSource();
final ResourceAction resourceAction = new ResourceAction(
new Resource(dataSource, ResourceType.DATASOURCE),
Action.WRITE
);
Access authResult = AuthorizationUtils.authorizeResourceAction(
req,
resourceAction,
authorizerMapper
);
if (!authResult.isAllowed()) {
throw new ForbiddenException(authResult.getMessage());
}
return asLeaderWith(
taskMaster.getTaskQueue(),
new Function<TaskQueue, Response>()
{
@Override
public Response apply(TaskQueue taskQueue)
{
try {
taskQueue.add(task);
return Response.ok(ImmutableMap.of("task", task.getId())).build();
}
catch (EntryExistsException e) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(
ImmutableMap.of(
"error",
StringUtils.format("Task[%s] already exists!", task.getId())
)
)
.build();
}
}
}
);
}
@GET
@Path("/leader")
@ResourceFilters(StateResourceFilter.class)
@Produces(MediaType.APPLICATION_JSON)
public Response getLeader()
{
return Response.ok(taskMaster.getCurrentLeader()).build();
}
/**
* This is an unsecured endpoint, defined as such in UNSECURED_PATHS in CliOverlord
*/
@GET
@Path("/isLeader")
@Produces(MediaType.APPLICATION_JSON)
public Response isLeader()
{
final boolean leading = taskMaster.isLeader();
final Map<String, Boolean> response = ImmutableMap.of("leader", leading);
if (leading) {
return Response.ok(response).build();
} else {
return Response.status(Response.Status.NOT_FOUND).entity(response).build();
}
}
@GET
@Path("/task/{taskid}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(TaskResourceFilter.class)
public Response getTaskPayload(@PathParam("taskid") String taskid)
{
final TaskPayloadResponse response = new TaskPayloadResponse(
taskid,
taskStorageQueryAdapter.getTask(taskid).orNull()
);
final Response.Status status = response.getPayload() == null
? Response.Status.NOT_FOUND
: Response.Status.OK;
return Response.status(status).entity(response).build();
}
@GET
@Path("/task/{taskid}/status")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(TaskResourceFilter.class)
public Response getTaskStatus(@PathParam("taskid") String taskid)
{
final TaskInfo<Task, TaskStatus> taskInfo = taskStorageQueryAdapter.getTaskInfo(taskid);
TaskStatusResponse response = null;
if (taskInfo != null) {
if (taskMaster.getTaskRunner().isPresent()) {
final TaskRunner taskRunner = taskMaster.getTaskRunner().get();
final TaskRunnerWorkItem workItem = taskRunner
.getKnownTasks()
.stream()
.filter(item -> item.getTaskId().equals(taskid))
.findAny()
.orElse(null);
if (workItem != null) {
response = new TaskStatusResponse(
workItem.getTaskId(),
new TaskStatusPlus(
taskInfo.getId(),
taskInfo.getTask() == null ? null : taskInfo.getTask().getGroupId(),
taskInfo.getTask() == null ? null : taskInfo.getTask().getType(),
taskInfo.getCreatedTime(),
// Would be nice to include the real queue insertion time, but the
// TaskStorage API doesn't yet allow it.
DateTimes.EPOCH,
taskInfo.getStatus().getStatusCode(),
taskRunner.getRunnerTaskState(workItem.getTaskId()),
taskInfo.getStatus().getDuration(),
workItem.getLocation(),
taskInfo.getDataSource(),
taskInfo.getStatus().getErrorMsg()
)
);
}
}
if (response == null) {
response = new TaskStatusResponse(
taskid,
new TaskStatusPlus(
taskInfo.getId(),
taskInfo.getTask() == null ? null : taskInfo.getTask().getGroupId(),
taskInfo.getTask() == null ? null : taskInfo.getTask().getType(),
taskInfo.getCreatedTime(),
// Would be nice to include the real queue insertion time, but the
// TaskStorage API doesn't yet allow it.
DateTimes.EPOCH,
taskInfo.getStatus().getStatusCode(),
RunnerTaskState.WAITING,
taskInfo.getStatus().getDuration(),
taskInfo.getStatus().getLocation() == null ? TaskLocation.unknown() : taskInfo.getStatus().getLocation(),
taskInfo.getDataSource(),
taskInfo.getStatus().getErrorMsg()
)
);
}
} else {
response = new TaskStatusResponse(taskid, null);
}
final Response.Status status = response.getStatus() == null
? Response.Status.NOT_FOUND
: Response.Status.OK;
return Response.status(status).entity(response).build();
}
@Deprecated
@GET
@Path("/task/{taskid}/segments")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(TaskResourceFilter.class)
public Response getTaskSegments(@PathParam("taskid") String taskid)
{
final Set<DataSegment> segments = taskStorageQueryAdapter.getInsertedSegments(taskid);
return Response.ok().entity(segments).build();
}
@POST
@Path("/task/{taskid}/shutdown")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(TaskResourceFilter.class)
public Response doShutdown(@PathParam("taskid") final String taskid)
{
return asLeaderWith(
taskMaster.getTaskQueue(),
new Function<TaskQueue, Response>()
{
@Override
public Response apply(TaskQueue taskQueue)
{
taskQueue.shutdown(taskid, "Shutdown request from user");
return Response.ok(ImmutableMap.of("task", taskid)).build();
}
}
);
}
@POST
@Path("/datasources/{dataSource}/shutdownAllTasks")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response shutdownTasksForDataSource(@PathParam("dataSource") final String dataSource)
{
return asLeaderWith(
taskMaster.getTaskQueue(),
new Function<TaskQueue, Response>()
{
@Override
public Response apply(TaskQueue taskQueue)
{
final List<TaskInfo<Task, TaskStatus>> tasks = taskStorageQueryAdapter.getActiveTaskInfo(dataSource);
if (tasks.isEmpty()) {
return Response.status(Status.NOT_FOUND).build();
} else {
for (final TaskInfo<Task, TaskStatus> task : tasks) {
taskQueue.shutdown(task.getId(), "Shutdown request from user");
}
return Response.ok(ImmutableMap.of("dataSource", dataSource)).build();
}
}
}
);
}
@POST
@Path("/taskStatus")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(StateResourceFilter.class)
public Response getMultipleTaskStatuses(Set<String> taskIds)
{
if (taskIds == null || taskIds.size() == 0) {
return Response.status(Response.Status.BAD_REQUEST).entity("No TaskIds provided.").build();
}
Map<String, TaskStatus> result = Maps.newHashMapWithExpectedSize(taskIds.size());
for (String taskId : taskIds) {
Optional<TaskStatus> optional = taskStorageQueryAdapter.getStatus(taskId);
if (optional.isPresent()) {
result.put(taskId, optional.get());
}
}
return Response.ok().entity(result).build();
}
@GET
@Path("/worker")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(ConfigResourceFilter.class)
public Response getWorkerConfig()
{
if (workerConfigRef == null) {
workerConfigRef = configManager.watch(WorkerBehaviorConfig.CONFIG_KEY, WorkerBehaviorConfig.class);
}
return Response.ok(workerConfigRef.get()).build();
}
// default value is used for backwards compatibility
@POST
@Path("/worker")
@Consumes(MediaType.APPLICATION_JSON)
@ResourceFilters(ConfigResourceFilter.class)
public Response setWorkerConfig(
final WorkerBehaviorConfig workerBehaviorConfig,
@HeaderParam(AuditManager.X_DRUID_AUTHOR) @DefaultValue("") final String author,
@HeaderParam(AuditManager.X_DRUID_COMMENT) @DefaultValue("") final String comment,
@Context final HttpServletRequest req
)
{
final SetResult setResult = configManager.set(
WorkerBehaviorConfig.CONFIG_KEY,
workerBehaviorConfig,
new AuditInfo(author, comment, req.getRemoteAddr())
);
if (setResult.isOk()) {
log.info("Updating Worker configs: %s", workerBehaviorConfig);
return Response.ok().build();
} else {
return Response.status(Response.Status.BAD_REQUEST).build();
}
}
@GET
@Path("/worker/history")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(ConfigResourceFilter.class)
public Response getWorkerConfigHistory(
@QueryParam("interval") final String interval,
@QueryParam("count") final Integer count
)
{
Interval theInterval = interval == null ? null : Intervals.of(interval);
if (theInterval == null && count != null) {
try {
List<AuditEntry> workerEntryList = auditManager.fetchAuditHistory(
WorkerBehaviorConfig.CONFIG_KEY,
WorkerBehaviorConfig.CONFIG_KEY,
count
);
return Response.ok(workerEntryList).build();
}
catch (IllegalArgumentException e) {
return Response.status(Response.Status.BAD_REQUEST)
.entity(ImmutableMap.<String, Object>of("error", e.getMessage()))
.build();
}
}
List<AuditEntry> workerEntryList = auditManager.fetchAuditHistory(
WorkerBehaviorConfig.CONFIG_KEY,
WorkerBehaviorConfig.CONFIG_KEY,
theInterval
);
return Response.ok(workerEntryList).build();
}
@POST
@Path("/action")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(StateResourceFilter.class)
public Response doAction(final TaskActionHolder holder)
{
return asLeaderWith(
taskMaster.getTaskActionClient(holder.getTask()),
new Function<TaskActionClient, Response>()
{
@Override
public Response apply(TaskActionClient taskActionClient)
{
final Map<String, Object> retMap;
// It would be great to verify that this worker is actually supposed to be running the task before
// actually doing the action. Some ideas for how that could be done would be using some sort of attempt_id
// or token that gets passed around.
try {
final Object ret = taskActionClient.submit(holder.getAction());
retMap = new HashMap<>();
retMap.put("result", ret);
}
catch (Exception e) {
log.warn(e, "Failed to perform task action");
return Response.serverError().entity(ImmutableMap.of("error", e.getMessage())).build();
}
return Response.ok().entity(retMap).build();
}
}
);
}
@GET
@Path("/waitingTasks")
@Produces(MediaType.APPLICATION_JSON)
public Response getWaitingTasks(@Context final HttpServletRequest req)
{
return getTasks("waiting", null, null, null, null, req);
}
@GET
@Path("/pendingTasks")
@Produces(MediaType.APPLICATION_JSON)
public Response getPendingTasks(@Context final HttpServletRequest req)
{
return getTasks("pending", null, null, null, null, req);
}
@GET
@Path("/runningTasks")
@Produces(MediaType.APPLICATION_JSON)
public Response getRunningTasks(
@QueryParam("type") String taskType,
@Context final HttpServletRequest req
)
{
return getTasks("running", null, null, null, taskType, req);
}
@GET
@Path("/completeTasks")
@Produces(MediaType.APPLICATION_JSON)
public Response getCompleteTasks(
@QueryParam("n") final Integer maxTaskStatuses,
@Context final HttpServletRequest req
)
{
return getTasks("complete", null, null, maxTaskStatuses, null, req);
}
@GET
@Path("/tasks")
@Produces(MediaType.APPLICATION_JSON)
public Response getTasks(
@QueryParam("state") final String state,
@QueryParam("datasource") final String dataSource,
@QueryParam("createdTimeInterval") final String createdTimeInterval,
@QueryParam("max") final Integer maxCompletedTasks,
@QueryParam("type") final String type,
@Context final HttpServletRequest req
)
{
//check for valid state
if (state != null) {
if (!API_TASK_STATES.contains(StringUtils.toLowerCase(state))) {
return Response.status(Status.BAD_REQUEST)
.entity(StringUtils.format("Invalid state : %s, valid values are: %s", state, API_TASK_STATES))
.build();
}
}
// early authorization check if datasource != null
// fail fast if user not authorized to access datasource
if (dataSource != null) {
final ResourceAction resourceAction = new ResourceAction(
new Resource(dataSource, ResourceType.DATASOURCE),
Action.READ
);
final Access authResult = AuthorizationUtils.authorizeResourceAction(
req,
resourceAction,
authorizerMapper
);
if (!authResult.isAllowed()) {
throw new WebApplicationException(
Response.status(Response.Status.FORBIDDEN)
.entity(StringUtils.format("Access-Check-Result: %s", authResult.toString()))
.build()
);
}
}
List<TaskStatusPlus> finalTaskList = new ArrayList<>();
Function<AnyTask, TaskStatusPlus> activeTaskTransformFunc = workItem -> new TaskStatusPlus(
workItem.getTaskId(),
workItem.getTaskGroupId(),
workItem.getTaskType(),
workItem.getCreatedTime(),
workItem.getQueueInsertionTime(),
workItem.getTaskState(),
workItem.getRunnerTaskState(),
null,
workItem.getLocation(),
workItem.getDataSource(),
null
);
Function<TaskInfo<Task, TaskStatus>, TaskStatusPlus> completeTaskTransformFunc = taskInfo -> new TaskStatusPlus(
taskInfo.getId(),
taskInfo.getTask() == null ? null : taskInfo.getTask().getGroupId(),
taskInfo.getTask() == null ? null : taskInfo.getTask().getType(),
taskInfo.getCreatedTime(),
// Would be nice to include the real queue insertion time, but the
// TaskStorage API doesn't yet allow it.
DateTimes.EPOCH,
taskInfo.getStatus().getStatusCode(),
RunnerTaskState.NONE,
taskInfo.getStatus().getDuration(),
taskInfo.getStatus().getLocation() == null ? TaskLocation.unknown() : taskInfo.getStatus().getLocation(),
taskInfo.getDataSource(),
taskInfo.getStatus().getErrorMsg()
);
//checking for complete tasks first to avoid querying active tasks if user only wants complete tasks
if (state == null || "complete".equals(StringUtils.toLowerCase(state))) {
Duration createdTimeDuration = null;
if (createdTimeInterval != null) {
final Interval theInterval = Intervals.of(StringUtils.replace(createdTimeInterval, "_", "/"));
createdTimeDuration = theInterval.toDuration();
}
final List<TaskInfo<Task, TaskStatus>> taskInfoList =
taskStorageQueryAdapter.getCompletedTaskInfoByCreatedTimeDuration(maxCompletedTasks, createdTimeDuration, dataSource);
final List<TaskStatusPlus> completedTasks = taskInfoList.stream()
.map(completeTaskTransformFunc::apply)
.collect(Collectors.toList());
finalTaskList.addAll(completedTasks);
}
final List<TaskInfo<Task, TaskStatus>> allActiveTaskInfo;
final List<AnyTask> allActiveTasks = new ArrayList<>();
if (state == null || !"complete".equals(StringUtils.toLowerCase(state))) {
allActiveTaskInfo = taskStorageQueryAdapter.getActiveTaskInfo(dataSource);
for (final TaskInfo<Task, TaskStatus> task : allActiveTaskInfo) {
allActiveTasks.add(
new AnyTask(
task.getId(),
task.getTask() == null ? null : task.getTask().getGroupId(),
task.getTask() == null ? null : task.getTask().getType(),
SettableFuture.create(),
task.getDataSource(),
null,
null,
task.getCreatedTime(),
DateTimes.EPOCH,
TaskLocation.unknown()
));
}
}
if (state == null || "waiting".equals(StringUtils.toLowerCase(state))) {
final List<AnyTask> waitingWorkItems = filterActiveTasks(RunnerTaskState.WAITING, allActiveTasks);
List<TaskStatusPlus> transformedWaitingList = waitingWorkItems.stream()
.map(activeTaskTransformFunc::apply)
.collect(Collectors.toList());
finalTaskList.addAll(transformedWaitingList);
}
if (state == null || "pending".equals(StringUtils.toLowerCase(state))) {
final List<AnyTask> pendingWorkItems = filterActiveTasks(RunnerTaskState.PENDING, allActiveTasks);
List<TaskStatusPlus> transformedPendingList = pendingWorkItems.stream()
.map(activeTaskTransformFunc::apply)
.collect(Collectors.toList());
finalTaskList.addAll(transformedPendingList);
}
if (state == null || "running".equals(StringUtils.toLowerCase(state))) {
final List<AnyTask> runningWorkItems = filterActiveTasks(RunnerTaskState.RUNNING, allActiveTasks);
List<TaskStatusPlus> transformedRunningList = runningWorkItems.stream()
.map(activeTaskTransformFunc::apply)
.collect(Collectors.toList());
finalTaskList.addAll(transformedRunningList);
}
final List<TaskStatusPlus> authorizedList = securedTaskStatusPlus(
finalTaskList,
dataSource,
type,
req
);
return Response.ok(authorizedList).build();
}
@DELETE
@Path("/pendingSegments/{dataSource}")
@Produces(MediaType.APPLICATION_JSON)
public Response killPendingSegments(
@PathParam("dataSource") String dataSource,
@QueryParam("interval") String deleteIntervalString,
@Context HttpServletRequest request
)
{
final Interval deleteInterval = Intervals.of(deleteIntervalString);
// check auth for dataSource
final Access authResult = AuthorizationUtils.authorizeAllResourceActions(
request,
ImmutableList.of(
new ResourceAction(new Resource(dataSource, ResourceType.DATASOURCE), Action.READ),
new ResourceAction(new Resource(dataSource, ResourceType.DATASOURCE), Action.WRITE)
),
authorizerMapper
);
if (!authResult.isAllowed()) {
throw new ForbiddenException(authResult.getMessage());
}
if (taskMaster.isLeader()) {
final int numDeleted = indexerMetadataStorageAdapter.deletePendingSegments(dataSource, deleteInterval);
return Response.ok().entity(ImmutableMap.of("numDeleted", numDeleted)).build();
} else {
return Response.status(Status.SERVICE_UNAVAILABLE).build();
}
}
@GET
@Path("/workers")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(StateResourceFilter.class)
public Response getWorkers()
{
return asLeaderWith(
taskMaster.getTaskRunner(),
new Function<TaskRunner, Response>()
{
@Override
public Response apply(TaskRunner taskRunner)
{
if (taskRunner instanceof WorkerTaskRunner) {
return Response.ok(((WorkerTaskRunner) taskRunner).getWorkers()).build();
} else {
log.debug(
"Task runner [%s] of type [%s] does not support listing workers",
taskRunner,
taskRunner.getClass().getName()
);
return Response.serverError()
.entity(ImmutableMap.of("error", "Task Runner does not support worker listing"))
.build();
}
}
}
);
}
@POST
@Path("/worker/{host}/enable")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(StateResourceFilter.class)
public Response enableWorker(@PathParam("host") final String host)
{
return changeWorkerStatus(host, WorkerTaskRunner.ActionType.ENABLE);
}
@POST
@Path("/worker/{host}/disable")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(StateResourceFilter.class)
public Response disableWorker(@PathParam("host") final String host)
{
return changeWorkerStatus(host, WorkerTaskRunner.ActionType.DISABLE);
}
private Response changeWorkerStatus(String host, WorkerTaskRunner.ActionType action)
{
try {
if (WorkerTaskRunner.ActionType.DISABLE.equals(action)) {
workerTaskRunnerQueryAdapter.disableWorker(host);
return Response.ok(ImmutableMap.of(host, "disabled")).build();
} else if (WorkerTaskRunner.ActionType.ENABLE.equals(action)) {
workerTaskRunnerQueryAdapter.enableWorker(host);
return Response.ok(ImmutableMap.of(host, "enabled")).build();
} else {
return Response.serverError()
.entity(ImmutableMap.of("error", "Worker does not support " + action + " action!"))
.build();
}
}
catch (Exception e) {
log.error(e, "Error in posting [%s] action to [%s]", action, host);
return Response.serverError()
.entity(ImmutableMap.of("error", e.getMessage()))
.build();
}
}
@GET
@Path("/scaling")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(StateResourceFilter.class)
public Response getScalingState()
{
// Don't use asLeaderWith, since we want to return 200 instead of 503 when missing an autoscaler.
final Optional<ScalingStats> rms = taskMaster.getScalingStats();
if (rms.isPresent()) {
return Response.ok(rms.get()).build();
} else {
return Response.ok().build();
}
}
@GET
@Path("/task/{taskid}/log")
@Produces(HttpMediaType.TEXT_PLAIN_UTF8)
@ResourceFilters(TaskResourceFilter.class)
public Response doGetLog(
@PathParam("taskid") final String taskid,
@QueryParam("offset") @DefaultValue("0") final long offset
)
{
try {
final Optional<ByteSource> stream = taskLogStreamer.streamTaskLog(taskid, offset);
if (stream.isPresent()) {
return Response.ok(stream.get().openStream()).build();
} else {
return Response.status(Response.Status.NOT_FOUND)
.entity(
"No log was found for this task. "
+ "The task may not exist, or it may not have begun running yet."
)
.build();
}
}
catch (Exception e) {
log.warn(e, "Failed to stream log for task %s", taskid);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
}
@GET
@Path("/task/{taskid}/reports")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(TaskResourceFilter.class)
public Response doGetReports(
@PathParam("taskid") final String taskid
)
{
try {
final Optional<ByteSource> stream = taskLogStreamer.streamTaskReports(taskid);
if (stream.isPresent()) {
return Response.ok(stream.get().openStream()).build();
} else {
return Response.status(Response.Status.NOT_FOUND)
.entity(
"No task reports were found for this task. "
+ "The task may not exist, or it may not have completed yet."
)
.build();
}
}
catch (Exception e) {
log.warn(e, "Failed to stream task reports for task %s", taskid);
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).build();
}
}
private <T> Response asLeaderWith(Optional<T> x, Function<T, Response> f)
{
if (x.isPresent()) {
return f.apply(x.get());
} else {
// Encourage client to try again soon, when we'll likely have a redirect set up
return Response.status(Response.Status.SERVICE_UNAVAILABLE).build();
}
}
private List<AnyTask> filterActiveTasks(
RunnerTaskState state,
List<AnyTask> allTasks
)
{
//divide active tasks into 3 lists : running, pending, waiting
Optional<TaskRunner> taskRunnerOpt = taskMaster.getTaskRunner();
if (!taskRunnerOpt.isPresent()) {
throw new WebApplicationException(
Response.serverError().entity("No task runner found").build()
);
}
TaskRunner runner = taskRunnerOpt.get();
// the order of tasks below is waiting, pending, running to prevent
// skipping a task, it's the order in which tasks will change state
// if they do while this is code is executing, so a task might be
// counted twice but never skipped
if (RunnerTaskState.WAITING.equals(state)) {
Collection<? extends TaskRunnerWorkItem> runnersKnownTasks = runner.getKnownTasks();
Set<String> runnerKnownTaskIds = runnersKnownTasks
.stream()
.map(TaskRunnerWorkItem::getTaskId)
.collect(Collectors.toSet());
final List<AnyTask> waitingTasks = new ArrayList<>();
for (TaskRunnerWorkItem task : allTasks) {
if (!runnerKnownTaskIds.contains(task.getTaskId())) {
waitingTasks.add(((AnyTask) task).withTaskState(
TaskState.RUNNING,
RunnerTaskState.WAITING,
task.getCreatedTime(),
task.getQueueInsertionTime(),
task.getLocation()
));
}
}
return waitingTasks;
}
if (RunnerTaskState.PENDING.equals(state)) {
Collection<? extends TaskRunnerWorkItem> knownPendingTasks = runner.getPendingTasks();
Set<String> pendingTaskIds = knownPendingTasks
.stream()
.map(TaskRunnerWorkItem::getTaskId)
.collect(Collectors.toSet());
Map<String, TaskRunnerWorkItem> workItemIdMap = knownPendingTasks
.stream()
.collect(Collectors.toMap(
TaskRunnerWorkItem::getTaskId,
java.util.function.Function.identity(),
(previousWorkItem, newWorkItem) -> newWorkItem
));
final List<AnyTask> pendingTasks = new ArrayList<>();
for (TaskRunnerWorkItem task : allTasks) {
if (pendingTaskIds.contains(task.getTaskId())) {
pendingTasks.add(((AnyTask) task).withTaskState(
TaskState.RUNNING,
RunnerTaskState.PENDING,
workItemIdMap.get(task.getTaskId()).getCreatedTime(),
workItemIdMap.get(task.getTaskId()).getQueueInsertionTime(),
workItemIdMap.get(task.getTaskId()).getLocation()
));
}
}
return pendingTasks;
}
if (RunnerTaskState.RUNNING.equals(state)) {
Collection<? extends TaskRunnerWorkItem> knownRunningTasks = runner.getRunningTasks();
Set<String> runningTaskIds = knownRunningTasks
.stream()
.map(TaskRunnerWorkItem::getTaskId)
.collect(Collectors.toSet());
Map<String, TaskRunnerWorkItem> workItemIdMap = knownRunningTasks
.stream()
.collect(Collectors.toMap(
TaskRunnerWorkItem::getTaskId,
java.util.function.Function.identity(),
(previousWorkItem, newWorkItem) -> newWorkItem
));
final List<AnyTask> runningTasks = new ArrayList<>();
for (TaskRunnerWorkItem task : allTasks) {
if (runningTaskIds.contains(task.getTaskId())) {
runningTasks.add(((AnyTask) task).withTaskState(
TaskState.RUNNING,
RunnerTaskState.RUNNING,
workItemIdMap.get(task.getTaskId()).getCreatedTime(),
workItemIdMap.get(task.getTaskId()).getQueueInsertionTime(),
workItemIdMap.get(task.getTaskId()).getLocation()
));
}
}
return runningTasks;
}
return allTasks;
}
private List<TaskStatusPlus> securedTaskStatusPlus(
List<TaskStatusPlus> collectionToFilter,
@Nullable String dataSource,
@Nullable String type,
HttpServletRequest req
)
{
Function<TaskStatusPlus, Iterable<ResourceAction>> raGenerator = taskStatusPlus -> {
final String taskId = taskStatusPlus.getId();
final String taskDatasource = taskStatusPlus.getDataSource();
if (taskDatasource == null) {
throw new WebApplicationException(
Response.serverError().entity(
StringUtils.format("No task information found for task with id: [%s]", taskId)
).build()
);
}
return Collections.singletonList(
new ResourceAction(new Resource(taskDatasource, ResourceType.DATASOURCE), Action.READ)
);
};
List<TaskStatusPlus> optionalTypeFilteredList = collectionToFilter;
if (type != null) {
optionalTypeFilteredList = collectionToFilter
.stream()
.filter(task -> type.equals(task.getType()))
.collect(Collectors.toList());
}
if (dataSource != null) {
//skip auth check here, as it's already done in getTasks
return optionalTypeFilteredList;
}
return Lists.newArrayList(
AuthorizationUtils.filterAuthorizedResources(
req,
optionalTypeFilteredList,
raGenerator,
authorizerMapper
)
);
}
private static class AnyTask extends TaskRunnerWorkItem
{
private final String taskGroupId;
private final String taskType;
private final String dataSource;
private final TaskState taskState;
private final RunnerTaskState runnerTaskState;
private final DateTime createdTime;
private final DateTime queueInsertionTime;
private final TaskLocation taskLocation;
AnyTask(
String taskId,
String taskGroupId,
String taskType,
ListenableFuture<TaskStatus> result,
String dataSource,
TaskState state,
RunnerTaskState runnerState,
DateTime createdTime,
DateTime queueInsertionTime,
TaskLocation taskLocation
)
{
super(taskId, result, DateTimes.EPOCH, DateTimes.EPOCH);
this.taskGroupId = taskGroupId;
this.taskType = taskType;
this.dataSource = dataSource;
this.taskState = state;
this.runnerTaskState = runnerState;
this.createdTime = createdTime;
this.queueInsertionTime = queueInsertionTime;
this.taskLocation = taskLocation;
}
@Override
public TaskLocation getLocation()
{
return taskLocation;
}
@Override
public String getTaskType()
{
return taskType;
}
@Override
public String getDataSource()
{
return dataSource;
}
public String getTaskGroupId()
{
return taskGroupId;
}
public TaskState getTaskState()
{
return taskState;
}
public RunnerTaskState getRunnerTaskState()
{
return runnerTaskState;
}
@Override
public DateTime getCreatedTime()
{
return createdTime;
}
@Override
public DateTime getQueueInsertionTime()
{
return queueInsertionTime;
}
public AnyTask withTaskState(
TaskState newTaskState,
RunnerTaskState runnerState,
DateTime createdTime,
DateTime queueInsertionTime,
TaskLocation taskLocation
)
{
return new AnyTask(
getTaskId(),
getTaskGroupId(),
getTaskType(),
getResult(),
getDataSource(),
newTaskState,
runnerState,
createdTime,
queueInsertionTime,
taskLocation
);
}
}
}
| |
/*
* Copyright (c) 2013 Mozilla Foundation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
/*
* THIS IS A GENERATED FILE. PLEASE DO NOT EDIT.
* Instead, please regenerate using generate-encoding-data.py
*/
package org.whattf.datatype.data;
import java.util.Arrays;
public class CharsetData {
private static final String[] LABELS = {
"866",
"ansi_x3.4-1968",
"arabic",
"ascii",
"asmo-708",
"big5",
"big5-hkscs",
"chinese",
"cn-big5",
"cp1250",
"cp1251",
"cp1252",
"cp1253",
"cp1254",
"cp1255",
"cp1256",
"cp1257",
"cp1258",
"cp819",
"cp866",
"csbig5",
"cseuckr",
"cseucpkdfmtjapanese",
"csgb2312",
"csibm866",
"csiso2022jp",
"csiso2022kr",
"csiso58gb231280",
"csiso88596e",
"csiso88596i",
"csiso88598e",
"csiso88598i",
"csisolatin1",
"csisolatin2",
"csisolatin3",
"csisolatin4",
"csisolatin5",
"csisolatin6",
"csisolatin9",
"csisolatinarabic",
"csisolatincyrillic",
"csisolatingreek",
"csisolatinhebrew",
"cskoi8r",
"csksc56011987",
"csmacintosh",
"csshiftjis",
"cyrillic",
"dos-874",
"ecma-114",
"ecma-118",
"elot_928",
"euc-jp",
"euc-kr",
"gb18030",
"gb2312",
"gb_2312",
"gb_2312-80",
"gbk",
"greek",
"greek8",
"hebrew",
"hz-gb-2312",
"ibm819",
"ibm866",
"iso-2022-cn",
"iso-2022-cn-ext",
"iso-2022-jp",
"iso-2022-kr",
"iso-8859-1",
"iso-8859-10",
"iso-8859-11",
"iso-8859-13",
"iso-8859-14",
"iso-8859-15",
"iso-8859-16",
"iso-8859-2",
"iso-8859-3",
"iso-8859-4",
"iso-8859-5",
"iso-8859-6",
"iso-8859-6-e",
"iso-8859-6-i",
"iso-8859-7",
"iso-8859-8",
"iso-8859-8-e",
"iso-8859-8-i",
"iso-8859-9",
"iso-ir-100",
"iso-ir-101",
"iso-ir-109",
"iso-ir-110",
"iso-ir-126",
"iso-ir-127",
"iso-ir-138",
"iso-ir-144",
"iso-ir-148",
"iso-ir-149",
"iso-ir-157",
"iso-ir-58",
"iso8859-1",
"iso8859-10",
"iso8859-11",
"iso8859-13",
"iso8859-14",
"iso8859-15",
"iso8859-2",
"iso8859-3",
"iso8859-4",
"iso8859-5",
"iso8859-6",
"iso8859-7",
"iso8859-8",
"iso8859-9",
"iso88591",
"iso885910",
"iso885911",
"iso885913",
"iso885914",
"iso885915",
"iso88592",
"iso88593",
"iso88594",
"iso88595",
"iso88596",
"iso88597",
"iso88598",
"iso88599",
"iso_8859-1",
"iso_8859-15",
"iso_8859-1:1987",
"iso_8859-2",
"iso_8859-2:1987",
"iso_8859-3",
"iso_8859-3:1988",
"iso_8859-4",
"iso_8859-4:1988",
"iso_8859-5",
"iso_8859-5:1988",
"iso_8859-6",
"iso_8859-6:1987",
"iso_8859-7",
"iso_8859-7:1987",
"iso_8859-8",
"iso_8859-8:1988",
"iso_8859-9",
"iso_8859-9:1989",
"koi",
"koi8",
"koi8-r",
"koi8-u",
"koi8_r",
"korean",
"ks_c_5601-1987",
"ks_c_5601-1989",
"ksc5601",
"ksc_5601",
"l1",
"l2",
"l3",
"l4",
"l5",
"l6",
"l9",
"latin1",
"latin2",
"latin3",
"latin4",
"latin5",
"latin6",
"logical",
"mac",
"macintosh",
"ms_kanji",
"shift-jis",
"shift_jis",
"sjis",
"sun_eu_greek",
"tis-620",
"unicode-1-1-utf-8",
"us-ascii",
"utf-16",
"utf-16be",
"utf-16le",
"utf-8",
"utf8",
"visual",
"windows-1250",
"windows-1251",
"windows-1252",
"windows-1253",
"windows-1254",
"windows-1255",
"windows-1256",
"windows-1257",
"windows-1258",
"windows-31j",
"windows-874",
"windows-949",
"x-cp1250",
"x-cp1251",
"x-cp1252",
"x-cp1253",
"x-cp1254",
"x-cp1255",
"x-cp1256",
"x-cp1257",
"x-cp1258",
"x-euc-jp",
"x-gbk",
"x-mac-cyrillic",
"x-mac-roman",
"x-mac-ukrainian",
"x-sjis",
"x-user-defined",
"x-x-big5",
};
private static final String[] PREFERRED_FOR_LABELS = {
"ibm866",
"windows-1252",
"iso-8859-6",
"windows-1252",
"iso-8859-6",
"big5",
"big5",
"gbk",
"big5",
"windows-1250",
"windows-1251",
"windows-1252",
"windows-1253",
"windows-1254",
"windows-1255",
"windows-1256",
"windows-1257",
"windows-1258",
"windows-1252",
"ibm866",
"big5",
"euc-kr",
"euc-jp",
"gbk",
"ibm866",
"iso-2022-jp",
"iso-2022-kr",
"gbk",
"iso-8859-6",
"iso-8859-6",
"iso-8859-8",
"iso-8859-8-i",
"windows-1252",
"iso-8859-2",
"iso-8859-3",
"iso-8859-4",
"windows-1254",
"iso-8859-10",
"iso-8859-15",
"iso-8859-6",
"iso-8859-5",
"iso-8859-7",
"iso-8859-8",
"koi8-r",
"euc-kr",
"macintosh",
"shift_jis",
"iso-8859-5",
"windows-874",
"iso-8859-6",
"iso-8859-7",
"iso-8859-7",
"euc-jp",
"euc-kr",
"gb18030",
"gbk",
"gbk",
"gbk",
"gbk",
"iso-8859-7",
"iso-8859-7",
"iso-8859-8",
"hz-gb-2312",
"windows-1252",
"ibm866",
"replacement",
"replacement",
"iso-2022-jp",
"iso-2022-kr",
"windows-1252",
"iso-8859-10",
"windows-874",
"iso-8859-13",
"iso-8859-14",
"iso-8859-15",
"iso-8859-16",
"iso-8859-2",
"iso-8859-3",
"iso-8859-4",
"iso-8859-5",
"iso-8859-6",
"iso-8859-6",
"iso-8859-6",
"iso-8859-7",
"iso-8859-8",
"iso-8859-8",
"iso-8859-8-i",
"windows-1254",
"windows-1252",
"iso-8859-2",
"iso-8859-3",
"iso-8859-4",
"iso-8859-7",
"iso-8859-6",
"iso-8859-8",
"iso-8859-5",
"windows-1254",
"euc-kr",
"iso-8859-10",
"gbk",
"windows-1252",
"iso-8859-10",
"windows-874",
"iso-8859-13",
"iso-8859-14",
"iso-8859-15",
"iso-8859-2",
"iso-8859-3",
"iso-8859-4",
"iso-8859-5",
"iso-8859-6",
"iso-8859-7",
"iso-8859-8",
"windows-1254",
"windows-1252",
"iso-8859-10",
"windows-874",
"iso-8859-13",
"iso-8859-14",
"iso-8859-15",
"iso-8859-2",
"iso-8859-3",
"iso-8859-4",
"iso-8859-5",
"iso-8859-6",
"iso-8859-7",
"iso-8859-8",
"windows-1254",
"windows-1252",
"iso-8859-15",
"windows-1252",
"iso-8859-2",
"iso-8859-2",
"iso-8859-3",
"iso-8859-3",
"iso-8859-4",
"iso-8859-4",
"iso-8859-5",
"iso-8859-5",
"iso-8859-6",
"iso-8859-6",
"iso-8859-7",
"iso-8859-7",
"iso-8859-8",
"iso-8859-8",
"windows-1254",
"windows-1254",
"koi8-r",
"koi8-r",
"koi8-r",
"koi8-u",
"koi8-r",
"euc-kr",
"euc-kr",
"euc-kr",
"euc-kr",
"euc-kr",
"windows-1252",
"iso-8859-2",
"iso-8859-3",
"iso-8859-4",
"windows-1254",
"iso-8859-10",
"iso-8859-15",
"windows-1252",
"iso-8859-2",
"iso-8859-3",
"iso-8859-4",
"windows-1254",
"iso-8859-10",
"iso-8859-8-i",
"macintosh",
"macintosh",
"shift_jis",
"shift_jis",
"shift_jis",
"shift_jis",
"iso-8859-7",
"windows-874",
"utf-8",
"windows-1252",
"utf-16le",
"utf-16be",
"utf-16le",
"utf-8",
"utf-8",
"iso-8859-8",
"windows-1250",
"windows-1251",
"windows-1252",
"windows-1253",
"windows-1254",
"windows-1255",
"windows-1256",
"windows-1257",
"windows-1258",
"shift_jis",
"windows-874",
"euc-kr",
"windows-1250",
"windows-1251",
"windows-1252",
"windows-1253",
"windows-1254",
"windows-1255",
"windows-1256",
"windows-1257",
"windows-1258",
"euc-jp",
"gbk",
"x-mac-cyrillic",
"macintosh",
"x-mac-cyrillic",
"shift_jis",
"x-user-defined",
"big5",
};
private static final String[] PREFERRED = {
"big5",
"euc-jp",
"euc-kr",
"gb18030",
"gbk",
"hz-gb-2312",
"ibm866",
"iso-2022-jp",
"iso-2022-kr",
"iso-8859-10",
"iso-8859-13",
"iso-8859-14",
"iso-8859-15",
"iso-8859-16",
"iso-8859-2",
"iso-8859-3",
"iso-8859-4",
"iso-8859-5",
"iso-8859-6",
"iso-8859-7",
"iso-8859-8",
"iso-8859-8-i",
"koi8-r",
"koi8-u",
"macintosh",
"replacement",
"shift_jis",
"utf-16be",
"utf-16le",
"utf-8",
"windows-1250",
"windows-1251",
"windows-1252",
"windows-1253",
"windows-1254",
"windows-1255",
"windows-1256",
"windows-1257",
"windows-1258",
"windows-874",
"x-mac-cyrillic",
"x-user-defined",
};
public static boolean isPreferred(String label) {
return Arrays.binarySearch(PREFERRED, label) > -1;
}
public static String preferredForLabel(String label) {
int index = Arrays.binarySearch(LABELS, label);
if (index < 0) {
return null;
}
return PREFERRED_FOR_LABELS[index];
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Random;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.math.random.RandomData;
import org.apache.commons.math.random.RandomDataImpl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.util.Bytes;
/**
* <p>
* This class runs performance benchmarks for {@link HFile}.
* </p>
*/
public class HFilePerformanceEvaluation {
private static final int ROW_LENGTH = 10;
private static final int ROW_COUNT = 1000000;
private static final int RFILE_BLOCKSIZE = 8 * 1024;
static final Log LOG =
LogFactory.getLog(HFilePerformanceEvaluation.class.getName());
static byte [] format(final int i) {
String v = Integer.toString(i);
return Bytes.toBytes("0000000000".substring(v.length()) + v);
}
static ImmutableBytesWritable format(final int i, ImmutableBytesWritable w) {
w.set(format(i));
return w;
}
private void runBenchmarks() throws Exception {
final Configuration conf = new Configuration();
final FileSystem fs = FileSystem.get(conf);
final Path mf = fs.makeQualified(new Path("performanceevaluation.mapfile"));
if (fs.exists(mf)) {
fs.delete(mf, true);
}
runBenchmark(new SequentialWriteBenchmark(conf, fs, mf, ROW_COUNT),
ROW_COUNT);
PerformanceEvaluationCommons.concurrentReads(new Runnable() {
public void run() {
try {
runBenchmark(new UniformRandomSmallScan(conf, fs, mf, ROW_COUNT),
ROW_COUNT);
} catch (Exception e) {
e.printStackTrace();
}
}
});
PerformanceEvaluationCommons.concurrentReads(new Runnable() {
public void run() {
try {
runBenchmark(new UniformRandomReadBenchmark(conf, fs, mf, ROW_COUNT),
ROW_COUNT);
} catch (Exception e) {
e.printStackTrace();
}
}
});
PerformanceEvaluationCommons.concurrentReads(new Runnable() {
public void run() {
try {
runBenchmark(new GaussianRandomReadBenchmark(conf, fs, mf, ROW_COUNT),
ROW_COUNT);
} catch (Exception e) {
e.printStackTrace();
}
}
});
PerformanceEvaluationCommons.concurrentReads(new Runnable() {
public void run() {
try {
runBenchmark(new SequentialReadBenchmark(conf, fs, mf, ROW_COUNT),
ROW_COUNT);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
protected void runBenchmark(RowOrientedBenchmark benchmark, int rowCount)
throws Exception {
LOG.info("Running " + benchmark.getClass().getSimpleName() + " for " +
rowCount + " rows.");
long elapsedTime = benchmark.run();
LOG.info("Running " + benchmark.getClass().getSimpleName() + " for " +
rowCount + " rows took " + elapsedTime + "ms.");
}
static abstract class RowOrientedBenchmark {
protected final Configuration conf;
protected final FileSystem fs;
protected final Path mf;
protected final int totalRows;
public RowOrientedBenchmark(Configuration conf, FileSystem fs, Path mf,
int totalRows) {
this.conf = conf;
this.fs = fs;
this.mf = mf;
this.totalRows = totalRows;
}
void setUp() throws Exception {
// do nothing
}
abstract void doRow(int i) throws Exception;
protected int getReportingPeriod() {
return this.totalRows / 10;
}
void tearDown() throws Exception {
// do nothing
}
/**
* Run benchmark
* @return elapsed time.
* @throws Exception
*/
long run() throws Exception {
long elapsedTime;
setUp();
long startTime = System.currentTimeMillis();
try {
for (int i = 0; i < totalRows; i++) {
if (i > 0 && i % getReportingPeriod() == 0) {
LOG.info("Processed " + i + " rows.");
}
doRow(i);
}
elapsedTime = System.currentTimeMillis() - startTime;
} finally {
tearDown();
}
return elapsedTime;
}
}
static class SequentialWriteBenchmark extends RowOrientedBenchmark {
protected HFile.Writer writer;
private Random random = new Random();
private byte[] bytes = new byte[ROW_LENGTH];
public SequentialWriteBenchmark(Configuration conf, FileSystem fs, Path mf,
int totalRows) {
super(conf, fs, mf, totalRows);
}
@Override
void setUp() throws Exception {
HFileContext hFileContext = new HFileContextBuilder().withBlockSize(RFILE_BLOCKSIZE).build();
writer =
HFile.getWriterFactoryNoCache(conf)
.withPath(fs, mf)
.withFileContext(hFileContext)
.withComparator(new KeyValue.RawBytesComparator())
.create();
}
@Override
void doRow(int i) throws Exception {
writer.append(format(i), generateValue());
}
private byte[] generateValue() {
random.nextBytes(bytes);
return bytes;
}
@Override
protected int getReportingPeriod() {
return this.totalRows; // don't report progress
}
@Override
void tearDown() throws Exception {
writer.close();
}
}
static abstract class ReadBenchmark extends RowOrientedBenchmark {
protected HFile.Reader reader;
public ReadBenchmark(Configuration conf, FileSystem fs, Path mf,
int totalRows) {
super(conf, fs, mf, totalRows);
}
@Override
void setUp() throws Exception {
reader = HFile.createReader(this.fs, this.mf, new CacheConfig(this.conf), this.conf);
this.reader.loadFileInfo();
}
@Override
void tearDown() throws Exception {
reader.close();
}
}
static class SequentialReadBenchmark extends ReadBenchmark {
private HFileScanner scanner;
public SequentialReadBenchmark(Configuration conf, FileSystem fs,
Path mf, int totalRows) {
super(conf, fs, mf, totalRows);
}
@Override
void setUp() throws Exception {
super.setUp();
this.scanner = this.reader.getScanner(false, false);
this.scanner.seekTo();
}
@Override
void doRow(int i) throws Exception {
if (this.scanner.next()) {
ByteBuffer k = this.scanner.getKey();
PerformanceEvaluationCommons.assertKey(format(i + 1), k);
ByteBuffer v = scanner.getValue();
PerformanceEvaluationCommons.assertValueSize(v.limit(), ROW_LENGTH);
}
}
@Override
protected int getReportingPeriod() {
return this.totalRows; // don't report progress
}
}
static class UniformRandomReadBenchmark extends ReadBenchmark {
private Random random = new Random();
public UniformRandomReadBenchmark(Configuration conf, FileSystem fs,
Path mf, int totalRows) {
super(conf, fs, mf, totalRows);
}
@Override
void doRow(int i) throws Exception {
HFileScanner scanner = this.reader.getScanner(false, true);
byte [] b = getRandomRow();
if (scanner.seekTo(b) < 0) {
LOG.info("Not able to seekTo " + new String(b));
return;
}
ByteBuffer k = scanner.getKey();
PerformanceEvaluationCommons.assertKey(b, k);
ByteBuffer v = scanner.getValue();
PerformanceEvaluationCommons.assertValueSize(v.limit(), ROW_LENGTH);
}
private byte [] getRandomRow() {
return format(random.nextInt(totalRows));
}
}
static class UniformRandomSmallScan extends ReadBenchmark {
private Random random = new Random();
public UniformRandomSmallScan(Configuration conf, FileSystem fs,
Path mf, int totalRows) {
super(conf, fs, mf, totalRows/10);
}
@Override
void doRow(int i) throws Exception {
HFileScanner scanner = this.reader.getScanner(false, false);
byte [] b = getRandomRow();
if (scanner.seekTo(b) != 0) {
LOG.info("Nonexistent row: " + new String(b));
return;
}
ByteBuffer k = scanner.getKey();
PerformanceEvaluationCommons.assertKey(b, k);
// System.out.println("Found row: " + new String(b));
for (int ii = 0; ii < 30; ii++) {
if (!scanner.next()) {
LOG.info("NOTHING FOLLOWS");
return;
}
ByteBuffer v = scanner.getValue();
PerformanceEvaluationCommons.assertValueSize(v.limit(), ROW_LENGTH);
}
}
private byte [] getRandomRow() {
return format(random.nextInt(totalRows));
}
}
static class GaussianRandomReadBenchmark extends ReadBenchmark {
private RandomData randomData = new RandomDataImpl();
public GaussianRandomReadBenchmark(Configuration conf, FileSystem fs,
Path mf, int totalRows) {
super(conf, fs, mf, totalRows);
}
@Override
void doRow(int i) throws Exception {
HFileScanner scanner = this.reader.getScanner(false, true);
byte[] gaussianRandomRowBytes = getGaussianRandomRowBytes();
scanner.seekTo(gaussianRandomRowBytes);
for (int ii = 0; ii < 30; ii++) {
if (!scanner.next()) {
LOG.info("NOTHING FOLLOWS");
return;
}
scanner.getKey();
scanner.getValue();
}
}
private byte [] getGaussianRandomRowBytes() {
int r = (int) randomData.nextGaussian((double)totalRows / 2.0,
(double)totalRows / 10.0);
// make sure r falls into [0,totalRows)
return format(Math.min(totalRows, Math.max(r,0)));
}
}
/**
* @param args
* @throws Exception
* @throws IOException
*/
public static void main(String[] args) throws Exception {
new HFilePerformanceEvaluation().runBenchmarks();
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.geo.SpatialStrategy;
import org.elasticsearch.common.geo.builders.EnvelopeBuilder;
import org.elasticsearch.common.geo.builders.ShapeBuilder;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.test.geo.RandomShapeGenerator;
import org.junit.After;
import org.junit.Test;
import java.io.IOException;
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
public class GeoShapeQueryBuilderTests extends AbstractQueryTestCase<GeoShapeQueryBuilder> {
private static String indexedShapeId;
private static String indexedShapeType;
private static String indexedShapePath;
private static String indexedShapeIndex;
private static ShapeBuilder indexedShapeToReturn;
@Override
protected GeoShapeQueryBuilder doCreateTestQueryBuilder() {
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null);
GeoShapeQueryBuilder builder;
if (randomBoolean()) {
try {
builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
indexedShapeToReturn = shape;
indexedShapeId = randomAsciiOfLengthBetween(3, 20);
indexedShapeType = randomAsciiOfLengthBetween(3, 20);
builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, indexedShapeId, indexedShapeType);
if (randomBoolean()) {
indexedShapeIndex = randomAsciiOfLengthBetween(3, 20);
builder.indexedShapeIndex(indexedShapeIndex);
}
if (randomBoolean()) {
indexedShapePath = randomAsciiOfLengthBetween(3, 20);
builder.indexedShapePath(indexedShapePath);
}
}
SpatialStrategy strategy = randomFrom(SpatialStrategy.values());
builder.strategy(strategy);
if (strategy != SpatialStrategy.TERM) {
builder.relation(randomFrom(ShapeRelation.values()));
}
return builder;
}
@Override
protected GetResponse executeGet(GetRequest getRequest) {
assertThat(indexedShapeToReturn, notNullValue());
assertThat(indexedShapeId, notNullValue());
assertThat(indexedShapeType, notNullValue());
assertThat(getRequest.id(), equalTo(indexedShapeId));
assertThat(getRequest.type(), equalTo(indexedShapeType));
String expectedShapeIndex = indexedShapeIndex == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_INDEX_NAME : indexedShapeIndex;
assertThat(getRequest.index(), equalTo(expectedShapeIndex));
String expectedShapePath = indexedShapePath == null ? GeoShapeQueryBuilder.DEFAULT_SHAPE_FIELD_NAME : indexedShapePath;
String json;
try {
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject();
builder.field(expectedShapePath, indexedShapeToReturn);
builder.endObject();
json = builder.string();
} catch (IOException ex) {
throw new ElasticsearchException("boom", ex);
}
GetResponse response = new GetResponse(new GetResult(indexedShapeIndex, indexedShapeType, indexedShapeId, 0, true, new BytesArray(
json), null));
return response;
}
@After
public void clearShapeFields() {
indexedShapeToReturn = null;
indexedShapeId = null;
indexedShapeType = null;
indexedShapePath = null;
indexedShapeIndex = null;
}
@Override
protected void doAssertLuceneQuery(GeoShapeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
// Logic for doToQuery is complex and is hard to test here. Need to rely
// on Integration tests to determine if created query is correct
// TODO improve GeoShapeQueryBuilder.doToQuery() method to make it
// easier to test here
assertThat(query, anyOf(instanceOf(BooleanQuery.class), instanceOf(ConstantScoreQuery.class)));
}
/**
* Overridden here to ensure the test is only run if at least one type is
* present in the mappings. Geo queries do not execute if the field is not
* explicitly mapped
*/
@Override
public void testToQuery() throws IOException {
//TODO figure out why this test might take up to 10 seconds once in a while
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
super.testToQuery();
}
@Test(expected = IllegalArgumentException.class)
public void testNoFieldName() throws Exception {
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null);
new GeoShapeQueryBuilder(null, shape);
}
@Test
public void testNoShape() throws IOException {
try {
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, (ShapeBuilder) null);
fail("exception expected");
} catch (IllegalArgumentException e) {
// expected
}
}
@Test(expected = IllegalArgumentException.class)
public void testNoIndexedShape() throws IOException {
new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, (String) null, "type");
}
@Test(expected = IllegalArgumentException.class)
public void testNoIndexedShapeType() throws IOException {
new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, "id", (String) null);
}
@Test(expected=IllegalArgumentException.class)
public void testNoRelation() throws IOException {
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null);
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
builder.relation(null);
}
@Test
public void testInvalidRelation() throws IOException {
ShapeBuilder shape = RandomShapeGenerator.createShapeWithin(getRandom(), null);
GeoShapeQueryBuilder builder = new GeoShapeQueryBuilder(GEO_SHAPE_FIELD_NAME, shape);
try {
builder.strategy(SpatialStrategy.TERM);
builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN));
fail("Illegal combination of strategy and relation setting");
} catch (IllegalArgumentException e) {
// okay
}
try {
builder.relation(randomFrom(ShapeRelation.DISJOINT, ShapeRelation.WITHIN));
builder.strategy(SpatialStrategy.TERM);
fail("Illegal combination of strategy and relation setting");
} catch (IllegalArgumentException e) {
// okay
}
}
@Test // see #3878
public void testThatXContentSerializationInsideOfArrayWorks() throws Exception {
EnvelopeBuilder envelopeBuilder = ShapeBuilder.newEnvelope().topLeft(0, 0).bottomRight(10, 10);
GeoShapeQueryBuilder geoQuery = QueryBuilders.geoShapeQuery("searchGeometry", envelopeBuilder);
JsonXContent.contentBuilder().startArray().value(geoQuery).endArray();
}
}
| |
/*
Copyright 2015, 2017, 2020 Nationale-Nederlanden
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package nl.nn.adapterframework.extensions.svn;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.URL;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Vector;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import javax.xml.xpath.XPathExpressionException;
import org.apache.commons.lang3.StringUtils;
import nl.nn.adapterframework.configuration.ConfigurationException;
import nl.nn.adapterframework.core.PipeLineSession;
import nl.nn.adapterframework.core.PipeRunException;
import nl.nn.adapterframework.core.PipeRunResult;
import nl.nn.adapterframework.core.SenderException;
import nl.nn.adapterframework.core.TimeoutException;
import nl.nn.adapterframework.http.HttpSender;
import nl.nn.adapterframework.pipes.FixedForwardPipe;
import nl.nn.adapterframework.stream.Message;
import nl.nn.adapterframework.util.DomBuilderException;
import nl.nn.adapterframework.util.XmlUtils;
/**
* Pipe which scans TIBCO sources in Subversion and creates a report in xml.
*
* @author Peter Leeuwenburgh
*/
public class ScanTibcoSolutionPipe extends FixedForwardPipe {
private String url;
private int level = 0;
@Override
public PipeRunResult doPipe(Message message, PipeLineSession session) throws PipeRunException {
StringWriter stringWriter = new StringWriter();
XMLStreamWriter xmlStreamWriter;
try {
xmlStreamWriter = XmlUtils.OUTPUT_FACTORY.createXMLStreamWriter(stringWriter);
xmlStreamWriter.writeStartDocument();
xmlStreamWriter.writeStartElement("root");
xmlStreamWriter.writeAttribute("url", getUrl());
// xmlStreamWriter.writeAttribute("level",
// String.valueOf(getLevel()));
process(xmlStreamWriter, getUrl(), getLevel());
xmlStreamWriter.writeEndDocument();
xmlStreamWriter.flush();
xmlStreamWriter.close();
} catch (XMLStreamException e) {
throw new PipeRunException(this, "XMLStreamException", e);
} catch (DomBuilderException e) {
throw new PipeRunException(this, "DomBuilderException", e);
} catch (XPathExpressionException e) {
throw new PipeRunException(this, "XPathExpressionException", e);
}
return new PipeRunResult(getSuccessForward(), stringWriter.getBuffer().toString());
}
public void process(XMLStreamWriter xmlStreamWriter, String cUrl, int cLevel) throws XMLStreamException, DomBuilderException, XPathExpressionException {
String html;
try {
html = getHtml(cUrl);
} catch (Exception e) {
error(xmlStreamWriter, "error occured during getting html", e, true);
html = null;
}
if (html != null) {
Collection<String> c = XmlUtils.evaluateXPathNodeSet(html,
"html/body/ul/li/a/@href");
if (c != null) {
for (Iterator<String> it = c.iterator(); it.hasNext();) {
String token = it.next();
if (token.equals("../")) {
// skip reference to parent directory
} else if (cLevel == 0 && !token.equals("BW/")
&& !token.equals("SOA/")) {
skipDir(xmlStreamWriter, token);
// } else if (cLevel == 1 &&
// !token.startsWith("Customer")) {
// skipDir(xmlStreamWriter, token);
} else if (cLevel == 2
&& (token.equals("branches/") || token
.equals("tags/")) && c.contains("trunk/")) {
skipDir(xmlStreamWriter, token);
} else if (cLevel == 3 && !token.equals("src/")
&& c.contains("src/") && !token.equals("release/")) {
skipDir(xmlStreamWriter, token);
// } else if (cLevel == 5 && token.endsWith("/")) {
// skipDir(xmlStreamWriter, token);
} else {
String newUrl = cUrl + token;
boolean dir = false;
if (token.endsWith("/")) {
dir = true;
}
if (dir) {
xmlStreamWriter.writeStartElement("dir");
xmlStreamWriter.writeAttribute("name",
skipLastCharacter(token));
// xmlStreamWriter.writeAttribute("level",
// String.valueOf(cLevel + 1));
if (cLevel == 1 || cLevel == 4) {
addCommit(xmlStreamWriter, newUrl);
}
process(xmlStreamWriter, newUrl, cLevel + 1);
} else {
xmlStreamWriter.writeStartElement("file");
xmlStreamWriter.writeAttribute("name", token);
if (cLevel > 5) {
if (token.endsWith(".jmsDest")) {
addFileContent(xmlStreamWriter, newUrl,
"jmsDest");
}
if (token.endsWith(".jmsDestConf")) {
addFileContent(xmlStreamWriter, newUrl,
"jmsDestConf");
}
if (token.endsWith(".composite")) {
addFileContent(xmlStreamWriter, newUrl,
"composite");
}
if (token.endsWith(".process")) {
addFileContent(xmlStreamWriter, newUrl,
"process");
}
if (token.equals("defaultVars.substvar")) {
addFileContent(xmlStreamWriter, newUrl,
"substVar");
}
}
}
xmlStreamWriter.writeEndElement();
}
}
}
}
}
private void skipDir(XMLStreamWriter xmlStreamWriter, String token)
throws XMLStreamException {
xmlStreamWriter.writeStartElement("dir");
xmlStreamWriter.writeAttribute("name", skipLastCharacter(token));
xmlStreamWriter.writeAttribute("skip", "true");
xmlStreamWriter.writeEndElement();
}
private String skipLastCharacter(String str) {
return StringUtils.left(str, str.length() - 1);
}
private void addCommit(XMLStreamWriter xmlStreamWriter, String urlString)
throws XMLStreamException {
xmlStreamWriter.writeStartElement("commit");
try {
String logReport = SvnUtils.getLogReport(urlString);
String creator = XmlUtils.evaluateXPathNodeSetFirstElement(
logReport, "log-report/log-item/creator-displayname");
xmlStreamWriter.writeAttribute("creator", creator);
String date = XmlUtils.evaluateXPathNodeSetFirstElement(logReport,
"log-report/log-item/date");
xmlStreamWriter.writeAttribute("date", date);
} catch (Exception e) {
error(xmlStreamWriter, "error occured during adding commit info",
e, false);
}
xmlStreamWriter.writeEndElement();
}
private void addFileContent(XMLStreamWriter xmlStreamWriter,
String urlString, String type) throws XMLStreamException {
xmlStreamWriter.writeStartElement("content");
xmlStreamWriter.writeAttribute("type", type);
String content;
try {
content = getHtml(urlString);
} catch (Exception e) {
error(xmlStreamWriter, "error occured during getting file content",
e, true);
content = null;
}
if (content != null) {
Vector<String> warnMessage = new Vector<String>();
try {
if (type.equals("jmsDest") || type.equals("jmsDestConf")) {
// AMX - receive (for jmsInboundDest)
Collection<String> c1 = XmlUtils.evaluateXPathNodeSet(
content, "namedResource/@name");
if (c1 != null && c1.size() > 0) {
if (c1.size() > 1) {
warnMessage.add("more then one resourceName found");
}
String resourceName = (String) c1.iterator().next();
xmlStreamWriter.writeStartElement("resourceName");
xmlStreamWriter.writeCharacters(resourceName);
xmlStreamWriter.writeEndElement();
} else {
warnMessage.add("no resourceName found");
}
Collection<String> c2 = XmlUtils.evaluateXPathNodeSet(
content, "namedResource/configuration/@jndiName");
if (c2 != null && c2.size() > 0) {
if (c2.size() > 1) {
warnMessage
.add("more then one resourceJndiName found");
}
String resourceJndiName = (String) c2.iterator().next();
xmlStreamWriter.writeStartElement("resourceJndiName");
xmlStreamWriter.writeCharacters(resourceJndiName);
xmlStreamWriter.writeEndElement();
} else {
warnMessage.add("no resourceJndiName found");
}
} else if (type.equals("composite")) {
// AMX - receive
Collection<String> c1 = XmlUtils
.evaluateXPathNodeSet(
content,
"composite/service/bindingAdjunct/property[@name='JmsInboundDestinationConfig']/@simpleValue");
if (c1 != null && c1.size() > 0) {
for (Iterator<String> c1it = c1.iterator(); c1it
.hasNext();) {
xmlStreamWriter.writeStartElement("jmsInboundDest");
xmlStreamWriter.writeCharacters(c1it.next());
xmlStreamWriter.writeEndElement();
}
} else {
warnMessage.add("no jmsInboundDest found");
}
// AMX - send
Collection<String> c2 = XmlUtils.evaluateXPathNodeSet(
content,
"composite/reference/interface.wsdl/@wsdlLocation");
if (c2 != null && c2.size() > 0) {
for (Iterator<String> c2it = c2.iterator(); c2it
.hasNext();) {
String itn = c2it.next();
String wsdl = null;
try {
URL url = new URL(urlString);
URL wsdlUrl = new URL(url, itn);
wsdl = getHtml(wsdlUrl.toString());
} catch (Exception e) {
error(xmlStreamWriter,
"error occured during getting wsdl file content",
e, true);
wsdl = null;
}
if (wsdl != null) {
Collection<String> c3 = XmlUtils
.evaluateXPathNodeSet(wsdl,
// "definitions/service/port/targetAddress",
// "concat(.,';',../../@name)");
"definitions/service/port/targetAddress");
if (c3 != null && c3.size() > 0) {
for (Iterator<String> c3it = c3.iterator(); c3it
.hasNext();) {
xmlStreamWriter
.writeStartElement("targetAddr");
xmlStreamWriter.writeCharacters(c3it
.next());
xmlStreamWriter.writeEndElement();
}
} else {
warnMessage.add("no targetAddr found");
}
} else {
warnMessage.add("wsdl [" + itn + "] not found");
}
}
} else {
warnMessage.add("no wsdlLocation found");
}
} else if (type.equals("process")) {
// BW - receive
Double d1 = XmlUtils
.evaluateXPathNumber(
content,
"count(ProcessDefinition/starter[type='com.tibco.plugin.soap.SOAPEventSource']/config)");
if (d1 > 0) {
Collection<String> c1 = XmlUtils
.evaluateXPathNodeSet(
content,
"ProcessDefinition/starter[type='com.tibco.plugin.soap.SOAPEventSource']/config/sharedChannels/jmsChannel/JMSTo");
if (c1 != null && c1.size() > 0) {
for (Iterator<String> c1it = c1.iterator(); c1it
.hasNext();) {
xmlStreamWriter.writeStartElement("jmsTo");
xmlStreamWriter.writeAttribute("type",
"soapEventSource");
xmlStreamWriter.writeCharacters(c1it.next());
xmlStreamWriter.writeEndElement();
}
} else {
warnMessage
.add("no jmsTo found for soapEventSource");
}
} else {
warnMessage.add("no soapEventSource found");
}
// BW - send
Double d2 = XmlUtils
.evaluateXPathNumber(
content,
"count(ProcessDefinition/activity[type='com.tibco.plugin.soap.SOAPSendReceiveActivity']/config)");
if (d2 > 0) {
Collection<String> c2 = XmlUtils
.evaluateXPathNodeSet(
content,
"ProcessDefinition/activity[type='com.tibco.plugin.soap.SOAPSendReceiveActivity']/config/sharedChannels/jmsChannel/JMSTo");
if (c2 != null && c2.size() > 0) {
for (Iterator<String> c2it = c2.iterator(); c2it
.hasNext();) {
xmlStreamWriter.writeStartElement("jmsTo");
xmlStreamWriter.writeAttribute("type",
"soapSendReceiveActivity");
xmlStreamWriter.writeCharacters(c2it.next());
xmlStreamWriter.writeEndElement();
}
} else {
warnMessage
.add("no jmsTo found for soapSendReceiveActivity");
}
} else {
warnMessage.add("no soapSendReceiveActivity found");
}
} else if (type.equals("substVar")) {
String path = StringUtils.substringBeforeLast(StringUtils
.substringAfterLast(urlString, "/defaultVars/"),
"/");
Map<String, String> m1 = XmlUtils.evaluateXPathNodeSet(
content,
"repository/globalVariables/globalVariable",
"name", "value");
if (m1 != null && m1.size() > 0) {
for (Iterator<String> m1it = m1.keySet().iterator(); m1it
.hasNext();) {
Object key = m1it.next();
Object value = m1.get(key);
xmlStreamWriter.writeStartElement("globalVariable");
xmlStreamWriter
.writeAttribute("name", (String) key);
xmlStreamWriter.writeAttribute("ref", "%%" + path
+ "/" + key + "%%");
xmlStreamWriter.writeCharacters((String) value);
xmlStreamWriter.writeEndElement();
}
} else {
warnMessage.add("no globalVariable found");
}
/*
* } else { content = XmlUtils.removeNamespaces(content);
* xmlStreamWriter.writeCharacters(content);
*/
}
} catch (Exception e) {
error(xmlStreamWriter, "error occured during processing "
+ type + " file", e, true);
}
if (warnMessage.size() > 0) {
xmlStreamWriter.writeStartElement("warnMessages");
for (int i = 0; i < warnMessage.size(); i++) {
xmlStreamWriter.writeStartElement("warnMessage");
xmlStreamWriter.writeCharacters(warnMessage.elementAt(i));
xmlStreamWriter.writeEndElement();
}
xmlStreamWriter.writeEndElement();
}
}
xmlStreamWriter.writeEndElement();
}
private void error(XMLStreamWriter xmlStreamWriter, String msg,
Throwable t, boolean printStackTrace) throws XMLStreamException {
log.warn(msg, t);
xmlStreamWriter.writeStartElement("errorMessage");
String errorMsg;
if (printStackTrace) {
StringWriter trace = new StringWriter();
t.printStackTrace(new PrintWriter(trace));
errorMsg = msg + ": " + trace;
} else {
errorMsg = msg + ": " + t.getMessage();
}
xmlStreamWriter.writeCharacters(errorMsg);
xmlStreamWriter.writeEndElement();
}
private String getHtml(String urlString) throws ConfigurationException, SenderException, TimeoutException, IOException {
HttpSender httpSender = null;
try {
httpSender = new HttpSender();
httpSender.setUrl(urlString);
httpSender.setAllowSelfSignedCertificates(true);
httpSender.setVerifyHostname(false);
httpSender.setIgnoreCertificateExpiredException(true);
httpSender.setXhtml(true);
httpSender.configure();
httpSender.open();
String result = httpSender.sendMessage(new Message(""), null).asString();
return result;
} finally {
if (httpSender != null) {
httpSender.close();
}
}
}
public String getUrl() {
return url;
}
public void setUrl(String s) {
url = s;
}
public void setLevel(int i) {
level = i;
}
public int getLevel() {
return level;
}
}
| |
/*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.mixin.core.world.storage;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.nbt.CompressedStreamTools;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.datafix.FixTypes;
import net.minecraft.world.storage.SaveHandler;
import net.minecraft.world.storage.WorldInfo;
import org.apache.logging.log4j.Logger;
import org.spongepowered.api.Sponge;
import org.spongepowered.api.util.annotation.NonnullByDefault;
import org.spongepowered.api.world.SerializationBehaviors;
import org.spongepowered.api.world.storage.WorldProperties;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.ModifyArg;
import org.spongepowered.asm.mixin.injection.Redirect;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import org.spongepowered.asm.util.PrettyPrinter;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.bridge.world.WorldInfoBridge;
import org.spongepowered.common.bridge.world.storage.SaveHandlerBridge;
import org.spongepowered.common.data.util.DataUtil;
import org.spongepowered.common.event.tracking.IPhaseState;
import org.spongepowered.common.event.tracking.PhaseTracker;
import org.spongepowered.common.util.Constants;
import org.spongepowered.common.world.WorldManager;
import org.spongepowered.common.world.storage.SpongePlayerDataHandler;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.BasicFileAttributes;
import java.time.Instant;
import java.time.format.DateTimeFormatter;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import javax.annotation.Nullable;
@NonnullByDefault
@Mixin(SaveHandler.class)
public abstract class SaveHandlerMixin implements SaveHandlerBridge {
@Shadow @Final private File worldDirectory;
@Shadow @Final private File playersDirectory;
@Shadow protected abstract void shadow$setSessionLock();
@Nullable private Exception impl$capturedException;
// player join stuff
@Nullable private Path impl$file;
private Set<File> impl$directoriesToCreate = new HashSet<>();
@Redirect(method = "<init>", at = @At(value = "INVOKE", target = "Ljava/io/File;mkdirs()Z", remap = false))
@SuppressWarnings({"rawtypes", "unchecked"})
private boolean impl$createDirectoryIfSavingFiles(final File dir) {
final IPhaseState state = PhaseTracker.getInstance().getCurrentState();
if (!state.shouldCreateWorldDirectories(PhaseTracker.getInstance().getCurrentContext())) {
this.impl$directoriesToCreate.add(dir);
return false;
}
return dir.mkdirs();
}
@Redirect(method = "<init>",
at = @At(value = "INVOKE",
target = "Lnet/minecraft/world/storage/SaveHandler;setSessionLock()V"
)
)
@SuppressWarnings({"rawtypes", "unchecked"})
private void impl$setSessionLockIfCreatingFiles(final SaveHandler self) {
final IPhaseState state = PhaseTracker.getInstance().getCurrentState();
if (state.shouldCreateWorldDirectories(PhaseTracker.getInstance().getCurrentContext())) {
this.shadow$setSessionLock();
}
}
@Redirect(method = "checkSessionLock",
at = @At(value = "NEW", target = "java/io/FileInputStream", remap = false))
private FileInputStream impl$createSessionLockAndCreateDirectories(final File file) throws FileNotFoundException {
if (!file.exists()) {
final WorldProperties props = Sponge.getServer().getWorldProperties(this.worldDirectory.getName()).get();
if (props.getSerializationBehavior() == SerializationBehaviors.NONE) {
throw new IllegalStateException("Should not be saving with SerializationBehaviors.NONE");
}
for (File dir : this.impl$directoriesToCreate) {
dir.mkdirs();
}
this.impl$directoriesToCreate.clear();
this.shadow$setSessionLock();
}
return new FileInputStream(file);
}
@ModifyArg(method = "checkSessionLock",
at = @At(value = "INVOKE", target = "Lnet/minecraft/world/MinecraftException;<init>(Ljava/lang/String;)V", ordinal = 0, remap = false))
private String modifyMinecraftExceptionOutputIfNotInitializationTime(final String message) {
return "The save folder for world " + this.worldDirectory + " is being accessed from another location, aborting";
}
@ModifyArg(method = "checkSessionLock",
at = @At(value = "INVOKE", target = "Lnet/minecraft/world/MinecraftException;<init>(Ljava/lang/String;)V", ordinal = 1, remap = false))
private String modifyMinecraftExceptionOutputIfIOException(final String message) {
return "Failed to check session lock for world " + this.worldDirectory + ", aborting";
}
@Inject(method = "saveWorldInfoWithPlayer", at = @At("RETURN"))
private void impl$saveLevelSpongeDataFile(final WorldInfo worldInformation, final NBTTagCompound tagCompound, final CallbackInfo ci) {
boolean performChecks = SpongeImpl.getGlobalConfigAdapter().getConfig().getGeneral().isCheckFileWhenSavingSpongeDataFile();
try {
// If the returned NBT is empty, then we should warn the user.
NBTTagCompound spongeRootLevelNBT = ((WorldInfoBridge) worldInformation).bridge$getSpongeRootLevelNbt();
if (spongeRootLevelNBT.isEmpty()) {
Integer dimensionId = ((WorldInfoBridge) worldInformation).bridge$getDimensionId();
String dimensionIdString = dimensionId == null ? "unknown" : String.valueOf(dimensionId);
// We should warn the user about the NBT being empty, but not saving it.
new PrettyPrinter().add("Sponge Root Level NBT for world %s is empty!", worldInformation.getWorldName()).centre().hr()
.add("When trying to save Sponge data for the world %s, an empty NBT compound was provided. The old Sponge data file was "
+ "left intact.",
worldInformation.getWorldName())
.add()
.add("The following information may be useful in debugging:")
.add()
.add("UUID: ", ((WorldInfoBridge) worldInformation).bridge$getAssignedId())
.add("Dimension ID: ", dimensionIdString)
.add("Is Modded: ", ((WorldInfoBridge) worldInformation).bridge$getIsMod())
.add("Valid flag: ", ((WorldInfoBridge) worldInformation).bridge$isValid())
.add()
.add("Stack trace:")
.add(new Exception())
.print(System.err);
return;
}
final File newDataFile = new File(this.worldDirectory, Constants.Sponge.World.LEVEL_SPONGE_DAT_NEW);
final File oldDataFile = new File(this.worldDirectory, Constants.Sponge.World.LEVEL_SPONGE_DAT_OLD);
final File dataFile = new File(this.worldDirectory, Constants.Sponge.World.LEVEL_SPONGE_DAT);
try (final FileOutputStream stream = new FileOutputStream(newDataFile)) {
CompressedStreamTools.writeCompressed(spongeRootLevelNBT, stream);
}
if (performChecks) {
// Before we continue, is the file zero length?
if (newDataFile.length() == 0) {
Integer dimensionId = ((WorldInfoBridge) worldInformation).bridge$getDimensionId();
String dimensionIdString = dimensionId == null ? "unknown" : String.valueOf(dimensionId);
// Then we just delete the file and tell the user that we didn't save properly.
new PrettyPrinter().add("Zero length level_sponge.dat file was created for %s!", worldInformation.getWorldName()).centre().hr()
.add("When saving the data file for the world %s, a zero length file was written. Sponge has discarded this file.",
worldInformation.getWorldName())
.add()
.add("The following information may be useful in debugging:")
.add()
.add("UUID: ", ((WorldInfoBridge) worldInformation).bridge$getAssignedId())
.add("Dimension ID: ", dimensionIdString)
.add("Is Modded: ", ((WorldInfoBridge) worldInformation).bridge$getIsMod())
.add("Valid flag: ", ((WorldInfoBridge) worldInformation).bridge$isValid())
.add()
.add("Stack trace:")
.add(new Exception())
.print(System.err);
newDataFile.delete();
return;
}
// Check the file starts with 0x1F 0x8B - it must be gzipped
boolean shouldDelete = false;
try (final InputStream reader = Files.newInputStream(newDataFile.toPath())) {
int byte1 = reader.read();
int byte2 = reader.read();
if (byte1 != Constants.GZip.GZIP_BYTE_1 || byte2 != Constants.GZip.GZIP_BYTE_2) {
// The file is not a gzip file, and is therefore not a valid file.
shouldDelete = true;
int next = 1;
if (byte1 == 0 && byte2 == 0) {
// We might as well check to see if it's zero filled.
do {
next = reader.read();
} while (next == 0);
}
// If we get next = -1, the entire file is zeroes. Else, we encountered a non-zero entry. We'll keep that as a corrupted
// file and if someone reports it, we can ask for it.
//
// If it's zero filled, we'll just delete it - we have no use for it.
Integer dimensionId = ((WorldInfoBridge) worldInformation).bridge$getDimensionId();
String dimensionIdString = dimensionId == null ? "unknown" : String.valueOf(dimensionId);
String copyText = null;
if (next != -1) {
try {
Path dataFilePath = newDataFile.toPath();
Path corrupted = dataFilePath.resolveSibling(newDataFile.getName() + ".corrupted-" +
DateTimeFormatter.ISO_INSTANT.format(Instant.now()).replaceAll(":", "") + ".dat");
Files.copy(dataFilePath, corrupted);
copyText =
String.format(
"We have backed up the corrupted file to %s. Please keep hold of this, it may be useful to Sponge "
+ "developers.", corrupted.getFileName());
} catch (IOException e) {
// could not copy, that's okay
}
}
// Then we just delete the file and tell the user that we didn't save properly.
PrettyPrinter prettyPrinter = new PrettyPrinter(100)
.add("Badly formatted level_sponge.dat file was created for %s!", worldInformation.getWorldName())
.centre()
.hr()
.addWrapped(
"When saving the data file for the world %s, the file was not saved with the correct magic header. Sponge "
+ "has discarded this file.", worldInformation.getWorldName())
.add();
if (copyText != null) {
prettyPrinter.addWrapped(copyText).add();
}
prettyPrinter.add("The following information may be useful in debugging:")
.add()
.add("Magic header: %x %x (expected %x %x)", byte1, byte2, Constants.GZip.GZIP_BYTE_1, Constants.GZip.GZIP_BYTE_2)
.add("File size: %d bytes", newDataFile.length())
.add("UUID: ", ((WorldInfoBridge) worldInformation).bridge$getAssignedId())
.add("Dimension ID: ", dimensionIdString)
.add("Is Modded: ", ((WorldInfoBridge) worldInformation).bridge$getIsMod())
.add("Valid flag: ", ((WorldInfoBridge) worldInformation).bridge$isValid())
.add()
.add("Stack trace:")
.add(new Exception())
.print(System.err);
}
}
// The delete call is here because we've closed the file stream
// for said file by this point.
if (shouldDelete) {
newDataFile.delete();
return;
}
}
if (dataFile.exists()) {
if (oldDataFile.exists()) {
oldDataFile.delete();
}
dataFile.renameTo(oldDataFile);
dataFile.delete();
}
newDataFile.renameTo(dataFile);
if (newDataFile.exists()) {
newDataFile.delete();
}
} catch (Exception exception) {
exception.printStackTrace();
}
}
@Override
public void bridge$loadSpongeDatData(final WorldInfo info) {
final File spongeFile = new File(this.worldDirectory, Constants.Sponge.World.LEVEL_SPONGE_DAT);
final File spongeOldFile = new File(this.worldDirectory, Constants.Sponge.World.LEVEL_SPONGE_DAT_OLD);
boolean exceptionRaised = false;
if (spongeFile.exists()) {
if (impl$loadSpongeDatFile(info, spongeFile, true)) {
return;
}
exceptionRaised = true;
}
if (spongeOldFile.exists()) {
if (impl$loadSpongeDatFile(info, spongeOldFile, false)) {
if (exceptionRaised) {
// Tell the user we successfully loaded a backup
SpongeImpl.getLogger().warn("Successfully loaded backup data file {} for world {}.", spongeFile.getName(), info.getWorldName());
// Delete the "current" file so we don't accidentally make it the backup file.
spongeFile.delete();
}
return;
}
exceptionRaised = true;
}
if (exceptionRaised) {
throw new RuntimeException("Unable to load sponge data for world [" + info.getWorldName() + "]");
}
}
/**
* Redirects the {@link File#exists()} checking that if the file exists, grab
* the file for later usage to read the file attributes for pre-existing data.
*
* @param localfile The local file
* @return True if the file exists
*/
@Redirect(method = "readPlayerData(Lnet/minecraft/entity/player/EntityPlayer;)Lnet/minecraft/nbt/NBTTagCompound;",
at = @At(value = "INVOKE", target = "Ljava/io/File;isFile()Z", remap = false))
private boolean impl$grabFileToField(final File localfile) {
final boolean isFile = localfile.isFile();
this.impl$file = isFile ? localfile.toPath() : null;
return isFile;
}
/**
* Redirects the reader such that since the player file existed already, we can safely assume
* we can grab the file attributes and check if the first join time exists in the sponge compound,
* if it does not, then we add it to the sponge data part of the compound.
*
* @param inputStream The input stream to direct to compressed stream tools
* @return The compound that may be modified
* @throws IOException for reasons
*/
@Redirect(method = "readPlayerData(Lnet/minecraft/entity/player/EntityPlayer;)Lnet/minecraft/nbt/NBTTagCompound;", at = @At(value = "INVOKE", target =
"Lnet/minecraft/nbt/CompressedStreamTools;readCompressed(Ljava/io/InputStream;)"
+ "Lnet/minecraft/nbt/NBTTagCompound;"))
private NBTTagCompound impl$readLegacyDataAndOrSpongeData(final InputStream inputStream) throws IOException {
Instant creation = this.impl$file == null ? Instant.now() : Files.readAttributes(this.impl$file, BasicFileAttributes.class).creationTime().toInstant();
final NBTTagCompound compound = CompressedStreamTools.readCompressed(inputStream);
Instant lastPlayed = Instant.now();
// first try to migrate bukkit join data stuff
if (compound.hasKey(Constants.Bukkit.BUKKIT, Constants.NBT.TAG_COMPOUND)) {
final NBTTagCompound bukkitCompound = compound.getCompoundTag(Constants.Bukkit.BUKKIT);
creation = Instant.ofEpochMilli(bukkitCompound.getLong(Constants.Bukkit.BUKKIT_FIRST_PLAYED));
lastPlayed = Instant.ofEpochMilli(bukkitCompound.getLong(Constants.Bukkit.BUKKIT_LAST_PLAYED));
}
// migrate canary join data
if (compound.hasKey(Constants.Canary.ROOT, Constants.NBT.TAG_COMPOUND)) {
final NBTTagCompound canaryCompound = compound.getCompoundTag(Constants.Canary.ROOT);
creation = Instant.ofEpochMilli(canaryCompound.getLong(Constants.Canary.FIRST_JOINED));
lastPlayed = Instant.ofEpochMilli(canaryCompound.getLong(Constants.Canary.LAST_JOINED));
}
UUID playerId = null;
if (compound.hasUniqueId(Constants.UUID)) {
playerId = compound.getUniqueId(Constants.UUID);
}
if (playerId != null) {
final Optional<Instant> savedFirst = SpongePlayerDataHandler.getFirstJoined(playerId);
if (savedFirst.isPresent()) {
creation = savedFirst.get();
}
final Optional<Instant> savedJoined = SpongePlayerDataHandler.getLastPlayed(playerId);
if (savedJoined.isPresent()) {
lastPlayed = savedJoined.get();
}
SpongePlayerDataHandler.setPlayerInfo(playerId, creation, lastPlayed);
}
this.impl$file = null;
return compound;
}
@Inject(method = "writePlayerData",
at = @At(value = "INVOKE",
target = "Lnet/minecraft/nbt/CompressedStreamTools;writeCompressed(Lnet/minecraft/nbt/NBTTagCompound;Ljava/io/OutputStream;)V",
shift = At.Shift.AFTER))
private void impl$saveSpongePlayerData(final EntityPlayer player, final CallbackInfo callbackInfo) {
SpongePlayerDataHandler.savePlayer(player.getUniqueID());
}
@Inject(
method = "writePlayerData",
at = @At(value = "INVOKE",
target = "Lorg/apache/logging/log4j/Logger;warn(Ljava/lang/String;Ljava/lang/Object;)V",
remap = false),
locals = LocalCapture.CAPTURE_FAILHARD)
private void impl$trackExceptionForLogging(final EntityPlayer player, final CallbackInfo ci, final Exception exception) {
this.impl$capturedException = exception;
}
@Redirect(
method = "writePlayerData",
at = @At(
value = "INVOKE",
target = "Lorg/apache/logging/log4j/Logger;warn(Ljava/lang/String;Ljava/lang/Object;)V",
remap = false
)
)
private void impl$useStoredException(final Logger logger, final String message, final Object param) {
logger.warn(message, param, this.impl$capturedException);
this.impl$capturedException = null;
}
// SF overrides getWorldDirectory for mod compatibility.
// In order to avoid conflicts, we simply use another method to guarantee
// the sponge world directory is returned for the corresponding save handler.
// AnvilSaveHandlerMixin#getChunkLoader is one example where we must use this method.
@Override
public File bridge$getSpongeWorldDirectory() {
return this.worldDirectory;
}
@Override
public File bridge$getPlayersDirectory() {
return this.playersDirectory;
}
private boolean impl$loadSpongeDatFile(final WorldInfo info, final File file, boolean isCurrent) {
final NBTTagCompound compound;
try (final FileInputStream stream = new FileInputStream(file)) {
compound = CompressedStreamTools.readCompressed(stream);
} catch (Exception ex) {
PrettyPrinter errorPrinter = new PrettyPrinter()
.add("Unable to load level data from world [%s] for file [%s]!", info.getWorldName(), file.getName())
.centre()
.hr();
// We can't read it - but let's copy the file so we can ask for it to inspect what it looks like later.
Path corrupted = file.toPath().getParent().resolve(file.getName() + ".corrupted-" +
DateTimeFormatter.ISO_INSTANT.format(Instant.now()).replaceAll(":", "") + ".dat");
try {
Files.copy(file.toPath(), corrupted);
errorPrinter.add("We have backed up the corrupted file to %s. Please keep hold of this, it may be useful to Sponge developers.",
corrupted.getFileName());
} catch (IOException e) {
errorPrinter.add("We were unable to copy the corrupted file.");
}
if (isCurrent) {
errorPrinter.add("We will try to load the backup file (if it exists)");
}
errorPrinter
.hr()
.add("Exception:")
.add(ex)
.print(System.err);
return false;
}
((WorldInfoBridge) info).bridge$setSpongeRootLevelNBT(compound);
if (compound.hasKey(Constants.Sponge.SPONGE_DATA)) {
final NBTTagCompound spongeCompound = compound.getCompoundTag(Constants.Sponge.SPONGE_DATA);
DataUtil.spongeDataFixer.process(FixTypes.LEVEL, spongeCompound);
((WorldInfoBridge) info).bridge$readSpongeNbt(spongeCompound);
}
return true;
}
}
| |
package elektra;
import java.io.InputStream;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.Date;
import java.net.IDN;
import javax.activation.DataHandler;
import javax.activation.DataSource;
import javax.activation.FileDataSource;
import javax.mail.BodyPart;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import javax.mail.util.ByteArrayDataSource;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class MailSender
{
private static final Log log = LogFactory.getLog(MailSender.class);
private InternetAddress addressFrom;
private InternetAddress addressTo;
private InternetAddress addressCc;
private InternetAddress addressBcc;
private MimeMessage msg;
private MimeMultipart content;
private Session session;
private Transport transport;
private Properties props = new Properties();
private String smtpHost = "192.168.71.66";
private String smtpBenutzer = "benutzer";
private String smtpPasswort = "passwort";
private int counter = 0;
/** Die MailQueue hier werden die Mail abgelegt bevor Sie asynchron versendet werden */
private LinkedList mailQueue;
/** Privates Klassenattribut, einzige Instanz der Klasse wird erzeugt. */
private static final MailSender INSTANCE = new MailSender();
/**
* Statische Methode "getInstance()" liefert die einzige Instanz der Klasse
* zurueck. Dadurch gibt es nur eine zentrale Instance in der Webanwendung die E-Mails versendet.
*/
public static MailSender getInstance()
{
return INSTANCE;
}
/** Konstruktor ist privat, darf nicht von aussen instanziert werden. */
private MailSender()
{
mailQueue = new LinkedList();
// props.put("mail.smtp.host", smtpHost);
this.counter = 0;
session = Session.getDefaultInstance(props, null);
try
{
transport = session.getTransport("smtp");
}
catch (Exception e)
{
log.error("Fehler beim initialisieren der Klasse : " + e, e);
}
Thread m = new Thread(new Mailer());
m.start();
}
/** Uebergeben der E-Mail an die MailQueue*/
public boolean sendMail(Mail m)
{
try
{
mailQueue.add(m);
return true;
}
catch (Exception e)
{
return false;
}
}
/** Der Mailer versendet Die E-Mails im asynchron im Hintergrund als eigener Thread */
private class Mailer extends Thread
{
int threadnummer;
Mailer()
{
super();
}
@Override
public void run()
{
// Endlosschleife zum zyklischen ueberpruefen ob neue Mails zum versenden vorhanden sind
while (true)
{
try
{
//Sind neue Mails vorhanden?
if (!mailQueue.isEmpty())
{
/*
Properties propmail = System.getProperties();
Session session = Session.getDefaultInstance(propmail, null);
session.setDebug(false);
*/
//Die erste Mail aus der Queue holen
Mail m = (Mail)mailQueue.removeFirst();
try
{
props.put("mail.smtp.host", m.getServer());
smtpHost = m.getServer();
content = new MimeMultipart("alternativ");
msg = new MimeMessage(session);
//Die Adresse des Versenders
addressFrom = new InternetAddress(m.getSender());
msg.setFrom(addressFrom);
//Die Adresse des Empfaengers
//Message-Objekt zusammenbauen
StringTokenizer st = new StringTokenizer(m.getTo(), " ,;");
int length = st.countTokens();
InternetAddress[] toAddresses = new InternetAddress[length];
for (int i=0; st.hasMoreTokens(); i++)
{
String strTemp = st.nextToken();
String[] ste = strTemp.split("@");
String strPuny = ste[0] + "@" + IDN.toASCII(ste[1]);
toAddresses[i] = new InternetAddress(strPuny);
}
msg.setRecipients(Message.RecipientType.TO, toAddresses);
//Die Adresse des Kopieemfaengers
if (m.getCc() != null && m.getCc().length() > 3)
{
StringTokenizer stcc = new StringTokenizer(m.getCc(), " ,;");
length = stcc.countTokens();
InternetAddress[] ccAddresses = new InternetAddress[length];
for (int i=0; stcc.hasMoreTokens(); i++)
{
String strTemp = st.nextToken();
String[] ste = strTemp.split("@");
String strPuny = ste[0] + "@" + IDN.toASCII(ste[1]);
ccAddresses[i] = new InternetAddress(strPuny);
}
msg.setRecipients(Message.RecipientType.CC, ccAddresses);
}
//Die Adresse des Blindkopieempfaengers
if (m.getBcc() != null && m.getBcc().length() > 3)
{
StringTokenizer stbcc = new StringTokenizer(m.getBcc(), " ,;");
length = stbcc.countTokens();
InternetAddress[] bccAddresses = new InternetAddress[length];
for (int i=0; stbcc.hasMoreTokens(); i++)
{
String strTemp = st.nextToken();
String[] ste = strTemp.split("@");
String strPuny = ste[0] + "@" + IDN.toASCII(ste[1]);
bccAddresses[i] = new InternetAddress(strPuny);
}
msg.setRecipients(Message.RecipientType.BCC, bccAddresses);
}
if (null != m.getReply())
{
StringTokenizer streply = new StringTokenizer(m.getReply(), " ,;");
length = streply.countTokens();
InternetAddress[] replyAddresses = new InternetAddress[length];
for (int i = 0; streply.hasMoreTokens(); i++)
{
replyAddresses[i] = new InternetAddress(streply.nextToken());
}
msg.setReplyTo( replyAddresses);
}
//Der Betreff
msg.setSubject(m.getSubject());
msg.setContent(m.getText(), "text/plain;charset=\"ISO-8859-1\"");
msg.setHeader("X-Mailer", "Elektra-Mailer");
msg.setSentDate(new Date()); // Date: header
/*
//Der eigentliche Text im HTML Format
MimeBodyPart text = new MimeBodyPart();
text.setContent(m.getText(), "text/plain;charset=\"ISO-8859-1\"");
content.addBodyPart(text);
//Die Anhaenge
List<MailAnhang> dateien = m.getAnhaenge();
if(dateien != null)
{
for (MailAnhang mailAnhang : dateien)
{
BodyPart anhang = new MimeBodyPart();
anhang.setFileName(mailAnhang.getName());
InputStream is = new FileDataSource(mailAnhang.getFile()).getInputStream();
DataSource dh = new ByteArrayDataSource(is, mailAnhang.getMime());
anhang.setDataHandler(new DataHandler(dh));
content.addBodyPart(anhang);
}
}
//Setzen des Content
msg.setContent(content);
msg.saveChanges();
*/
transport = session.getTransport("smtp");
//Verbindung zu Mailserver aufbauen
transport.connect(smtpHost, smtpBenutzer, smtpPasswort);
transport.sendMessage(msg, msg.getAllRecipients());
transport.close();
counter++;
if (log.isDebugEnabled()) log.debug("Mail " + counter + " " + m.getSubject() + " erfolgreich gesendet");
}
catch (MessagingException me)
{
log.error("Fehler beim Versenden der Mail : " + me, me);
}
catch (Exception e)
{
log.error("Fehler beim Versenden der Mail : " + e, e);
}
}
else
{
//Alle wieviel Minuten soll die Queue auf neue Mails ueberprueft und diese versendet werden.
Thread.sleep(600l);
}
}
catch (Exception e)
{
log.error("Fehler beim Versenden der Mail : " + e, e);
}
}
}
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.utils;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesAction;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder;
import org.elasticsearch.action.admin.indices.create.CreateIndexAction;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.AdminClient;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.ClusterAdminClient;
import org.elasticsearch.client.IndicesAdminClient;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.AliasMetadata;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants;
import org.elasticsearch.xpack.core.template.IndexTemplateConfig;
import org.junit.After;
import org.junit.Before;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.mockito.stubbing.Answer;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toMap;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
public class MlIndexAndAliasTests extends ESTestCase {
private static final String TEST_INDEX_PREFIX = "test";
private static final String TEST_INDEX_ALIAS = "test-alias";
private static final String LEGACY_INDEX_WITHOUT_SUFFIX = TEST_INDEX_PREFIX;
private static final String FIRST_CONCRETE_INDEX = "test-000001";
private ThreadPool threadPool;
private IndicesAdminClient indicesAdminClient;
private ClusterAdminClient clusterAdminClient;
private AdminClient adminClient;
private Client client;
private ActionListener<Boolean> listener;
private ArgumentCaptor<CreateIndexRequest> createRequestCaptor;
private ArgumentCaptor<IndicesAliasesRequest> aliasesRequestCaptor;
@Before
public void setUpMocks() {
threadPool = mock(ThreadPool.class);
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
indicesAdminClient = mock(IndicesAdminClient.class);
when(indicesAdminClient.prepareCreate(FIRST_CONCRETE_INDEX))
.thenReturn(new CreateIndexRequestBuilder(client, CreateIndexAction.INSTANCE, FIRST_CONCRETE_INDEX));
doAnswer(withResponse(new CreateIndexResponse(true, true, FIRST_CONCRETE_INDEX))).when(indicesAdminClient).create(any(), any());
when(indicesAdminClient.prepareAliases()).thenReturn(new IndicesAliasesRequestBuilder(client, IndicesAliasesAction.INSTANCE));
doAnswer(withResponse(new AcknowledgedResponse(true))).when(indicesAdminClient).aliases(any(), any());
doAnswer(withResponse(new AcknowledgedResponse(true))).when(indicesAdminClient).putTemplate(any(), any());
clusterAdminClient = mock(ClusterAdminClient.class);
doAnswer(invocationOnMock -> {
@SuppressWarnings("unchecked")
ActionListener<ClusterHealthResponse> listener = (ActionListener<ClusterHealthResponse>) invocationOnMock.getArguments()[1];
listener.onResponse(new ClusterHealthResponse());
return null;
}).when(clusterAdminClient).health(any(ClusterHealthRequest.class), any(ActionListener.class));
adminClient = mock(AdminClient.class);
when(adminClient.indices()).thenReturn(indicesAdminClient);
when(adminClient.cluster()).thenReturn(clusterAdminClient);
client = mock(Client.class);
when(client.threadPool()).thenReturn(threadPool);
when(client.admin()).thenReturn(adminClient);
listener = mock(ActionListener.class);
createRequestCaptor = ArgumentCaptor.forClass(CreateIndexRequest.class);
aliasesRequestCaptor = ArgumentCaptor.forClass(IndicesAliasesRequest.class);
}
@After
public void verifyNoMoreInteractionsWithMocks() {
verifyNoMoreInteractions(indicesAdminClient, listener);
}
public void testInstallIndexTemplateIfRequired_GivenTemplateExists() {
ClusterState clusterState = createClusterState(Collections.emptyMap(),
Collections.singletonMap(InferenceIndexConstants.LATEST_INDEX_NAME,
createIndexTemplateMetaData(InferenceIndexConstants.LATEST_INDEX_NAME,
Collections.singletonList(InferenceIndexConstants.LATEST_INDEX_NAME))));
IndexTemplateConfig inferenceTemplate = new IndexTemplateConfig(InferenceIndexConstants.LATEST_INDEX_NAME,
"not_a_real_file.json", Version.CURRENT.id, "xpack.ml.version",
Collections.singletonMap("xpack.ml.version.id", String.valueOf(Version.CURRENT.id)));
MlIndexAndAlias.installIndexTemplateIfRequired(clusterState, client, inferenceTemplate, listener);
verify(listener).onResponse(true);
verifyNoMoreInteractions(client);
}
public void testInstallIndexTemplateIfRequired() {
ClusterState clusterState = createClusterState(Collections.emptyMap());
IndexTemplateConfig inferenceTemplate = new IndexTemplateConfig(InferenceIndexConstants.LATEST_INDEX_NAME,
"/org/elasticsearch/xpack/core/ml/inference_index_template.json", Version.CURRENT.id, "xpack.ml.version",
Collections.singletonMap("xpack.ml.version.id", String.valueOf(Version.CURRENT.id)));
MlIndexAndAlias.installIndexTemplateIfRequired(clusterState, client, inferenceTemplate, listener);
InOrder inOrder = inOrder(indicesAdminClient, listener);
inOrder.verify(indicesAdminClient).putTemplate(any(), any());
inOrder.verify(listener).onResponse(true);
}
public void testCreateStateIndexAndAliasIfNecessary_CleanState() {
ClusterState clusterState = createClusterState(Collections.emptyMap());
createIndexAndAliasIfNecessary(clusterState);
InOrder inOrder = inOrder(indicesAdminClient, listener);
inOrder.verify(indicesAdminClient).prepareCreate(FIRST_CONCRETE_INDEX);
inOrder.verify(indicesAdminClient).create(createRequestCaptor.capture(), any());
inOrder.verify(listener).onResponse(true);
CreateIndexRequest createRequest = createRequestCaptor.getValue();
assertThat(createRequest.index(), equalTo(FIRST_CONCRETE_INDEX));
assertThat(createRequest.aliases(), equalTo(Collections.singleton(new Alias(TEST_INDEX_ALIAS).isHidden(true))));
}
private void assertNoClientInteractionsWhenWriteAliasAlreadyExists(String indexName) {
ClusterState clusterState = createClusterState(Collections.singletonMap(indexName, createIndexMetadataWithAlias(indexName)));
createIndexAndAliasIfNecessary(clusterState);
verify(listener).onResponse(false);
}
public void testCreateStateIndexAndAliasIfNecessary_WriteAliasAlreadyExistsAndPointsAtInitialStateIndex() {
assertNoClientInteractionsWhenWriteAliasAlreadyExists(FIRST_CONCRETE_INDEX);
}
public void testCreateStateIndexAndAliasIfNecessary_WriteAliasAlreadyExistsAndPointsAtSubsequentStateIndex() {
assertNoClientInteractionsWhenWriteAliasAlreadyExists("test-000007");
}
public void testCreateStateIndexAndAliasIfNecessary_WriteAliasAlreadyExistsAndPointsAtDummyIndex() {
assertNoClientInteractionsWhenWriteAliasAlreadyExists("dummy-index");
}
public void testCreateStateIndexAndAliasIfNecessary_WriteAliasAlreadyExistsAndPointsAtLegacyStateIndex() {
ClusterState clusterState =
createClusterState(
Collections.singletonMap(LEGACY_INDEX_WITHOUT_SUFFIX, createIndexMetadataWithAlias(LEGACY_INDEX_WITHOUT_SUFFIX)));
createIndexAndAliasIfNecessary(clusterState);
InOrder inOrder = inOrder(indicesAdminClient, listener);
inOrder.verify(indicesAdminClient).prepareCreate(FIRST_CONCRETE_INDEX);
inOrder.verify(indicesAdminClient).create(createRequestCaptor.capture(), any());
inOrder.verify(indicesAdminClient).prepareAliases();
inOrder.verify(indicesAdminClient).aliases(aliasesRequestCaptor.capture(), any());
inOrder.verify(listener).onResponse(true);
CreateIndexRequest createRequest = createRequestCaptor.getValue();
assertThat(createRequest.index(), equalTo(FIRST_CONCRETE_INDEX));
assertThat(createRequest.aliases(), empty());
IndicesAliasesRequest indicesAliasesRequest = aliasesRequestCaptor.getValue();
assertThat(
indicesAliasesRequest.getAliasActions(),
contains(
AliasActions.add().alias(TEST_INDEX_ALIAS).index(FIRST_CONCRETE_INDEX).isHidden(true),
AliasActions.remove().alias(TEST_INDEX_ALIAS).index(LEGACY_INDEX_WITHOUT_SUFFIX)));
}
private void assertMlStateWriteAliasAddedToMostRecentMlStateIndex(List<String> existingIndexNames, String expectedWriteIndexName) {
ClusterState clusterState =
createClusterState(
existingIndexNames.stream().collect(toMap(Function.identity(), MlIndexAndAliasTests::createIndexMetadata)));
createIndexAndAliasIfNecessary(clusterState);
InOrder inOrder = inOrder(indicesAdminClient, listener);
inOrder.verify(indicesAdminClient).prepareAliases();
inOrder.verify(indicesAdminClient).aliases(aliasesRequestCaptor.capture(), any());
inOrder.verify(listener).onResponse(true);
IndicesAliasesRequest indicesAliasesRequest = aliasesRequestCaptor.getValue();
assertThat(
indicesAliasesRequest.getAliasActions(),
contains(AliasActions.add().alias(TEST_INDEX_ALIAS).index(expectedWriteIndexName).isHidden(true)));
}
public void testCreateStateIndexAndAliasIfNecessary_WriteAliasDoesNotExistButInitialStateIndexExists() {
assertMlStateWriteAliasAddedToMostRecentMlStateIndex(
Arrays.asList(FIRST_CONCRETE_INDEX), FIRST_CONCRETE_INDEX);
}
public void testCreateStateIndexAndAliasIfNecessary_WriteAliasDoesNotExistButSubsequentStateIndicesExist() {
assertMlStateWriteAliasAddedToMostRecentMlStateIndex(
Arrays.asList("test-000003", "test-000040", "test-000500"), "test-000500");
}
public void testCreateStateIndexAndAliasIfNecessary_WriteAliasDoesNotExistButBothLegacyAndNewIndicesExist() {
assertMlStateWriteAliasAddedToMostRecentMlStateIndex(
Arrays.asList(LEGACY_INDEX_WITHOUT_SUFFIX, "test-000003", "test-000040", "test-000500"), "test-000500");
}
public void testCreateStateIndexAndAliasIfNecessary_WriteAliasDoesNotExistButLegacyStateIndexExists() {
ClusterState clusterState =
createClusterState(Collections.singletonMap(LEGACY_INDEX_WITHOUT_SUFFIX, createIndexMetadata(LEGACY_INDEX_WITHOUT_SUFFIX)));
createIndexAndAliasIfNecessary(clusterState);
InOrder inOrder = inOrder(indicesAdminClient, listener);
inOrder.verify(indicesAdminClient).prepareCreate(FIRST_CONCRETE_INDEX);
inOrder.verify(indicesAdminClient).create(createRequestCaptor.capture(), any());
inOrder.verify(listener).onResponse(true);
CreateIndexRequest createRequest = createRequestCaptor.getValue();
assertThat(createRequest.index(), equalTo(FIRST_CONCRETE_INDEX));
assertThat(createRequest.aliases(), equalTo(Collections.singleton(new Alias(TEST_INDEX_ALIAS).isHidden(true))));
}
public void testIndexNameComparator() {
Comparator<String> comparator = MlIndexAndAlias.INDEX_NAME_COMPARATOR;
assertThat(
Stream.of("test-000001").max(comparator).get(),
equalTo("test-000001"));
assertThat(
Stream.of("test-000002", "test-000001").max(comparator).get(),
equalTo("test-000002"));
assertThat(
Stream.of("test-000003", "test-000040", "test-000500").max(comparator).get(),
equalTo("test-000500"));
assertThat(
Stream.of("test-000042", "test-000049", "test-000038").max(comparator).get(),
equalTo("test-000049"));
assertThat(
Stream.of("test", "test-000003", "test-000040", "test-000500").max(comparator).get(),
equalTo("test-000500"));
assertThat(
Stream.of(".reindexed-6-test", "test-000042").max(comparator).get(),
equalTo("test-000042"));
assertThat(
Stream.of(".a-000002", ".b-000001").max(comparator).get(),
equalTo(".a-000002"));
}
private void createIndexAndAliasIfNecessary(ClusterState clusterState) {
MlIndexAndAlias.createIndexAndAliasIfNecessary(
client, clusterState, new IndexNameExpressionResolver(), TEST_INDEX_PREFIX, TEST_INDEX_ALIAS, listener);
}
@SuppressWarnings("unchecked")
private static <Response> Answer<Response> withResponse(Response response) {
return invocationOnMock -> {
ActionListener<Response> listener = (ActionListener<Response>) invocationOnMock.getArguments()[1];
listener.onResponse(response);
return null;
};
}
private static ClusterState createClusterState(Map<String, IndexMetadata> indices) {
return createClusterState(indices, Collections.emptyMap());
}
private static ClusterState createClusterState(Map<String, IndexMetadata> indices, Map<String, IndexTemplateMetadata> templates) {
return ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder()
.indices(ImmutableOpenMap.<String, IndexMetadata>builder().putAll(indices).build())
.templates(ImmutableOpenMap.<String, IndexTemplateMetadata>builder().putAll(templates).build())
.build())
.build();
}
private static IndexMetadata createIndexMetadata(String indexName) {
return createIndexMetadata(indexName, false);
}
private static IndexMetadata createIndexMetadataWithAlias(String indexName) {
return createIndexMetadata(indexName, true);
}
private static IndexTemplateMetadata createIndexTemplateMetaData(String templateName, List<String> patterns) {
return IndexTemplateMetadata.builder(templateName).patterns(patterns).build();
}
private static IndexMetadata createIndexMetadata(String indexName, boolean withAlias) {
Settings settings =
Settings.builder()
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.CURRENT)
.build();
IndexMetadata.Builder builder = IndexMetadata.builder(indexName)
.settings(settings);
if (withAlias) {
builder.putAlias(AliasMetadata.builder(TEST_INDEX_ALIAS).build());
}
return builder.build();
}
}
| |
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 2000-2016. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
package com.ericsson.otp.erlang;
/**
* <p>
* Provides a carrier for Erlang messages.
* </p>
*
* <p>
* Instances of this class are created to package header and payload information
* in received Erlang messages so that the recipient can obtain both parts with
* a single call to {@link OtpMbox#receiveMsg receiveMsg()}.
* </p>
*
* <p>
* The header information that is available is as follows:
* <ul>
* <li>a tag indicating the type of message
* <li>the intended recipient of the message, either as a {@link OtpErlangPid
* pid} or as a String, but never both.
* <li>(sometimes) the sender of the message. Due to some eccentric
* characteristics of the Erlang distribution protocol, not all messages have
* information about the sending process. In particular, only messages whose tag
* is {@link OtpMsg#regSendTag regSendTag} contain sender information.
* </ul>
*
* <p>
* Message are sent using the Erlang external format (see separate
* documentation). When a message is received and delivered to the recipient
* {@link OtpMbox mailbox}, the body of the message is still in this external
* representation until {@link #getMsg getMsg()} is called, at which point the
* message is decoded. A copy of the decoded message is stored in the OtpMsg so
* that subsequent calls to {@link #getMsg getMsg()} do not require that the
* message be decoded a second time.
* </p>
*/
public class OtpMsg {
public static final int linkTag = 1;
public static final int sendTag = 2;
public static final int exitTag = 3;
public static final int unlinkTag = 4;
public static final int regSendTag = 6;
/* public static final int groupLeaderTag = 7; */
public static final int exit2Tag = 8;
protected int tag; // what type of message is this (send, link, exit etc)
protected OtpInputStream paybuf;
protected OtpErlangObject payload;
protected OtpErlangPid from;
protected OtpErlangPid to;
protected String toName;
protected long unlink_id;
// send has receiver pid but no sender information
OtpMsg(final OtpErlangPid to, final OtpInputStream paybuf) {
tag = sendTag;
from = null;
this.to = to;
toName = null;
this.paybuf = paybuf;
payload = null;
this.unlink_id = 0;
}
// send has receiver pid but no sender information
OtpMsg(final OtpErlangPid to, final OtpErlangObject payload) {
tag = sendTag;
from = null;
this.to = to;
toName = null;
paybuf = null;
this.payload = payload;
this.unlink_id = 0;
}
// send_reg has sender pid and receiver name
OtpMsg(final OtpErlangPid from, final String toName,
final OtpInputStream paybuf) {
tag = regSendTag;
this.from = from;
this.toName = toName;
to = null;
this.paybuf = paybuf;
payload = null;
this.unlink_id = 0;
}
// send_reg has sender pid and receiver name
OtpMsg(final OtpErlangPid from, final String toName,
final OtpErlangObject payload) {
tag = regSendTag;
this.from = from;
this.toName = toName;
to = null;
paybuf = null;
this.payload = payload;
this.unlink_id = 0;
}
// exit (etc) has from, to, reason
OtpMsg(final int tag, final OtpErlangPid from, final OtpErlangPid to,
final OtpErlangObject reason) {
this.tag = tag;
this.from = from;
this.to = to;
this.unlink_id = 0;
paybuf = null;
payload = reason;
this.unlink_id = 0;
}
// special case when reason is an atom (i.e. most of the time)
OtpMsg(final int tag, final OtpErlangPid from, final OtpErlangPid to,
final String reason) {
this.tag = tag;
this.from = from;
this.to = to;
paybuf = null;
payload = new OtpErlangAtom(reason);
this.unlink_id = 0;
}
// other message types (link and old unlink)
OtpMsg(final int tag, final OtpErlangPid from, final OtpErlangPid to) {
// convert TT-tags to equiv non-TT versions
this.tag = drop_tt_tag(tag);
this.from = from;
this.to = to;
this.unlink_id = 0;
}
// unlink
OtpMsg(final int tag, final OtpErlangPid from, final OtpErlangPid to,
final long unlink_id) {
// convert TT-tags to equiv non-TT versions
this.tag = drop_tt_tag(tag);
this.from = from;
this.to = to;
this.unlink_id = unlink_id;
}
private int drop_tt_tag(final int tag) {
switch (tag) {
case AbstractConnection.sendTTTag:
return OtpMsg.sendTag;
case AbstractConnection.exitTTTag:
return OtpMsg.exitTag;
case AbstractConnection.regSendTTTag:
return OtpMsg.regSendTag;
case AbstractConnection.exit2TTTag:
return OtpMsg.exit2Tag;
default:
return tag;
}
}
/**
* Get unlink identifier of an unlink or unlink acknowledgment
* message. For package internal use only.
*
* @return the serialized Erlang term contained in this message.
*
*/
long getUnlinkId() {
return this.unlink_id;
}
/**
* Get the payload from this message without deserializing it.
*
* @return the serialized Erlang term contained in this message.
*
*/
OtpInputStream getMsgBuf() {
return paybuf;
}
/**
* <p>
* Get the type marker from this message. The type marker identifies the
* type of message. Valid values are the ``tag'' constants defined in this
* class.
* </p>
*
* <p>
* The tab identifies not only the type of message but also the content of
* the OtpMsg object, since different messages have different components, as
* follows:
* </p>
*
* <ul>
* <li>sendTag identifies a "normal" message. The recipient is a
* {@link OtpErlangPid Pid} and it is available through
* {@link #getRecipientPid getRecipientPid()}. Sender information is not
* available. The message body can be retrieved with {@link #getMsg
* getMsg()}.</li>
*
* <li>regSendTag also identifies a "normal" message. The recipient here is
* a String and it is available through {@link #getRecipientName
* getRecipientName()}. Sender information is available through
* #getSenderPid getSenderPid()}. The message body can be retrieved with
* {@link #getMsg getMsg()}.</li>
*
* <li>linkTag identifies a link request. The Pid of the sender is
* available, as well as the Pid to which the link should be made.</li>
*
* <li>exitTag and exit2Tag messages are sent as a result of broken links.
* Both sender and recipient Pids and are available through the
* corresponding methods, and the "reason" is available through
* {@link #getMsg getMsg()}.</li>
* </ul>
*/
public int type() {
return tag;
}
/**
* <p>
* Deserialize and return a new copy of the message contained in this
* OtpMsg.
* </p>
*
* <p>
* The first time this method is called the actual payload is deserialized
* and the Erlang term is created. Calling this method subsequent times will
* not cuase the message to be deserialized additional times, instead the
* same Erlang term object will be returned.
* </p>
*
* @return an Erlang term.
*
* @exception OtpErlangDecodeException
* if the byte stream could not be deserialized.
*
*/
public OtpErlangObject getMsg() throws OtpErlangDecodeException {
if (payload == null) {
payload = paybuf.read_any();
}
return payload;
}
/**
* <p>
* Get the name of the recipient for this message.
* </p>
*
* <p>
* Messages are sent to Pids or names. If this message was sent to a name
* then the name is returned by this method.
* </p>
*
* @return the name of the recipient, or null if the recipient was in fact a
* Pid.
*/
public String getRecipientName() {
return toName;
}
/**
* <p>
* Get the Pid of the recipient for this message, if it is a sendTag
* message.
* </p>
*
* <p>
* Messages are sent to Pids or names. If this message was sent to a Pid
* then the Pid is returned by this method. The recipient Pid is also
* available for link, unlink and exit messages.
* </p>
*
* @return the Pid of the recipient, or null if the recipient was in fact a
* name.
*/
public OtpErlangPid getRecipientPid() {
return to;
}
/**
* <p>
* Get the name of the recipient for this message, if it is a regSendTag
* message.
* </p>
*
* <p>
* Messages are sent to Pids or names. If this message was sent to a name
* then the name is returned by this method.
* </p>
*
* @return the Pid of the recipient, or null if the recipient was in fact a
* name.
*/
public Object getRecipient() {
if (toName != null) {
return toName;
}
return to;
}
/**
* <p>
* Get the Pid of the sender of this message.
* </p>
*
* <p>
* For messages sent to names, the Pid of the sender is included with the
* message. The sender Pid is also available for link, unlink and exit
* messages. It is not available for sendTag messages sent to Pids.
* </p>
*
* @return the Pid of the sender, or null if it was not available.
*/
public OtpErlangPid getSenderPid() {
return from;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.connect.mirror;
import java.util.Map.Entry;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.common.config.AbstractConfig;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigDef.Type;
import org.apache.kafka.common.config.ConfigDef.Importance;
import org.apache.kafka.common.config.provider.ConfigProvider;
import org.apache.kafka.common.config.ConfigTransformer;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.connect.runtime.WorkerConfig;
import org.apache.kafka.connect.runtime.distributed.DistributedConfig;
import org.apache.kafka.connect.runtime.isolation.Plugins;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.HashSet;
import java.util.ArrayList;
import java.util.Collections;
import java.util.stream.Collectors;
/** Top-level config describing replication flows between multiple Kafka clusters.
*
* Supports cluster-level properties of the form cluster.x.y.z, and replication-level
* properties of the form source->target.x.y.z.
* e.g.
*
* clusters = A, B, C
* A.bootstrap.servers = aaa:9092
* A.security.protocol = SSL
* --->%---
* A->B.enabled = true
* A->B.producer.client.id = "A-B-producer"
* --->%---
*
*/
public class MirrorMakerConfig extends AbstractConfig {
public static final String CLUSTERS_CONFIG = "clusters";
private static final String CLUSTERS_DOC = "List of cluster aliases.";
public static final String CONFIG_PROVIDERS_CONFIG = WorkerConfig.CONFIG_PROVIDERS_CONFIG;
private static final String CONFIG_PROVIDERS_DOC = "Names of ConfigProviders to use.";
private static final String NAME = "name";
private static final String CONNECTOR_CLASS = "connector.class";
private static final String SOURCE_CLUSTER_ALIAS = "source.cluster.alias";
private static final String TARGET_CLUSTER_ALIAS = "target.cluster.alias";
private static final String GROUP_ID_CONFIG = "group.id";
private static final String KEY_CONVERTER_CLASS_CONFIG = "key.converter";
private static final String VALUE_CONVERTER_CLASS_CONFIG = "value.converter";
private static final String HEADER_CONVERTER_CLASS_CONFIG = "header.converter";
private static final String BYTE_ARRAY_CONVERTER_CLASS =
"org.apache.kafka.connect.converters.ByteArrayConverter";
static final String SOURCE_CLUSTER_PREFIX = "source.cluster.";
static final String TARGET_CLUSTER_PREFIX = "target.cluster.";
static final String SOURCE_PREFIX = "source.";
static final String TARGET_PREFIX = "target.";
private final Plugins plugins;
public MirrorMakerConfig(Map<?, ?> props) {
super(CONFIG_DEF, props, true);
plugins = new Plugins(originalsStrings());
}
public Set<String> clusters() {
return new HashSet<>(getList(CLUSTERS_CONFIG));
}
public List<SourceAndTarget> clusterPairs() {
List<SourceAndTarget> pairs = new ArrayList<>();
Set<String> clusters = clusters();
Map<String, String> originalStrings = originalsStrings();
boolean globalHeartbeatsEnabled = MirrorConnectorConfig.EMIT_HEARTBEATS_ENABLED_DEFAULT;
if (originalStrings.containsKey(MirrorConnectorConfig.EMIT_HEARTBEATS_ENABLED)) {
globalHeartbeatsEnabled = Boolean.valueOf(originalStrings.get(MirrorConnectorConfig.EMIT_HEARTBEATS_ENABLED));
}
for (String source : clusters) {
for (String target : clusters) {
if (!source.equals(target)) {
String clusterPairConfigPrefix = source + "->" + target + ".";
boolean clusterPairEnabled = Boolean.valueOf(originalStrings.getOrDefault(clusterPairConfigPrefix + "enabled", "false"));
boolean clusterPairHeartbeatsEnabled = globalHeartbeatsEnabled;
if (originalStrings.containsKey(clusterPairConfigPrefix + MirrorConnectorConfig.EMIT_HEARTBEATS_ENABLED)) {
clusterPairHeartbeatsEnabled = Boolean.valueOf(originalStrings.get(clusterPairConfigPrefix + MirrorConnectorConfig.EMIT_HEARTBEATS_ENABLED));
}
// By default, all source->target Herder combinations are created even if `x->y.enabled=false`
// Unless `emit.heartbeats.enabled=false` or `x->y.emit.heartbeats.enabled=false`
// Reason for this behavior: for a given replication flow A->B with heartbeats, 2 herders are required :
// B->A for the MirrorHeartbeatConnector (emits heartbeats into A for monitoring replication health)
// A->B for the MirrorSourceConnector (actual replication flow)
if (clusterPairEnabled || clusterPairHeartbeatsEnabled) {
pairs.add(new SourceAndTarget(source, target));
}
}
}
}
return pairs;
}
/** Construct a MirrorClientConfig from properties of the form cluster.x.y.z.
* Use to connect to a cluster based on the MirrorMaker top-level config file.
*/
public MirrorClientConfig clientConfig(String cluster) {
Map<String, String> props = new HashMap<>();
props.putAll(originalsStrings());
props.putAll(clusterProps(cluster));
return new MirrorClientConfig(transform(props));
}
// loads properties of the form cluster.x.y.z
Map<String, String> clusterProps(String cluster) {
Map<String, String> props = new HashMap<>();
Map<String, String> strings = originalsStrings();
props.putAll(stringsWithPrefixStripped(cluster + "."));
for (String k : MirrorClientConfig.CLIENT_CONFIG_DEF.names()) {
String v = props.get(k);
if (v != null) {
props.putIfAbsent("producer." + k, v);
props.putIfAbsent("consumer." + k, v);
props.putIfAbsent("admin." + k, v);
}
}
for (String k : MirrorClientConfig.CLIENT_CONFIG_DEF.names()) {
String v = strings.get(k);
if (v != null) {
props.putIfAbsent("producer." + k, v);
props.putIfAbsent("consumer." + k, v);
props.putIfAbsent("admin." + k, v);
props.putIfAbsent(k, v);
}
}
return props;
}
// loads worker configs based on properties of the form x.y.z and cluster.x.y.z
public Map<String, String> workerConfig(SourceAndTarget sourceAndTarget) {
Map<String, String> props = new HashMap<>();
props.putAll(clusterProps(sourceAndTarget.target()));
// Accept common top-level configs that are otherwise ignored by MM2.
// N.B. all other worker properties should be configured for specific herders,
// e.g. primary->backup.client.id
props.putAll(stringsWithPrefix("offset.storage"));
props.putAll(stringsWithPrefix("config.storage"));
props.putAll(stringsWithPrefix("status.storage"));
props.putAll(stringsWithPrefix("key.converter"));
props.putAll(stringsWithPrefix("value.converter"));
props.putAll(stringsWithPrefix("header.converter"));
props.putAll(stringsWithPrefix("task"));
props.putAll(stringsWithPrefix("worker"));
// transform any expression like ${provider:path:key}, since the worker doesn't do so
props = transform(props);
props.putAll(stringsWithPrefix(CONFIG_PROVIDERS_CONFIG));
// fill in reasonable defaults
props.putIfAbsent(GROUP_ID_CONFIG, sourceAndTarget.source() + "-mm2");
props.putIfAbsent(DistributedConfig.OFFSET_STORAGE_TOPIC_CONFIG, "mm2-offsets."
+ sourceAndTarget.source() + ".internal");
props.putIfAbsent(DistributedConfig.STATUS_STORAGE_TOPIC_CONFIG, "mm2-status."
+ sourceAndTarget.source() + ".internal");
props.putIfAbsent(DistributedConfig.CONFIG_TOPIC_CONFIG, "mm2-configs."
+ sourceAndTarget.source() + ".internal");
props.putIfAbsent(KEY_CONVERTER_CLASS_CONFIG, BYTE_ARRAY_CONVERTER_CLASS);
props.putIfAbsent(VALUE_CONVERTER_CLASS_CONFIG, BYTE_ARRAY_CONVERTER_CLASS);
props.putIfAbsent(HEADER_CONVERTER_CLASS_CONFIG, BYTE_ARRAY_CONVERTER_CLASS);
return props;
}
// loads properties of the form cluster.x.y.z and source->target.x.y.z
public Map<String, String> connectorBaseConfig(SourceAndTarget sourceAndTarget, Class<?> connectorClass) {
Map<String, String> props = new HashMap<>();
props.putAll(originalsStrings());
props.keySet().retainAll(MirrorConnectorConfig.CONNECTOR_CONFIG_DEF.names());
props.putAll(stringsWithPrefix(CONFIG_PROVIDERS_CONFIG));
Map<String, String> sourceClusterProps = clusterProps(sourceAndTarget.source());
// attrs non prefixed with producer|consumer|admin
props.putAll(clusterConfigsWithPrefix(SOURCE_CLUSTER_PREFIX, sourceClusterProps));
// attrs prefixed with producer|consumer|admin
props.putAll(clientConfigsWithPrefix(SOURCE_PREFIX, sourceClusterProps));
Map<String, String> targetClusterProps = clusterProps(sourceAndTarget.target());
props.putAll(clusterConfigsWithPrefix(TARGET_CLUSTER_PREFIX, targetClusterProps));
props.putAll(clientConfigsWithPrefix(TARGET_PREFIX, targetClusterProps));
props.putIfAbsent(NAME, connectorClass.getSimpleName());
props.putIfAbsent(CONNECTOR_CLASS, connectorClass.getName());
props.putIfAbsent(SOURCE_CLUSTER_ALIAS, sourceAndTarget.source());
props.putIfAbsent(TARGET_CLUSTER_ALIAS, sourceAndTarget.target());
// override with connector-level properties
props.putAll(stringsWithPrefixStripped(sourceAndTarget.source() + "->"
+ sourceAndTarget.target() + "."));
// disabled by default
props.putIfAbsent(MirrorConnectorConfig.ENABLED, "false");
// don't transform -- the worker will handle transformation of Connector and Task configs
return props;
}
List<String> configProviders() {
return getList(CONFIG_PROVIDERS_CONFIG);
}
Map<String, String> transform(Map<String, String> props) {
// transform worker config according to config.providers
List<String> providerNames = configProviders();
Map<String, ConfigProvider> providers = new HashMap<>();
for (String name : providerNames) {
ConfigProvider configProvider = plugins.newConfigProvider(
this,
CONFIG_PROVIDERS_CONFIG + "." + name,
Plugins.ClassLoaderUsage.PLUGINS
);
providers.put(name, configProvider);
}
ConfigTransformer transformer = new ConfigTransformer(providers);
Map<String, String> transformed = transformer.transform(props).data();
providers.values().forEach(x -> Utils.closeQuietly(x, "config provider"));
return transformed;
}
protected static final ConfigDef CONFIG_DEF = new ConfigDef()
.define(CLUSTERS_CONFIG, Type.LIST, Importance.HIGH, CLUSTERS_DOC)
.define(CONFIG_PROVIDERS_CONFIG, Type.LIST, Collections.emptyList(), Importance.LOW, CONFIG_PROVIDERS_DOC)
// security support
.define(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
Type.STRING,
CommonClientConfigs.DEFAULT_SECURITY_PROTOCOL,
Importance.MEDIUM,
CommonClientConfigs.SECURITY_PROTOCOL_DOC)
.withClientSslSupport()
.withClientSaslSupport();
private Map<String, String> stringsWithPrefixStripped(String prefix) {
return originalsStrings().entrySet().stream()
.filter(x -> x.getKey().startsWith(prefix))
.collect(Collectors.toMap(x -> x.getKey().substring(prefix.length()), Entry::getValue));
}
private Map<String, String> stringsWithPrefix(String prefix) {
Map<String, String> strings = originalsStrings();
strings.keySet().removeIf(x -> !x.startsWith(prefix));
return strings;
}
static Map<String, String> clusterConfigsWithPrefix(String prefix, Map<String, String> props) {
return props.entrySet().stream()
.filter(x -> !x.getKey().matches("(^consumer.*|^producer.*|^admin.*)"))
.collect(Collectors.toMap(x -> prefix + x.getKey(), Entry::getValue));
}
static Map<String, String> clientConfigsWithPrefix(String prefix, Map<String, String> props) {
return props.entrySet().stream()
.filter(x -> x.getKey().matches("(^consumer.*|^producer.*|^admin.*)"))
.collect(Collectors.toMap(x -> prefix + x.getKey(), Entry::getValue));
}
}
| |
/**
*
*/
package io.quarkus.bootstrap.resolver.maven;
import io.quarkus.bootstrap.model.AppArtifactKey;
import io.quarkus.bootstrap.resolver.maven.workspace.ProjectModuleResolver;
import io.quarkus.bootstrap.util.PropertyUtils;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.eclipse.aether.DefaultRepositorySystemSession;
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.collection.CollectRequest;
import org.eclipse.aether.collection.CollectResult;
import org.eclipse.aether.collection.DependencyCollectionException;
import org.eclipse.aether.graph.DefaultDependencyNode;
import org.eclipse.aether.graph.Dependency;
import org.eclipse.aether.graph.Exclusion;
import org.eclipse.aether.impl.RemoteRepositoryManager;
import org.eclipse.aether.installation.InstallRequest;
import org.eclipse.aether.installation.InstallationException;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.resolution.ArtifactDescriptorException;
import org.eclipse.aether.resolution.ArtifactDescriptorRequest;
import org.eclipse.aether.resolution.ArtifactDescriptorResult;
import org.eclipse.aether.resolution.ArtifactRequest;
import org.eclipse.aether.resolution.ArtifactResolutionException;
import org.eclipse.aether.resolution.ArtifactResult;
import org.eclipse.aether.resolution.DependencyRequest;
import org.eclipse.aether.resolution.DependencyResolutionException;
import org.eclipse.aether.resolution.DependencyResult;
import org.eclipse.aether.resolution.VersionRangeRequest;
import org.eclipse.aether.resolution.VersionRangeResolutionException;
import org.eclipse.aether.resolution.VersionRangeResult;
import org.eclipse.aether.util.artifact.JavaScopes;
import org.eclipse.aether.util.version.GenericVersionScheme;
import org.eclipse.aether.version.InvalidVersionSpecificationException;
import org.eclipse.aether.version.Version;
/**
*
* @author Alexey Loubyansky
*/
public class MavenArtifactResolver {
private static final String SECONDARY_LOCAL_REPO_PROP = "io.quarkus.maven.secondary-local-repo";
public static class Builder extends BootstrapMavenContextConfig<Builder> {
private Path secondaryLocalRepo;
private Builder() {
super();
}
public Builder setSecondaryLocalRepo(Path secondaryLocalRepo) {
this.secondaryLocalRepo = secondaryLocalRepo;
return this;
}
public MavenArtifactResolver build() throws BootstrapMavenException {
return new MavenArtifactResolver(this);
}
}
public static Builder builder() {
return new Builder();
}
protected final BootstrapMavenContext context;
protected final RepositorySystem repoSystem;
protected final RepositorySystemSession repoSession;
protected final List<RemoteRepository> remoteRepos;
protected final MavenLocalRepositoryManager localRepoManager;
protected final RemoteRepositoryManager remoteRepoManager;
private MavenArtifactResolver(Builder builder) throws BootstrapMavenException {
this.context = new BootstrapMavenContext(builder);
this.repoSystem = context.getRepositorySystem();
final RepositorySystemSession session = context.getRepositorySystemSession();
final String secondaryRepo = PropertyUtils.getProperty(SECONDARY_LOCAL_REPO_PROP);
if (secondaryRepo != null) {
builder.secondaryLocalRepo = Paths.get(secondaryRepo);
}
if (builder.secondaryLocalRepo != null) {
localRepoManager = new MavenLocalRepositoryManager(
session.getLocalRepositoryManager(),
builder.secondaryLocalRepo);
this.repoSession = new DefaultRepositorySystemSession(session).setLocalRepositoryManager(localRepoManager);
} else {
this.repoSession = session;
localRepoManager = null;
}
this.remoteRepos = context.getRemoteRepositories();
this.remoteRepoManager = context.getRemoteRepositoryManager();
}
public MavenArtifactResolver(BootstrapMavenContext mvnSettings) throws BootstrapMavenException {
this.context = mvnSettings;
this.repoSystem = mvnSettings.getRepositorySystem();
this.repoSession = mvnSettings.getRepositorySystemSession();
localRepoManager = null;
this.remoteRepos = mvnSettings.getRemoteRepositories();
this.remoteRepoManager = mvnSettings.getRemoteRepositoryManager();
}
public ProjectModuleResolver getProjectModuleResolver() {
return context.getWorkspace() == null ? null : context.getWorkspace();
}
public BootstrapMavenContext getMavenContext() {
return context;
}
public RemoteRepositoryManager getRemoteRepositoryManager() {
return remoteRepoManager;
}
public MavenLocalRepositoryManager getLocalRepositoryManager() {
return localRepoManager;
}
public RepositorySystem getSystem() {
return repoSystem;
}
public RepositorySystemSession getSession() {
return repoSession;
}
public List<RemoteRepository> getRepositories() {
return remoteRepos;
}
public void addRemoteRepositories(List<RemoteRepository> repos) {
remoteRepos.addAll(repos);
}
public ArtifactResult resolve(Artifact artifact) throws BootstrapMavenException {
return resolveInternal(artifact, remoteRepos);
}
public ArtifactResult resolve(Artifact artifact, List<RemoteRepository> mainRepos) throws BootstrapMavenException {
return resolveInternal(artifact, aggregateRepositories(mainRepos, remoteRepos));
}
private ArtifactResult resolveInternal(Artifact artifact, List<RemoteRepository> aggregatedRepos)
throws BootstrapMavenException {
try {
return repoSystem.resolveArtifact(repoSession,
new ArtifactRequest()
.setArtifact(artifact)
.setRepositories(aggregatedRepos));
} catch (ArtifactResolutionException e) {
throw new BootstrapMavenException("Failed to resolve artifact " + artifact, e);
}
}
public List<ArtifactResult> resolve(List<ArtifactRequest> artifacts) throws BootstrapMavenException {
try {
return repoSystem.resolveArtifacts(repoSession, artifacts);
} catch (ArtifactResolutionException e) {
throw new BootstrapMavenException("Failed to resolve artifacts", e);
}
}
public ArtifactDescriptorResult resolveDescriptor(final Artifact artifact)
throws BootstrapMavenException {
return resolveDescriptorInternal(artifact, remoteRepos);
}
public ArtifactDescriptorResult resolveDescriptor(final Artifact artifact, List<RemoteRepository> mainRepos)
throws BootstrapMavenException {
return resolveDescriptorInternal(artifact, aggregateRepositories(mainRepos, remoteRepos));
}
private ArtifactDescriptorResult resolveDescriptorInternal(final Artifact artifact, List<RemoteRepository> aggregatedRepos)
throws BootstrapMavenException {
try {
return repoSystem.readArtifactDescriptor(repoSession,
new ArtifactDescriptorRequest()
.setArtifact(artifact)
.setRepositories(
aggregatedRepos));
} catch (ArtifactDescriptorException e) {
throw new BootstrapMavenException("Failed to read descriptor of " + artifact, e);
}
}
public VersionRangeResult resolveVersionRange(Artifact artifact) throws BootstrapMavenException {
try {
return repoSystem.resolveVersionRange(repoSession,
new VersionRangeRequest()
.setArtifact(artifact)
.setRepositories(remoteRepos));
} catch (VersionRangeResolutionException ex) {
throw new BootstrapMavenException("Failed to resolve version range for " + artifact, ex);
}
}
public String getLatestVersionFromRange(Artifact artifact, String range) throws BootstrapMavenException {
return getLatest(resolveVersionRange(new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(),
artifact.getClassifier(), artifact.getExtension(), range)));
}
private String getLatest(final VersionRangeResult rangeResult) {
final List<Version> versions = rangeResult.getVersions();
if (versions.isEmpty()) {
return null;
}
Version next = versions.get(0);
for (int i = 1; i < versions.size(); ++i) {
final Version candidate = versions.get(i);
if (candidate.compareTo(next) > 0) {
next = candidate;
}
}
return next.toString();
}
public CollectResult collectDependencies(Artifact artifact, List<Dependency> deps) throws BootstrapMavenException {
return collectDependencies(artifact, deps, Collections.emptyList());
}
public CollectResult collectDependencies(Artifact artifact, List<Dependency> deps, List<RemoteRepository> mainRepos)
throws BootstrapMavenException {
return collectDependencies(artifact, deps, mainRepos, Collections.emptyList());
}
public CollectResult collectDependencies(Artifact artifact, List<Dependency> deps, List<RemoteRepository> mainRepos,
Collection<Exclusion> exclusions) throws BootstrapMavenException {
final CollectRequest request = newCollectRequest(artifact, mainRepos, exclusions);
request.setDependencies(deps);
try {
return repoSystem.collectDependencies(repoSession, request);
} catch (DependencyCollectionException e) {
throw new BootstrapMavenException("Failed to collect dependencies for " + artifact, e);
}
}
public DependencyResult resolveDependencies(Artifact artifact, List<Dependency> deps) throws BootstrapMavenException {
return resolveDependencies(artifact, deps, Collections.emptyList());
}
public DependencyResult resolveDependencies(Artifact artifact, List<Dependency> deps, List<RemoteRepository> mainRepos)
throws BootstrapMavenException {
final CollectRequest request = newCollectRequest(artifact, mainRepos);
request.setDependencies(deps);
try {
return repoSystem.resolveDependencies(repoSession,
new DependencyRequest().setCollectRequest(request));
} catch (DependencyResolutionException e) {
throw new BootstrapMavenException("Failed to resolve dependencies for " + artifact, e);
}
}
public DependencyResult resolveManagedDependencies(Artifact artifact, List<Dependency> deps, List<Dependency> managedDeps,
List<RemoteRepository> mainRepos, String... excludedScopes) throws BootstrapMavenException {
try {
return repoSystem.resolveDependencies(repoSession,
new DependencyRequest().setCollectRequest(
newCollectManagedRequest(artifact, deps, managedDeps, mainRepos, excludedScopes)));
} catch (DependencyResolutionException e) {
throw new BootstrapMavenException("Failed to resolve dependencies for " + artifact, e);
}
}
public DependencyResult resolvePluginDependencies(Artifact pluginArtifact) throws BootstrapMavenException {
try {
return repoSystem.resolveDependencies(repoSession, new DependencyRequest().setCollectRequest(new CollectRequest()
.setRoot(new Dependency(pluginArtifact, null)).setRepositories(context.getRemotePluginRepositories())));
} catch (DependencyResolutionException e) {
throw new BootstrapMavenException("Failed to resolve dependencies for Maven plugin " + pluginArtifact, e);
}
}
/**
* Turns the list of dependencies into a simple dependency tree
*/
public DependencyResult toDependencyTree(List<Dependency> deps, List<RemoteRepository> mainRepos)
throws BootstrapMavenException {
DependencyResult result = new DependencyResult(
new DependencyRequest().setCollectRequest(new CollectRequest(deps, Collections.emptyList(), mainRepos)));
DefaultDependencyNode root = new DefaultDependencyNode((Dependency) null);
result.setRoot(root);
GenericVersionScheme vs = new GenericVersionScheme();
for (Dependency i : deps) {
DefaultDependencyNode node = new DefaultDependencyNode(i);
try {
node.setVersionConstraint(vs.parseVersionConstraint(i.getArtifact().getVersion()));
node.setVersion(vs.parseVersion(i.getArtifact().getVersion()));
} catch (InvalidVersionSpecificationException e) {
throw new RuntimeException(e);
}
root.getChildren().add(node);
}
return result;
}
public CollectResult collectManagedDependencies(Artifact artifact, List<Dependency> deps, List<Dependency> managedDeps,
List<RemoteRepository> mainRepos, Collection<Exclusion> exclusions, String... excludedScopes)
throws BootstrapMavenException {
try {
return repoSystem.collectDependencies(repoSession,
newCollectManagedRequest(artifact, deps, managedDeps, mainRepos, exclusions, excludedScopes));
} catch (DependencyCollectionException e) {
throw new BootstrapMavenException("Failed to collect dependencies for " + artifact, e);
}
}
private CollectRequest newCollectManagedRequest(Artifact artifact, List<Dependency> deps, List<Dependency> managedDeps,
List<RemoteRepository> mainRepos, String... excludedScopes) throws BootstrapMavenException {
return newCollectManagedRequest(artifact, deps, managedDeps, mainRepos, Collections.emptyList(), excludedScopes);
}
private CollectRequest newCollectManagedRequest(Artifact artifact, List<Dependency> deps, List<Dependency> managedDeps,
List<RemoteRepository> mainRepos, Collection<Exclusion> exclusions, String... excludedScopes)
throws BootstrapMavenException {
final List<RemoteRepository> aggregatedRepos = aggregateRepositories(mainRepos, remoteRepos);
final ArtifactDescriptorResult descr = resolveDescriptorInternal(artifact, aggregatedRepos);
Collection<String> excluded;
if (excludedScopes.length == 0) {
excluded = Collections.emptyList();
} else if (excludedScopes.length == 1) {
excluded = Collections.singleton(excludedScopes[0]);
} else {
excluded = Arrays.asList(excludedScopes);
if (excludedScopes.length > 3) {
excluded = new HashSet<>(Arrays.asList(excludedScopes));
}
}
final List<Dependency> originalDeps = new ArrayList<>(descr.getDependencies().size());
for (Dependency dep : descr.getDependencies()) {
if (excluded.contains(dep.getScope())) {
continue;
}
originalDeps.add(dep);
}
final List<Dependency> mergedManagedDeps = new ArrayList<Dependency>(
managedDeps.size() + descr.getManagedDependencies().size());
Map<AppArtifactKey, String> managedVersions = Collections.emptyMap();
if (!managedDeps.isEmpty()) {
managedVersions = new HashMap<>(managedDeps.size());
for (Dependency dep : managedDeps) {
managedVersions.put(getId(dep.getArtifact()), dep.getArtifact().getVersion());
mergedManagedDeps.add(dep);
}
}
if (!descr.getManagedDependencies().isEmpty()) {
for (Dependency dep : descr.getManagedDependencies()) {
final AppArtifactKey key = getId(dep.getArtifact());
if (!managedVersions.containsKey(key)) {
mergedManagedDeps.add(dep);
}
}
}
final CollectRequest request = new CollectRequest()
.setDependencies(mergeDeps(deps, originalDeps, managedVersions))
.setManagedDependencies(mergedManagedDeps)
.setRepositories(aggregateRepositories(aggregatedRepos, newResolutionRepositories(descr.getRepositories())));
if (exclusions.isEmpty()) {
request.setRootArtifact(artifact);
} else {
request.setRoot(new Dependency(artifact, JavaScopes.COMPILE, false, exclusions));
}
return request;
}
public List<RemoteRepository> newResolutionRepositories(List<RemoteRepository> repos) {
return repos.isEmpty() ? Collections.emptyList() : repoSystem.newResolutionRepositories(repoSession, repos);
}
public List<RemoteRepository> aggregateRepositories(List<RemoteRepository> dominant, List<RemoteRepository> recessive) {
return dominant.isEmpty() ? recessive
: remoteRepoManager.aggregateRepositories(repoSession, dominant, recessive, false);
}
public void install(Artifact artifact) throws BootstrapMavenException {
try {
repoSystem.install(repoSession, new InstallRequest().addArtifact(artifact));
} catch (InstallationException ex) {
throw new BootstrapMavenException("Failed to install " + artifact, ex);
}
}
private CollectRequest newCollectRequest(Artifact artifact, List<RemoteRepository> mainRepos) {
return newCollectRequest(artifact, mainRepos, Collections.emptyList());
}
private CollectRequest newCollectRequest(Artifact artifact, List<RemoteRepository> mainRepos,
Collection<Exclusion> exclusions) {
return new CollectRequest()
.setRoot(new Dependency(artifact, JavaScopes.RUNTIME, false, exclusions))
.setRepositories(aggregateRepositories(mainRepos, remoteRepos));
}
private List<Dependency> mergeDeps(List<Dependency> dominant, List<Dependency> recessive,
Map<AppArtifactKey, String> managedVersions) {
final int initialCapacity = dominant.size() + recessive.size();
if (initialCapacity == 0) {
return Collections.emptyList();
}
final List<Dependency> result = new ArrayList<Dependency>(initialCapacity);
final Set<AppArtifactKey> ids = new HashSet<AppArtifactKey>(initialCapacity, 1.0f);
for (Dependency dependency : dominant) {
final AppArtifactKey id = getId(dependency.getArtifact());
ids.add(id);
final String managedVersion = managedVersions.get(id);
if (managedVersion != null) {
dependency = dependency.setArtifact(dependency.getArtifact().setVersion(managedVersion));
}
result.add(dependency);
}
for (Dependency dependency : recessive) {
final AppArtifactKey id = getId(dependency.getArtifact());
if (!ids.contains(id)) {
final String managedVersion = managedVersions.get(id);
if (managedVersion != null) {
dependency = dependency.setArtifact(dependency.getArtifact().setVersion(managedVersion));
}
result.add(dependency);
}
}
return result;
}
private static AppArtifactKey getId(Artifact a) {
return new AppArtifactKey(a.getGroupId(), a.getArtifactId(), a.getClassifier(), a.getExtension());
}
}
| |
/*
* Copyright 2013 Haulmont
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.haulmont.yarg.formatters.impl;
import com.google.common.collect.BiMap;
import com.google.common.collect.HashBiMap;
import com.haulmont.yarg.exception.ReportingException;
import com.haulmont.yarg.exception.UnsupportedFormatException;
import com.haulmont.yarg.formatters.factory.FormatterFactoryInput;
import com.haulmont.yarg.formatters.impl.xls.Area;
import com.haulmont.yarg.formatters.impl.xls.AreaDependencyManager;
import com.haulmont.yarg.formatters.impl.xls.Cell;
import com.haulmont.yarg.formatters.impl.xls.DocumentConverter;
import com.haulmont.yarg.formatters.impl.xls.caches.XlsFontCache;
import com.haulmont.yarg.formatters.impl.xls.caches.XlsStyleCache;
import com.haulmont.yarg.formatters.impl.xls.caches.XslStyleHelper;
import com.haulmont.yarg.formatters.impl.xls.hints.*;
import com.haulmont.yarg.formatters.impl.xlsx.Range;
import com.haulmont.yarg.structure.BandData;
import com.haulmont.yarg.structure.BandOrientation;
import com.haulmont.yarg.structure.ReportOutputType;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.poi.hssf.model.HSSFFormulaParser;
import org.apache.poi.hssf.record.EscherAggregate;
import org.apache.poi.hssf.usermodel.*;
import org.apache.poi.ss.SpreadsheetVersion;
import org.apache.poi.ss.formula.ptg.AreaPtg;
import org.apache.poi.ss.formula.ptg.Ptg;
import org.apache.poi.ss.formula.ptg.RefPtg;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.util.AreaReference;
import org.apache.poi.ss.util.CellRangeAddress;
import org.apache.poi.ss.util.CellReference;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.*;
import static com.haulmont.yarg.formatters.impl.xls.HSSFCellHelper.getCellFromReference;
import static com.haulmont.yarg.formatters.impl.xls.HSSFPicturesHelper.getAllAnchors;
import static com.haulmont.yarg.formatters.impl.xls.HSSFRangeHelper.*;
/**
* Document formatter for '.xls' file types
*/
//todo : we need to rewrite logic in the way similar to XlsxFormatter (store rendered ranges in memory) - use bandsToResultRanges etc.
public class XLSFormatter extends AbstractFormatter {
protected static final String DYNAMIC_HEIGHT_STYLE = "styleWithoutHeight";
protected HSSFWorkbook templateWorkbook;
protected HSSFWorkbook resultWorkbook;
protected HSSFSheet currentTemplateSheet = null;
protected XlsFontCache fontCache = new XlsFontCache();
protected XlsStyleCache styleCache = new XlsStyleCache();
protected int rownum = 0;
protected int colnum = 0;
protected int rowsAddedByVerticalBand = 0;
protected int rowsAddedByHorizontalBand = 0;
protected Map<String, List<SheetRange>> mergeRegionsForRangeNames = new HashMap<>();
protected Map<HSSFSheet, HSSFSheet> templateToResultSheetsMapping = new HashMap<>();
protected Map<String, Bounds> templateBounds = new HashMap<>();
protected AreaDependencyManager areaDependencyManager = new AreaDependencyManager();
protected Map<Area, List<Area>> areasDependency = areaDependencyManager.getAreasDependency();
protected List<Integer> orderedPicturesId = new ArrayList<>();
protected Map<String, EscherAggregate> sheetToEscherAggregate = new HashMap<>();
protected Map<HSSFSheet, HSSFPatriarch> drawingPatriarchsMap = new HashMap<>();
protected List<XlsHint> hints = new ArrayList<>();
protected DocumentConverter documentConverter;
protected BiMap<BandData, Range> bandsToResultRanges = HashBiMap.create();
public XLSFormatter(FormatterFactoryInput formatterFactoryInput) {
super(formatterFactoryInput);
supportedOutputTypes.add(ReportOutputType.xls);
supportedOutputTypes.add(ReportOutputType.pdf);
hints.add(new CustomCellStyleHint(fontCache, styleCache));
hints.add(new CopyColumnWidthHint());
hints.add(new AutoWidthHint());
hints.add(new CustomWidthHint());
}
public void setDocumentConverter(DocumentConverter documentConverter) {
this.documentConverter = documentConverter;
}
@Override
public void renderDocument() {
initWorkbook();
processDocument();
applyHints();
outputDocument();
}
protected void initWorkbook() {
try {
templateWorkbook = new HSSFWorkbook(reportTemplate.getDocumentContent());
resultWorkbook = new HSSFWorkbook(reportTemplate.getDocumentContent());
} catch (IOException e) {
throw wrapWithReportingException("An error occurred while parsing xls template " + reportTemplate.getDocumentName(), e);
}
for (int sheetNumber = 0; sheetNumber < templateWorkbook.getNumberOfSheets(); sheetNumber++) {
HSSFSheet templateSheet = templateWorkbook.getSheetAt(sheetNumber);
HSSFSheet resultSheet = resultWorkbook.getSheetAt(sheetNumber);
templateToResultSheetsMapping.put(templateSheet, resultSheet);
initMergeRegions(templateSheet);
copyCharts(resultSheet);
removeMergedRegions(resultSheet);
cleanupCells(resultSheet);
}
copyPicturesToResultWorkbook();
initNamedStyleCache();
}
protected void initNamedStyleCache() {
for (short i = 0; i < resultWorkbook.getNumCellStyles(); i++) {
HSSFCellStyle cellStyle = resultWorkbook.getCellStyleAt(i);
if (StringUtils.isNotBlank(cellStyle.getUserStyleName())) {
styleCache.addNamedStyle(cellStyle);
}
}
}
protected void processDocument() {
for (BandData childBand : rootBand.getChildrenList()) {
checkThreadInterrupted();
writeBand(childBand);
}
updateFormulas();
copyPictures();
}
protected void applyHints() {
for (XlsHint option : hints) {
option.apply();
}
}
protected void outputDocument() {
checkThreadInterrupted();
if (ReportOutputType.xls.equals(outputType)) {
try {
resultWorkbook.write(outputStream);
} catch (Exception e) {
throw wrapWithReportingException("An error occurred while writing result to file.", e);
} finally {
IOUtils.closeQuietly(outputStream);
}
} else if (ReportOutputType.pdf.equals(outputType)) {
if (documentConverter != null) {
try {
ByteArrayOutputStream stream = new ByteArrayOutputStream();
resultWorkbook.write(stream);
documentConverter.convertToPdf(DocumentConverter.FileType.SPREADSHEET, stream.toByteArray(), outputStream);
} catch (IOException e) {
throw wrapWithReportingException("An error occurred while converting xls to pdf.", e);
} finally {
IOUtils.closeQuietly(outputStream);
}
} else {
throw new UnsupportedFormatException("Could not convert xls files to pdf because Open Office connection params not set. Please check, that \"cuba.reporting.openoffice.path\" property is set in properties file.");
}
}
}
protected void copyPicturesToResultWorkbook() {
List<HSSFPictureData> allPictures = templateWorkbook.getAllPictures();
for (HSSFPictureData allPicture : allPictures) {
int i = resultWorkbook.addPicture(allPicture.getData(), Workbook.PICTURE_TYPE_JPEG);
orderedPicturesId.add(i);
}
}
protected void removeMergedRegions(HSSFSheet resultSheet) {
for (int i = 0, size = resultSheet.getNumMergedRegions(); i < size; i++) {
resultSheet.removeMergedRegion(0);//each time we remove region - they "move to left" so region 1 become region 0
}
}
protected void cleanupCells(HSSFSheet resultSheet) {
for (int i = resultSheet.getFirstRowNum(); i <= resultSheet.getLastRowNum(); i++) {
HSSFRow row = resultSheet.getRow(i);
if (row != null) {
for (int j = 0; j < row.getLastCellNum(); j++) {
HSSFCell cell = row.getCell(j);
if (cell != null) {
row.removeCell(cell);
}
}
}
}
}
protected void copyCharts(HSSFSheet resultSheet) {
HSSFChart[] sheetCharts = HSSFChart.getSheetCharts(resultSheet);
if (sheetCharts == null || sheetCharts.length == 0) {//workaround for charts. If there is charts on sheet - we can not use getDrawPatriarch as it removes all charts (because does not support them)
HSSFPatriarch drawingPatriarch = resultSheet.createDrawingPatriarch();
if (drawingPatriarch == null) {
drawingPatriarch = resultSheet.createDrawingPatriarch();
}
drawingPatriarchsMap.put(resultSheet, drawingPatriarch);
}
}
protected void updateFormulas() {
for (Map.Entry<Area, List<Area>> entry : areasDependency.entrySet()) {
Area original = entry.getKey();
for (Area dependent : entry.getValue()) {
updateFormulas(original, dependent);
}
}
}
protected void copyPictures() {
for (int sheetNumber = 0; sheetNumber < templateWorkbook.getNumberOfSheets(); sheetNumber++) {
HSSFSheet templateSheet = templateWorkbook.getSheetAt(sheetNumber);
HSSFSheet resultSheet = resultWorkbook.getSheetAt(sheetNumber);
copyPicturesFromTemplateToResult(templateSheet, resultSheet);
}
}
protected void writeBand(BandData band) {
String rangeName = band.getName();
try {
HSSFSheet templateSheet = getTemplateSheetForRangeName(templateWorkbook, rangeName);
if (templateSheet != currentTemplateSheet) { //todo: reimplement. store rownum for each sheet.
currentTemplateSheet = templateSheet;
rownum = 0;
}
HSSFSheet resultSheet = templateToResultSheetsMapping.get(templateSheet);
if (BandOrientation.HORIZONTAL == band.getOrientation()) {
colnum = 0;
writeHorizontalBand(band, templateSheet, resultSheet);
} else {
writeVerticalBand(band, templateSheet, resultSheet);
}
} catch (ReportingException e) {
throw e;
} catch (Exception e) {
throw wrapWithReportingException(String.format("An error occurred while rendering band [%s]", rangeName), e);
}
}
/**
* Method writes horizontal band
* Note: Only one band for row is supported. Now we think that many bands for row aren't usable.
*
* @param band - band to write
* @param templateSheet - template sheet
* @param resultSheet - result sheet
*/
protected void writeHorizontalBand(BandData band, HSSFSheet templateSheet, HSSFSheet resultSheet) {
String rangeName = band.getName();
AreaReference templateRange = getAreaForRange(templateWorkbook, rangeName);
if (templateRange == null) {
throw wrapWithReportingException(String.format("No such named range in xls file: %s", rangeName));
}
CellReference[] crefs = templateRange.getAllReferencedCells();
CellReference topLeft, bottomRight;
AreaReference resultRange;
int rowsAddedByHorizontalBandBackup = rowsAddedByHorizontalBand;
int rownumBackup = rownum;
if (crefs != null) {
addRangeBounds(band, crefs);
ArrayList<HSSFRow> resultRows = new ArrayList<>();
int currentRowNum = -1;
int currentRowCount = -1;
int currentColumnCount = 0;
int offset = 0;
topLeft = new CellReference(rownum + rowsAddedByHorizontalBand, 0);
// no child bands - merge regions now
if (band.getChildrenList().isEmpty()) {
copyMergeRegions(resultSheet, rangeName, rownum + rowsAddedByHorizontalBand,
getCellFromReference(crefs[0], templateSheet).getColumnIndex());
}
for (CellReference cellRef : crefs) {
HSSFCell templateCell = getCellFromReference(cellRef, templateSheet);
HSSFRow resultRow;
if (templateCell.getRowIndex() != currentRowNum) { //create new row
resultRow = resultSheet.createRow(rownum + rowsAddedByHorizontalBand);
copyPageBreaks(templateSheet, resultSheet, templateCell.getRowIndex(), resultRow.getRowNum());
rowsAddedByHorizontalBand += 1;
//todo move to options
if (templateCell.getCellStyle().getParentStyle() != null
&& templateCell.getCellStyle().getParentStyle().getUserStyleName() != null
&& templateCell.getCellStyle().getParentStyle().getUserStyleName().equals(DYNAMIC_HEIGHT_STYLE)
) {
//resultRow.setHeight(templateCell.getRow().getHeight());
} else {
resultRow.setHeight(templateCell.getRow().getHeight());
}
resultRows.add(resultRow);
currentRowNum = templateCell.getRowIndex();
currentRowCount++;
currentColumnCount = 0;
offset = templateCell.getColumnIndex();
} else { // or write cell to current row
resultRow = resultRows.get(currentRowCount);
currentColumnCount++;
}
copyCellFromTemplate(templateCell, resultRow, offset + currentColumnCount, band);
}
bottomRight = new CellReference(rownum + rowsAddedByHorizontalBand - 1, offset + currentColumnCount);
resultRange = new AreaReference(topLeft, bottomRight);
areaDependencyManager.addDependency(new Area(band.getName(), Area.AreaAlign.HORIZONTAL, templateRange),
new Area(band.getName(), Area.AreaAlign.HORIZONTAL, resultRange));
bandsToResultRanges.put(band, new Range(resultSheet.getSheetName(),
resultRange.getFirstCell().getCol() + 1, resultRange.getFirstCell().getRow() + 1,
resultRange.getLastCell().getCol() + 1, resultRange.getLastCell().getRow() + 1
));
}
for (BandData child : band.getChildrenList()) {
writeBand(child);
}
// scheduled merge regions
if (!band.getChildrenList().isEmpty() && crefs != null) {
copyMergeRegions(resultSheet, rangeName, rownumBackup + rowsAddedByHorizontalBandBackup,
getCellFromReference(crefs[0], templateSheet).getColumnIndex());
}
rownum += rowsAddedByHorizontalBand;
rowsAddedByHorizontalBand = 0;
rownum += rowsAddedByVerticalBand;
rowsAddedByVerticalBand = 0;
}
/**
* Method writes vertical band
* Note: no child support for vertical band ;)
*
* @param band - band to write
* @param templateSheet - template sheet
* @param resultSheet - result sheet
*/
protected void writeVerticalBand(BandData band, HSSFSheet templateSheet, HSSFSheet resultSheet) {
String rangeName = band.getName();
CellReference[] crefs = getRangeContent(templateWorkbook, rangeName);
Set<Integer> addedRowNumbers = new HashSet<>();
if (crefs != null) {
addRangeBounds(band, crefs);
Bounds thisBounds = templateBounds.get(band.getName());
Bounds parentBounds = templateBounds.get(band.getParentBand().getName());
Range parentRange = bandsToResultRanges.get(band.getParentBand());
int localRowNum = parentBounds != null && parentRange != null ?
parentRange.getFirstRow() - 1 + thisBounds.row0 - parentBounds.row0 :
rownum;
colnum = colnum == 0 ? getCellFromReference(crefs[0], templateSheet).getColumnIndex() : colnum;
copyMergeRegions(resultSheet, rangeName, localRowNum, colnum);
int firstRow = crefs[0].getRow();
int firstColumn = crefs[0].getCol();
for (CellReference cref : crefs) {//create necessary rows
int currentRow = cref.getRow();
final int rowOffset = currentRow - firstRow;
if (!rowExists(resultSheet, localRowNum + rowOffset)) {
HSSFRow resultRow = resultSheet.createRow(localRowNum + rowOffset);
copyPageBreaks(templateSheet, resultSheet, cref.getRow(), resultRow.getRowNum());
}
addedRowNumbers.add(cref.getRow());
}
CellReference topLeft = null;
CellReference bottomRight = null;
for (CellReference cref : crefs) {
int currentRow = cref.getRow();
int currentColumn = cref.getCol();
final int rowOffset = currentRow - firstRow;
final int columnOffset = currentColumn - firstColumn;
HSSFCell templateCell = getCellFromReference(cref, templateSheet);
resultSheet.setColumnWidth(colnum + columnOffset, templateSheet.getColumnWidth(templateCell.getColumnIndex()));
HSSFCell resultCell = copyCellFromTemplate(templateCell, resultSheet.getRow(localRowNum + rowOffset), colnum + columnOffset, band);
if (topLeft == null) {
topLeft = new CellReference(resultCell);
}
bottomRight = new CellReference(resultCell);
}
colnum += crefs[crefs.length - 1].getCol() - firstColumn + 1;
AreaReference templateRange = getAreaForRange(templateWorkbook, rangeName);
AreaReference resultRange = new AreaReference(topLeft, bottomRight);
areaDependencyManager.addDependency(new Area(band.getName(), Area.AreaAlign.VERTICAL, templateRange),
new Area(band.getName(), Area.AreaAlign.VERTICAL, resultRange));
bandsToResultRanges.put(band, new Range(resultSheet.getSheetName(),
resultRange.getFirstCell().getCol() + 1, resultRange.getFirstCell().getRow() + 1,
resultRange.getLastCell().getCol() + 1, resultRange.getLastCell().getRow() + 1
));
}
//for first level vertical bands we should increase rownum by number of rows added by vertical band
//nested vertical bands do not add rows, they use parent space
if (BandData.ROOT_BAND_NAME.equals(band.getParentBand().getName())) {
List<BandData> sameBands = band.getParentBand().getChildrenByName(band.getName());
if (sameBands.size() > 0 && sameBands.get(sameBands.size() - 1) == band) {//check if this vertical band is last vertical band with same name
rownum += addedRowNumbers.size();
// rowsAddedByVerticalBand = 0;
}
}
}
/**
* Method creates mapping [rangeName : List<CellRangeAddress>].
* List contains all merge regions for this named range.
* Attention: if merged regions writes wrong - look on methods isMergeRegionInsideNamedRange or isNamedRangeInsideMergeRegion
* todo: how to recognize if merge region must be copied with named range
*
* @param currentSheet Sheet which contains merge regions
*/
protected void initMergeRegions(HSSFSheet currentSheet) {
int rangeNumber = templateWorkbook.getNumberOfNames();
for (int i = 0; i < rangeNumber; i++) {
HSSFName aNamedRange = templateWorkbook.getNameAt(i);
String refersToFormula = aNamedRange.getRefersToFormula();
if (!AreaReference.isContiguous(refersToFormula)) {
continue;
}
AreaReference aref = new AreaReference(refersToFormula, SpreadsheetVersion.EXCEL97);
Integer rangeFirstRow = aref.getFirstCell().getRow();
Integer rangeFirstColumn = (int) aref.getFirstCell().getCol();
Integer rangeLastRow = aref.getLastCell().getRow();
Integer rangeLastColumn = (int) aref.getLastCell().getCol();
for (int j = 0; j < currentSheet.getNumMergedRegions(); j++) {
CellRangeAddress mergedRegion = currentSheet.getMergedRegion(j);
if (mergedRegion != null) {
Integer regionFirstRow = mergedRegion.getFirstRow();
Integer regionFirstColumn = mergedRegion.getFirstColumn();
Integer regionLastRow = mergedRegion.getLastRow();
Integer regionLastColumn = mergedRegion.getLastColumn();
boolean mergedInsideNamed = isMergeRegionInsideNamedRange(
rangeFirstRow, rangeFirstColumn, rangeLastRow, rangeLastColumn,
regionFirstRow, regionFirstColumn, regionLastRow, regionLastColumn);
boolean namedInsideMerged = isNamedRangeInsideMergeRegion(
rangeFirstRow, rangeFirstColumn, rangeLastRow, rangeLastColumn,
regionFirstRow, regionFirstColumn, regionLastRow, regionLastColumn);
if (mergedInsideNamed || namedInsideMerged) {
String name = aNamedRange.getNameName();
SheetRange sheetRange = new SheetRange(mergedRegion, currentSheet.getSheetName());
if (mergeRegionsForRangeNames.get(name) == null) {
ArrayList<SheetRange> list = new ArrayList<>();
list.add(sheetRange);
mergeRegionsForRangeNames.put(name, list);
} else {
mergeRegionsForRangeNames.get(name).add(sheetRange);
}
}
}
}
}
}
/**
* Create new merge regions in result sheet identically to range's merge regions from template.
* Not support copy of frames and rules
*
* @param resultSheet - result sheet
* @param rangeName - range name
* @param firstTargetRangeRow - first column of target range
* @param firstTargetRangeColumn - first column of target range
*/
protected void copyMergeRegions(HSSFSheet resultSheet, String rangeName,
int firstTargetRangeRow, int firstTargetRangeColumn) {
int rangeNameIdx = templateWorkbook.getNameIndex(rangeName);
if (rangeNameIdx == -1) return;
HSSFName aNamedRange = templateWorkbook.getNameAt(rangeNameIdx);
AreaReference aref = new AreaReference(aNamedRange.getRefersToFormula(), SpreadsheetVersion.EXCEL97);
int column = aref.getFirstCell().getCol();
int row = aref.getFirstCell().getRow();
List<SheetRange> regionsList = mergeRegionsForRangeNames.get(rangeName);
if (regionsList != null)
for (SheetRange sheetRange : regionsList) {
if (resultSheet.getSheetName().equals(sheetRange.getSheetName())) {
CellRangeAddress cra = sheetRange.getCellRangeAddress();
if (cra != null) {
int regionHeight = cra.getLastRow() - cra.getFirstRow() + 1;
int regionWidth = cra.getLastColumn() - cra.getFirstColumn() + 1;
int regionVOffset = cra.getFirstRow() - row;
int regionHOffset = cra.getFirstColumn() - column;
CellRangeAddress newRegion = cra.copy();
newRegion.setFirstColumn(regionHOffset + firstTargetRangeColumn);
newRegion.setLastColumn(regionHOffset + regionWidth - 1 + firstTargetRangeColumn);
newRegion.setFirstRow(regionVOffset + firstTargetRangeRow);
newRegion.setLastRow(regionVOffset + regionHeight - 1 + firstTargetRangeRow);
boolean skipRegion = false;
for (int mergedIndex = 0; mergedIndex < resultSheet.getNumMergedRegions(); mergedIndex++) {
CellRangeAddress mergedRegion = resultSheet.getMergedRegion(mergedIndex);
if (!intersects(newRegion, mergedRegion)) {
continue;
}
skipRegion = true;
}
if (!skipRegion) {
resultSheet.addMergedRegion(newRegion);
}
}
}
}
}
protected boolean intersects(CellRangeAddress x, CellRangeAddress y) {
return (x.getFirstColumn() <= y.getLastColumn() &&
x.getLastColumn() >= y.getFirstColumn() &&
x.getLastRow() >= y.getFirstRow() &&
x.getFirstRow() <= y.getLastRow())
// or
|| (y.getFirstColumn() <= x.getLastColumn() &&
y.getLastColumn() >= x.getFirstColumn() &&
y.getLastRow() >= x.getFirstRow() &&
y.getFirstRow() <= x.getLastRow());
}
/**
* copies template cell to result row into result column. Fills this cell with data from band
*
* @param templateCell - template cell
* @param resultRow - result row
* @param resultColumn - result column
* @param band - band
*/
private HSSFCell copyCellFromTemplate(HSSFCell templateCell, HSSFRow resultRow, int resultColumn, BandData band) {
checkThreadInterrupted();
if (templateCell == null) return null;
HSSFCell resultCell = resultRow.createCell(resultColumn);
HSSFCellStyle templateStyle = templateCell.getCellStyle();
HSSFCellStyle resultStyle = copyCellStyle(templateStyle);
resultCell.setCellStyle(resultStyle);
String templateCellValue = "";
int cellType = templateCell.getCellType();
if (cellType != HSSFCell.CELL_TYPE_FORMULA && cellType != HSSFCell.CELL_TYPE_NUMERIC) {
HSSFRichTextString richStringCellValue = templateCell.getRichStringCellValue();
templateCellValue = richStringCellValue != null ? richStringCellValue.getString() : "";
templateCellValue = extractStyles(templateCell, resultCell, templateCellValue, band);
}
if (cellType == HSSFCell.CELL_TYPE_STRING && containsJustOneAlias(templateCellValue)) {
updateValueCell(rootBand, band, templateCellValue, resultCell,
drawingPatriarchsMap.get(resultCell.getSheet()));
} else {
String cellValue = inlineBandDataToCellString(templateCell, templateCellValue, band);
setValueToCell(resultCell, cellValue, cellType);
}
return resultCell;
}
/**
* Copies template cell to result cell and fills it with bandData data
*
* @param bandData - bandData
* @param templateCellValue - template cell value
* @param resultCell - result cell
*/
protected void updateValueCell(BandData rootBand, BandData bandData, String templateCellValue, HSSFCell resultCell, HSSFPatriarch patriarch) {
String parameterName = templateCellValue;
parameterName = unwrapParameterName(parameterName);
String fullParameterName = bandData.getName() + "." + parameterName;
if (StringUtils.isEmpty(parameterName)) return;
if (!bandData.getData().containsKey(parameterName)) {
resultCell.setCellValue((String) null);
return;
}
Object value = bandData.getData().get(parameterName);
if (value == null) {
resultCell.setCellType(HSSFCell.CELL_TYPE_BLANK);
return;
}
String formatString = getFormatString(parameterName, fullParameterName);
InlinerAndMatcher inlinerAndMatcher = getContentInlinerForFormat(formatString);
if (inlinerAndMatcher != null) {
inlinerAndMatcher.contentInliner.inlineToXls(patriarch, resultCell, value, inlinerAndMatcher.matcher);
return;
}
if (formatString != null) {
resultCell.setCellValue(new HSSFRichTextString(formatValue(value, parameterName, fullParameterName)));
} else if (value instanceof Number) {
resultCell.setCellValue(((Number) value).doubleValue());
} else if (value instanceof Boolean) {
resultCell.setCellValue((Boolean) value);
} else if (value instanceof Date) {
resultCell.setCellValue((Date) value);
} else {
resultCell.setCellValue(new HSSFRichTextString(formatValue(value, parameterName, fullParameterName)));
}
}
protected void setValueToCell(HSSFCell resultCell, String cellValue, int cellType) {
if (StringUtils.isNotEmpty(cellValue)) {
switch (cellType) {
case HSSFCell.CELL_TYPE_FORMULA:
resultCell.setCellFormula(cellValue);
break;
case HSSFCell.CELL_TYPE_STRING:
resultCell.setCellValue(new HSSFRichTextString(cellValue));
break;
default:
resultCell.setCellValue(cellValue);
break;
}
} else {
resultCell.setCellType(HSSFCell.CELL_TYPE_BLANK);
}
}
protected String inlineBandDataToCellString(HSSFCell cell, String templateCellValue, BandData band) {
String resultStr = "";
if (cell.getCellType() == HSSFCell.CELL_TYPE_STRING) {
if (templateCellValue != null) resultStr = templateCellValue;
} else {
if (cell.toString() != null) resultStr = cell.toString();
}
if (StringUtils.isNotEmpty(resultStr)) return insertBandDataToString(band, resultStr);
return "";
}
/**
* This method adds range bounds to cache. Key is bandName
*
* @param band - band
* @param crefs - range
*/
protected void addRangeBounds(BandData band, CellReference[] crefs) {
if (templateBounds.containsKey(band.getName()))
return;
Bounds bounds = new Bounds(crefs[0].getRow(), crefs[0].getCol(), crefs[crefs.length - 1].getRow(), crefs[crefs.length - 1].getCol());
templateBounds.put(band.getName(), bounds);
}
protected void updateFormulas(Area templateArea, Area dependentResultArea) {
HSSFSheet templateSheet = getTemplateSheetForRangeName(templateWorkbook, templateArea.getName());
HSSFSheet resultSheet = templateToResultSheetsMapping.get(templateSheet);
AreaReference area = dependentResultArea.toAreaReference();
for (CellReference cell : area.getAllReferencedCells()) {
HSSFCell resultCell = getCellFromReference(cell, resultSheet);
if (resultCell.getCellType() == HSSFCell.CELL_TYPE_FORMULA) {
Ptg[] ptgs = HSSFFormulaParser.parse(resultCell.getCellFormula(), resultWorkbook);
for (Ptg ptg : ptgs) {
if (ptg instanceof AreaPtg) {
areaDependencyManager.updateAreaPtg(templateArea, dependentResultArea, (AreaPtg) ptg);
} else if (ptg instanceof RefPtg) {
areaDependencyManager.updateRefPtg(templateArea, dependentResultArea, (RefPtg) ptg);
}
}
String calculatedFormula = HSSFFormulaParser.toFormulaString(templateWorkbook, ptgs);
resultCell.setCellFormula(calculatedFormula);
}
}
}
protected String extractStyles(HSSFCell templateCell, HSSFCell resultCell, String templateCellValue, BandData bandData) {
for (XlsHint hint : hints) {
XlsHint.CheckResult check = hint.check(templateCellValue);
if (check.result) {
templateCellValue = check.cellValue;
hint.add(templateCell, resultCell, bandData);
}
}
templateCellValue = StringUtils.stripEnd(templateCellValue, null);
return templateCellValue;
}
protected HSSFCellStyle copyCellStyle(HSSFCellStyle templateStyle) {
HSSFCellStyle style = styleCache.getCellStyleByTemplate(templateStyle);
if (style == null) {
HSSFCellStyle newStyle = resultWorkbook.createCellStyle();
XslStyleHelper.cloneStyleRelations(templateStyle, newStyle);
HSSFFont templateFont = templateStyle.getFont(templateWorkbook);
HSSFFont font = fontCache.getFontByTemplate(templateFont);
if (font != null)
newStyle.setFont(font);
else {
XslStyleHelper.cloneFont(templateStyle, newStyle);
fontCache.addCachedFont(templateFont, newStyle.getFont(resultWorkbook));
}
styleCache.addCachedStyle(templateStyle, newStyle);
style = newStyle;
}
return style;
}
/**
* Returns EscherAggregate from sheet
*
* @param sheet - HSSFSheet
* @return - EscherAggregate from sheet
*/
protected EscherAggregate getEscherAggregate(HSSFSheet sheet) {
EscherAggregate agg = sheetToEscherAggregate.get(sheet.getSheetName());
if (agg == null) {
agg = sheet.getDrawingEscherAggregate();
sheetToEscherAggregate.put(sheet.getSheetName(), agg);
}
return agg;
}
/**
* Copies all pictures from template sheet to result sheet, shift picture depending on area dependencies
*
* @param templateSheet - template sheet
* @param resultSheet - result sheet
*/
protected void copyPicturesFromTemplateToResult(HSSFSheet templateSheet, HSSFSheet resultSheet) {
List<HSSFClientAnchor> list = getAllAnchors(getEscherAggregate(templateSheet));
int i = 0;
if (CollectionUtils.isNotEmpty(orderedPicturesId)) {//just a shitty workaround for anchors without pictures
for (HSSFClientAnchor anchor : list) {
Cell topLeft = getCellFromTemplate(new Cell(anchor.getCol1(), anchor.getRow1()));
anchor.setCol1(topLeft.getCol());
anchor.setRow1(topLeft.getRow());
anchor.setCol2(topLeft.getCol() + anchor.getCol2() - anchor.getCol1());
anchor.setRow2(topLeft.getRow() + anchor.getRow2() - anchor.getRow1());
HSSFPatriarch sheetPatriarch = drawingPatriarchsMap.get(resultSheet);
if (sheetPatriarch != null) {
sheetPatriarch.createPicture(anchor, orderedPicturesId.get(i++));
}
}
}
}
protected boolean rowExists(HSSFSheet sheet, int rowNumber) {
return sheet.getRow(rowNumber) != null;
}
protected Cell getCellFromTemplate(Cell cell) {
Cell newCell = new Cell(cell);
updateCell(newCell);
return newCell;
}
protected void updateCell(Cell cell) {
Area templateArea = areaDependencyManager.getTemplateAreaByCoordinate(cell.getCol(), cell.getRow());
List<Area> resultAreas = areasDependency.get(templateArea);
if (CollectionUtils.isNotEmpty(resultAreas)) {
Area destination = resultAreas.get(0);
int col = cell.getCol() - templateArea.getTopLeft().getCol() + destination.getTopLeft().getCol();
int row = cell.getRow() - templateArea.getTopLeft().getRow() + destination.getTopLeft().getRow();
cell.setCol(col);
cell.setRow(row);
}
}
protected void copyPageBreaks(HSSFSheet templateSheet, HSSFSheet resultSheet, int templateRowIndex, int resultRowIndex) {
int[] rowBreaks = templateSheet.getRowBreaks();
for (int rowBreak : rowBreaks) {
if (rowBreak == templateRowIndex) {
resultSheet.setRowBreak(resultRowIndex);
break;
}
}
}
//---------------------Utility classes------------------------
/**
* Cell range at sheet
*/
protected static class SheetRange {
private CellRangeAddress cellRangeAddress;
private String sheetName;
private SheetRange(CellRangeAddress cellRangeAddress, String sheetName) {
this.cellRangeAddress = cellRangeAddress;
this.sheetName = sheetName;
}
public CellRangeAddress getCellRangeAddress() {
return cellRangeAddress;
}
public String getSheetName() {
return sheetName;
}
}
/**
* Bounds of region [(x,y) : (x1, y1)]
*/
protected static class Bounds {
public final int row0;
public final int column0;
public final int row1;
public final int column1;
private Bounds(int row0, int column0, int row1, int column1) {
this.row0 = row0;
this.column0 = column0;
this.row1 = row1;
this.column1 = column1;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.taskmanager;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.JobID;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.testutils.OneShotLatch;
import org.apache.flink.runtime.blob.BlobCacheService;
import org.apache.flink.runtime.blob.PermanentBlobCache;
import org.apache.flink.runtime.blob.TransientBlobCache;
import org.apache.flink.runtime.broadcast.BroadcastVariableManager;
import org.apache.flink.runtime.checkpoint.CheckpointMetaData;
import org.apache.flink.runtime.checkpoint.CheckpointMetrics;
import org.apache.flink.runtime.checkpoint.CheckpointOptions;
import org.apache.flink.runtime.clusterframework.types.AllocationID;
import org.apache.flink.runtime.deployment.InputGateDeploymentDescriptor;
import org.apache.flink.runtime.deployment.ResultPartitionDeploymentDescriptor;
import org.apache.flink.runtime.execution.Environment;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.execution.librarycache.LibraryCacheManager;
import org.apache.flink.runtime.executiongraph.ExecutionAttemptID;
import org.apache.flink.runtime.executiongraph.JobInformation;
import org.apache.flink.runtime.executiongraph.TaskInformation;
import org.apache.flink.runtime.filecache.FileCache;
import org.apache.flink.runtime.io.disk.iomanager.IOManager;
import org.apache.flink.runtime.io.network.NettyShuffleEnvironmentBuilder;
import org.apache.flink.runtime.io.network.TaskEventDispatcher;
import org.apache.flink.runtime.io.network.partition.NoOpResultPartitionConsumableNotifier;
import org.apache.flink.runtime.io.network.partition.ResultPartitionConsumableNotifier;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable;
import org.apache.flink.runtime.jobgraph.tasks.InputSplitProvider;
import org.apache.flink.runtime.memory.MemoryManager;
import org.apache.flink.runtime.metrics.groups.TaskMetricGroup;
import org.apache.flink.runtime.metrics.groups.UnregisteredMetricGroups;
import org.apache.flink.runtime.query.KvStateRegistry;
import org.apache.flink.runtime.shuffle.ShuffleEnvironment;
import org.apache.flink.runtime.state.TestTaskStateManager;
import org.apache.flink.runtime.taskexecutor.KvStateService;
import org.apache.flink.runtime.taskexecutor.PartitionProducerStateChecker;
import org.apache.flink.runtime.taskexecutor.TestGlobalAggregateManager;
import org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo;
import org.apache.flink.util.SerializedValue;
import org.apache.flink.util.TestLogger;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Executor;
import static org.hamcrest.Matchers.everyItem;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.isOneOf;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Testing asynchronous call of {@link Task}.
*/
public class TaskAsyncCallTest extends TestLogger {
/** Number of expected checkpoints. */
private static int numCalls;
/** Triggered at the beginning of {@link CheckpointsInOrderInvokable#invoke()}. */
private static OneShotLatch awaitLatch;
/**
* Triggered when {@link CheckpointsInOrderInvokable#triggerCheckpoint(CheckpointMetaData, CheckpointOptions, boolean)}
* was called {@link #numCalls} times.
*/
private static OneShotLatch triggerLatch;
/**
* Triggered when {@link CheckpointsInOrderInvokable#notifyCheckpointComplete(long)}
* was called {@link #numCalls} times.
*/
private static OneShotLatch notifyCheckpointCompleteLatch;
/** Triggered on {@link ContextClassLoaderInterceptingInvokable#cancel()}. */
private static OneShotLatch stopLatch;
private static final List<ClassLoader> classLoaders = Collections.synchronizedList(new ArrayList<>());
private ShuffleEnvironment<?, ?> shuffleEnvironment;
@Before
public void createQueuesAndActors() {
numCalls = 1000;
awaitLatch = new OneShotLatch();
triggerLatch = new OneShotLatch();
notifyCheckpointCompleteLatch = new OneShotLatch();
stopLatch = new OneShotLatch();
shuffleEnvironment = new NettyShuffleEnvironmentBuilder().build();
classLoaders.clear();
}
@After
public void teardown() throws Exception {
if (shuffleEnvironment != null) {
shuffleEnvironment.close();
}
}
// ------------------------------------------------------------------------
// Tests
// ------------------------------------------------------------------------
@Test
public void testCheckpointCallsInOrder() throws Exception {
Task task = createTask(CheckpointsInOrderInvokable.class);
try (TaskCleaner ignored = new TaskCleaner(task)) {
task.startTaskThread();
awaitLatch.await();
for (int i = 1; i <= numCalls; i++) {
task.triggerCheckpointBarrier(i, 156865867234L, CheckpointOptions.forCheckpointWithDefaultLocation(), false);
}
triggerLatch.await();
assertFalse(task.isCanceledOrFailed());
ExecutionState currentState = task.getExecutionState();
assertThat(currentState, isOneOf(ExecutionState.RUNNING, ExecutionState.FINISHED));
}
}
@Test
public void testMixedAsyncCallsInOrder() throws Exception {
Task task = createTask(CheckpointsInOrderInvokable.class);
try (TaskCleaner ignored = new TaskCleaner(task)) {
task.startTaskThread();
awaitLatch.await();
for (int i = 1; i <= numCalls; i++) {
task.triggerCheckpointBarrier(i, 156865867234L, CheckpointOptions.forCheckpointWithDefaultLocation(), false);
task.notifyCheckpointComplete(i);
}
triggerLatch.await();
assertFalse(task.isCanceledOrFailed());
ExecutionState currentState = task.getExecutionState();
assertThat(currentState, isOneOf(ExecutionState.RUNNING, ExecutionState.FINISHED));
}
}
/**
* Asserts that {@link AbstractInvokable#triggerCheckpoint(CheckpointMetaData, CheckpointOptions, boolean)},
* and {@link AbstractInvokable#notifyCheckpointComplete(long)} are invoked by a thread whose context
* class loader is set to the user code class loader.
*/
@Test
public void testSetsUserCodeClassLoader() throws Exception {
numCalls = 1;
Task task = createTask(ContextClassLoaderInterceptingInvokable.class);
try (TaskCleaner ignored = new TaskCleaner(task)) {
task.startTaskThread();
awaitLatch.await();
task.triggerCheckpointBarrier(1, 1, CheckpointOptions.forCheckpointWithDefaultLocation(), false);
triggerLatch.await();
task.notifyCheckpointComplete(1);
notifyCheckpointCompleteLatch.await();
task.cancelExecution();
stopLatch.await();
assertThat(classLoaders, hasSize(greaterThanOrEqualTo(2)));
assertThat(classLoaders, everyItem(instanceOf(TestUserCodeClassLoader.class)));
}
}
private Task createTask(Class<? extends AbstractInvokable> invokableClass) throws Exception {
BlobCacheService blobService =
new BlobCacheService(mock(PermanentBlobCache.class), mock(TransientBlobCache.class));
LibraryCacheManager libCache = mock(LibraryCacheManager.class);
when(libCache.getClassLoader(any(JobID.class))).thenReturn(new TestUserCodeClassLoader());
ResultPartitionConsumableNotifier consumableNotifier = new NoOpResultPartitionConsumableNotifier();
PartitionProducerStateChecker partitionProducerStateChecker = mock(PartitionProducerStateChecker.class);
Executor executor = mock(Executor.class);
TaskMetricGroup taskMetricGroup = UnregisteredMetricGroups.createUnregisteredTaskMetricGroup();
JobInformation jobInformation = new JobInformation(
new JobID(),
"Job Name",
new SerializedValue<>(new ExecutionConfig()),
new Configuration(),
Collections.emptyList(),
Collections.emptyList());
TaskInformation taskInformation = new TaskInformation(
new JobVertexID(),
"Test Task",
1,
1,
invokableClass.getName(),
new Configuration());
return new Task(
jobInformation,
taskInformation,
new ExecutionAttemptID(),
new AllocationID(),
0,
0,
Collections.<ResultPartitionDeploymentDescriptor>emptyList(),
Collections.<InputGateDeploymentDescriptor>emptyList(),
0,
mock(MemoryManager.class),
mock(IOManager.class),
shuffleEnvironment,
new KvStateService(new KvStateRegistry(), null, null),
mock(BroadcastVariableManager.class),
new TaskEventDispatcher(),
new TestTaskStateManager(),
mock(TaskManagerActions.class),
mock(InputSplitProvider.class),
mock(CheckpointResponder.class),
new TestGlobalAggregateManager(),
blobService,
libCache,
mock(FileCache.class),
new TestingTaskManagerRuntimeInfo(),
taskMetricGroup,
consumableNotifier,
partitionProducerStateChecker,
executor);
}
/**
* Invokable for testing checkpoints.
*/
public static class CheckpointsInOrderInvokable extends AbstractInvokable {
private volatile long lastCheckpointId = 0;
private volatile Exception error;
public CheckpointsInOrderInvokable(Environment environment) {
super(environment);
}
@Override
public void invoke() throws Exception {
awaitLatch.trigger();
// wait forever (until canceled)
synchronized (this) {
while (error == null) {
wait();
}
}
if (error != null) {
// exit method prematurely due to error but make sure that the tests can finish
triggerLatch.trigger();
notifyCheckpointCompleteLatch.trigger();
stopLatch.trigger();
throw error;
}
}
@Override
public boolean triggerCheckpoint(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) {
lastCheckpointId++;
if (checkpointMetaData.getCheckpointId() == lastCheckpointId) {
if (lastCheckpointId == numCalls) {
triggerLatch.trigger();
}
}
else if (this.error == null) {
this.error = new Exception("calls out of order");
synchronized (this) {
notifyAll();
}
}
return true;
}
@Override
public void triggerCheckpointOnBarrier(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, CheckpointMetrics checkpointMetrics) throws Exception {
throw new UnsupportedOperationException("Should not be called");
}
@Override
public void abortCheckpointOnBarrier(long checkpointId, Throwable cause) {
throw new UnsupportedOperationException("Should not be called");
}
@Override
public void notifyCheckpointComplete(long checkpointId) {
if (checkpointId != lastCheckpointId && this.error == null) {
this.error = new Exception("calls out of order");
synchronized (this) {
notifyAll();
}
} else if (lastCheckpointId == numCalls) {
notifyCheckpointCompleteLatch.trigger();
}
}
}
/**
* This is an {@link AbstractInvokable} that stores the context class loader of the invoking
* thread in a static field so that tests can assert on the class loader instances.
*
* @see #testSetsUserCodeClassLoader()
*/
public static class ContextClassLoaderInterceptingInvokable extends CheckpointsInOrderInvokable {
public ContextClassLoaderInterceptingInvokable(Environment environment) {
super(environment);
}
@Override
public boolean triggerCheckpoint(CheckpointMetaData checkpointMetaData, CheckpointOptions checkpointOptions, boolean advanceToEndOfEventTime) {
classLoaders.add(Thread.currentThread().getContextClassLoader());
return super.triggerCheckpoint(checkpointMetaData, checkpointOptions, advanceToEndOfEventTime);
}
@Override
public void notifyCheckpointComplete(long checkpointId) {
classLoaders.add(Thread.currentThread().getContextClassLoader());
super.notifyCheckpointComplete(checkpointId);
}
@Override
public void cancel() {
stopLatch.trigger();
}
}
/**
* A {@link ClassLoader} that delegates everything to {@link ClassLoader#getSystemClassLoader()}.
*
* @see #testSetsUserCodeClassLoader()
*/
private static class TestUserCodeClassLoader extends ClassLoader {
public TestUserCodeClassLoader() {
super(ClassLoader.getSystemClassLoader());
}
}
private static class TaskCleaner implements AutoCloseable {
private final Task task;
private TaskCleaner(Task task) {
this.task = task;
}
@Override
public void close() throws Exception {
task.cancelExecution();
task.getExecutingThread().join(5000);
}
}
}
| |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.feed.library.feedknowncontent;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatchers;
import org.mockito.Captor;
import org.mockito.Mock;
import org.robolectric.annotation.Config;
import org.chromium.base.Consumer;
import org.chromium.base.Function;
import org.chromium.chrome.browser.feed.library.api.client.knowncontent.ContentMetadata;
import org.chromium.chrome.browser.feed.library.api.client.knowncontent.ContentRemoval;
import org.chromium.chrome.browser.feed.library.api.client.knowncontent.KnownContent;
import org.chromium.chrome.browser.feed.library.api.internal.common.ThreadUtils;
import org.chromium.chrome.browser.feed.library.api.internal.sessionmanager.FeedSessionManager;
import org.chromium.chrome.browser.feed.library.common.Result;
import org.chromium.chrome.browser.feed.library.common.concurrent.testing.FakeMainThreadRunner;
import org.chromium.components.feed.core.proto.libraries.api.internal.StreamDataProto.StreamFeature;
import org.chromium.components.feed.core.proto.libraries.api.internal.StreamDataProto.StreamPayload;
import org.chromium.components.feed.core.proto.ui.stream.StreamStructureProto.Content;
import org.chromium.components.feed.core.proto.ui.stream.StreamStructureProto.OfflineMetadata;
import org.chromium.components.feed.core.proto.ui.stream.StreamStructureProto.RepresentationData;
import org.chromium.testing.local.LocalRobolectricTestRunner;
import java.util.Collections;
import java.util.List;
/** Tests for {@link FeedKnownContentImpl}. */
@RunWith(LocalRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class FeedKnownContentImplTest {
private static final long CONTENT_CREATION_DATE_TIME_MS = 123L;
private static final List<ContentRemoval> CONTENT_REMOVED =
Collections.singletonList(new ContentRemoval("url", /* requestedByUser= */ false));
private static final String URL = "url";
private static final String TITLE = "title";
@Mock
private FeedSessionManager mFeedSessionManager;
@Mock
private KnownContent.Listener mListener1;
@Mock
private KnownContent.Listener mListener2;
@Mock
private Consumer<List<ContentMetadata>> mKnownContentConsumer;
@Mock
private ThreadUtils mThreadUtils;
private final FakeMainThreadRunner mMainThreadRunner =
FakeMainThreadRunner.runTasksImmediately();
@Captor
private ArgumentCaptor<Function<StreamPayload, ContentMetadata>> mKnownContentFunctionCaptor;
@Captor
private ArgumentCaptor<Consumer<Result<List<ContentMetadata>>>> mContentMetadataResultCaptor;
private FeedKnownContentImpl mKnownContentApi;
@Before
public void setUp() {
initMocks(this);
when(mThreadUtils.isMainThread()).thenReturn(true);
mKnownContentApi =
new FeedKnownContentImpl(mFeedSessionManager, mMainThreadRunner, mThreadUtils);
}
@Test
public void testSetsListenerOnSessionManager() {
verify(mFeedSessionManager)
.setKnownContentListener(mKnownContentApi.getKnownContentHostNotifier());
}
@Test
public void testNotifyListeners_contentReceived() {
mKnownContentApi.addListener(mListener1);
mKnownContentApi.addListener(mListener2);
mKnownContentApi.getKnownContentHostNotifier().onNewContentReceived(
/* isNewRefresh= */ false, CONTENT_CREATION_DATE_TIME_MS);
verify(mListener1)
.onNewContentReceived(
/* isNewRefresh= */ false, CONTENT_CREATION_DATE_TIME_MS);
verify(mListener2)
.onNewContentReceived(
/* isNewRefresh= */ false, CONTENT_CREATION_DATE_TIME_MS);
}
@Test
public void testNotifyListeners_contentRemoved() {
mKnownContentApi.addListener(mListener1);
mKnownContentApi.addListener(mListener2);
mKnownContentApi.getKnownContentHostNotifier().onContentRemoved(CONTENT_REMOVED);
verify(mListener1).onContentRemoved(CONTENT_REMOVED);
verify(mListener2).onContentRemoved(CONTENT_REMOVED);
}
@Test
public void testRemoveListener() {
mKnownContentApi.addListener(mListener1);
mKnownContentApi.removeListener(mListener1);
mKnownContentApi.getKnownContentHostNotifier().onContentRemoved(CONTENT_REMOVED);
mKnownContentApi.getKnownContentHostNotifier().onNewContentReceived(
/* isNewRefresh= */ true, CONTENT_CREATION_DATE_TIME_MS);
verifyNoMoreInteractions(mListener1);
}
@Test
public void testGetKnownContent_returnsNullForNonContent() {
mKnownContentApi.getKnownContent(mKnownContentConsumer);
verify(mFeedSessionManager)
.getStreamFeaturesFromHead(mKnownContentFunctionCaptor.capture(),
mContentMetadataResultCaptor.capture());
assertThat(mKnownContentFunctionCaptor.getValue().apply(StreamPayload.getDefaultInstance()))
.isNull();
}
@Test
public void testGetKnownContent_returnsContentMetadataFromContent() {
mKnownContentApi.getKnownContent(mKnownContentConsumer);
verify(mFeedSessionManager)
.getStreamFeaturesFromHead(mKnownContentFunctionCaptor.capture(),
mContentMetadataResultCaptor.capture());
StreamPayload streamPayload =
StreamPayload.newBuilder()
.setStreamFeature(StreamFeature.newBuilder().setContent(
Content.newBuilder()
.setOfflineMetadata(
OfflineMetadata.newBuilder().setTitle(TITLE))
.setRepresentationData(
RepresentationData.newBuilder().setUri(URL))))
.build();
ContentMetadata contentMetadata =
mKnownContentFunctionCaptor.getValue().apply(streamPayload);
assertThat(contentMetadata.getUrl()).isEqualTo(URL);
assertThat(contentMetadata.getTitle()).isEqualTo(TITLE);
}
@Test
public void testGetKnownContent_failure() {
mKnownContentApi.getKnownContent(mKnownContentConsumer);
verify(mFeedSessionManager)
.getStreamFeaturesFromHead(mKnownContentFunctionCaptor.capture(),
mContentMetadataResultCaptor.capture());
mContentMetadataResultCaptor.getValue().accept(Result.failure());
verify(mKnownContentConsumer, never())
.accept(ArgumentMatchers.<List<ContentMetadata>>any());
}
@Test
public void testGetKnownContent_offMainThread() {
FakeMainThreadRunner fakeMainThreadRunner = FakeMainThreadRunner.queueAllTasks();
when(mThreadUtils.isMainThread()).thenReturn(false);
mKnownContentApi =
new FeedKnownContentImpl(mFeedSessionManager, fakeMainThreadRunner, mThreadUtils);
mKnownContentApi.addListener(mListener1);
mKnownContentApi.getKnownContentHostNotifier().onNewContentReceived(
/* isNewRefresh= */ false, CONTENT_CREATION_DATE_TIME_MS);
assertThat(fakeMainThreadRunner.hasTasks()).isTrue();
verifyZeroInteractions(mListener1);
fakeMainThreadRunner.runAllTasks();
verify(mListener1)
.onNewContentReceived(
/* isNewRefresh= */ false, CONTENT_CREATION_DATE_TIME_MS);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.type;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.facebook.presto.spi.block.BlockBuilderStatus;
import com.facebook.presto.spi.type.Type;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import org.testng.annotations.Test;
import java.util.Locale;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.TreeMap;
import static com.facebook.presto.spi.block.SortOrder.ASC_NULLS_FIRST;
import static com.facebook.presto.spi.block.SortOrder.ASC_NULLS_LAST;
import static com.facebook.presto.spi.block.SortOrder.DESC_NULLS_FIRST;
import static com.facebook.presto.spi.block.SortOrder.DESC_NULLS_LAST;
import static com.facebook.presto.spi.type.TimeZoneKey.UTC_KEY;
import static com.google.common.base.Preconditions.checkNotNull;
import static io.airlift.testing.Assertions.assertInstanceOf;
import static java.util.Collections.unmodifiableSortedMap;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public abstract class AbstractTestType
{
public static final ConnectorSession SESSION = new ConnectorSession("user", "source", "catalog", "schema", UTC_KEY, Locale.ENGLISH, "address", "agent");
private final Class<?> objectValueType;
private final Block testBlock;
private final Type type;
private final SortedMap<Integer, Object> expectedStackValues;
private final SortedMap<Integer, Object> expectedObjectValues;
private final Block testBlockWithNulls;
protected AbstractTestType(Type type, Class<?> objectValueType, Block testBlock)
{
this(type, objectValueType, testBlock, testBlock);
}
protected AbstractTestType(Type type, Class<?> objectValueType, Block testBlock, Block expectedValues)
{
this.type = checkNotNull(type, "type is null");
this.objectValueType = checkNotNull(objectValueType, "objectValueType is null");
this.testBlock = checkNotNull(testBlock, "testBlock is null");
checkNotNull(expectedValues, "expectedValues is null");
this.expectedStackValues = indexStackValues(type, expectedValues);
this.expectedObjectValues = indexObjectValues(type, expectedValues);
this.testBlockWithNulls = createAlternatingNullsBlock(testBlock);
}
private Block createAlternatingNullsBlock(Block testBlock)
{
BlockBuilder nullsBlockBuilder = type.createBlockBuilder(new BlockBuilderStatus());
for (int position = 0; position < testBlock.getPositionCount(); position++) {
if (type.getJavaType() == void.class) {
nullsBlockBuilder.appendNull();
}
else if (type.getJavaType() == boolean.class) {
type.writeBoolean(nullsBlockBuilder, type.getBoolean(testBlock, position));
}
else if (type.getJavaType() == long.class) {
type.writeLong(nullsBlockBuilder, type.getLong(testBlock, position));
}
else if (type.getJavaType() == double.class) {
type.writeDouble(nullsBlockBuilder, type.getDouble(testBlock, position));
}
else if (type.getJavaType() == Slice.class) {
Slice slice = type.getSlice(testBlock, position);
type.writeSlice(nullsBlockBuilder, slice, 0, slice.length());
}
else {
throw new RuntimeException("Unsupported Java type " + type.getJavaType());
}
nullsBlockBuilder.appendNull();
}
return nullsBlockBuilder.build();
}
@Test
public void testBlock()
{
for (Entry<Integer, Object> entry : expectedStackValues.entrySet()) {
assertPositionEquals(testBlock, entry.getKey(), entry.getValue(), expectedObjectValues.get(entry.getKey()));
}
for (Entry<Integer, Object> entry : expectedStackValues.entrySet()) {
assertPositionEquals(testBlockWithNulls, entry.getKey() * 2, entry.getValue(), expectedObjectValues.get(entry.getKey()));
assertPositionEquals(testBlockWithNulls, (entry.getKey() * 2) + 1, null, null);
}
}
protected void assertPositionEquals(Block block, int position, Object expectedStackValue, Object expectedObjectValue)
{
int hash = 0;
if (type.isComparable()) {
hash = type.hash(block, position);
}
assertPositionValue(block, position, expectedStackValue, hash, expectedObjectValue);
assertPositionValue(block.getSingleValueBlock(position), 0, expectedStackValue, hash, expectedObjectValue);
assertPositionValue(block.getRegion(position, 1), 0, expectedStackValue, hash, expectedObjectValue);
assertPositionValue(block.getRegion(0, position + 1), position, expectedStackValue, hash, expectedObjectValue);
assertPositionValue(block.getRegion(position, block.getPositionCount() - position), 0, expectedStackValue, hash, expectedObjectValue);
BlockBuilder blockBuilder = type.createBlockBuilder(new BlockBuilderStatus());
type.appendTo(block, position, blockBuilder);
assertPositionValue(blockBuilder.build(), 0, expectedStackValue, hash, expectedObjectValue);
}
private void assertPositionValue(Block block, int position, Object expectedStackValue, int expectedHash, Object expectedObjectValue)
{
Object objectValue = type.getObjectValue(SESSION, block, position);
assertEquals(objectValue, expectedObjectValue);
if (objectValue != null) {
assertInstanceOf(objectValue, objectValueType);
}
if (type.isComparable()) {
assertEquals(type.hash(block, position), expectedHash);
}
else {
try {
type.hash(block, position);
fail("Expected UnsupportedOperationException");
}
catch (UnsupportedOperationException expected) {
}
}
Block expectedBlock = createBlock(type, expectedStackValue);
if (type.isComparable()) {
assertTrue(type.equalTo(block, position, block, position));
assertTrue(type.equalTo(block, position, expectedBlock, 0));
assertTrue(type.equalTo(expectedBlock, 0, block, position));
}
assertEquals(block.isNull(position), expectedStackValue == null);
if (type.isOrderable()) {
assertTrue(ASC_NULLS_FIRST.compareBlockValue(type, block, position, expectedBlock, 0) == 0);
assertTrue(ASC_NULLS_LAST.compareBlockValue(type, block, position, expectedBlock, 0) == 0);
assertTrue(DESC_NULLS_FIRST.compareBlockValue(type, block, position, expectedBlock, 0) == 0);
assertTrue(DESC_NULLS_LAST.compareBlockValue(type, block, position, expectedBlock, 0) == 0);
}
else {
try {
type.compareTo(block, position, expectedBlock, 0);
fail("Expected UnsupportedOperationException");
}
catch (UnsupportedOperationException expected) {
}
}
verifyInvalidPositionHandling(block);
if (block.isNull(position)) {
if (type.isOrderable()) {
Block nonNullValue = toBlock(getNonNullValue());
assertTrue(ASC_NULLS_FIRST.compareBlockValue(type, block, position, nonNullValue, 0) < 0);
assertTrue(ASC_NULLS_LAST.compareBlockValue(type, block, position, nonNullValue, 0) > 0);
assertTrue(DESC_NULLS_FIRST.compareBlockValue(type, block, position, nonNullValue, 0) < 0);
assertTrue(DESC_NULLS_LAST.compareBlockValue(type, block, position, nonNullValue, 0) > 0);
}
return;
}
if (type.isOrderable() && expectedStackValue != Boolean.TRUE) {
Block greaterValue = toBlock(getGreaterValue(expectedStackValue));
assertTrue(ASC_NULLS_FIRST.compareBlockValue(type, block, position, greaterValue, 0) < 0);
assertTrue(ASC_NULLS_LAST.compareBlockValue(type, block, position, greaterValue, 0) < 0);
assertTrue(DESC_NULLS_FIRST.compareBlockValue(type, block, position, greaterValue, 0) > 0);
assertTrue(DESC_NULLS_LAST.compareBlockValue(type, block, position, greaterValue, 0) > 0);
}
if (type.getJavaType() == boolean.class) {
assertEquals(type.getBoolean(block, position), expectedStackValue);
try {
type.getLong(block, position);
fail("Expected IllegalStateException or UnsupportedOperationException");
}
catch (IllegalStateException | UnsupportedOperationException expected) {
}
try {
type.getDouble(block, position);
fail("Expected IllegalStateException or UnsupportedOperationException");
}
catch (IllegalStateException | UnsupportedOperationException expected) {
}
}
else if (type.getJavaType() == long.class) {
assertEquals(type.getLong(block, position), expectedStackValue);
try {
type.getBoolean(block, position);
fail("Expected IllegalStateException or UnsupportedOperationException");
}
catch (IllegalStateException | UnsupportedOperationException expected) {
}
try {
type.getDouble(block, position);
fail("Expected IllegalStateException or UnsupportedOperationException");
}
catch (IllegalStateException | UnsupportedOperationException expected) {
}
}
else if (type.getJavaType() == double.class) {
assertEquals(type.getDouble(block, position), expectedStackValue);
try {
type.getBoolean(block, position);
fail("Expected IllegalStateException or UnsupportedOperationException");
}
catch (IllegalStateException | UnsupportedOperationException expected) {
}
try {
type.getLong(block, position);
fail("Expected IllegalStateException or UnsupportedOperationException");
}
catch (IllegalStateException | UnsupportedOperationException expected) {
}
}
else if (type.getJavaType() == Slice.class) {
assertEquals(type.getSlice(block, position), expectedStackValue);
try {
type.getBoolean(block, position);
fail("Expected IllegalStateException or UnsupportedOperationException");
}
catch (IllegalStateException | UnsupportedOperationException expected) {
}
try {
type.getLong(block, position);
fail("Expected IllegalStateException or UnsupportedOperationException");
}
catch (IllegalStateException | UnsupportedOperationException expected) {
}
try {
type.getDouble(block, position);
fail("Expected IllegalStateException or UnsupportedOperationException");
}
catch (IllegalStateException | UnsupportedOperationException expected) {
}
}
}
private void verifyInvalidPositionHandling(Block block)
{
try {
type.getObjectValue(SESSION, block, -1);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
try {
type.getObjectValue(SESSION, block, block.getPositionCount());
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
try {
type.hash(block, -1);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
try {
type.hash(block, block.getPositionCount());
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
if (type.isComparable()) {
Block other = toBlock(getNonNullValue());
try {
type.equalTo(block, -1, other, 0);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
try {
type.equalTo(block, block.getPositionCount(), other, 0);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
}
if (type.isOrderable()) {
Block other = toBlock(getNonNullValue());
try {
ASC_NULLS_FIRST.compareBlockValue(type, block, -1, other, 0);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
try {
ASC_NULLS_FIRST.compareBlockValue(type, block, block.getPositionCount(), other, 0);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
}
if (type.getJavaType() == boolean.class) {
try {
type.getBoolean(block, -1);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
try {
type.getBoolean(block, block.getPositionCount());
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
}
else if (type.getJavaType() == long.class) {
try {
type.getLong(block, -1);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
try {
type.getLong(block, block.getPositionCount());
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
}
else if (type.getJavaType() == double.class) {
try {
type.getDouble(block, -1);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
try {
type.getDouble(block, block.getPositionCount());
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
}
else if (type.getJavaType() == Slice.class) {
try {
type.getSlice(block, -1);
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
try {
type.getSlice(block, block.getPositionCount());
fail("expected RuntimeException");
}
catch (RuntimeException expected) {
}
}
}
private static Block createBlock(Type type, Object value)
{
BlockBuilder blockBuilder = type.createBlockBuilder(new BlockBuilderStatus());
Class<?> javaType = type.getJavaType();
if (value == null) {
blockBuilder.appendNull();
}
else if (javaType == boolean.class) {
type.writeBoolean(blockBuilder, (Boolean) value);
}
else if (javaType == long.class) {
type.writeLong(blockBuilder, (Long) value);
}
else if (javaType == double.class) {
type.writeDouble(blockBuilder, (Double) value);
}
else if (javaType == Slice.class) {
Slice slice = (Slice) value;
type.writeSlice(blockBuilder, slice, 0, slice.length());
}
else {
throw new UnsupportedOperationException("not yet implemented: " + javaType);
}
return blockBuilder.build();
}
protected abstract Object getGreaterValue(Object value);
protected Object getNonNullValue()
{
if (type.getJavaType() == boolean.class) {
return true;
}
if (type.getJavaType() == long.class) {
return 1L;
}
if (type.getJavaType() == double.class) {
return 1.0;
}
if (type.getJavaType() == Slice.class) {
return Slices.utf8Slice("_");
}
throw new IllegalStateException("Unsupported Java type " + type.getJavaType());
}
private Block toBlock(Object value)
{
BlockBuilder blockBuilder = type.createBlockBuilder(new BlockBuilderStatus());
Class<?> javaType = type.getJavaType();
if (value == null) {
blockBuilder.appendNull();
}
else if (javaType == boolean.class) {
type.writeBoolean(blockBuilder, (Boolean) value);
}
else if (javaType == long.class) {
type.writeLong(blockBuilder, (Long) value);
}
else if (javaType == double.class) {
type.writeDouble(blockBuilder, (Double) value);
}
else if (javaType == Slice.class) {
Slice slice = (Slice) value;
type.writeSlice(blockBuilder, slice, 0, slice.length());
}
else {
throw new UnsupportedOperationException("not yet implemented: " + javaType);
}
return blockBuilder.build();
}
private static SortedMap<Integer, Object> indexStackValues(Type type, Block block)
{
SortedMap<Integer, Object> values = new TreeMap<>();
for (int position = 0; position < block.getPositionCount(); position++) {
if (block.isNull(position)) {
values.put(position, null);
}
else if (type.getJavaType() == boolean.class) {
values.put(position, type.getBoolean(block, position));
}
else if (type.getJavaType() == long.class) {
values.put(position, type.getLong(block, position));
}
else if (type.getJavaType() == double.class) {
values.put(position, type.getDouble(block, position));
}
else if (type.getJavaType() == Slice.class) {
values.put(position, type.getSlice(block, position));
}
else {
throw new RuntimeException("Unsupported value type " + type.getJavaType());
}
}
return unmodifiableSortedMap(values);
}
private static SortedMap<Integer, Object> indexObjectValues(Type type, Block block)
{
SortedMap<Integer, Object> values = new TreeMap<>();
for (int position = 0; position < block.getPositionCount(); position++) {
values.put(position, type.getObjectValue(SESSION, block, position));
}
return unmodifiableSortedMap(values);
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.dataFlow;
import com.intellij.codeInspection.dataFlow.instructions.EndOfInitializerInstruction;
import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet;
import com.intellij.codeInspection.dataFlow.types.*;
import com.intellij.codeInspection.dataFlow.value.DfaTypeValue;
import com.intellij.codeInspection.dataFlow.value.DfaValue;
import com.intellij.codeInspection.dataFlow.value.DfaVariableValue;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.*;
import com.intellij.psi.util.*;
import com.intellij.util.JavaPsiConstructorUtil;
import com.intellij.util.ThreeState;
import com.siyeh.ig.psiutils.ExpressionUtils;
import one.util.streamex.StreamEx;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RejectedExecutionException;
import static com.intellij.codeInspection.dataFlow.DfaUtil.hasImplicitImpureSuperCall;
public final class CommonDataflow {
private static class DataflowPoint {
@NotNull DfType myDfType = DfTypes.BOTTOM;
// empty = top; null = bottom
@Nullable Set<Object> myPossibleValues = Collections.emptySet();
boolean myMayFailByContract = false;
DataflowPoint() {}
DataflowPoint(DataflowPoint other) {
myDfType = other.myDfType;
myPossibleValues = other.myPossibleValues;
myMayFailByContract = other.myMayFailByContract;
}
void addValue(DfaMemoryState memState, DfaValue value) {
if (myPossibleValues == null) return;
DfType dfType = memState.getDfType(value);
if (!(dfType instanceof DfConstantType)) {
myPossibleValues = null;
return;
}
Object newValue = ((DfConstantType<?>)dfType).getValue();
if (myPossibleValues.contains(newValue)) return;
if (myPossibleValues.isEmpty()) {
myPossibleValues = Collections.singleton(newValue);
}
else {
myPossibleValues = new HashSet<>(myPossibleValues);
myPossibleValues.add(newValue);
}
}
void addFacts(DfaMemoryState memState, DfaValue value) {
if (myDfType == DfTypes.TOP) return;
DfType newType = memState.getDfType(value);
if (value instanceof DfaVariableValue) {
SpecialField field = SpecialField.fromQualifier(value);
if (field != null && newType instanceof DfReferenceType) {
DfaValue specialField = field.createValue(value.getFactory(), value);
DfType withSpecialField = field.asDfType(memState.getDfType(specialField));
newType = newType
.meet(withSpecialField instanceof DfReferenceType ? ((DfReferenceType)withSpecialField).dropNullability() : withSpecialField);
}
}
myDfType = myDfType.join(newType);
}
}
/**
* Represents the result of dataflow applied to some code fragment (usually a method)
*/
public static final class DataflowResult {
private final @NotNull Map<PsiExpression, DataflowPoint> myData = new HashMap<>();
private @NotNull Map<PsiExpression, DataflowPoint> myDataAssertionsDisabled = myData;
private final RunnerResult myResult;
public DataflowResult(RunnerResult result) {
myResult = result;
}
@NotNull
DataflowResult copy() {
DataflowResult copy = new DataflowResult(myResult);
myData.forEach((expression, point) -> copy.myData.put(expression, new DataflowPoint(point)));
return copy;
}
void add(PsiExpression expression, DfaMemoryState memState, DfaValue value) {
DfaVariableValue assertionDisabled = value.getFactory().getAssertionDisabled();
if (assertionDisabled == null) {
assert myData == myDataAssertionsDisabled;
updateDataPoint(myData, expression, memState, value);
} else {
DfType type = memState.getDfType(assertionDisabled);
if (type == DfTypes.TRUE || type == DfTypes.FALSE) {
if (myData == myDataAssertionsDisabled) {
myDataAssertionsDisabled = new HashMap<>(myData);
}
updateDataPoint(type == DfTypes.TRUE ? myDataAssertionsDisabled : myData, expression, memState, value);
} else {
updateDataPoint(myData, expression, memState, value);
if (myData != myDataAssertionsDisabled) {
updateDataPoint(myDataAssertionsDisabled, expression, memState, value);
}
}
}
}
private void updateDataPoint(Map<PsiExpression, DataflowPoint> data,
PsiExpression expression,
DfaMemoryState memState,
DfaValue value) {
DataflowPoint point = data.computeIfAbsent(expression, e -> new DataflowPoint());
if (DfaTypeValue.isContractFail(value)) {
point.myMayFailByContract = true;
return;
}
if (point.myDfType != DfTypes.TOP) {
PsiElement parent = PsiUtil.skipParenthesizedExprUp(expression.getParent());
if (parent instanceof PsiConditionalExpression &&
!PsiTreeUtil.isAncestor(((PsiConditionalExpression)parent).getCondition(), expression, false)) {
add((PsiExpression)parent, memState, value);
}
}
point.addFacts(memState, value);
point.addValue(memState, value);
}
/**
* Returns true if given expression was visited by dataflow. Note that dataflow usually tracks deparenthesized expressions only,
* so you should deparenthesize it in advance if necessary.
*
* @param expression expression to check, not parenthesized
* @return true if given expression was visited by dataflow.
* If false is returned, it's possible that the expression exists in unreachable branch or this expression is not tracked due to
* the dataflow implementation details.
*/
public boolean expressionWasAnalyzed(PsiExpression expression) {
if (expression instanceof PsiParenthesizedExpression) {
throw new IllegalArgumentException("Should not pass parenthesized expression");
}
return myData.containsKey(expression);
}
/**
* Returns true if given call cannot fail according to its contracts
* (e.g. {@code Optional.get()} executed under {@code Optional.isPresent()}).
*
* @param call call to check
* @return true if it cannot fail by contract; false if unknown or can fail
*/
public boolean cannotFailByContract(PsiCallExpression call) {
DataflowPoint point = myData.get(call);
return point != null && !point.myMayFailByContract;
}
/**
* Returns a set of expression values if known. If non-empty set is returned, then given expression
* is guaranteed to have one of returned values.
*
* @param expression an expression to get its value
* @return a set of possible values or empty set if not known
*/
@NotNull
public Set<Object> getExpressionValues(@Nullable PsiExpression expression) {
DataflowPoint point = myData.get(expression);
if (point == null) return Collections.emptySet();
Set<Object> values = point.myPossibleValues;
return values == null ? Collections.emptySet() : Collections.unmodifiableSet(values);
}
/**
* @param expression an expression to infer the DfType, must be deparenthesized.
* @return DfType for that expression, assuming assertions are disabled.
* May return {@link DfTypes#TOP} if no information from dataflow is known about this expression
* @see #getDfTypeNoAssertions(PsiExpression)
*/
@NotNull
public DfType getDfType(PsiExpression expression) {
DataflowPoint point = myData.get(expression);
return point == null ? DfTypes.TOP : point.myDfType;
}
/**
* @param expression an expression to infer the DfType, must be deparenthesized.
* @return DfType for that expression, assuming assertions are disabled.
* May return {@link DfTypes#TOP} if no information from dataflow is known about this expression
* @see #getDfType(PsiExpression)
*/
@NotNull
public DfType getDfTypeNoAssertions(PsiExpression expression) {
DataflowPoint point = myDataAssertionsDisabled.get(expression);
return point == null ? DfTypes.TOP : point.myDfType;
}
}
@NotNull
private static DataflowResult runDFA(@Nullable PsiElement block) {
if (block == null) return new DataflowResult(RunnerResult.NOT_APPLICABLE);
DataFlowRunner runner = new DataFlowRunner(block.getProject(), block, ThreeState.UNSURE);
CommonDataflowVisitor visitor = new CommonDataflowVisitor();
RunnerResult result = runner.analyzeMethodRecursively(block, visitor);
if (result != RunnerResult.OK) return new DataflowResult(result);
if (!(block instanceof PsiClass)) return visitor.myResult;
DataflowResult dfr = visitor.myResult.copy();
List<DfaMemoryState> states = visitor.myEndOfInitializerStates;
for (PsiMethod method : ((PsiClass)block).getConstructors()) {
List<DfaMemoryState> initialStates;
PsiCodeBlock body = method.getBody();
if (body == null) continue;
PsiMethodCallExpression call = JavaPsiConstructorUtil.findThisOrSuperCallInConstructor(method);
if (JavaPsiConstructorUtil.isChainedConstructorCall(call) || (call == null && hasImplicitImpureSuperCall((PsiClass)block, method))) {
initialStates = Collections.singletonList(runner.createMemoryState());
} else {
initialStates = StreamEx.of(states).map(DfaMemoryState::createCopy).toList();
}
if(runner.analyzeBlockRecursively(body, initialStates, visitor) == RunnerResult.OK) {
dfr = visitor.myResult.copy();
} else {
visitor.myResult = dfr;
}
}
return dfr;
}
/**
* Returns the dataflow result for code fragment which contains given context
* @param context a context to get the dataflow result
* @return the dataflow result or null if dataflow cannot be launched for this context (e.g. we are inside too complex method)
*/
@Nullable
public static DataflowResult getDataflowResult(PsiExpression context) {
PsiElement body = DfaUtil.getDataflowContext(context);
if (body == null) return null;
ConcurrentHashMap<PsiElement, DataflowResult> fileMap =
CachedValuesManager.getCachedValue(body.getContainingFile(), () ->
CachedValueProvider.Result.create(new ConcurrentHashMap<>(), PsiModificationTracker.MODIFICATION_COUNT));
class ManagedCompute implements ForkJoinPool.ManagedBlocker {
DataflowResult myResult;
@Override
public boolean block() {
myResult = fileMap.computeIfAbsent(body, CommonDataflow::runDFA);
return true;
}
@Override
public boolean isReleasable() {
myResult = fileMap.get(body);
return myResult != null;
}
DataflowResult getResult() {
return myResult == null || myResult.myResult != RunnerResult.OK ? null : myResult;
}
}
ManagedCompute managedCompute = new ManagedCompute();
try {
ForkJoinPool.managedBlock(managedCompute);
}
catch (RejectedExecutionException ex) {
// Too many FJP threads: execute anyway in current thread
managedCompute.block();
}
catch (InterruptedException ex) {
// Should not happen
throw new AssertionError(ex);
}
return managedCompute.getResult();
}
/**
* @param expression an expression to infer the DfType
* @return DfType for that expression. May return {@link DfTypes#TOP} if no information from dataflow is known about this expression
*/
@NotNull
public static DfType getDfType(PsiExpression expression) {
DataflowResult result = getDataflowResult(expression);
if (result == null) return DfTypes.TOP;
return result.getDfType(PsiUtil.skipParenthesizedExprDown(expression));
}
/**
* Returns long range set for expression or null if range is unknown.
* This method first tries to compute expression using {@link com.intellij.psi.impl.ConstantExpressionEvaluator}
* and only then calls {@link #getDfType(PsiExpression)}.
*
* @param expression expression to get its range
* @return long range set
*/
@Contract("null -> null")
@Nullable
public static LongRangeSet getExpressionRange(@Nullable PsiExpression expression) {
if (expression == null) return null;
Object value = ExpressionUtils.computeConstantExpression(expression);
LongRangeSet rangeSet = LongRangeSet.fromConstant(value);
if (rangeSet != null) return rangeSet;
DfType dfType = getDfType(expression);
return dfType instanceof DfIntegralType ? ((DfIntegralType)dfType).getRange() : null;
}
/**
* Returns the value of given expression calculated via dataflow; or null if value is null or unknown.
*
* @param expression expression to analyze
* @return expression value if known
*/
@Contract("null -> null")
public static Object computeValue(@Nullable PsiExpression expression) {
PsiExpression expressionToAnalyze = PsiUtil.skipParenthesizedExprDown(expression);
if (expressionToAnalyze == null) return null;
Object computed = ExpressionUtils.computeConstantExpression(expressionToAnalyze);
if (computed != null) return computed;
return getDfType(expressionToAnalyze).getConstantOfType(Object.class);
}
private static class CommonDataflowVisitor extends StandardInstructionVisitor {
private DataflowResult myResult = new DataflowResult(RunnerResult.OK);
private final List<DfaMemoryState> myEndOfInitializerStates = new ArrayList<>();
@Override
public DfaInstructionState[] visitEndOfInitializer(EndOfInitializerInstruction instruction,
DataFlowRunner runner,
DfaMemoryState state) {
if (!instruction.isStatic()) {
myEndOfInitializerStates.add(state.createCopy());
}
return super.visitEndOfInitializer(instruction, runner, state);
}
@Override
protected void beforeExpressionPush(@NotNull DfaValue value,
@NotNull PsiExpression expression,
@Nullable TextRange range,
@NotNull DfaMemoryState state) {
if (range == null) {
// Do not track instructions which cover part of expression
myResult.add(expression, state, value);
}
}
}
}
| |
/*
* Copyright 2002-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.test.web.servlet.samples.standalone;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import jakarta.servlet.Filter;
import jakarta.servlet.FilterChain;
import jakarta.servlet.ServletException;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletRequestWrapper;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.servlet.http.Part;
import org.junit.jupiter.api.Test;
import org.springframework.http.MediaType;
import org.springframework.mock.web.MockMultipartFile;
import org.springframework.mock.web.MockPart;
import org.springframework.stereotype.Controller;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.filter.OncePerRequestFilter;
import org.springframework.web.multipart.MultipartFile;
import static org.assertj.core.api.Assertions.assertThat;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.multipart;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.model;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import static org.springframework.test.web.servlet.setup.MockMvcBuilders.standaloneSetup;
/**
* @author Rossen Stoyanchev
* @author Juergen Hoeller
*/
public class MultipartControllerTests {
@Test
public void multipartRequestWithSingleFile() throws Exception {
byte[] fileContent = "bar".getBytes(StandardCharsets.UTF_8);
MockMultipartFile filePart = new MockMultipartFile("file", "orig", null, fileContent);
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/multipartfile").file(filePart).file(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attribute("fileContent", fileContent))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test
public void multipartRequestWithSingleFileNotPresent() throws Exception {
standaloneSetup(new MultipartController()).build()
.perform(multipart("/multipartfile"))
.andExpect(status().isFound());
}
@Test
public void multipartRequestWithFileArray() throws Exception {
byte[] fileContent = "bar".getBytes(StandardCharsets.UTF_8);
MockMultipartFile filePart1 = new MockMultipartFile("file", "orig", null, fileContent);
MockMultipartFile filePart2 = new MockMultipartFile("file", "orig", null, fileContent);
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/multipartfilearray").file(filePart1).file(filePart2).file(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attribute("fileContent", fileContent))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test
public void multipartRequestWithFileArrayNotPresent() throws Exception {
standaloneSetup(new MultipartController()).build()
.perform(multipart("/multipartfilearray"))
.andExpect(status().isFound());
}
@Test
public void multipartRequestWithFileArrayNoMultipart() throws Exception {
standaloneSetup(new MultipartController()).build()
.perform(post("/multipartfilearray"))
.andExpect(status().isFound());
}
@Test
public void multipartRequestWithFileList() throws Exception {
byte[] fileContent = "bar".getBytes(StandardCharsets.UTF_8);
MockMultipartFile filePart1 = new MockMultipartFile("file", "orig", null, fileContent);
MockMultipartFile filePart2 = new MockMultipartFile("file", "orig", null, fileContent);
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/multipartfilelist").file(filePart1).file(filePart2).file(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attribute("fileContent", fileContent))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test
public void multipartRequestWithFileListNotPresent() throws Exception {
standaloneSetup(new MultipartController()).build()
.perform(multipart("/multipartfilelist"))
.andExpect(status().isFound());
}
@Test
public void multipartRequestWithFileListNoMultipart() throws Exception {
standaloneSetup(new MultipartController()).build()
.perform(post("/multipartfilelist"))
.andExpect(status().isFound());
}
@Test
public void multipartRequestWithOptionalFile() throws Exception {
byte[] fileContent = "bar".getBytes(StandardCharsets.UTF_8);
MockMultipartFile filePart = new MockMultipartFile("file", "orig", null, fileContent);
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/optionalfile").file(filePart).file(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attribute("fileContent", fileContent))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test
public void multipartRequestWithOptionalFileNotPresent() throws Exception {
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/optionalfile").file(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attributeDoesNotExist("fileContent"))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test
public void multipartRequestWithOptionalFileArray() throws Exception {
byte[] fileContent = "bar".getBytes(StandardCharsets.UTF_8);
MockMultipartFile filePart1 = new MockMultipartFile("file", "orig", null, fileContent);
MockMultipartFile filePart2 = new MockMultipartFile("file", "orig", null, fileContent);
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/optionalfilearray").file(filePart1).file(filePart2).file(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attribute("fileContent", fileContent))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test
public void multipartRequestWithOptionalFileArrayNotPresent() throws Exception {
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/optionalfilearray").file(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attributeDoesNotExist("fileContent"))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test
public void multipartRequestWithOptionalFileList() throws Exception {
byte[] fileContent = "bar".getBytes(StandardCharsets.UTF_8);
MockMultipartFile filePart1 = new MockMultipartFile("file", "orig", null, fileContent);
MockMultipartFile filePart2 = new MockMultipartFile("file", "orig", null, fileContent);
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/optionalfilelist").file(filePart1).file(filePart2).file(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attribute("fileContent", fileContent))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test
public void multipartRequestWithOptionalFileListNotPresent() throws Exception {
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/optionalfilelist").file(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attributeDoesNotExist("fileContent"))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test
public void multipartRequestWithServletParts() throws Exception {
byte[] fileContent = "bar".getBytes(StandardCharsets.UTF_8);
MockPart filePart = new MockPart("file", "orig", fileContent);
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockPart jsonPart = new MockPart("json", json);
jsonPart.getHeaders().setContentType(MediaType.APPLICATION_JSON);
standaloneSetup(new MultipartController()).build()
.perform(multipart("/multipartfile").part(filePart).part(jsonPart))
.andExpect(status().isFound())
.andExpect(model().attribute("fileContent", fileContent))
.andExpect(model().attribute("jsonContent", Collections.singletonMap("name", "yeeeah")));
}
@Test // SPR-13317
public void multipartRequestWrapped() throws Exception {
byte[] json = "{\"name\":\"yeeeah\"}".getBytes(StandardCharsets.UTF_8);
MockMultipartFile jsonPart = new MockMultipartFile("json", "json", "application/json", json);
Filter filter = new RequestWrappingFilter();
MockMvc mockMvc = standaloneSetup(new MultipartController()).addFilter(filter).build();
Map<String, String> jsonMap = Collections.singletonMap("name", "yeeeah");
mockMvc.perform(multipart("/json").file(jsonPart)).andExpect(model().attribute("json", jsonMap));
}
@Controller
private static class MultipartController {
@RequestMapping(value = "/multipartfile", method = RequestMethod.POST)
public String processMultipartFile(@RequestParam(required = false) MultipartFile file,
@RequestPart(required = false) Map<String, String> json, Model model) throws IOException {
if (file != null) {
model.addAttribute("fileContent", file.getBytes());
}
if (json != null) {
model.addAttribute("jsonContent", json);
}
return "redirect:/index";
}
@RequestMapping(value = "/multipartfilearray", method = RequestMethod.POST)
public String processMultipartFileArray(@RequestParam(required = false) MultipartFile[] file,
@RequestPart(required = false) Map<String, String> json, Model model) throws IOException {
if (file != null && file.length > 0) {
byte[] content = file[0].getBytes();
assertThat(file[1].getBytes()).isEqualTo(content);
model.addAttribute("fileContent", content);
}
if (json != null) {
model.addAttribute("jsonContent", json);
}
return "redirect:/index";
}
@RequestMapping(value = "/multipartfilelist", method = RequestMethod.POST)
public String processMultipartFileList(@RequestParam(required = false) List<MultipartFile> file,
@RequestPart(required = false) Map<String, String> json, Model model) throws IOException {
if (file != null && !file.isEmpty()) {
byte[] content = file.get(0).getBytes();
assertThat(file.get(1).getBytes()).isEqualTo(content);
model.addAttribute("fileContent", content);
}
if (json != null) {
model.addAttribute("jsonContent", json);
}
return "redirect:/index";
}
@RequestMapping(value = "/optionalfile", method = RequestMethod.POST)
public String processOptionalFile(@RequestParam Optional<MultipartFile> file,
@RequestPart Map<String, String> json, Model model) throws IOException {
if (file.isPresent()) {
model.addAttribute("fileContent", file.get().getBytes());
}
model.addAttribute("jsonContent", json);
return "redirect:/index";
}
@RequestMapping(value = "/optionalfilearray", method = RequestMethod.POST)
public String processOptionalFileArray(@RequestParam Optional<MultipartFile[]> file,
@RequestPart Map<String, String> json, Model model) throws IOException {
if (file.isPresent()) {
byte[] content = file.get()[0].getBytes();
assertThat(file.get()[1].getBytes()).isEqualTo(content);
model.addAttribute("fileContent", content);
}
model.addAttribute("jsonContent", json);
return "redirect:/index";
}
@RequestMapping(value = "/optionalfilelist", method = RequestMethod.POST)
public String processOptionalFileList(@RequestParam Optional<List<MultipartFile>> file,
@RequestPart Map<String, String> json, Model model) throws IOException {
if (file.isPresent()) {
byte[] content = file.get().get(0).getBytes();
assertThat(file.get().get(1).getBytes()).isEqualTo(content);
model.addAttribute("fileContent", content);
}
model.addAttribute("jsonContent", json);
return "redirect:/index";
}
@RequestMapping(value = "/part", method = RequestMethod.POST)
public String processPart(@RequestParam Part part,
@RequestPart Map<String, String> json, Model model) throws IOException {
model.addAttribute("fileContent", part.getInputStream());
model.addAttribute("jsonContent", json);
return "redirect:/index";
}
@RequestMapping(value = "/json", method = RequestMethod.POST)
public String processMultipart(@RequestPart Map<String, String> json, Model model) {
model.addAttribute("json", json);
return "redirect:/index";
}
}
private static class RequestWrappingFilter extends OncePerRequestFilter {
@Override
protected void doFilterInternal(HttpServletRequest request, HttpServletResponse response,
FilterChain filterChain) throws IOException, ServletException {
request = new HttpServletRequestWrapper(request);
filterChain.doFilter(request, response);
}
}
}
| |
package com.jamesloyd.foldergenutility.Forms;
import com.jamesloyd.foldergenutility.AbstractFolder;
import com.jamesloyd.foldergenutility.SimpleFolderFactory;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.io.File;
/**
* Copyright (c) 2014 James Loyd
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
public class SimpleFolderForm extends JFrame implements ActionListener , ItemListener
{
private JTextField textFieldNumberOfFolders;
private JCheckBox checkboxForNumber;
private JButton submitButton;
private JLabel labelForFolderLocation;
private JLabel labelFolderLocation;
private JLabel labelForMultipleFoldersName;
private JTextField textForFolderName;
private JLabel labelForFolderName;
private JLabel labelForMultipleFoldersIncrement;
private JToolBar toolBar;
private JButton selectFolder;
private JTextArea textArea;
private JScrollPane scrollpane;
private JButton clearButton;
String placeholder = null;
String startpoint = null;
String numberOfFiles = null;
JFrame jFrame = new JFrame("FolderGenUtility");
String path = "";
JFileChooser fileChooser = new JFileChooser();
public SimpleFolderForm()
{
this.toolBar = new JToolBar();
this.labelFolderLocation = new JLabel();
this.textFieldNumberOfFolders = new JTextField();
this.checkboxForNumber = new JCheckBox();
this.submitButton = new JButton();
this.labelForFolderLocation = new JLabel();
this.labelForMultipleFoldersIncrement = new JLabel();
this.textForFolderName = new JTextField();
this.labelForFolderName = new JLabel();
this.labelForMultipleFoldersName = new JLabel();
this.selectFolder = new JButton();
this.textArea = new JTextArea();
this.scrollpane = new JScrollPane(this.textArea,JScrollPane.VERTICAL_SCROLLBAR_NEVER,JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS);
this.clearButton = new JButton();
}
public void addComponentsToPane(Container pane)
{
pane.setLayout(new BoxLayout(pane, BoxLayout.Y_AXIS));
addAToolbar("yo", pane, toolBar);
addALabel("<html><p>Please insert a folder location</p></html>", pane, true, labelForFolderLocation);
addAFileChooser(pane,selectFolder);
addALabel("Current Root Directory Selected:" ,pane,true, labelFolderLocation);
addAScrollPane(pane, scrollpane);
addACheckBox(pane, checkboxForNumber);
addALabel("<html><p>Please insert the number total</p></html>",pane,false,labelForMultipleFoldersName);
addATextBox(pane, false,textFieldNumberOfFolders);
addALabel("<html><p>Please insert the folder name</p></html>", pane, true, labelForFolderName);
addATextBox(pane,true,textForFolderName);
addAbutton("Submit Button", pane, submitButton);
addAbutton("Clear",pane, clearButton);
}
private void addAScrollPane(Container container, JScrollPane scrollPane)
{
scrollPane.setVisible(true);
scrollPane.setMaximumSize(new Dimension(290, 45));
container.add(scrollPane);
}
private void addAFileChooser(Container container, JButton selectFolder)
{
selectFolder.setText("Select a Folder");
selectFolder.setAlignmentX(Component.CENTER_ALIGNMENT);
selectFolder.setMaximumSize(new Dimension(290,20));
selectFolder.addActionListener(this);
container.add(selectFolder);
}
private void addAToolbar(String text, Container container, JToolBar toolBar)
{
JButton button = new JButton();
toolBar.setAlignmentX(Component.CENTER_ALIGNMENT);
toolBar.setMaximumSize(new Dimension(290, 30));
toolBar.add(button);
toolBar.setFloatable(false);
container.add(toolBar);
}
private void addAbutton(String text, Container container, JButton button)
{
button.setAlignmentX(Component.CENTER_ALIGNMENT);
button.setText(text);
button.addActionListener(this);
button.setMaximumSize(new Dimension(290,20));
container.add(button);
}
private void addALabel(String text, Container container, boolean show, JLabel label)
{
label.setText(text);
label.setAlignmentX(Component.CENTER_ALIGNMENT);
label.setMaximumSize(new Dimension(290, 30));
label.setVisible(show);
container.add(label);
}
private void addATextBox(Container container, boolean show, JTextField textField)
{
textField.setAlignmentX(Component.CENTER_ALIGNMENT);
textField.setMaximumSize(new Dimension(290, 20));
textField.setVisible(show);
container.add(textField);
}
public void addACheckBox(Container container, JCheckBox checkBox)
{
checkBox.setAlignmentX(Component.CENTER_ALIGNMENT);
checkBox.setText("Click me to set the number of folders");
checkBox.setMaximumSize(new Dimension(290, 20));
checkBox.addItemListener(this);
container.add(checkBox);
}
public void createShowGUI()
{
jFrame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
addComponentsToPane(jFrame.getContentPane());
jFrame.pack();
jFrame.setSize(300, 300);
jFrame.setVisible(true);
jFrame.setMaximizedBounds(new Rectangle(300, 290));
fileChooser.setCurrentDirectory(new File("."));
textArea.setEditable(false);
textArea.setText(fileChooser.getCurrentDirectory().toString());
jFrame.setResizable(false);
path = textArea.getText();
textFieldNumberOfFolders.setText(null);
}
@Override
public void actionPerformed(ActionEvent e)
{
if(e.getSource() == submitButton)
{
startpoint = null;
if(textForFolderName.getText().equals(""))
{
JOptionPane.showMessageDialog(null, "Please choose a root location", "ERROR", JOptionPane.ERROR_MESSAGE);
}
else
{
if(textFieldNumberOfFolders.getText().equals(""))
{
numberOfFiles = null;
}
else
{
numberOfFiles = textFieldNumberOfFolders.getText();
}
AbstractFolder simpleFolderBuilder = SimpleFolderFactory.createFolders(path.toString(), textForFolderName.getText().toString(), null, numberOfFiles, null);
simpleFolderBuilder.generateFolders();
}
}
if(e.getSource() == selectFolder)
{
fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
fileChooser.setCurrentDirectory(new File("."));
fileChooser.showOpenDialog(this);
fileChooser.approveSelection();
path = fileChooser.getSelectedFile().getAbsolutePath();
StringBuffer buffer = new StringBuffer();
buffer.append(path);
System.out.println(path);
textArea.setText(buffer.toString());
textArea.setMaximumSize(new Dimension(259,20));
jFrame.invalidate();
jFrame.validate();
}
if(e.getSource() == clearButton)
{
textArea.setText(fileChooser.getCurrentDirectory().toString());
jFrame.setSize(new Dimension(300, 290));
textFieldNumberOfFolders.setText("");
textForFolderName.setText("");
jFrame.invalidate();
jFrame.validate();
}
}
public void openFileDialog()
{
}
@Override
public void itemStateChanged(ItemEvent e)
{
if(e.getStateChange() == ItemEvent.SELECTED)
{
jFrame.setSize(300,350);
textFieldNumberOfFolders.setVisible(true);
labelForMultipleFoldersName.setVisible(true);
jFrame.invalidate();
jFrame.validate();
}
else
{
textFieldNumberOfFolders.setVisible(false);
labelForMultipleFoldersName.setVisible(false);
jFrame.setSize(300,300);
jFrame.invalidate();
jFrame.validate();
}
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.s3.internal;
import java.util.HashMap;
import java.util.Map;
import com.amazonaws.AmazonServiceException.ErrorType;
import com.amazonaws.services.s3.model.AmazonS3Exception;
/**
* Builder class that builds the <code>AmazonS3Exception</code>
*/
public class AmazonS3ExceptionBuilder {
/**
* The unique AWS identifier for the service request the caller made. The
* AWS request ID can uniquely identify the AWS request, and is used for
* reporting an error to AWS support team.
*/
private String requestId;
/**
* The AWS error code represented by this exception (ex:
* InvalidParameterValue).
*/
private String errorCode;
/**
* The error message as returned by the service.
*/
private String errorMessage;
/** The HTTP status code that was returned with this error */
private int statusCode;
/**
* An S3 specific request ID that provides additional debugging information.
*/
private String extendedRequestId;
/**
* An accelerate mode specific ID sent from CloudFront that provides additional debugging information.
*/
private String cloudFrontId;
/**
* Additional information on the exception.
*/
private Map<String, String> additionalDetails;
/**
* Returns the error XML received in the HTTP Response or null if the
* exception is constructed from the headers.
*/
private String errorResponseXml;
/**
* Returns the AWS request ID that uniquely identifies the service request
* the caller made.
*
* @return The AWS request ID that uniquely identifies the service request
* the caller made.
*/
public String getRequestId() {
return requestId;
}
/**
* Sets the AWS requestId for this exception.
*
* @param requestId
* The unique identifier for the service request the caller made.
*/
public void setRequestId(String requestId) {
this.requestId = requestId;
}
/**
* Sets the AWS error code represented by this exception.
*
* @param errorCode
* The AWS error code represented by this exception.
*/
public void setErrorCode(String errorCode) {
this.errorCode = errorCode;
}
/**
* Returns the AWS error code represented by this exception.
*
* @return The AWS error code represented by this exception.
*/
public String getErrorCode() {
return errorCode;
}
/**
* Returns the human-readable error message provided by the service
*/
public String getErrorMessage() {
return errorMessage;
}
/**
* Sets the human-readable error message provided by the service
*/
public void setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
}
/**
* Sets the HTTP status code that was returned with this service exception.
*
* @param statusCode
* The HTTP status code that was returned with this service
* exception.
*/
public void setStatusCode(int statusCode) {
this.statusCode = statusCode;
}
/**
* Returns the HTTP status code that was returned with this service
* exception.
*
* @return The HTTP status code that was returned with this service
* exception.
*/
public int getStatusCode() {
return statusCode;
}
/**
* Gets Amazon S3's extended request ID. This ID is required debugging
* information in the case the user needs to contact Amazon about an issue
* where Amazon S3 is incorrectly handling a request.
*
* @return Amazon S3's extended request ID.
*
* @see AmazonS3Exception#setExtendedRequestId(String)
* @see AmazonS3Exception#getExtendedRequestId()
*/
public String getExtendedRequestId() {
return extendedRequestId;
}
/**
* Sets Amazon S3's extended request ID.
*
* @param extendedRequestId
* S3's extended request ID.
*
* @see AmazonS3Exception#setExtendedRequestId(String)
* @see AmazonS3Exception#getExtendedRequestId()
*/
public void setExtendedRequestId(String extendedRequestId) {
this.extendedRequestId = extendedRequestId;
}
/**
* Gets Amazon S3's CloudFront ID when the request is performed in the accelerate mode.
*
* @return Amazon S3's CloudFront ID when in accelerate mode.
*/
public String getCloudFrontId() {
return cloudFrontId;
}
/**
* Sets Amazon S3's CloudFront ID when the request is performed in the accelerate mode.
*
* @param cloudFrontId
* Amazon S3's CloudFront ID.
*/
public void setCloudFrontId(String cloudFrontId) {
this.cloudFrontId = cloudFrontId;
}
/**
* Returns any additional information retrieved in the error response.
*/
public Map<String, String> getAdditionalDetails() {
return additionalDetails;
}
/**
* Sets additional information about the response.
*/
public void setAdditionalDetails(Map<String, String> additionalDetails) {
this.additionalDetails = additionalDetails;
}
/**
* Adds an entry to the additional information map.
*/
public void addAdditionalDetail(String key, String detail) {
if (detail == null || detail.trim().isEmpty())
return;
if (this.additionalDetails == null) {
this.additionalDetails = new HashMap<String, String>();
}
String additionalContent = this.additionalDetails.get(key);
if (additionalContent != null && !additionalContent.trim().isEmpty())
detail = additionalContent + "-" + detail;
if (!detail.isEmpty())
additionalDetails.put(key, detail);
}
/**
* Returns the original error response XML received from Amazon S3
*/
public String getErrorResponseXml() {
return errorResponseXml;
}
/**
* Sets the error response XML received from Amazon S3
*/
public void setErrorResponseXml(String errorResponseXml) {
this.errorResponseXml = errorResponseXml;
}
/**
* Creates a new AmazonS3Exception object with the values set.
*/
public AmazonS3Exception build() {
AmazonS3Exception s3Exception = errorResponseXml == null ? new AmazonS3Exception(
errorMessage) : new AmazonS3Exception(errorMessage,
errorResponseXml);
s3Exception.setErrorCode(errorCode);
s3Exception.setExtendedRequestId(extendedRequestId);
s3Exception.setStatusCode(statusCode);
s3Exception.setRequestId(requestId);
s3Exception.setCloudFrontId(cloudFrontId);
s3Exception.setAdditionalDetails(additionalDetails);
s3Exception.setErrorType(errorTypeOf(statusCode));
return s3Exception;
}
/**
* Returns the AWS error type information by looking at the HTTP status code
* in the error response. S3 error responses don't explicitly declare a
* sender or client fault like other AWS services, so we have to use the
* HTTP status code to infer this information.
*
* @param httpResponse
* The HTTP error response to use to determine the right error
* type to set.
*/
private ErrorType errorTypeOf(int statusCode) {
return statusCode >= 500 ? ErrorType.Service : ErrorType.Client;
}
}
| |
/*
* RED5 Open Source Media Server - https://github.com/Red5/
*
* Copyright 2006-2016 by respective authors (see below). All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.red5.server.stream;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.ref.WeakReference;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import javax.management.InstanceAlreadyExistsException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.management.StandardMBean;
import org.apache.commons.lang3.StringUtils;
import org.apache.mina.core.buffer.IoBuffer;
import org.red5.codec.IAudioStreamCodec;
import org.red5.codec.IStreamCodecInfo;
import org.red5.codec.IVideoStreamCodec;
import org.red5.codec.StreamCodecInfo;
import org.red5.server.api.IConnection;
import org.red5.server.api.Red5;
import org.red5.server.api.event.IEvent;
import org.red5.server.api.event.IEventDispatcher;
import org.red5.server.api.event.IEventListener;
import org.red5.server.api.scope.IScope;
import org.red5.server.api.statistics.IClientBroadcastStreamStatistics;
import org.red5.server.api.statistics.support.StatisticsCounter;
import org.red5.server.api.stream.IClientBroadcastStream;
import org.red5.server.api.stream.IStreamAwareScopeHandler;
import org.red5.server.api.stream.IStreamCapableConnection;
import org.red5.server.api.stream.IStreamListener;
import org.red5.server.api.stream.IStreamPacket;
import org.red5.server.jmx.mxbeans.ClientBroadcastStreamMXBean;
import org.red5.server.messaging.IConsumer;
import org.red5.server.messaging.IFilter;
import org.red5.server.messaging.IMessage;
import org.red5.server.messaging.IMessageComponent;
import org.red5.server.messaging.IMessageOutput;
import org.red5.server.messaging.IPipe;
import org.red5.server.messaging.IPipeConnectionListener;
import org.red5.server.messaging.IProvider;
import org.red5.server.messaging.IPushableConsumer;
import org.red5.server.messaging.OOBControlMessage;
import org.red5.server.messaging.PipeConnectionEvent;
import org.red5.server.net.rtmp.event.AudioData;
import org.red5.server.net.rtmp.event.IRTMPEvent;
import org.red5.server.net.rtmp.event.Invoke;
import org.red5.server.net.rtmp.event.Notify;
import org.red5.server.net.rtmp.event.VideoData;
import org.red5.server.net.rtmp.status.Status;
import org.red5.server.net.rtmp.status.StatusCodes;
import org.red5.server.stream.message.RTMPMessage;
import org.red5.server.stream.message.StatusMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jmx.export.annotation.ManagedResource;
/**
* Represents live stream broadcasted from client. As Flash Media Server, Red5 supports recording mode for live streams, that is, broadcasted stream has broadcast mode. It can be either "live" or "record" and latter causes server-side application to record broadcasted stream.
*
* Note that recorded streams are recorded as FLV files.
*
* This type of stream uses two different pipes for live streaming and recording.
*
* @author The Red5 Project
* @author Steven Gong
* @author Paul Gregoire (mondain@gmail.com)
* @author Vladimir Hmelyoff (vlhm@splitmedialabs.com)
*/
@ManagedResource(objectName = "org.red5.server:type=ClientBroadcastStream", description = "ClientBroadcastStream")
public class ClientBroadcastStream extends AbstractClientStream implements IClientBroadcastStream, IFilter, IPushableConsumer, IPipeConnectionListener, IEventDispatcher, IClientBroadcastStreamStatistics, ClientBroadcastStreamMXBean {
private static final Logger log = LoggerFactory.getLogger(ClientBroadcastStream.class);
/**
* Whether or not to automatically record the associated stream.
*/
protected boolean automaticRecording;
/**
* Total number of bytes received.
*/
protected long bytesReceived;
/**
* Is there need to check video codec?
*/
protected boolean checkVideoCodec = false;
/**
* Is there need to check audio codec?
*/
protected boolean checkAudioCodec = false;
/**
* Data is sent by chunks, each of them has size
*/
protected int chunkSize;
/**
* Is this stream still active?
*/
protected volatile boolean closed;
/**
* Output endpoint that providers use
*/
protected transient IMessageOutput connMsgOut;
/**
* Stores timestamp of first packet
*/
protected long firstPacketTime = -1;
/**
* Pipe for live streaming
*/
protected transient IPipe livePipe;
/**
* Stream published name
*/
protected String publishedName;
/**
* Streaming parameters
*/
protected Map<String, String> parameters;
/**
* Is there need to send start notification?
*/
protected boolean sendStartNotification = true;
/**
* Stores statistics about subscribers.
*/
private transient StatisticsCounter subscriberStats = new StatisticsCounter();
/**
* Listeners to get notified about received packets.
*/
protected transient Set<IStreamListener> listeners = new CopyOnWriteArraySet<IStreamListener>();
/**
* Recording listener
*/
private transient WeakReference<IRecordingListener> recordingListener;
protected long latestTimeStamp = -1;
/**
* Whether or not to register with JMX.
*/
private boolean registerJMX = true;
/**
* Check and send notification if necessary
*
* @param event
* Event
*/
private void checkSendNotifications(IEvent event) {
IEventListener source = event.getSource();
sendStartNotifications(source);
}
/**
* Closes stream, unsubscribes provides, sends stoppage notifications and broadcast close notification.
*/
public void close() {
log.debug("Stream close: {}", publishedName);
if (closed) {
log.debug("{} already closed", publishedName);
return;
}
closed = true;
if (livePipe != null) {
livePipe.unsubscribe((IProvider) this);
}
// if we have a recording listener, inform that this stream is done
if (recordingListener != null) {
sendRecordStopNotify();
notifyRecordingStop();
// inform the listener to finish and close
recordingListener.get().stop();
}
sendPublishStopNotify();
// TODO: can we send the client something to make sure he stops sending data?
if (connMsgOut != null) {
connMsgOut.unsubscribe(this);
}
notifyBroadcastClose();
// clear the listener after all the notifications have been sent
if (recordingListener != null) {
recordingListener.clear();
}
// clear listeners
if (!listeners.isEmpty()) {
listeners.clear();
}
// deregister with jmx
unregisterJMX();
}
/**
* Dispatches event
*
* @param event
* Event to dispatch
*/
public void dispatchEvent(IEvent event) {
if (event instanceof IRTMPEvent && !closed) {
switch (event.getType()) {
case STREAM_CONTROL:
case STREAM_DATA:
// create the event
IRTMPEvent rtmpEvent;
try {
rtmpEvent = (IRTMPEvent) event;
} catch (ClassCastException e) {
log.error("Class cast exception in event dispatch", e);
return;
}
int eventTime = -1;
if (log.isTraceEnabled()) {
// If this is first packet save its timestamp; expect it is
// absolute? no matter: it's never used!
if (firstPacketTime == -1) {
firstPacketTime = rtmpEvent.getTimestamp();
log.trace(String.format("CBS=@%08x: rtmpEvent=%s creation=%s firstPacketTime=%d", System.identityHashCode(this), rtmpEvent.getClass().getSimpleName(), creationTime, firstPacketTime));
} else {
log.trace(String.format("CBS=@%08x: rtmpEvent=%s creation=%s firstPacketTime=%d timestamp=%d", System.identityHashCode(this), rtmpEvent.getClass().getSimpleName(), creationTime, firstPacketTime, rtmpEvent.getTimestamp()));
}
}
//get the buffer only once per call
IoBuffer buf = null;
if (rtmpEvent instanceof IStreamData && (buf = ((IStreamData<?>) rtmpEvent).getData()) != null) {
bytesReceived += buf.limit();
}
// get stream codec
IStreamCodecInfo codecInfo = getCodecInfo();
StreamCodecInfo info = null;
if (codecInfo instanceof StreamCodecInfo) {
info = (StreamCodecInfo) codecInfo;
}
//log.trace("Stream codec info: {}", info);
if (rtmpEvent instanceof AudioData) {
IAudioStreamCodec audioStreamCodec = null;
if (checkAudioCodec) {
// dont try to read codec info from 0 length audio packets
if (buf.limit() > 0) {
audioStreamCodec = AudioCodecFactory.getAudioCodec(buf);
if (info != null) {
info.setAudioCodec(audioStreamCodec);
}
checkAudioCodec = false;
}
} else if (codecInfo != null) {
audioStreamCodec = codecInfo.getAudioCodec();
}
if (audioStreamCodec != null) {
audioStreamCodec.addData(buf);
}
if (info != null) {
info.setHasAudio(true);
}
eventTime = rtmpEvent.getTimestamp();
log.trace("Audio: {}", eventTime);
} else if (rtmpEvent instanceof VideoData) {
IVideoStreamCodec videoStreamCodec = null;
if (checkVideoCodec) {
videoStreamCodec = VideoCodecFactory.getVideoCodec(buf);
if (info != null) {
info.setVideoCodec(videoStreamCodec);
}
checkVideoCodec = false;
} else if (codecInfo != null) {
videoStreamCodec = codecInfo.getVideoCodec();
}
if (videoStreamCodec != null) {
videoStreamCodec.addData(buf);
}
if (info != null) {
info.setHasVideo(true);
}
eventTime = rtmpEvent.getTimestamp();
log.trace("Video: {}", eventTime);
} else if (rtmpEvent instanceof Invoke) {
Invoke invokeEvent = (Invoke) rtmpEvent;
log.debug("Invoke action: {}", invokeEvent.getAction());
eventTime = rtmpEvent.getTimestamp();
// event / stream listeners will not be notified of invokes
return;
} else if (rtmpEvent instanceof Notify) {
Notify notifyEvent = (Notify) rtmpEvent;
log.debug("Notify action: {}", notifyEvent.getAction());
if (notifyEvent.getAction() != null && notifyEvent.getAction().equals("onMetaData")) {
// store the metadata
try {
log.debug("Setting metadata");
metaData = notifyEvent.duplicate();
} catch (Exception e) {
log.warn("Metadata could not be duplicated for this stream", e);
}
}
eventTime = rtmpEvent.getTimestamp();
}
// update last event time
if (eventTime > latestTimeStamp) {
latestTimeStamp = eventTime;
}
// notify event listeners
checkSendNotifications(event);
// note this timestamp is set in event/body but not in the associated header
try {
// route to live
if (livePipe != null) {
// create new RTMP message, initialize it and push through pipe
RTMPMessage msg = RTMPMessage.build(rtmpEvent, eventTime);
livePipe.pushMessage(msg);
} else {
log.debug("Live pipe was null, message was not pushed");
}
} catch (IOException err) {
stop();
}
// notify listeners about received packet
if (rtmpEvent instanceof IStreamPacket) {
for (IStreamListener listener : getStreamListeners()) {
try {
listener.packetReceived(this, (IStreamPacket) rtmpEvent);
} catch (Exception e) {
log.error("Error while notifying listener {}", listener, e);
if (listener instanceof RecordingListener) {
sendRecordFailedNotify(e.getMessage());
}
}
}
}
break;
default:
// ignored event
log.debug("Ignoring event: {}", event.getType());
}
} else {
log.debug("Event was of wrong type or stream is closed ({})", closed);
}
}
/** {@inheritDoc} */
public int getActiveSubscribers() {
return subscriberStats.getCurrent();
}
/** {@inheritDoc} */
public long getBytesReceived() {
return bytesReceived;
}
/** {@inheritDoc} */
public int getCurrentTimestamp() {
return (int) latestTimeStamp;
}
/** {@inheritDoc} */
public int getMaxSubscribers() {
return subscriberStats.getMax();
}
/**
* Getter for provider
*
* @return Provider
*/
public IProvider getProvider() {
return this;
}
/**
* Setter for stream published name
*
* @param name
* Name that used for publishing. Set at client side when begin to broadcast with NetStream#publish.
*/
public void setPublishedName(String name) {
log.debug("setPublishedName: {}", name);
// a publish name of "false" is a special case, used when stopping a stream
if (StringUtils.isNotEmpty(name) && !"false".equals(name)) {
this.publishedName = name;
registerJMX();
}
}
/**
* Getter for published name
*
* @return Stream published name
*/
public String getPublishedName() {
return publishedName;
}
/** {@inheritDoc} */
public void setParameters(Map<String, String> params) {
this.parameters = params;
}
/** {@inheritDoc} */
public Map<String, String> getParameters() {
return parameters;
}
/** {@inheritDoc} */
public String getSaveFilename() {
if (recordingListener != null) {
return recordingListener.get().getFileName();
}
return null;
}
/** {@inheritDoc} */
public IClientBroadcastStreamStatistics getStatistics() {
return this;
}
/** {@inheritDoc} */
public int getTotalSubscribers() {
return subscriberStats.getTotal();
}
/**
* @return the automaticRecording
*/
public boolean isAutomaticRecording() {
return automaticRecording;
}
/**
* @param automaticRecording
* the automaticRecording to set
*/
public void setAutomaticRecording(boolean automaticRecording) {
this.automaticRecording = automaticRecording;
}
/**
* @param registerJMX
* the registerJMX to set
*/
public void setRegisterJMX(boolean registerJMX) {
this.registerJMX = registerJMX;
}
/**
* Notifies handler on stream broadcast close
*/
private void notifyBroadcastClose() {
final IStreamAwareScopeHandler handler = getStreamAwareHandler();
if (handler != null) {
try {
handler.streamBroadcastClose(this);
} catch (Throwable t) {
log.error("Error in notifyBroadcastClose", t);
}
}
}
/**
* Notifies handler on stream recording stop
*/
private void notifyRecordingStop() {
IStreamAwareScopeHandler handler = getStreamAwareHandler();
if (handler != null) {
try {
handler.streamRecordStop(this);
} catch (Throwable t) {
log.error("Error in notifyBroadcastClose", t);
}
}
}
/**
* Notifies handler on stream broadcast start
*/
private void notifyBroadcastStart() {
IStreamAwareScopeHandler handler = getStreamAwareHandler();
if (handler != null) {
try {
handler.streamBroadcastStart(this);
} catch (Throwable t) {
log.error("Error in notifyBroadcastStart", t);
}
}
}
/**
* Send OOB control message with chunk size
*/
private void notifyChunkSize() {
if (chunkSize > 0 && livePipe != null) {
OOBControlMessage setChunkSize = new OOBControlMessage();
setChunkSize.setTarget("ConnectionConsumer");
setChunkSize.setServiceName("chunkSize");
if (setChunkSize.getServiceParamMap() == null) {
setChunkSize.setServiceParamMap(new HashMap<String, Object>());
}
setChunkSize.getServiceParamMap().put("chunkSize", chunkSize);
livePipe.sendOOBControlMessage(getProvider(), setChunkSize);
}
}
/**
* Out-of-band control message handler
*
* @param source
* OOB message source
* @param pipe
* Pipe that used to send OOB message
* @param oobCtrlMsg
* Out-of-band control message
*/
public void onOOBControlMessage(IMessageComponent source, IPipe pipe, OOBControlMessage oobCtrlMsg) {
String target = oobCtrlMsg.getTarget();
if ("ClientBroadcastStream".equals(target)) {
String serviceName = oobCtrlMsg.getServiceName();
if ("chunkSize".equals(serviceName)) {
chunkSize = (Integer) oobCtrlMsg.getServiceParamMap().get("chunkSize");
notifyChunkSize();
} else {
log.debug("Unhandled OOB control message for service: {}", serviceName);
}
} else {
log.debug("Unhandled OOB control message to target: {}", target);
}
}
/**
* Pipe connection event handler
*
* @param event
* Pipe connection event
*/
@SuppressWarnings("unused")
public void onPipeConnectionEvent(PipeConnectionEvent event) {
switch (event.getType()) {
case PROVIDER_CONNECT_PUSH:
log.debug("Provider connect");
if (event.getProvider() == this && event.getSource() != connMsgOut && (event.getParamMap() == null || !event.getParamMap().containsKey("record"))) {
livePipe = (IPipe) event.getSource();
log.debug("Provider: {}", livePipe.getClass().getName());
for (IConsumer consumer : livePipe.getConsumers()) {
subscriberStats.increment();
}
}
break;
case PROVIDER_DISCONNECT:
log.debug("Provider disconnect");
if (log.isDebugEnabled() && livePipe != null) {
log.debug("Provider: {}", livePipe.getClass().getName());
}
if (livePipe == event.getSource()) {
livePipe = null;
}
break;
case CONSUMER_CONNECT_PUSH:
log.debug("Consumer connect");
IPipe pipe = (IPipe) event.getSource();
if (log.isDebugEnabled() && pipe != null) {
log.debug("Consumer: {}", pipe.getClass().getName());
}
if (livePipe == pipe) {
notifyChunkSize();
}
subscriberStats.increment();
break;
case CONSUMER_DISCONNECT:
log.debug("Consumer disconnect: {}", event.getSource().getClass().getName());
subscriberStats.decrement();
break;
default:
}
}
/**
* Currently not implemented
*
* @param pipe
* Pipe
* @param message
* Message
*/
public void pushMessage(IPipe pipe, IMessage message) {
}
/**
* Save broadcasted stream.
*
* @param name
* Stream name
* @param isAppend
* Append mode
* @throws IOException
* File could not be created/written to
*/
public void saveAs(String name, boolean isAppend) throws IOException {
log.debug("SaveAs - name: {} append: {}", name, isAppend);
// get connection to check if client is still streaming
IStreamCapableConnection conn = getConnection();
if (conn == null) {
throw new IOException("Stream is no longer connected");
}
// one recording listener at a time via this entry point
if (recordingListener == null) {
// XXX Paul: Revisit this section to allow for implementation of custom IRecordingListener
//IRecordingListener listener = (IRecordingListener) ScopeUtils.getScopeService(conn.getScope(), IRecordingListener.class, RecordingListener.class, false);
// create a recording listener
IRecordingListener listener = new RecordingListener();
log.debug("Created: {}", listener);
// initialize the listener
if (listener.init(conn, name, isAppend)) {
// get decoder info if it exists for the stream
IStreamCodecInfo codecInfo = getCodecInfo();
log.debug("Codec info: {}", codecInfo);
if (codecInfo instanceof StreamCodecInfo) {
StreamCodecInfo info = (StreamCodecInfo) codecInfo;
IVideoStreamCodec videoCodec = info.getVideoCodec();
log.debug("Video codec: {}", videoCodec);
if (videoCodec != null) {
//check for decoder configuration to send
IoBuffer config = videoCodec.getDecoderConfiguration();
if (config != null) {
log.debug("Decoder configuration is available for {}", videoCodec.getName());
VideoData videoConf = new VideoData(config.asReadOnlyBuffer());
try {
log.debug("Setting decoder configuration for recording");
listener.getFileConsumer().setVideoDecoderConfiguration(videoConf);
} finally {
videoConf.release();
}
}
} else {
log.debug("Could not initialize stream output, videoCodec is null.");
}
IAudioStreamCodec audioCodec = info.getAudioCodec();
log.debug("Audio codec: {}", audioCodec);
if (audioCodec != null) {
//check for decoder configuration to send
IoBuffer config = audioCodec.getDecoderConfiguration();
if (config != null) {
log.debug("Decoder configuration is available for {}", audioCodec.getName());
AudioData audioConf = new AudioData(config.asReadOnlyBuffer());
try {
log.debug("Setting decoder configuration for recording");
listener.getFileConsumer().setAudioDecoderConfiguration(audioConf);
} finally {
audioConf.release();
}
}
} else {
log.debug("No decoder configuration available, audioCodec is null.");
}
}
// set as primary listener
recordingListener = new WeakReference<IRecordingListener>(listener);
// add as a listener
addStreamListener(listener);
// start the listener thread
listener.start();
} else {
log.warn("Recording listener failed to initialize for stream: {}", name);
}
} else {
log.debug("Recording listener already exists for stream: {} auto record enabled: {}", name, automaticRecording);
}
}
/**
* Sends publish start notifications
*/
private void sendPublishStartNotify() {
Status publishStatus = new Status(StatusCodes.NS_PUBLISH_START);
publishStatus.setClientid(getStreamId());
publishStatus.setDetails(getPublishedName());
StatusMessage startMsg = new StatusMessage();
startMsg.setBody(publishStatus);
pushMessage(startMsg);
}
/**
* Sends publish stop notifications
*/
private void sendPublishStopNotify() {
Status stopStatus = new Status(StatusCodes.NS_UNPUBLISHED_SUCCESS);
stopStatus.setClientid(getStreamId());
stopStatus.setDetails(getPublishedName());
StatusMessage stopMsg = new StatusMessage();
stopMsg.setBody(stopStatus);
pushMessage(stopMsg);
}
/**
* Sends record failed notifications
*/
private void sendRecordFailedNotify(String reason) {
Status failedStatus = new Status(StatusCodes.NS_RECORD_FAILED);
failedStatus.setLevel(Status.ERROR);
failedStatus.setClientid(getStreamId());
failedStatus.setDetails(getPublishedName());
failedStatus.setDesciption(reason);
StatusMessage failedMsg = new StatusMessage();
failedMsg.setBody(failedStatus);
pushMessage(failedMsg);
}
/**
* Sends record start notifications
*/
private void sendRecordStartNotify() {
Status recordStatus = new Status(StatusCodes.NS_RECORD_START);
recordStatus.setClientid(getStreamId());
recordStatus.setDetails(getPublishedName());
StatusMessage startMsg = new StatusMessage();
startMsg.setBody(recordStatus);
pushMessage(startMsg);
}
/**
* Sends record stop notifications
*/
private void sendRecordStopNotify() {
Status stopStatus = new Status(StatusCodes.NS_RECORD_STOP);
stopStatus.setClientid(getStreamId());
stopStatus.setDetails(getPublishedName());
StatusMessage stopMsg = new StatusMessage();
stopMsg.setBody(stopStatus);
pushMessage(stopMsg);
}
/**
* Pushes a message out to a consumer.
*
* @param msg
* StatusMessage
*/
protected void pushMessage(StatusMessage msg) {
if (connMsgOut != null) {
try {
connMsgOut.pushMessage(msg);
} catch (IOException err) {
log.error("Error while pushing message: {}", msg, err);
}
} else {
log.warn("Consumer message output is null");
}
}
private void sendStartNotifications(IEventListener source) {
if (sendStartNotification) {
// notify handler that stream starts recording/publishing
sendStartNotification = false;
if (source instanceof IConnection) {
IScope scope = ((IConnection) source).getScope();
if (scope.hasHandler()) {
final Object handler = scope.getHandler();
if (handler instanceof IStreamAwareScopeHandler) {
if (recordingListener != null && recordingListener.get().isRecording()) {
// callback for record start
((IStreamAwareScopeHandler) handler).streamRecordStart(this);
} else {
// delete any previously recorded versions of this now "live" stream per
// http://livedocs.adobe.com/flashmediaserver/3.0/hpdocs/help.html?content=00000186.html
try {
File file = getRecordFile(scope, publishedName);
if (file != null && file.exists()) {
if (!file.delete()) {
log.debug("File was not deleted: {}", file.getAbsoluteFile());
}
}
} catch (Exception e) {
log.warn("Exception removing previously recorded file", e);
}
// callback for publish start
((IStreamAwareScopeHandler) handler).streamPublishStart(this);
}
}
}
}
// send start notifications
sendPublishStartNotify();
if (recordingListener != null && recordingListener.get().isRecording()) {
sendRecordStartNotify();
}
notifyBroadcastStart();
}
}
/**
* Starts stream, creates pipes, connects
*/
public void start() {
log.info("Stream start: {}", publishedName);
checkVideoCodec = true;
checkAudioCodec = true;
firstPacketTime = -1;
latestTimeStamp = -1;
bytesReceived = 0;
IConsumerService consumerManager = (IConsumerService) getScope().getContext().getBean(IConsumerService.KEY);
connMsgOut = consumerManager.getConsumerOutput(this);
if (connMsgOut != null && connMsgOut.subscribe(this, null)) {
setCodecInfo(new StreamCodecInfo());
creationTime = System.currentTimeMillis();
closed = false;
} else {
log.warn("Subscribe failed");
}
}
/** {@inheritDoc} */
public void startPublishing() {
// We send the start messages before the first packet is received.
// This is required so FME actually starts publishing.
sendStartNotifications(Red5.getConnectionLocal());
// force recording if set
if (automaticRecording) {
log.debug("Starting automatic recording of {}", publishedName);
try {
saveAs(publishedName, false);
} catch (Exception e) {
log.warn("Start of automatic recording failed", e);
}
}
}
/** {@inheritDoc} */
public void stop() {
log.info("Stream stop: {}", publishedName);
stopRecording();
close();
}
/**
* Stops any currently active recording.
*/
public void stopRecording() {
IRecordingListener listener = null;
if (recordingListener != null && (listener = recordingListener.get()).isRecording()) {
sendRecordStopNotify();
notifyRecordingStop();
// remove the listener
removeStreamListener(listener);
// stop the recording listener
listener.stop();
// clear and null-out the thread local
recordingListener.clear();
recordingListener = null;
}
}
public boolean isRecording() {
return recordingListener != null && recordingListener.get().isRecording();
}
/** {@inheritDoc} */
public void addStreamListener(IStreamListener listener) {
listeners.add(listener);
}
/** {@inheritDoc} */
public Collection<IStreamListener> getStreamListeners() {
return listeners;
}
/** {@inheritDoc} */
public void removeStreamListener(IStreamListener listener) {
listeners.remove(listener);
}
/**
* Get the file we'd be recording to based on scope and given name.
*
* @param scope
* scope
* @param name
* record name
* @return file
*/
protected File getRecordFile(IScope scope, String name) {
return RecordingListener.getRecordFile(scope, name);
}
protected void registerJMX() {
if (registerJMX) {
// register with jmx
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try {
ObjectName oName = new ObjectName(String.format("org.red5.server:type=ClientBroadcastStream,scope=%s,publishedName=%s", getScope().getName(), publishedName));
mbs.registerMBean(new StandardMBean(this, ClientBroadcastStreamMXBean.class, true), oName);
} catch (InstanceAlreadyExistsException e) {
log.debug("Instance already registered", e);
} catch (Exception e) {
log.warn("Error on jmx registration", e);
}
}
}
protected void unregisterJMX() {
if (registerJMX) {
if (StringUtils.isNotEmpty(publishedName) && !"false".equals(publishedName)) {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try {
ObjectName oName = new ObjectName(String.format("org.red5.server:type=ClientBroadcastStream,scope=%s,publishedName=%s", getScope().getName(), publishedName));
mbs.unregisterMBean(oName);
} catch (Exception e) {
log.warn("Exception unregistering", e);
}
}
}
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.util.Map;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.MobCompactPartitionPolicy;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.exceptions.HBaseException;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
/**
* An HColumnDescriptor contains information about a column family such as the
* number of versions, compression settings, etc.
*
* It is used as input when creating a table or adding a column.
*/
@InterfaceAudience.Public
@Deprecated // remove it in 3.0
public class HColumnDescriptor implements ColumnFamilyDescriptor, Comparable<HColumnDescriptor> {
public static final String IN_MEMORY_COMPACTION = ColumnFamilyDescriptorBuilder.IN_MEMORY_COMPACTION;
public static final String COMPRESSION = ColumnFamilyDescriptorBuilder.COMPRESSION;
public static final String COMPRESSION_COMPACT = ColumnFamilyDescriptorBuilder.COMPRESSION_COMPACT;
public static final String ENCODE_ON_DISK = "ENCODE_ON_DISK";
public static final String DATA_BLOCK_ENCODING = ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING;
public static final String BLOCKCACHE = ColumnFamilyDescriptorBuilder.BLOCKCACHE;
public static final String CACHE_DATA_ON_WRITE = ColumnFamilyDescriptorBuilder.CACHE_DATA_ON_WRITE;
public static final String CACHE_INDEX_ON_WRITE = ColumnFamilyDescriptorBuilder.CACHE_INDEX_ON_WRITE;
public static final String CACHE_BLOOMS_ON_WRITE = ColumnFamilyDescriptorBuilder.CACHE_BLOOMS_ON_WRITE;
public static final String EVICT_BLOCKS_ON_CLOSE = ColumnFamilyDescriptorBuilder.EVICT_BLOCKS_ON_CLOSE;
public static final String CACHE_DATA_IN_L1 = "CACHE_DATA_IN_L1";
public static final String PREFETCH_BLOCKS_ON_OPEN = ColumnFamilyDescriptorBuilder.PREFETCH_BLOCKS_ON_OPEN;
public static final String BLOCKSIZE = ColumnFamilyDescriptorBuilder.BLOCKSIZE;
public static final String LENGTH = "LENGTH";
public static final String TTL = ColumnFamilyDescriptorBuilder.TTL;
public static final String BLOOMFILTER = ColumnFamilyDescriptorBuilder.BLOOMFILTER;
public static final String FOREVER = "FOREVER";
public static final String REPLICATION_SCOPE = ColumnFamilyDescriptorBuilder.REPLICATION_SCOPE;
public static final byte[] REPLICATION_SCOPE_BYTES = Bytes.toBytes(REPLICATION_SCOPE);
public static final String MIN_VERSIONS = ColumnFamilyDescriptorBuilder.MIN_VERSIONS;
public static final String KEEP_DELETED_CELLS = ColumnFamilyDescriptorBuilder.KEEP_DELETED_CELLS;
public static final String COMPRESS_TAGS = ColumnFamilyDescriptorBuilder.COMPRESS_TAGS;
public static final String ENCRYPTION = ColumnFamilyDescriptorBuilder.ENCRYPTION;
public static final String ENCRYPTION_KEY = ColumnFamilyDescriptorBuilder.ENCRYPTION_KEY;
public static final String IS_MOB = ColumnFamilyDescriptorBuilder.IS_MOB;
public static final byte[] IS_MOB_BYTES = Bytes.toBytes(IS_MOB);
public static final String MOB_THRESHOLD = ColumnFamilyDescriptorBuilder.MOB_THRESHOLD;
public static final byte[] MOB_THRESHOLD_BYTES = Bytes.toBytes(MOB_THRESHOLD);
public static final long DEFAULT_MOB_THRESHOLD = ColumnFamilyDescriptorBuilder.DEFAULT_MOB_THRESHOLD;
public static final String MOB_COMPACT_PARTITION_POLICY = ColumnFamilyDescriptorBuilder.MOB_COMPACT_PARTITION_POLICY;
public static final byte[] MOB_COMPACT_PARTITION_POLICY_BYTES = Bytes.toBytes(MOB_COMPACT_PARTITION_POLICY);
public static final MobCompactPartitionPolicy DEFAULT_MOB_COMPACT_PARTITION_POLICY
= ColumnFamilyDescriptorBuilder.DEFAULT_MOB_COMPACT_PARTITION_POLICY;
public static final String DFS_REPLICATION = ColumnFamilyDescriptorBuilder.DFS_REPLICATION;
public static final short DEFAULT_DFS_REPLICATION = ColumnFamilyDescriptorBuilder.DEFAULT_DFS_REPLICATION;
public static final String STORAGE_POLICY = ColumnFamilyDescriptorBuilder.STORAGE_POLICY;
public static final String DEFAULT_COMPRESSION = ColumnFamilyDescriptorBuilder.DEFAULT_COMPRESSION.name();
public static final boolean DEFAULT_ENCODE_ON_DISK = true;
public static final String DEFAULT_DATA_BLOCK_ENCODING = ColumnFamilyDescriptorBuilder.DEFAULT_DATA_BLOCK_ENCODING.name();
public static final int DEFAULT_VERSIONS = ColumnFamilyDescriptorBuilder.DEFAULT_MAX_VERSIONS;
public static final int DEFAULT_MIN_VERSIONS = ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS;
public static final boolean DEFAULT_IN_MEMORY = ColumnFamilyDescriptorBuilder.DEFAULT_IN_MEMORY;
public static final KeepDeletedCells DEFAULT_KEEP_DELETED = ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED;
public static final boolean DEFAULT_BLOCKCACHE = ColumnFamilyDescriptorBuilder.DEFAULT_BLOCKCACHE;
public static final boolean DEFAULT_CACHE_DATA_ON_WRITE = ColumnFamilyDescriptorBuilder.DEFAULT_CACHE_DATA_ON_WRITE;
public static final boolean DEFAULT_CACHE_DATA_IN_L1 = false;
public static final boolean DEFAULT_CACHE_INDEX_ON_WRITE = ColumnFamilyDescriptorBuilder.DEFAULT_CACHE_INDEX_ON_WRITE;
public static final int DEFAULT_BLOCKSIZE = ColumnFamilyDescriptorBuilder.DEFAULT_BLOCKSIZE;
public static final String DEFAULT_BLOOMFILTER = ColumnFamilyDescriptorBuilder.DEFAULT_BLOOMFILTER.name();
public static final boolean DEFAULT_CACHE_BLOOMS_ON_WRITE = ColumnFamilyDescriptorBuilder.DEFAULT_CACHE_BLOOMS_ON_WRITE;
public static final int DEFAULT_TTL = ColumnFamilyDescriptorBuilder.DEFAULT_TTL;
public static final int DEFAULT_REPLICATION_SCOPE = ColumnFamilyDescriptorBuilder.DEFAULT_REPLICATION_SCOPE;
public static final boolean DEFAULT_EVICT_BLOCKS_ON_CLOSE = ColumnFamilyDescriptorBuilder.DEFAULT_EVICT_BLOCKS_ON_CLOSE;
public static final boolean DEFAULT_COMPRESS_TAGS = ColumnFamilyDescriptorBuilder.DEFAULT_COMPRESS_TAGS;
public static final boolean DEFAULT_PREFETCH_BLOCKS_ON_OPEN = ColumnFamilyDescriptorBuilder.DEFAULT_PREFETCH_BLOCKS_ON_OPEN;
public static final String NEW_VERSION_BEHAVIOR = ColumnFamilyDescriptorBuilder.NEW_VERSION_BEHAVIOR;
public static final boolean DEFAULT_NEW_VERSION_BEHAVIOR = ColumnFamilyDescriptorBuilder.DEFAULT_NEW_VERSION_BEHAVIOR;
protected final ModifyableColumnFamilyDescriptor delegatee;
/**
* Construct a column descriptor specifying only the family name
* The other attributes are defaulted.
*
* @param familyName Column family name. Must be 'printable' -- digit or
* letter -- and may not contain a <code>:</code>
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
* (<a href="https://issues.apache.org/jira/browse/HBASE-18433">HBASE-18433</a>).
* Use {@link ColumnFamilyDescriptorBuilder#of(String)}.
*/
@Deprecated
public HColumnDescriptor(final String familyName) {
this(new ModifyableColumnFamilyDescriptor(Bytes.toBytes(familyName)));
}
/**
* Construct a column descriptor specifying only the family name
* The other attributes are defaulted.
*
* @param familyName Column family name. Must be 'printable' -- digit or
* letter -- and may not contain a <code>:</code>
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
* (<a href="https://issues.apache.org/jira/browse/HBASE-18433">HBASE-18433</a>).
* Use {@link ColumnFamilyDescriptorBuilder#of(byte[])}.
*/
@Deprecated
public HColumnDescriptor(final byte [] familyName) {
this(new ModifyableColumnFamilyDescriptor(familyName));
}
/**
* Constructor.
* Makes a deep copy of the supplied descriptor.
* Can make a modifiable descriptor from an UnmodifyableHColumnDescriptor.
*
* @param desc The descriptor.
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
* (<a href="https://issues.apache.org/jira/browse/HBASE-18433">HBASE-18433</a>).
* Use {@link ColumnFamilyDescriptorBuilder#copy(ColumnFamilyDescriptor)}.
*/
@Deprecated
public HColumnDescriptor(HColumnDescriptor desc) {
this(desc, true);
}
protected HColumnDescriptor(HColumnDescriptor desc, boolean deepClone) {
this(deepClone ? new ModifyableColumnFamilyDescriptor(desc)
: desc.delegatee);
}
protected HColumnDescriptor(ModifyableColumnFamilyDescriptor delegate) {
this.delegatee = delegate;
}
/**
* @return Name of this column family
*/
@Override
public byte [] getName() {
return delegatee.getName();
}
/**
* @return The name string of this column family
*/
@Override
public String getNameAsString() {
return delegatee.getNameAsString();
}
/**
* @param key The key.
* @return The value.
*/
@Override
public byte[] getValue(byte[] key) {
return delegatee.getValue(key);
}
/**
* @param key The key.
* @return The value as a string.
*/
public String getValue(String key) {
byte[] value = getValue(Bytes.toBytes(key));
return value == null ? null : Bytes.toString(value);
}
@Override
public Map<Bytes, Bytes> getValues() {
return delegatee.getValues();
}
/**
* @param key The key.
* @param value The value.
* @return this (for chained invocation)
*/
public HColumnDescriptor setValue(byte[] key, byte[] value) {
getDelegateeForModification().setValue(key, value);
return this;
}
/**
* @param key Key whose key and value we're to remove from HCD parameters.
*/
public void remove(final byte [] key) {
getDelegateeForModification().removeValue(new Bytes(key));
}
/**
* @param key The key.
* @param value The value.
* @return this (for chained invocation)
*/
public HColumnDescriptor setValue(String key, String value) {
getDelegateeForModification().setValue(key, value);
return this;
}
/**
* @return compression type being used for the column family
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
* (<a href="https://issues.apache.org/jira/browse/HBASE-13655">HBASE-13655</a>).
* Use {@link #getCompressionType()}.
*/
@Deprecated
public Compression.Algorithm getCompression() {
return getCompressionType();
}
/**
* @return compression type being used for the column family for major compaction
* @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
* (<a href="https://issues.apache.org/jira/browse/HBASE-13655">HBASE-13655</a>).
* Use {@link #getCompactionCompressionType()}.
*/
@Deprecated
public Compression.Algorithm getCompactionCompression() {
return getCompactionCompressionType();
}
@Override
public int getMaxVersions() {
return delegatee.getMaxVersions();
}
/**
* @param value maximum number of versions
* @return this (for chained invocation)
*/
public HColumnDescriptor setMaxVersions(int value) {
getDelegateeForModification().setMaxVersions(value);
return this;
}
/**
* Set minimum and maximum versions to keep
*
* @param minVersions minimal number of versions
* @param maxVersions maximum number of versions
* @return this (for chained invocation)
*/
public HColumnDescriptor setVersions(int minVersions, int maxVersions) {
if (minVersions <= 0) {
// TODO: Allow minVersion and maxVersion of 0 to be the way you say "Keep all versions".
// Until there is support, consider 0 or < 0 -- a configuration error.
throw new IllegalArgumentException("Minimum versions must be positive");
}
if (maxVersions < minVersions) {
throw new IllegalArgumentException("Unable to set MaxVersion to " + maxVersions
+ " and set MinVersion to " + minVersions
+ ", as maximum versions must be >= minimum versions.");
}
setMinVersions(minVersions);
setMaxVersions(maxVersions);
return this;
}
@Override
public int getBlocksize() {
return delegatee.getBlocksize();
}
/**
* @param value Blocksize to use when writing out storefiles/hfiles on this
* column family.
* @return this (for chained invocation)
*/
public HColumnDescriptor setBlocksize(int value) {
getDelegateeForModification().setBlocksize(value);
return this;
}
@Override
public Compression.Algorithm getCompressionType() {
return delegatee.getCompressionType();
}
/**
* Compression types supported in hbase.
* LZO is not bundled as part of the hbase distribution.
* See <a href="http://hbase.apache.org/book.html#lzo.compression">LZO Compression</a>
* for how to enable it.
* @param value Compression type setting.
* @return this (for chained invocation)
*/
public HColumnDescriptor setCompressionType(Compression.Algorithm value) {
getDelegateeForModification().setCompressionType(value);
return this;
}
@Override
public DataBlockEncoding getDataBlockEncoding() {
return delegatee.getDataBlockEncoding();
}
/**
* Set data block encoding algorithm used in block cache.
* @param value What kind of data block encoding will be used.
* @return this (for chained invocation)
*/
public HColumnDescriptor setDataBlockEncoding(DataBlockEncoding value) {
getDelegateeForModification().setDataBlockEncoding(value);
return this;
}
/**
* Set whether the tags should be compressed along with DataBlockEncoding. When no
* DataBlockEncoding is been used, this is having no effect.
*
* @param value
* @return this (for chained invocation)
*/
public HColumnDescriptor setCompressTags(boolean value) {
getDelegateeForModification().setCompressTags(value);
return this;
}
@Override
public boolean isCompressTags() {
return delegatee.isCompressTags();
}
@Override
public Compression.Algorithm getCompactionCompressionType() {
return delegatee.getCompactionCompressionType();
}
/**
* Compression types supported in hbase.
* LZO is not bundled as part of the hbase distribution.
* See <a href="http://hbase.apache.org/book.html#lzo.compression">LZO Compression</a>
* for how to enable it.
* @param value Compression type setting.
* @return this (for chained invocation)
*/
public HColumnDescriptor setCompactionCompressionType(Compression.Algorithm value) {
getDelegateeForModification().setCompactionCompressionType(value);
return this;
}
@Override
public boolean isInMemory() {
return delegatee.isInMemory();
}
/**
* @param value True if we are to favor keeping all values for this column family in the
* HRegionServer cache
* @return this (for chained invocation)
*/
public HColumnDescriptor setInMemory(boolean value) {
getDelegateeForModification().setInMemory(value);
return this;
}
@Override
public MemoryCompactionPolicy getInMemoryCompaction() {
return delegatee.getInMemoryCompaction();
}
/**
* @param value the prefered in-memory compaction policy
* for this column family
* @return this (for chained invocation)
*/
public HColumnDescriptor setInMemoryCompaction(MemoryCompactionPolicy value) {
getDelegateeForModification().setInMemoryCompaction(value);
return this;
}
@Override
public KeepDeletedCells getKeepDeletedCells() {
return delegatee.getKeepDeletedCells();
}
/**
* @param value True if deleted rows should not be collected
* immediately.
* @return this (for chained invocation)
*/
public HColumnDescriptor setKeepDeletedCells(KeepDeletedCells value) {
getDelegateeForModification().setKeepDeletedCells(value);
return this;
}
/**
* By default, HBase only consider timestamp in versions. So a previous Delete with higher ts
* will mask a later Put with lower ts. Set this to true to enable new semantics of versions.
* We will also consider mvcc in versions. See HBASE-15968 for details.
*/
@Override
public boolean isNewVersionBehavior() {
return delegatee.isNewVersionBehavior();
}
public HColumnDescriptor setNewVersionBehavior(boolean newVersionBehavior) {
getDelegateeForModification().setNewVersionBehavior(newVersionBehavior);
return this;
}
@Override
public int getTimeToLive() {
return delegatee.getTimeToLive();
}
/**
* @param value Time-to-live of cell contents, in seconds.
* @return this (for chained invocation)
*/
public HColumnDescriptor setTimeToLive(int value) {
getDelegateeForModification().setTimeToLive(value);
return this;
}
/**
* @param value Time to live of cell contents, in human readable format
* @see org.apache.hadoop.hbase.util.PrettyPrinter#format(String, Unit)
* @return this (for chained invocation)
*/
public HColumnDescriptor setTimeToLive(String value) throws HBaseException {
getDelegateeForModification().setTimeToLive(value);
return this;
}
@Override
public int getMinVersions() {
return delegatee.getMinVersions();
}
/**
* @param value The minimum number of versions to keep.
* (used when timeToLive is set)
* @return this (for chained invocation)
*/
public HColumnDescriptor setMinVersions(int value) {
getDelegateeForModification().setMinVersions(value);
return this;
}
@Override
public boolean isBlockCacheEnabled() {
return delegatee.isBlockCacheEnabled();
}
/**
* @param value True if hfile DATA type blocks should be cached (We always cache
* INDEX and BLOOM blocks; you cannot turn this off).
* @return this (for chained invocation)
*/
public HColumnDescriptor setBlockCacheEnabled(boolean value) {
getDelegateeForModification().setBlockCacheEnabled(value);
return this;
}
@Override
public BloomType getBloomFilterType() {
return delegatee.getBloomFilterType();
}
/**
* @param value bloom filter type
* @return this (for chained invocation)
*/
public HColumnDescriptor setBloomFilterType(final BloomType value) {
getDelegateeForModification().setBloomFilterType(value);
return this;
}
@Override
public int getScope() {
return delegatee.getScope();
}
/**
* @param value the scope tag
* @return this (for chained invocation)
*/
public HColumnDescriptor setScope(int value) {
getDelegateeForModification().setScope(value);
return this;
}
@Override
public boolean isCacheDataOnWrite() {
return delegatee.isCacheDataOnWrite();
}
/**
* @param value true if we should cache data blocks on write
* @return this (for chained invocation)
*/
public HColumnDescriptor setCacheDataOnWrite(boolean value) {
getDelegateeForModification().setCacheDataOnWrite(value);
return this;
}
/**
* This is a noop call from HBase 2.0 onwards
*
* @return this (for chained invocation)
* @deprecated Since 2.0 and will be removed in 3.0 with out any replacement. Caching data in on
* heap Cache, when there are both on heap LRU Cache and Bucket Cache will no longer
* be supported from 2.0.
*/
@Deprecated
public HColumnDescriptor setCacheDataInL1(boolean value) {
return this;
}
@Override
public boolean isCacheIndexesOnWrite() {
return delegatee.isCacheIndexesOnWrite();
}
/**
* @param value true if we should cache index blocks on write
* @return this (for chained invocation)
*/
public HColumnDescriptor setCacheIndexesOnWrite(boolean value) {
getDelegateeForModification().setCacheIndexesOnWrite(value);
return this;
}
@Override
public boolean isCacheBloomsOnWrite() {
return delegatee.isCacheBloomsOnWrite();
}
/**
* @param value true if we should cache bloomfilter blocks on write
* @return this (for chained invocation)
*/
public HColumnDescriptor setCacheBloomsOnWrite(boolean value) {
getDelegateeForModification().setCacheBloomsOnWrite(value);
return this;
}
@Override
public boolean isEvictBlocksOnClose() {
return delegatee.isEvictBlocksOnClose();
}
/**
* @param value true if we should evict cached blocks from the blockcache on
* close
* @return this (for chained invocation)
*/
public HColumnDescriptor setEvictBlocksOnClose(boolean value) {
getDelegateeForModification().setEvictBlocksOnClose(value);
return this;
}
@Override
public boolean isPrefetchBlocksOnOpen() {
return delegatee.isPrefetchBlocksOnOpen();
}
/**
* @param value true if we should prefetch blocks into the blockcache on open
* @return this (for chained invocation)
*/
public HColumnDescriptor setPrefetchBlocksOnOpen(boolean value) {
getDelegateeForModification().setPrefetchBlocksOnOpen(value);
return this;
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return delegatee.toString();
}
/**
* @return Column family descriptor with only the customized attributes.
*/
@Override
public String toStringCustomizedValues() {
return delegatee.toStringCustomizedValues();
}
public static Unit getUnit(String key) {
return ColumnFamilyDescriptorBuilder.getUnit(key);
}
public static Map<String, String> getDefaultValues() {
return ColumnFamilyDescriptorBuilder.getDefaultValues();
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof HColumnDescriptor) {
return delegatee.equals(((HColumnDescriptor) obj).delegatee);
}
return false;
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return delegatee.hashCode();
}
@Override
public int compareTo(HColumnDescriptor other) {
return COMPARATOR.compare(this, other);
}
/**
* @return This instance serialized with pb with pb magic prefix
* @see #parseFrom(byte[])
*/
public byte[] toByteArray() {
return ColumnFamilyDescriptorBuilder.toByteArray(delegatee);
}
/**
* @param bytes A pb serialized {@link HColumnDescriptor} instance with pb magic prefix
* @return An instance of {@link HColumnDescriptor} made from <code>bytes</code>
* @throws DeserializationException
* @see #toByteArray()
*/
public static HColumnDescriptor parseFrom(final byte [] bytes) throws DeserializationException {
ColumnFamilyDescriptor desc = ColumnFamilyDescriptorBuilder.parseFrom(bytes);
if (desc instanceof ModifyableColumnFamilyDescriptor) {
return new HColumnDescriptor((ModifyableColumnFamilyDescriptor) desc);
} else {
return new HColumnDescriptor(new ModifyableColumnFamilyDescriptor(desc));
}
}
@Override
public String getConfigurationValue(String key) {
return delegatee.getConfigurationValue(key);
}
@Override
public Map<String, String> getConfiguration() {
return delegatee.getConfiguration();
}
/**
* Setter for storing a configuration setting.
* @param key Config key. Same as XML config key e.g. hbase.something.or.other.
* @param value String value. If null, removes the configuration.
*/
public HColumnDescriptor setConfiguration(String key, String value) {
getDelegateeForModification().setConfiguration(key, value);
return this;
}
/**
* Remove a configuration setting represented by the key.
*/
public void removeConfiguration(final String key) {
getDelegateeForModification().removeConfiguration(key);
}
@Override
public String getEncryptionType() {
return delegatee.getEncryptionType();
}
/**
* Set the encryption algorithm for use with this family
* @param value
*/
public HColumnDescriptor setEncryptionType(String value) {
getDelegateeForModification().setEncryptionType(value);
return this;
}
@Override
public byte[] getEncryptionKey() {
return delegatee.getEncryptionKey();
}
/** Set the raw crypto key attribute for the family */
public HColumnDescriptor setEncryptionKey(byte[] value) {
getDelegateeForModification().setEncryptionKey(value);
return this;
}
@Override
public long getMobThreshold() {
return delegatee.getMobThreshold();
}
/**
* Sets the mob threshold of the family.
* @param value The mob threshold.
* @return this (for chained invocation)
*/
public HColumnDescriptor setMobThreshold(long value) {
getDelegateeForModification().setMobThreshold(value);
return this;
}
@Override
public boolean isMobEnabled() {
return delegatee.isMobEnabled();
}
/**
* Enables the mob for the family.
* @param value Whether to enable the mob for the family.
* @return this (for chained invocation)
*/
public HColumnDescriptor setMobEnabled(boolean value) {
getDelegateeForModification().setMobEnabled(value);
return this;
}
@Override
public MobCompactPartitionPolicy getMobCompactPartitionPolicy() {
return delegatee.getMobCompactPartitionPolicy();
}
/**
* Set the mob compact partition policy for the family.
* @param value policy type
* @return this (for chained invocation)
*/
public HColumnDescriptor setMobCompactPartitionPolicy(MobCompactPartitionPolicy value) {
getDelegateeForModification().setMobCompactPartitionPolicy(value);
return this;
}
@Override
public short getDFSReplication() {
return delegatee.getDFSReplication();
}
/**
* Set the replication factor to hfile(s) belonging to this family
* @param value number of replicas the blocks(s) belonging to this CF should have, or
* {@link #DEFAULT_DFS_REPLICATION} for the default replication factor set in the
* filesystem
* @return this (for chained invocation)
*/
public HColumnDescriptor setDFSReplication(short value) {
getDelegateeForModification().setDFSReplication(value);
return this;
}
@Override
public String getStoragePolicy() {
return delegatee.getStoragePolicy();
}
/**
* Set the storage policy for use with this family
* @param value the policy to set, valid setting includes: <i>"LAZY_PERSIST"</i>,
* <i>"ALL_SSD"</i>, <i>"ONE_SSD"</i>, <i>"HOT"</i>, <i>"WARM"</i>, <i>"COLD"</i>
*/
public HColumnDescriptor setStoragePolicy(String value) {
getDelegateeForModification().setStoragePolicy(value);
return this;
}
@Override
public Bytes getValue(Bytes key) {
return delegatee.getValue(key);
}
protected ModifyableColumnFamilyDescriptor getDelegateeForModification() {
return delegatee;
}
}
| |
/*
* Copyright (C) 2017 Alasdair Mercer, !ninja
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.notninja.verifier.type;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import java.lang.annotation.Annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import org.notninja.verifier.AbstractCustomVerifierTestCase;
import org.notninja.verifier.CustomVerifierTestCaseBase;
import org.notninja.verifier.message.MessageKeyEnumTestCase;
/**
* <p>
* Tests for the {@link ClassVerifier} class.
* </p>
*
* @author Alasdair Mercer
*/
@RunWith(Enclosed.class)
public class ClassVerifierTest {
private static final Class<?>[] PRIMITIVES = {Boolean.TYPE, Byte.TYPE, Character.TYPE, Double.TYPE, Float.TYPE, Integer.TYPE, Long.TYPE, Short.TYPE, Void.TYPE};
private static final Class<?>[] PRIMITIVE_WRAPPERS = {Boolean.class, Byte.class, Character.class, Double.class, Float.class, Integer.class, Long.class, Short.class, Void.TYPE};
public static class ClassVerifierAbstractCustomVerifierTest extends AbstractCustomVerifierTestCase<Class, ClassVerifier> {
@Override
protected ClassVerifier createCustomVerifier() {
return new ClassVerifier(getMockVerification());
}
@Override
protected Class createValueOne() {
return AnnotationOne.class;
}
@Override
protected Class createValueTwo() {
return AnnotationTwo.class;
}
@Override
protected boolean isEqualValueSame() {
return true;
}
@Override
protected Class<?> getParentClass() {
return Object.class;
}
@Override
protected Class<?> getValueClass() {
return Class.class;
}
}
public static class ClassVerifierMiscTest extends CustomVerifierTestCaseBase<Class, ClassVerifier> {
@Override
protected ClassVerifier createCustomVerifier() {
return new ClassVerifier(getMockVerification());
}
@Test
public void testAnnotatedWhenValueIsAnnotated() {
testAnnotatedHelper(TypeWithAnnotationOne.class, true);
}
@Test
public void testAnnotatedWhenValueIsNotAnnotated() {
testAnnotatedHelper(TypeWithNoAnnotations.class, false);
}
@Test
public void testAnnotatedWhenValueIsNull() {
testAnnotatedHelper(null, false);
}
private void testAnnotatedHelper(Class<?> value, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().annotated());
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.ANNOTATED);
}
@Test
public void testAnnotatedWithWhenTypeIsNull() {
testAnnotatedWithHelper(TypeWithAnnotationOne.class, null, false);
}
@Test
public void testAnnotatedWithWhenValueIsAnnotatedWithDifferentAnnotation() {
testAnnotatedWithHelper(TypeWithAnnotationOne.class, AnnotationTwo.class, false);
}
@Test
public void testAnnotatedWithWhenValueIsAnnotatedWithMultipleAnnotations() {
testAnnotatedWithHelper(TypeWithAnnotationOneAndTwo.class, AnnotationTwo.class, true);
}
@Test
public void testAnnotatedWithWhenValueIsAnnotatedWithNoAnnotations() {
testAnnotatedWithHelper(TypeWithNoAnnotations.class, AnnotationOne.class, false);
}
@Test
public void testAnnotatedWithWhenValueIsAnnotatedWithSameAnnotation() {
testAnnotatedWithHelper(TypeWithAnnotationOne.class, AnnotationOne.class, true);
}
@Test
public void testAnnotatedWithWhenValueIsNull() {
testAnnotatedWithHelper(null, AnnotationOne.class, false);
}
private void testAnnotatedWithHelper(Class<?> value, Class<? extends Annotation> type, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().annotatedWith(type));
verify(getMockVerification()).report(eq(expected), eq(ClassVerifier.MessageKeys.ANNOTATED_WITH), getArgsCaptor().capture());
assertSame("Passes type for message formatting", type, getArgsCaptor().getValue());
}
@Test
public void testAnnotatedWithAllWhenNoTypes() {
testAnnotatedWithAllHelper(TypeWithAnnotationOneAndTwo.class, createEmptyArray(Class.class), true);
}
@Test
public void testAnnotatedWithAllWhenNoTypesAndValueHasNoAnnotations() {
testAnnotatedWithAllHelper(TypeWithNoAnnotations.class, createEmptyArray(Class.class), true);
}
@Test
public void testAnnotatedWithAllWhenTypeIsNull() {
testAnnotatedWithAllHelper(TypeWithAnnotationOne.class, createArray((Class<? extends Annotation>) null), false);
}
@Test
public void testAnnotatedWithAllWhenTypesIsNull() {
testAnnotatedWithAllHelper(TypeWithNoAnnotations.class, null, true);
}
@Test
public void testAnnotatedWithAllWhenValueContainsAllTypes() {
testAnnotatedWithAllHelper(TypeWithAnnotationOneAndTwo.class, createArray(AnnotationOne.class, AnnotationTwo.class), true);
}
@Test
public void testAnnotatedWithAllWhenValueContainsSomeTypes() {
testAnnotatedWithAllHelper(TypeWithAnnotationOne.class, createArray(AnnotationOne.class, AnnotationTwo.class), false);
}
@Test
public void testAnnotatedWithAllWhenValueDoesNotContainType() {
testAnnotatedWithAllHelper(TypeWithNoAnnotations.class, createArray(AnnotationOne.class, AnnotationTwo.class), false);
}
@Test
public void testAnnotatedWithAllWhenValueIsNull() {
testAnnotatedWithAllHelper(null, createArray(AnnotationOne.class), false);
}
@Test
public void testAnnotatedWithAllWhenValueHasNoAnnotations() {
testAnnotatedWithAllHelper(TypeWithNoAnnotations.class, createArray(AnnotationOne.class), false);
}
private void testAnnotatedWithAllHelper(Class<?> value, Class<? extends Annotation>[] types, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().annotatedWithAll(types));
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.ANNOTATED_WITH_ALL, (Object) types);
}
@Test
public void testAnnotatedWithAnyWhenNoTypes() {
testAnnotatedWithAnyHelper(TypeWithAnnotationOneAndTwo.class, createEmptyArray(Class.class), false);
}
@Test
public void testAnnotatedWithAnyWhenNoTypesAndValueHasNoAnnotations() {
testAnnotatedWithAnyHelper(TypeWithNoAnnotations.class, createEmptyArray(Class.class), false);
}
@Test
public void testAnnotatedWithAnyWhenTypeIsNull() {
testAnnotatedWithAnyHelper(TypeWithAnnotationOne.class, createArray((Class<? extends Annotation>) null), false);
}
@Test
public void testAnnotatedWithAnyWhenTypesIsNull() {
testAnnotatedWithAnyHelper(TypeWithNoAnnotations.class, null, false);
}
@Test
public void testAnnotatedWithAnyWhenValueContainsAllTypes() {
testAnnotatedWithAnyHelper(TypeWithAnnotationOneAndTwo.class, createArray(AnnotationOne.class, AnnotationTwo.class), true);
}
@Test
public void testAnnotatedWithAnyWhenValueContainsSomeTypes() {
testAnnotatedWithAnyHelper(TypeWithAnnotationOne.class, createArray(AnnotationTwo.class, AnnotationOne.class), true);
}
@Test
public void testAnnotatedWithAnyWhenValueDoesNotContainType() {
testAnnotatedWithAnyHelper(TypeWithNoAnnotations.class, createArray(AnnotationOne.class, AnnotationTwo.class), false);
}
@Test
public void testAnnotatedWithAnyWhenValueIsNull() {
testAnnotatedWithAnyHelper(null, createArray(AnnotationOne.class), false);
}
@Test
public void testAnnotatedWithAnyWhenValueHasNoAnnotations() {
testAnnotatedWithAnyHelper(TypeWithNoAnnotations.class, createArray(AnnotationOne.class), false);
}
private void testAnnotatedWithAnyHelper(Class<?> value, Class<? extends Annotation>[] types, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().annotatedWithAny(types));
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.ANNOTATED_WITH_ANY, (Object) types);
}
@Test
public void testAnnotationWhenValueIsAnnotation() {
testAnnotationHelper(AnnotationOne.class, true);
}
@Test
public void testAnnotationWhenValueIsNotAnnotation() {
testAnnotationHelper(TypeWithAnnotationOne.class, false);
}
@Test
public void testAnnotationWhenValueIsNull() {
testAnnotationHelper(null, false);
}
private void testAnnotationHelper(Class<?> value, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().annotation());
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.ANNOTATION);
}
@Test
public void testAnonymousWhenValueIsAnonymous() {
testAnonymousHelper(new AnInterface() {
}.getClass(), true);
}
@Test
public void testAnonymousWhenValueIsNotAnonymous() {
testAnonymousHelper(ClassVerifierTest.class, false);
}
@Test
public void testAnonymousWhenValueIsNull() {
testAnonymousHelper(null, false);
}
private void testAnonymousHelper(Class<?> value, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().anonymous());
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.ANONYMOUS);
}
@Test
public void testArrayWhenValueIsArray() {
testArrayHelper(Object[].class, true);
}
@Test
public void testArrayWhenValueIsNotArray() {
testArrayHelper(ClassVerifierTest.class, false);
}
@Test
public void testArrayWhenValueIsNull() {
testArrayHelper(null, false);
}
private void testArrayHelper(Class<?> value, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().array());
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.ARRAY);
}
@Test
public void testAssignableFromWhenTypeIsNull() {
testAssignableFromHelper(Object.class, null, false);
}
@Test
public void testAssignableFromWhenValueIsAssignableFromType() {
testAssignableFromHelper(Object.class, ClassVerifierTest.class, true);
}
@Test
public void testAssignableFromWhenValueIsNotAssignableFromType() {
testAssignableFromHelper(ClassVerifierTest.class, Object.class, false);
}
@Test
public void testAssignableFromWhenValueIsNull() {
testAssignableFromHelper(null, Object.class, false);
}
@Test
public void testAssignableFromWhenValueIsType() {
testAssignableFromHelper(ClassVerifierTest.class, ClassVerifierTest.class, true);
}
private void testAssignableFromHelper(Class<?> value, Class<?> type, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().assignableFrom(type));
verify(getMockVerification()).report(eq(expected), eq(ClassVerifier.MessageKeys.ASSIGNABLE_FROM), getArgsCaptor().capture());
assertSame("Passes type for message formatting", type, getArgsCaptor().getValue());
}
@Test
public void testAssignableFromAllWhenNoTypes() {
testAssignableFromAllHelper(Collection.class, createEmptyArray(Class.class), true);
}
@Test
public void testAssignableFromAllWhenTypeIsNull() {
testAssignableFromAllHelper(Collection.class, createArray((Class) null), false);
}
@Test
public void testAssignableFromAllWhenTypesIsNull() {
testAssignableFromAllHelper(Collection.class, null, true);
}
@Test
public void testAssignableFromAllWhenValueIsAssignableFromAllTypes() {
testAssignableFromAllHelper(Collection.class, createArray(List.class, Set.class, ArrayList.class), true);
}
@Test
public void testAssignableFromAllWhenValueIsAssignableFromSomeTypes() {
testAssignableFromAllHelper(Collection.class, createArray(List.class, Map.class, ArrayList.class), false);
}
@Test
public void testAssignableFromAllWhenValueIsNotAssignableFromAnyType() {
testAssignableFromAllHelper(Collection.class, createArray(Boolean.class, Map.class, URI.class), false);
}
@Test
public void testAssignableFromAllWhenValueIsNull() {
testAssignableFromAllHelper(null, createArray(Object.class), false);
}
private void testAssignableFromAllHelper(Class<?> value, Class<?>[] types, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().assignableFromAll(types));
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.ASSIGNABLE_FROM_ALL, (Object) types);
}
@Test
public void testAssignableFromAnyWhenNoTypes() {
testAssignableFromAnyHelper(Collection.class, createEmptyArray(Class.class), false);
}
@Test
public void testAssignableFromAnyWhenTypeIsNull() {
testAssignableFromAnyHelper(Collection.class, createArray((Class) null), false);
}
@Test
public void testAssignableFromAnyWhenTypesIsNull() {
testAssignableFromAnyHelper(Collection.class, null, false);
}
@Test
public void testAssignableFromAnyWhenValueIsAssignableFromAllTypes() {
testAssignableFromAnyHelper(Collection.class, createArray(List.class, Set.class, ArrayList.class), true);
}
@Test
public void testAssignableFromAnyWhenValueIsAssignableFromSomeTypes() {
testAssignableFromAnyHelper(Collection.class, createArray(List.class, Map.class, ArrayList.class), true);
}
@Test
public void testAssignableFromAnyWhenValueIsNotAssignableFromAnyType() {
testAssignableFromAnyHelper(Collection.class, createArray(Boolean.class, Map.class, URI.class), false);
}
@Test
public void testAssignableFromAnyWhenValueIsNull() {
testAssignableFromAnyHelper(null, createArray(Object.class), false);
}
private void testAssignableFromAnyHelper(Class<?> value, Class<?>[] types, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().assignableFromAny(types));
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.ASSIGNABLE_FROM_ANY, (Object) types);
}
@Test
public void testEnumerationWhenValueIsEnum() {
testEnumerationHelper(AnEnum.class, true);
}
@Test
public void testEnumerationWhenValueIsNotEnum() {
testEnumerationHelper(ClassVerifierTest.class, false);
}
@Test
public void testEnumerationWhenValueIsNull() {
testEnumerationHelper(null, false);
}
private void testEnumerationHelper(Class<?> value, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().enumeration());
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.ENUMERATION);
}
@Test
public void testInterfacedWhenValueIsInterface() {
testInterfacedHelper(AnInterface.class, true);
}
@Test
public void testInterfacedWhenValueIsNotInterface() {
testInterfacedHelper(ClassVerifierTest.class, false);
}
@Test
public void testInterfacedWhenValueIsNull() {
testInterfacedHelper(null, false);
}
private void testInterfacedHelper(Class<?> value, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().interfaced());
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.INTERFACED);
}
@Test
public void testNestedWhenValueIsNested() {
testNestedHelper(TypeWithNoAnnotations.class, true);
}
@Test
public void testNestedWhenValueIsNotNested() {
testNestedHelper(ClassVerifierTest.class, false);
}
@Test
public void testNestedWhenValueIsNull() {
testNestedHelper(null, false);
}
private void testNestedHelper(Class<?> value, boolean expected) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().nested());
verify(getMockVerification()).report(expected, ClassVerifier.MessageKeys.NESTED);
}
@Test
public void testPrimitiveWhenValueIsNotPrimitiveOrWrapper() {
testPrimitiveHelper(new Class<?>[]{Object.class}, false);
}
@Test
public void testPrimitiveWhenValueIsNull() {
testPrimitiveHelper(new Class<?>[]{null}, false);
}
@Test
public void testPrimitiveWhenValueIsPrimitive() {
testPrimitiveHelper(PRIMITIVES, true);
}
@Test
public void testPrimitiveWhenValueIsPrimitiveWrapper() {
List<Class<?>> primitiveWrappers = new ArrayList<>(Arrays.asList(PRIMITIVE_WRAPPERS));
primitiveWrappers.remove(Void.TYPE);
testPrimitiveHelper(primitiveWrappers.toArray(new Class<?>[0]), false);
}
private void testPrimitiveHelper(Class<?>[] values, boolean expected) {
for (Class<?> value : values) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().primitive());
}
verify(getMockVerification(), times(values.length)).report(expected, ClassVerifier.MessageKeys.PRIMITIVE);
}
@Test
public void testPrimitiveOrWrapperWhenValueIsNotPrimitiveOrWrapper() {
testPrimitiveOrWrapperHelper(new Class<?>[]{Object.class}, false);
}
@Test
public void testPrimitiveOrWrapperWhenValueIsNull() {
testPrimitiveOrWrapperHelper(new Class<?>[]{null}, false);
}
@Test
public void testPrimitiveOrWrapperWhenValueIsPrimitive() {
testPrimitiveOrWrapperHelper(PRIMITIVES, true);
}
@Test
public void testPrimitiveOrWrapperWhenValueIsPrimitiveWrapper() {
testPrimitiveOrWrapperHelper(PRIMITIVE_WRAPPERS, true);
}
private void testPrimitiveOrWrapperHelper(Class<?>[] values, boolean expected) {
for (Class<?> value : values) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().primitiveOrWrapper());
}
verify(getMockVerification(), times(values.length)).report(expected, ClassVerifier.MessageKeys.PRIMITIVE_OR_WRAPPER);
}
@Test
public void testPrimitiveWrapperWhenValueIsNotPrimitiveOrWrapper() {
testPrimitiveWrapperHelper(new Class<?>[]{Object.class}, false);
}
@Test
public void testPrimitiveWrapperWhenValueIsNull() {
testPrimitiveWrapperHelper(new Class<?>[]{null}, false);
}
@Test
public void testPrimitiveWrapperWhenValueIsPrimitive() {
List<Class<?>> primitives = new ArrayList<>(Arrays.asList(PRIMITIVES));
primitives.remove(Void.TYPE);
testPrimitiveWrapperHelper(primitives.toArray(new Class<?>[0]), false);
}
@Test
public void testPrimitiveWrapperWhenValueIsPrimitiveWrapper() {
testPrimitiveWrapperHelper(PRIMITIVE_WRAPPERS, true);
}
private void testPrimitiveWrapperHelper(Class<?>[] values, boolean expected) {
for (Class<?> value : values) {
setValue(value);
assertSame("Chains reference", getCustomVerifier(), getCustomVerifier().primitiveWrapper());
}
verify(getMockVerification(), times(values.length)).report(expected, ClassVerifier.MessageKeys.PRIMITIVE_WRAPPER);
}
}
public static class ClassVerifierMessageKeysTest extends MessageKeyEnumTestCase<ClassVerifier.MessageKeys> {
@Override
protected Class<? extends Enum> getEnumClass() {
return ClassVerifier.MessageKeys.class;
}
@Override
protected Map<String, String> getMessageKeys() {
Map<String, String> messageKeys = new HashMap<>();
messageKeys.put("ANNOTATED", "org.notninja.verifier.type.ClassVerifier.annotated");
messageKeys.put("ANNOTATED_WITH", "org.notninja.verifier.type.ClassVerifier.annotatedWith");
messageKeys.put("ANNOTATED_WITH_ALL", "org.notninja.verifier.type.ClassVerifier.annotatedWithAll");
messageKeys.put("ANNOTATED_WITH_ANY", "org.notninja.verifier.type.ClassVerifier.annotatedWithAny");
messageKeys.put("ANNOTATION", "org.notninja.verifier.type.ClassVerifier.annotation");
messageKeys.put("ANONYMOUS", "org.notninja.verifier.type.ClassVerifier.anonymous");
messageKeys.put("ARRAY", "org.notninja.verifier.type.ClassVerifier.array");
messageKeys.put("ASSIGNABLE_FROM", "org.notninja.verifier.type.ClassVerifier.assignableFrom");
messageKeys.put("ASSIGNABLE_FROM_ALL", "org.notninja.verifier.type.ClassVerifier.assignableFromAll");
messageKeys.put("ASSIGNABLE_FROM_ANY", "org.notninja.verifier.type.ClassVerifier.assignableFromAny");
messageKeys.put("ENUMERATION", "org.notninja.verifier.type.ClassVerifier.enumeration");
messageKeys.put("INTERFACED", "org.notninja.verifier.type.ClassVerifier.interfaced");
messageKeys.put("NESTED", "org.notninja.verifier.type.ClassVerifier.nested");
messageKeys.put("PRIMITIVE", "org.notninja.verifier.type.ClassVerifier.primitive");
messageKeys.put("PRIMITIVE_OR_WRAPPER", "org.notninja.verifier.type.ClassVerifier.primitiveOrWrapper");
messageKeys.put("PRIMITIVE_WRAPPER", "org.notninja.verifier.type.ClassVerifier.primitiveWrapper");
return messageKeys;
}
}
@AnnotationOne
private static class TypeWithAnnotationOne {
}
@AnnotationOne
@AnnotationTwo
private static class TypeWithAnnotationOneAndTwo {
}
private static class TypeWithNoAnnotations {
}
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE})
private @interface AnnotationOne {
}
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE})
private @interface AnnotationTwo {
}
private enum AnEnum {}
private interface AnInterface {
}
}
| |
package org.apache.ddlutils.platform.mssql;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.sql.Types;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.ddlutils.Platform;
import org.apache.ddlutils.alteration.ColumnDefinitionChange;
import org.apache.ddlutils.model.Column;
import org.apache.ddlutils.model.Database;
import org.apache.ddlutils.model.ForeignKey;
import org.apache.ddlutils.model.Index;
import org.apache.ddlutils.model.Table;
import org.apache.ddlutils.model.TypeMap;
import org.apache.ddlutils.platform.SqlBuilder;
/**
* The SQL Builder for the Microsoft SQL Server.
*
* @version $Revision: 893917 $
*/
public class MSSqlBuilder extends SqlBuilder
{
/** We use a generic date format. */
private DateFormat _genericDateFormat = new SimpleDateFormat("yyyy-MM-dd");
/** We use a generic date format. */
private DateFormat _genericTimeFormat = new SimpleDateFormat("HH:mm:ss");
/**
* Creates a new builder instance.
*
* @param platform The plaftform this builder belongs to
*/
public MSSqlBuilder(Platform platform)
{
super(platform);
addEscapedCharSequence("'", "''");
}
/**
* {@inheritDoc}
*/
public void createTable(Database database, Table table, Map parameters) throws IOException
{
turnOnQuotation();
super.createTable(database, table, parameters);
}
/**
* {@inheritDoc}
*/
public void dropTable(Table table) throws IOException
{
String tableName = getTableName(table);
String tableNameVar = "tn" + createUniqueIdentifier();
String constraintNameVar = "cn" + createUniqueIdentifier();
turnOnQuotation();
print("IF EXISTS (SELECT 1 FROM sysobjects WHERE type = 'U' AND name = ");
printAlwaysSingleQuotedIdentifier(tableName);
println(")");
println("BEGIN");
println(" DECLARE @" + tableNameVar + " nvarchar(256), @" + constraintNameVar + " nvarchar(256)");
println(" DECLARE refcursor CURSOR FOR");
println(" SELECT object_name(objs.parent_obj) tablename, objs.name constraintname");
println(" FROM sysobjects objs JOIN sysconstraints cons ON objs.id = cons.constid");
print(" WHERE objs.xtype != 'PK' AND object_name(objs.parent_obj) = ");
printAlwaysSingleQuotedIdentifier(tableName);
println(" OPEN refcursor");
println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @" + constraintNameVar);
println(" WHILE @@FETCH_STATUS = 0");
println(" BEGIN");
println(" EXEC ('ALTER TABLE '+@" + tableNameVar + "+' DROP CONSTRAINT '+@" + constraintNameVar + ")");
println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @" + constraintNameVar);
println(" END");
println(" CLOSE refcursor");
println(" DEALLOCATE refcursor");
print(" DROP TABLE ");
printlnIdentifier(tableName);
print("END");
printEndOfStatement();
}
/**
* {@inheritDoc}
*/
public void dropForeignKeys(Table table) throws IOException
{
turnOnQuotation();
super.dropForeignKeys(table);
}
/**
* {@inheritDoc}
*/
protected DateFormat getValueDateFormat()
{
return _genericDateFormat;
}
/**
* {@inheritDoc}
*/
protected DateFormat getValueTimeFormat()
{
return _genericTimeFormat;
}
/**
* {@inheritDoc}
*/
protected String getValueAsString(Column column, Object value)
{
if (value == null)
{
return "NULL";
}
StringBuilder result = new StringBuilder();
switch (column.getTypeCode())
{
case Types.REAL:
case Types.NUMERIC:
case Types.FLOAT:
case Types.DOUBLE:
case Types.DECIMAL:
// SQL Server does not want quotes around the value
if (!(value instanceof String) && (getValueNumberFormat() != null))
{
result.append(getValueNumberFormat().format(value));
}
else
{
result.append(value.toString());
}
break;
case Types.DATE:
result.append("CAST(");
result.append(getPlatformInfo().getValueQuoteToken());
result.append(value instanceof String ? (String)value : getValueDateFormat().format(value));
result.append(getPlatformInfo().getValueQuoteToken());
result.append(" AS datetime)");
break;
case Types.TIME:
result.append("CAST(");
result.append(getPlatformInfo().getValueQuoteToken());
result.append(value instanceof String ? (String)value : getValueTimeFormat().format(value));
result.append(getPlatformInfo().getValueQuoteToken());
result.append(" AS datetime)");
break;
case Types.TIMESTAMP:
result.append("CAST(");
result.append(getPlatformInfo().getValueQuoteToken());
result.append(value.toString());
result.append(getPlatformInfo().getValueQuoteToken());
result.append(" AS datetime)");
break;
}
return super.getValueAsString(column, value);
}
/**
* {@inheritDoc}
*/
protected String getNativeDefaultValue(Column column)
{
// Sql Server wants BIT default values as 0 or 1
if ((column.getTypeCode() == Types.BIT) || (column.getTypeCode() == Types.BOOLEAN))
{
return getDefaultValueHelper().convert(column.getDefaultValue(), column.getTypeCode(), Types.SMALLINT);
}
else
{
return super.getNativeDefaultValue(column);
}
}
/**
* {@inheritDoc}
*/
protected void writeColumnAutoIncrementStmt(Table table, Column column) throws IOException
{
print("IDENTITY (1,1) ");
}
/**
* {@inheritDoc}
*/
public void dropIndex(Table table, Index index) throws IOException
{
print("DROP INDEX ");
printIdentifier(getTableName(table));
print(".");
printIdentifier(getIndexName(index));
printEndOfStatement();
}
/**
* {@inheritDoc}
*/
public void dropForeignKey(Table table, ForeignKey foreignKey) throws IOException
{
String constraintName = getForeignKeyName(table, foreignKey);
print("IF EXISTS (SELECT 1 FROM sysobjects WHERE type = 'F' AND name = ");
printAlwaysSingleQuotedIdentifier(constraintName);
println(")");
printIndent();
print("ALTER TABLE ");
printIdentifier(getTableName(table));
print(" DROP CONSTRAINT ");
printIdentifier(constraintName);
printEndOfStatement();
}
/**
* Returns the statement that turns on the ability to write delimited identifiers.
*
* @return The quotation-on statement
*/
private String getQuotationOnStatement()
{
if (getPlatform().isDelimitedIdentifierModeOn())
{
return "SET quoted_identifier on" + getPlatformInfo().getSqlCommandDelimiter() + "\n";
}
else
{
return "";
}
}
/**
* If quotation mode is on, then this writes the statement that turns on the ability to write delimited identifiers.
*/
protected void turnOnQuotation() throws IOException
{
print(getQuotationOnStatement());
}
/**
* {@inheritDoc}
*/
public String getSelectLastIdentityValues(Table table)
{
return "SELECT @@IDENTITY";
}
/**
* Returns the SQL to enable identity override mode.
*
* @param table The table to enable the mode for
* @return The SQL
*/
protected String getEnableIdentityOverrideSql(Table table)
{
StringBuilder result = new StringBuilder();
result.append(getQuotationOnStatement());
result.append("SET IDENTITY_INSERT ");
result.append(getDelimitedIdentifier(getTableName(table)));
result.append(" ON");
result.append(getPlatformInfo().getSqlCommandDelimiter());
return result.toString();
}
/**
* Returns the SQL to disable identity override mode.
*
* @param table The table to disable the mode for
* @return The SQL
*/
protected String getDisableIdentityOverrideSql(Table table)
{
StringBuilder result = new StringBuilder();
result.append(getQuotationOnStatement());
result.append("SET IDENTITY_INSERT ");
result.append(getDelimitedIdentifier(getTableName(table)));
result.append(" OFF");
result.append(getPlatformInfo().getSqlCommandDelimiter());
return result.toString();
}
/**
* {@inheritDoc}
*/
public String getDeleteSql(Table table, Map pkValues, boolean genPlaceholders)
{
return getQuotationOnStatement() + super.getDeleteSql(table, pkValues, genPlaceholders);
}
/**
* {@inheritDoc}
*/
public String getInsertSql(Table table, Map columnValues, boolean genPlaceholders)
{
return getQuotationOnStatement() + super.getInsertSql(table, columnValues, genPlaceholders);
}
/**
* {@inheritDoc}
*/
public String getUpdateSql(Table table, Map columnValues, boolean genPlaceholders)
{
return getQuotationOnStatement() + super.getUpdateSql(table, columnValues, genPlaceholders);
}
/**
* Prints the given identifier with enforced single quotes around it regardless of whether
* delimited identifiers are turned on or not.
*
* @param identifier The identifier
*/
private void printAlwaysSingleQuotedIdentifier(String identifier) throws IOException
{
print("'");
print(identifier);
print("'");
}
/**
* {@inheritDoc}
*/
protected void copyData(Table sourceTable, Table targetTable) throws IOException
{
// Sql Server per default does not allow us to insert values explicitly into
// identity columns. However, we can change this behavior
// We need to this only if
// - there is a column in both tables that is auto increment only in the target table, or
// - there is a column in both tables that is auto increment in both tables
Column[] targetIdentityColumns = targetTable.getAutoIncrementColumns();
// Sql Server allows only one identity column, so let's take a shortcut here
boolean needToAllowIdentityInsert = (targetIdentityColumns.length > 0) &&
(sourceTable.findColumn(targetIdentityColumns[0].getName(), getPlatform().isDelimitedIdentifierModeOn()) != null);
if (needToAllowIdentityInsert)
{
print("SET IDENTITY_INSERT ");
printIdentifier(getTableName(targetTable));
print(" ON");
printEndOfStatement();
}
super.copyData(sourceTable, targetTable);
// We have to turn it off ASAP because it can be on only for one table per session
if (needToAllowIdentityInsert)
{
print("SET IDENTITY_INSERT ");
printIdentifier(getTableName(targetTable));
print(" OFF");
printEndOfStatement();
}
}
/**
* {@inheritDoc}
*/
public void addColumn(Database model, Table table, Column newColumn) throws IOException
{
print("ALTER TABLE ");
printlnIdentifier(getTableName(table));
printIndent();
print("ADD ");
writeColumn(table, newColumn);
printEndOfStatement();
}
/**
* Generates the SQL to drop a column from a table.
*
* @param table The table where to drop the column from
* @param column The column to drop
*/
public void dropColumn(Table table, Column column) throws IOException
{
if (!StringUtils.isEmpty(column.getDefaultValue()))
{
writeDropConstraintStatement(table, column, "D");
}
print("ALTER TABLE ");
printlnIdentifier(getTableName(table));
printIndent();
print("DROP COLUMN ");
printIdentifier(getColumnName(column));
printEndOfStatement();
}
/**
* Writes the SQL for dropping the primary key of the given table.
*
* @param table The table
*/
public void dropPrimaryKey(Table table) throws IOException
{
// this would be easier if named primary keys are supported
// because for named pks we could use ALTER TABLE DROP
writeDropConstraintStatement(table, null, "PK");
}
/**
* Writes the SQL to recreate a column, e.g. using a different type or similar.
*
* @param table The table
* @param curColumn The current column definition
* @param newColumn The new column definition
*/
public void recreateColumn(Table table, Column curColumn, Column newColumn) throws IOException
{
boolean hasDefault = curColumn.getParsedDefaultValue() != null;
boolean shallHaveDefault = newColumn.getParsedDefaultValue() != null;
String newDefault = newColumn.getDefaultValue();
// Sql Server does not like it if there is a default spec in the ALTER TABLE ALTER COLUMN
// statement; thus we have to change the default manually
if (newDefault != null)
{
newColumn.setDefaultValue(null);
}
if (hasDefault)
{
// we're dropping the old default
writeDropConstraintStatement(table, curColumn, "D");
}
print("ALTER TABLE ");
printlnIdentifier(getTableName(table));
printIndent();
print("ALTER COLUMN ");
writeColumn(table, newColumn);
printEndOfStatement();
if (shallHaveDefault)
{
newColumn.setDefaultValue(newDefault);
// if the column shall have a default, then we have to add it as a constraint
print("ALTER TABLE ");
printlnIdentifier(getTableName(table));
printIndent();
print("ADD CONSTRAINT ");
printIdentifier(getConstraintName("DF", table, curColumn.getName(), null));
writeColumnDefaultValueStmt(table, newColumn);
print(" FOR ");
printIdentifier(getColumnName(curColumn));
printEndOfStatement();
}
}
/**
* Writes the SQL to drop a constraint, e.g. a primary key or default value constraint.
*
* @param table The table that the constraint is on
* @param column The column that the constraint is on; <code>null</code> for table-level
* constraints
* @param typeIdentifier The constraint type identifier as is specified for the
* <code>sysobjects</code> system table
*/
protected void writeDropConstraintStatement(Table table, Column column, String typeIdentifier) throws IOException
{
String tableName = getTableName(table);
String columnName = column == null ? null : getColumnName(column);
String tableNameVar = "tn" + createUniqueIdentifier();
String constraintNameVar = "cn" + createUniqueIdentifier();
println("BEGIN");
println(" DECLARE @" + tableNameVar + " nvarchar(256), @" + constraintNameVar + " nvarchar(256)");
println(" DECLARE refcursor CURSOR FOR");
println(" SELECT object_name(objs.parent_obj) tablename, objs.name constraintname");
println(" FROM sysobjects objs JOIN sysconstraints cons ON objs.id = cons.constid");
print(" WHERE objs.xtype = '");
print(typeIdentifier);
println("' AND");
if (columnName != null)
{
print(" cons.colid = (SELECT colid FROM syscolumns WHERE id = object_id(");
printAlwaysSingleQuotedIdentifier(tableName);
print(") AND name = ");
printAlwaysSingleQuotedIdentifier(columnName);
println(") AND");
}
print(" object_name(objs.parent_obj) = ");
printAlwaysSingleQuotedIdentifier(tableName);
println(" OPEN refcursor");
println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @" + constraintNameVar);
println(" WHILE @@FETCH_STATUS = 0");
println(" BEGIN");
println(" EXEC ('ALTER TABLE '+@" + tableNameVar + "+' DROP CONSTRAINT '+@" + constraintNameVar + ")");
println(" FETCH NEXT FROM refcursor INTO @" + tableNameVar + ", @" + constraintNameVar);
println(" END");
println(" CLOSE refcursor");
println(" DEALLOCATE refcursor");
print("END");
printEndOfStatement();
}
/**
* {@inheritDoc}
*/
protected void writeCastExpression(Column sourceColumn, Column targetColumn) throws IOException
{
boolean sizeChanged = ColumnDefinitionChange.isSizeChanged(getPlatformInfo(), sourceColumn, targetColumn);
boolean typeChanged = ColumnDefinitionChange.isTypeChanged(getPlatformInfo(), sourceColumn, targetColumn);
if (sizeChanged || typeChanged)
{
if (TypeMap.isTextType(targetColumn.getTypeCode()) && sizeChanged &&
(targetColumn.getSize() != null) && (sourceColumn.getSizeAsInt() > targetColumn.getSizeAsInt()))
{
print("SUBSTRING(CAST(");
printIdentifier(getColumnName(sourceColumn));
print(" AS ");
print(getNativeType(targetColumn));
print("),1,");
print(getSizeSpec(targetColumn));
print(")");
}
else
{
print("CAST(");
printIdentifier(getColumnName(sourceColumn));
print(" AS ");
print(getSqlType(targetColumn));
print(")");
}
}
else
{
printIdentifier(getColumnName(sourceColumn));
}
}
}
| |
package production;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Random;
/**
* Creates an artificial graph for testing purposes.
* <p>
* The value numVertices and String type in this class should be set before running the main method.
* - numVertices is the number of nodes to be included in this graph.
* - type refers to the type of graph to be constructed, in terms of its density.
* The class can generate three types of graphs:
* - sparse
* - regular
* - dense
* These are based on values recovered from a paper online looking at graph density of real world
* graphs.
* <p>
* The graph constructed is based on a domain of webpages and programmers. Webpages are linked to
* one another randomly. Programmers can code for zero or more webpages, and these webpages are
* owned by another set of people known as owners. These owners also employ programmers, although
* they may employ programmers to code for websites which the owners do not own (in this case the
* programmer might be a contractor for example). The final relationship in the graph is that
* programmers and owners can be friends with one another. Note: a person may be both a programmer
* and an owner.
* <p>
* The array allocations assigns a proportion of the nodes to each label.
* <p>
* The edges are firstly added to the graph randomly. After the number of edges is greater than the
* number of nodes, the edges are added proportionally to the degree of each node. Thus, if one node
* has a larger number of edges already attached to it, it is more likely to have another edge attach
* to it. These creates slightly more realistic graphs, and helps demonstrate in some way the small
* world property that a lot of real world graphs exhibit.
* <p>
* Features of the graph:
* - no loops allowed
* - type of relationship of the edge is based on the labels of the nodes. In most cases this is
* deterministic (i.e. if the label of the node where the edge is coming from is 'programmer', and
* the label of the other node is 'website', then this is a "CODES-FOR" relation.)
* - if no type of relationship can be found for two nodes, then algorithm flips the edge and attempts
* again. This should add all edges to the graph.
* <p>
* The output of the module are CSV files for both the labels and the relationships. These can then be
* filled with additional metadata, and added to Neo4J through the use of the shell and some existing
* queries which can parse the .csv.
* <p>
* load csv with headers from "file:///csv/owner.csv" AS line
* CREATE (a:Owner {personID:toInt(line.ownerID), surname:line.surname,
* city:line.city, state: line.state, cars:toInt(line.cars)});
* <p>
* load csv with headers from "file:///csv/progOwner.csv" AS line
* CREATE (a:Owner:Programmer {personID:toInt(line.poID), surname:line.surname,
* city:line.city, state: line.state, cars:toInt(line.cars), pets:toInt(line.pets)});
* <p>
* load csv with headers from "file:///csv/programmer.csv" AS line
* CREATE (a:Programmer {personID:toInt(line.progID), surname:line.surname,
* city:line.city, state:line.state, pets:toInt(line.pets)});
* <p>
* load csv with headers from "file:///csv/website.csv" AS line
* CREATE (a:Website {webID:toInt(line.webID), host:line.host, domain:line.domain});
* <p>
* load csv with headers from "file:///csv/CODES_FOR.csv" AS line
* MATCH (a {personID:toInt(line.sourceID)}), (b:Website {webID:toInt(line.destID)})
* CREATE (a)-[r:CODES_FOR {commits:toInt(line.commits)}]->(b);
* <p>
* load csv with headers from "file:///csv/EMPLOYS.csv" AS line
* MATCH (a {personID:toInt(line.sourceID)}), (b {personID:toInt(line.destID)})
* CREATE (a)-[r:EMPLOYS {salary:toInt(line.salary), carShare:line.carShare}]->(b);
* <p>
* load csv with headers from "file:///csv/FRIENDS.csv" AS line
* MATCH (a {personID:toInt(line.sourceID)}), (b {personID:toInt(line.destID)}) CREATE (a)-[r:FRIENDS]->(b);
* <p>
* load csv with headers from "file:///csv/LINKED_TO.csv" AS line
* MATCH (a:Website {webID:toInt(line.sourceID)}), (b:Website {webID:toInt(line.destID)})
* CREATE (a)-[r:LINKED_TO {popularity:toInt(line.popularity)}]->(b);
* <p>
* load csv with headers from "file:///csv/OWNS.csv" AS line
* MATCH (a {personID:toInt(line.sourceID)}), (b:Website {webID:toInt(line.destID)})
* CREATE (a)-[r:OWNS {salary:toInt(line.salary)}]->(b);
*/
public class GraphCreator {
// set these before running the main method.
private static final int numVertices = 40000;
// options are SPARSE, REGULAR, DENSE
private static final String type = "DENSE";
// density calculation based on d = m / n.
// where m is the number of edges, and n is the number of nodes.
private static final double SPARSE_DEGREE = 7.61;
private static final double REGULAR_DEGREE = 13.49;
private static final double DENSE_DEGREE = 25.67;
private static final Random r = new Random();
// labels of the nodes in the graph.
private static String[] labels = {"website", "programmer", "owner", "progOwner"};
// allocations of the number of nodes to the labels above.
private static int[] allocations = {
(int) ((0.7) * numVertices),
(int) ((0.22) * numVertices),
(int) ((0.05) * numVertices),
(int) ((0.03) * numVertices)
};
private static int numEdges = 0;
private static long edgesToAdd;
private static short[] degreeOfVertices;
private static byte[] labelsOfNodes;
private static boolean[][] adjMat;
public static void main(String args[]) {
try {
// initialise the parameters and arrays.
setupAndInitlise();
} catch (Exception e) {
System.exit(1);
}
// add the edges to the graph.
addEdges();
// print basic information about the created graph.
printInformation();
// write the output of the graph to a local CSV file for importing into Neo4J.
try {
produceCSV();
produceCSVLabels();
} catch (FileNotFoundException e) {
e.printStackTrace();
System.exit(1);
}
}
/**
* Produce CSV files for the labels - the additional metadata for each label is
* added manually from other CSV files.
*
* @throws FileNotFoundException - File was not found in location specified.
*/
private static void produceCSVLabels() throws FileNotFoundException {
StringBuilder web = new StringBuilder();
StringBuilder prog = new StringBuilder();
StringBuilder owner = new StringBuilder();
StringBuilder po = new StringBuilder();
web.append("webID,host,domain\n");
prog.append("progID,surname,city,state,pets\n");
owner.append("ownerID,surname,city,state,cars\n");
po.append("poID,surname,city,state,pets,cars\n");
// add the ids of the nodes for each label type to the correct .csv file.
for (int i = 0; i < labelsOfNodes.length; i++) {
switch (labels[labelsOfNodes[i]]) {
case "website":
web.append(i).append("\n");
break;
case "programmer":
prog.append(i).append("\n");
break;
case "owner":
owner.append(i).append("\n");
break;
case "progOwner":
po.append(i).append("\n");
break;
}
}
PrintWriter pw = new PrintWriter(new File("C:/Users/ocraw/Desktop/website.csv"));
pw.write(web.toString());
pw.close();
pw = new PrintWriter(new File("C:/Users/ocraw/Desktop/programmer.csv"));
pw.write(prog.toString());
pw.close();
pw = new PrintWriter(new File("C:/Users/ocraw/Desktop/owner.csv"));
pw.write(owner.toString());
pw.close();
pw = new PrintWriter(new File("C:/Users/ocraw/Desktop/progOwner.csv"));
pw.write(po.toString());
pw.close();
}
/**
* Print information of the graph created.
*/
private static void printInformation() {
System.out.println("Graph density : " + (double) (numEdges) / (numVertices));
Arrays.parallelSort(degreeOfVertices);
System.out.println("Minimal degree of node : " + degreeOfVertices[0]);
System.out.println("Maximal degree of node : " + degreeOfVertices[numVertices - 1]);
}
/**
* Adds edges to the graph.
*/
private static void addEdges() {
// add edges to the graph depending on how sparse/dense the test graph should be
// the edges are added proportionally to the degree of each node
// thus, a node with a higher degree than the other nodes has more chance of having
// edges added to it.
for (int b = 0; b < edgesToAdd; b++) {
// add first numVertices edges at random anywhere in the graph.
if (b % 5000 == 0) {
System.out.println(b + " edges added.");
}
if (numEdges < numVertices) {
int indexFrom = 1;
int indexTo = 1;
while (!addEdge(indexFrom, indexTo)) {
indexFrom = r.nextInt(numVertices) + 1;
indexTo = r.nextInt(numVertices) + 1;
}
} else {
// pick edge based on how edges already been assigned
// small world principle
int randomEdgeNumber = r.nextInt(numEdges) + 1;
// index of node edge leaving from (just for notation, edges have no direction
// at the moment).
int indexFrom = 0;
while (randomEdgeNumber > 0) randomEdgeNumber = randomEdgeNumber - degreeOfVertices[indexFrom++];
indexFrom--;
boolean addedEdge = false;
while (!addedEdge && degreeOfVertices[indexFrom] != numVertices - 1) {
int indexTo = r.nextInt(numVertices);
if (!adjMat[indexFrom][indexTo] && indexFrom != indexTo) {
addedEdge = true;
addEdge(indexFrom + 1, indexTo + 1);
}
}
}
}
}
/**
* Setup and initialise the graph. This includes:
* - determining the number of edges needed to produce the correct density value.
* - assigning labels to nodes
* - setting all values in the adjacency matrix to 0 (meaning no edge)
*
* @throws Exception - parameter 'type' was not one of either: "SPARSE", "REGULAR", or "DENSE".
*/
private static void setupAndInitlise() throws Exception {
// setup the number of edges to add to the artificial graph.
switch (type) {
case "SPARSE":
edgesToAdd = (long) (numVertices * SPARSE_DEGREE);
break;
case "REGULAR":
edgesToAdd = (long) (numVertices * REGULAR_DEGREE);
break;
case "DENSE":
edgesToAdd = (long) (numVertices * DENSE_DEGREE);
break;
default:
throw new Exception("Incorrect parameter passed to setup");
}
// setup the adjacency matrix and keep track of the degree of each node.
degreeOfVertices = new short[numVertices];
labelsOfNodes = new byte[numVertices];
adjMat = new boolean[numVertices][numVertices];
boolean labelAdded;
// initialise the degree of each node to 0 at the start.
for (int a = 0; a < numVertices; a++) {
labelAdded = false;
// go round loop until the node is assigned a valid label
while (!labelAdded) {
byte labelAllocation = (byte) r.nextInt(labels.length);
if (allocations[labelAllocation] > 0) {
allocations[labelAllocation]--;
labelsOfNodes[a] = labelAllocation;
labelAdded = true;
}
}
// initialise this array
degreeOfVertices[a] = 0;
}
// NOTE: edges are undirected, direction is decided randomly later on.
for (int i = 1; i <= numVertices; i++) {
for (int j = 1; j <= numVertices; j++) {
adjMat[i - 1][j - 1] = false;
}
}
}
/**
* Create CSV files for the relationships.
*
* @throws FileNotFoundException - file not found in specified location.
*/
private static void produceCSV() throws FileNotFoundException {
StringBuilder sb_LINKED_TO = new StringBuilder();
StringBuilder sb_CODES_FOR = new StringBuilder();
StringBuilder sb_OWNS = new StringBuilder();
StringBuilder sb_EMPLOYS = new StringBuilder();
StringBuilder sb_FRIENDS = new StringBuilder();
sb_LINKED_TO.append("sourceID,destID,popularity").append("\n");
sb_CODES_FOR.append("sourceID,destID,commits").append("\n");
sb_OWNS.append("sourceID,destID,salary").append("\n");
sb_EMPLOYS.append("sourceID,destID,salary,carShare").append("\n");
sb_FRIENDS.append("sourceID,destID").append("\n");
for (int from = 0; from < adjMat.length; from++) {
for (int to = 0; to < adjMat.length; to++) {
if (adjMat[from][to]) {
if (Math.random() < 0.5) {
String labelFrom = labels[labelsOfNodes[from]];
String labelTo = labels[labelsOfNodes[to]];
String typeRel = workoutrel(labelFrom, labelTo);
if (typeRel != null) {
switch (typeRel) {
case "LINKED_TO":
sb_LINKED_TO = fillCSV(sb_LINKED_TO, from, to, typeRel);
break;
case "CODES_FOR":
sb_CODES_FOR = fillCSV(sb_CODES_FOR, from, to, typeRel);
break;
case "OWNS":
sb_OWNS = fillCSV(sb_OWNS, from, to, typeRel);
break;
case "EMPLOYS":
sb_EMPLOYS = fillCSV(sb_EMPLOYS, from, to, typeRel);
break;
case "FRIENDS":
sb_FRIENDS = fillCSV(sb_FRIENDS, from, to, typeRel);
break;
}
adjMat[from][to] = false;
adjMat[to][from] = false;
}
}
}
}
}
sb_LINKED_TO = addRemainingEdges(sb_LINKED_TO, "LINKED_TO", 0);
sb_LINKED_TO = addRemainingEdges(sb_LINKED_TO, "LINKED_TO", 1);
sb_CODES_FOR = addRemainingEdges(sb_CODES_FOR, "CODES_FOR", 0);
sb_CODES_FOR = addRemainingEdges(sb_CODES_FOR, "CODES_FOR", 1);
sb_OWNS = addRemainingEdges(sb_OWNS, "OWNS", 0);
sb_OWNS = addRemainingEdges(sb_OWNS, "OWNS", 1);
sb_EMPLOYS = addRemainingEdges(sb_EMPLOYS, "EMPLOYS", 0);
sb_EMPLOYS = addRemainingEdges(sb_EMPLOYS, "EMPLOYS", 1);
sb_FRIENDS = addRemainingEdges(sb_FRIENDS, "FRIENDS", 0);
sb_FRIENDS = addRemainingEdges(sb_FRIENDS, "FRIENDS", 1);
PrintWriter pw = new PrintWriter(new File("C:/Users/ocraw/Desktop/LINKED_TO.csv"));
pw.write(sb_LINKED_TO.toString());
pw.close();
pw = new PrintWriter(new File("C:/Users/ocraw/Desktop/CODES_FOR.csv"));
pw.write(sb_CODES_FOR.toString());
pw.close();
pw = new PrintWriter(new File("C:/Users/ocraw/Desktop/OWNS.csv"));
pw.write(sb_OWNS.toString());
pw.close();
pw = new PrintWriter(new File("C:/Users/ocraw/Desktop/EMPLOYS.csv"));
pw.write(sb_EMPLOYS.toString());
pw.close();
pw = new PrintWriter(new File("C:/Users/ocraw/Desktop/FRIENDS.csv"));
pw.write(sb_FRIENDS.toString());
pw.close();
System.out.println("Done!");
}
/**
* Fill the CSV with the correct data.
*
* @param sb - comma separated string to write for each row.
* @param from - index of node where relationship is directed from.
* @param to - index of node where relationship is directed to.
* @param typeRel - the type of relationship.
* @return - comma separated string with new information in.
*/
private static StringBuilder fillCSV(StringBuilder sb, int from, int to, String typeRel) {
sb.append(from).append(",").append(to);
switch (typeRel) {
case "LINKED_TO":
sb.append(",").append(r.nextInt(100));
break;
case "CODES_FOR":
sb.append(",").append(r.nextInt(100));
break;
case "OWNS":
sb.append(",").append((r.nextInt(1000) + 1500) * 2000);
break;
case "EMPLOYS":
String carShare = "no";
if (Math.random() > 0.5) carShare = "yes";
sb.append(",").append((r.nextInt(1000) + 1500) * 10).append(",").append(carShare);
break;
case "FRIENDS":
break;
}
sb.append("\n");
return sb;
}
/**
* Add remaining edges to the graph.
*
* @param sb - comma separated string to write for each row.
* @param rel - the type of relationship to add.
* @param i - direction of the relationship - if 1 then inverse from and to indexes.
* @return comma separated string with new information to write.
*/
private static StringBuilder addRemainingEdges(StringBuilder sb, String rel, int i) {
for (int from = 0; from < adjMat.length; from++) {
for (int to = 0; to < adjMat.length; to++) {
if (i == 1) {
int temp = from;
from = to;
to = temp;
}
if (adjMat[from][to]) {
String labelFrom = labels[labelsOfNodes[from]];
String labelTo = labels[labelsOfNodes[to]];
String typeRel = workoutrel(labelFrom, labelTo);
if (typeRel != null && typeRel.equals(rel)) {
sb = fillCSV(sb, from, to, typeRel);
adjMat[from][to] = false;
adjMat[to][from] = false;
}
}
}
}
return sb;
}
/**
* Workout the relationship between the two nodes, based on their labels only.
*
* @param labelFrom - label of node where relationship is coming from.
* @param labelTo - label of node where relationship is going to.
* @return String value of the type of relationship.
*/
private static String workoutrel(String labelFrom, String labelTo) {
if (labelFrom.equals("website") && labelTo.equals("website")) {
return "LINKED_TO";
} else if ((labelFrom.equals("programmer") || labelFrom.equals("progOwner")) && labelTo.equals("website")) {
if (labelFrom.equals("progOwner")) {
if (Math.random() > 0.5) return "OWNS";
}
return "CODES_FOR";
} else if (labelFrom.equals("owner") && labelTo.equals("website")) {
return "OWNS";
} else if ((labelFrom.equals("owner") || labelFrom.equals("progOwner")) &&
(labelTo.equals("programmer") || labelTo.equals("progOwner"))) {
return "EMPLOYS";
} else if ((labelFrom.equals("programmer") || labelFrom.equals("progOwner")) &&
(labelTo.equals("programmer") || labelTo.equals("progOwner"))) {
return "FRIENDS";
} else if ((labelFrom.equals("owner") || labelFrom.equals("progOwner"))
&& (labelTo.equals("owner") || labelTo.equals("progOwner"))) {
return "FRIENDS";
} else {
return null;
}
}
/**
* Helped method to output the values in the adjacency matrix.
* Note : only really for debugging and small matrices.
*
* @param adjMat Adjacency matrix to print out.
*/
private static void printMatirx(int[][] adjMat) {
int k = 0;
System.out.print(" ");
for (int l = 1; l <= adjMat.length; l++) {
if (l < 9)
System.out.print(l + " ");
else System.out.print(l + " ");
}
System.out.println();
for (int[] anAdjMat : adjMat) {
System.out.print(++k + " :\t ");
for (int j = 0; j < adjMat.length; j++) {
System.out.print("[" + (anAdjMat[j] == 1 ? "x" : ((j + 1) == k) ? "\\" : " ") + "] ");
}
System.out.println();
}
}
/**
* Add edge to the adjacency matrix. No loops are allowed in the graph.
*
* @param from - index of node where edge coming from.
* @param to - index of the node where edge is going to.
* @return True: edge added successfully. False: if edge already exists, or values 'from' and 'to' are equal.
*/
private static boolean addEdge(int from, int to) {
from--;
to--;
// edge already there or indexes the same (i.e a loop)
if ((adjMat[from][to]) || from == to) return false;
adjMat[from][to] = true;
adjMat[to][from] = true;
degreeOfVertices[to]++;
degreeOfVertices[from]++;
numEdges++;
return true;
}
}
| |
/*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.oauth2.config.annotation.builders;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.springframework.security.config.annotation.SecurityBuilder;
import org.springframework.security.config.annotation.SecurityConfigurerAdapter;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.oauth2.provider.ClientDetails;
import org.springframework.security.oauth2.provider.ClientDetailsService;
import org.springframework.security.oauth2.provider.client.BaseClientDetails;
/**
* Builder for OAuth2 client details service. Can be used to construct either an in-memory or a JDBC implementation of
* the {@link ClientDetailsService} and populate it with data.
*
* <p>
* @deprecated See the <a href="https://github.com/spring-projects/spring-security/wiki/OAuth-2.0-Migration-Guide">OAuth 2.0 Migration Guide</a> for Spring Security 5.
*
* @author Dave Syer
*
*/
@Deprecated
public class ClientDetailsServiceBuilder<B extends ClientDetailsServiceBuilder<B>> extends
SecurityConfigurerAdapter<ClientDetailsService, B> implements SecurityBuilder<ClientDetailsService> {
private List<ClientBuilder> clientBuilders = new ArrayList<ClientBuilder>();
public InMemoryClientDetailsServiceBuilder inMemory() throws Exception {
return new InMemoryClientDetailsServiceBuilder();
}
public JdbcClientDetailsServiceBuilder jdbc() throws Exception {
return new JdbcClientDetailsServiceBuilder();
}
@SuppressWarnings("rawtypes")
public ClientDetailsServiceBuilder<?> clients(final ClientDetailsService clientDetailsService) throws Exception {
return new ClientDetailsServiceBuilder() {
@Override
public ClientDetailsService build() throws Exception {
return clientDetailsService;
}
};
}
public ClientBuilder withClient(String clientId) {
ClientBuilder clientBuilder = new ClientBuilder(clientId);
this.clientBuilders.add(clientBuilder);
return clientBuilder;
}
@Override
public ClientDetailsService build() throws Exception {
for (ClientBuilder clientDetailsBldr : clientBuilders) {
addClient(clientDetailsBldr.clientId, clientDetailsBldr.build());
}
return performBuild();
}
protected void addClient(String clientId, ClientDetails build) {
}
protected ClientDetailsService performBuild() {
throw new UnsupportedOperationException("Cannot build client services (maybe use inMemory() or jdbc()).");
}
public final class ClientBuilder {
private final String clientId;
private Collection<String> authorizedGrantTypes = new LinkedHashSet<String>();
private Collection<String> authorities = new LinkedHashSet<String>();
private Integer accessTokenValiditySeconds;
private Integer refreshTokenValiditySeconds;
private Collection<String> scopes = new LinkedHashSet<String>();
private Collection<String> autoApproveScopes = new HashSet<String>();
private String secret;
private Set<String> registeredRedirectUris = new HashSet<String>();
private Set<String> resourceIds = new HashSet<String>();
private boolean autoApprove;
private Map<String, Object> additionalInformation = new LinkedHashMap<String, Object>();
private ClientDetails build() {
BaseClientDetails result = new BaseClientDetails();
result.setClientId(clientId);
result.setAuthorizedGrantTypes(authorizedGrantTypes);
result.setAccessTokenValiditySeconds(accessTokenValiditySeconds);
result.setRefreshTokenValiditySeconds(refreshTokenValiditySeconds);
result.setRegisteredRedirectUri(registeredRedirectUris);
result.setClientSecret(secret);
result.setScope(scopes);
result.setAuthorities(AuthorityUtils.createAuthorityList(authorities.toArray(new String[authorities.size()])));
result.setResourceIds(resourceIds);
result.setAdditionalInformation(additionalInformation);
if (autoApprove) {
result.setAutoApproveScopes(scopes);
}
else {
result.setAutoApproveScopes(autoApproveScopes);
}
return result;
}
public ClientBuilder resourceIds(String... resourceIds) {
for (String resourceId : resourceIds) {
this.resourceIds.add(resourceId);
}
return this;
}
public ClientBuilder redirectUris(String... registeredRedirectUris) {
for (String redirectUri : registeredRedirectUris) {
this.registeredRedirectUris.add(redirectUri);
}
return this;
}
public ClientBuilder authorizedGrantTypes(String... authorizedGrantTypes) {
for (String grant : authorizedGrantTypes) {
this.authorizedGrantTypes.add(grant);
}
return this;
}
public ClientBuilder accessTokenValiditySeconds(int accessTokenValiditySeconds) {
this.accessTokenValiditySeconds = accessTokenValiditySeconds;
return this;
}
public ClientBuilder refreshTokenValiditySeconds(int refreshTokenValiditySeconds) {
this.refreshTokenValiditySeconds = refreshTokenValiditySeconds;
return this;
}
public ClientBuilder secret(String secret) {
this.secret = secret;
return this;
}
public ClientBuilder scopes(String... scopes) {
for (String scope : scopes) {
this.scopes.add(scope);
}
return this;
}
public ClientBuilder authorities(String... authorities) {
for (String authority : authorities) {
this.authorities.add(authority);
}
return this;
}
public ClientBuilder autoApprove(boolean autoApprove) {
this.autoApprove = autoApprove;
return this;
}
public ClientBuilder autoApprove(String... scopes) {
for (String scope : scopes) {
this.autoApproveScopes.add(scope);
}
return this;
}
public ClientBuilder additionalInformation(Map<String, ?> map) {
this.additionalInformation.putAll(map);
return this;
}
public ClientBuilder additionalInformation(String... pairs) {
for (String pair : pairs) {
String separator = ":";
if (!pair.contains(separator) && pair.contains("=")) {
separator = "=";
}
int index = pair.indexOf(separator);
String key = pair.substring(0, index > 0 ? index : pair.length());
String value = index > 0 ? pair.substring(index+1) : null;
this.additionalInformation.put(key, (Object) value);
}
return this;
}
public ClientDetailsServiceBuilder<B> and() {
return ClientDetailsServiceBuilder.this;
}
private ClientBuilder(String clientId) {
this.clientId = clientId;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cloudwatchrum.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/rum-2018-05-10/GetAppMonitorData" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetAppMonitorDataRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* An array of structures that you can use to filter the results to those that match one or more sets of key-value
* pairs that you specify.
* </p>
*/
private java.util.List<QueryFilter> filters;
/**
* <p>
* The maximum number of results to return in one operation.
* </p>
*/
private Integer maxResults;
/**
* <p>
* The name of the app monitor that collected the data that you want to retrieve.
* </p>
*/
private String name;
/**
* <p>
* Use the token returned by the previous operation to request the next page of results.
* </p>
*/
private String nextToken;
/**
* <p>
* A structure that defines the time range that you want to retrieve results from.
* </p>
*/
private TimeRange timeRange;
/**
* <p>
* An array of structures that you can use to filter the results to those that match one or more sets of key-value
* pairs that you specify.
* </p>
*
* @return An array of structures that you can use to filter the results to those that match one or more sets of
* key-value pairs that you specify.
*/
public java.util.List<QueryFilter> getFilters() {
return filters;
}
/**
* <p>
* An array of structures that you can use to filter the results to those that match one or more sets of key-value
* pairs that you specify.
* </p>
*
* @param filters
* An array of structures that you can use to filter the results to those that match one or more sets of
* key-value pairs that you specify.
*/
public void setFilters(java.util.Collection<QueryFilter> filters) {
if (filters == null) {
this.filters = null;
return;
}
this.filters = new java.util.ArrayList<QueryFilter>(filters);
}
/**
* <p>
* An array of structures that you can use to filter the results to those that match one or more sets of key-value
* pairs that you specify.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setFilters(java.util.Collection)} or {@link #withFilters(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param filters
* An array of structures that you can use to filter the results to those that match one or more sets of
* key-value pairs that you specify.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAppMonitorDataRequest withFilters(QueryFilter... filters) {
if (this.filters == null) {
setFilters(new java.util.ArrayList<QueryFilter>(filters.length));
}
for (QueryFilter ele : filters) {
this.filters.add(ele);
}
return this;
}
/**
* <p>
* An array of structures that you can use to filter the results to those that match one or more sets of key-value
* pairs that you specify.
* </p>
*
* @param filters
* An array of structures that you can use to filter the results to those that match one or more sets of
* key-value pairs that you specify.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAppMonitorDataRequest withFilters(java.util.Collection<QueryFilter> filters) {
setFilters(filters);
return this;
}
/**
* <p>
* The maximum number of results to return in one operation.
* </p>
*
* @param maxResults
* The maximum number of results to return in one operation.
*/
public void setMaxResults(Integer maxResults) {
this.maxResults = maxResults;
}
/**
* <p>
* The maximum number of results to return in one operation.
* </p>
*
* @return The maximum number of results to return in one operation.
*/
public Integer getMaxResults() {
return this.maxResults;
}
/**
* <p>
* The maximum number of results to return in one operation.
* </p>
*
* @param maxResults
* The maximum number of results to return in one operation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAppMonitorDataRequest withMaxResults(Integer maxResults) {
setMaxResults(maxResults);
return this;
}
/**
* <p>
* The name of the app monitor that collected the data that you want to retrieve.
* </p>
*
* @param name
* The name of the app monitor that collected the data that you want to retrieve.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the app monitor that collected the data that you want to retrieve.
* </p>
*
* @return The name of the app monitor that collected the data that you want to retrieve.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the app monitor that collected the data that you want to retrieve.
* </p>
*
* @param name
* The name of the app monitor that collected the data that you want to retrieve.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAppMonitorDataRequest withName(String name) {
setName(name);
return this;
}
/**
* <p>
* Use the token returned by the previous operation to request the next page of results.
* </p>
*
* @param nextToken
* Use the token returned by the previous operation to request the next page of results.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* Use the token returned by the previous operation to request the next page of results.
* </p>
*
* @return Use the token returned by the previous operation to request the next page of results.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* Use the token returned by the previous operation to request the next page of results.
* </p>
*
* @param nextToken
* Use the token returned by the previous operation to request the next page of results.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAppMonitorDataRequest withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* A structure that defines the time range that you want to retrieve results from.
* </p>
*
* @param timeRange
* A structure that defines the time range that you want to retrieve results from.
*/
public void setTimeRange(TimeRange timeRange) {
this.timeRange = timeRange;
}
/**
* <p>
* A structure that defines the time range that you want to retrieve results from.
* </p>
*
* @return A structure that defines the time range that you want to retrieve results from.
*/
public TimeRange getTimeRange() {
return this.timeRange;
}
/**
* <p>
* A structure that defines the time range that you want to retrieve results from.
* </p>
*
* @param timeRange
* A structure that defines the time range that you want to retrieve results from.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetAppMonitorDataRequest withTimeRange(TimeRange timeRange) {
setTimeRange(timeRange);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFilters() != null)
sb.append("Filters: ").append(getFilters()).append(",");
if (getMaxResults() != null)
sb.append("MaxResults: ").append(getMaxResults()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getTimeRange() != null)
sb.append("TimeRange: ").append(getTimeRange());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetAppMonitorDataRequest == false)
return false;
GetAppMonitorDataRequest other = (GetAppMonitorDataRequest) obj;
if (other.getFilters() == null ^ this.getFilters() == null)
return false;
if (other.getFilters() != null && other.getFilters().equals(this.getFilters()) == false)
return false;
if (other.getMaxResults() == null ^ this.getMaxResults() == null)
return false;
if (other.getMaxResults() != null && other.getMaxResults().equals(this.getMaxResults()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getTimeRange() == null ^ this.getTimeRange() == null)
return false;
if (other.getTimeRange() != null && other.getTimeRange().equals(this.getTimeRange()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFilters() == null) ? 0 : getFilters().hashCode());
hashCode = prime * hashCode + ((getMaxResults() == null) ? 0 : getMaxResults().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getTimeRange() == null) ? 0 : getTimeRange().hashCode());
return hashCode;
}
@Override
public GetAppMonitorDataRequest clone() {
return (GetAppMonitorDataRequest) super.clone();
}
}
| |
/*
* JScience - Java(TM) Tools and Libraries for the Advancement of Sciences.
* Copyright (C) 2007 - JScience (http://jscience.org/)
* All rights reserved.
*
* Permission to use, copy, modify, and distribute this software is
* freely granted, provided that this notice is preserved.
*/
package javax.measure;
import java.math.BigDecimal;
import java.math.MathContext;
import javax.measure.converter.AddConverter;
import javax.measure.converter.RationalConverter;
import javax.measure.converter.UnitConverter;
import javax.measure.quantity.Quantity;
import javax.measure.unit.Unit;
/**
* <p>
* This class represents a measure whose value is an arbitrary-precision decimal number.
* </p>
*
* <p>
* When converting, applications may supply the <code>java.math.Context</code>:
*
* <pre>
* DecimalMeasure<Velocity> c = DecimalMeasure.valueOf("299792458 m/s");
* DecimalMeasure<Velocity> milesPerHour = c.to(MILES_PER_HOUR, MathContext.DECIMAL128);
* System.out.println(milesPerHour);
*
* > 670616629.3843951324266284896206156 mph
* </pre>
*
* @author <a href="mailto:jean-marie@dautelle.com">Jean-Marie Dautelle</a>
* @version 4.3, October 3, 2007
* @param <Q>
* The quantity
*/
public class DecimalMeasure<Q extends Quantity> extends Measure<BigDecimal, Q> {
/**
* Holds the BigDecimal value.
*/
private final BigDecimal value;
/**
* Holds the unit.
*/
private final Unit<Q> unit;
/**
* Creates a decimal measure for the specified number stated in the specified unit.
*
* @param value
* The value
* @param unit
* The unit
*/
public DecimalMeasure(BigDecimal value, Unit<Q> unit) {
this.value = value;
this.unit = unit;
}
/**
* Returns the decimal measure for the specified number stated in the specified unit.
*
* @param decimal
* the measurement value.
* @param unit
* the measurement unit.
* @param <Q>
* The quantity
*/
public static <Q extends Quantity> DecimalMeasure<Q> valueOf(BigDecimal decimal, Unit<Q> unit) {
return new DecimalMeasure<Q>(decimal, unit);
}
/**
* @param csq
* the decimal measure representation (including unit if any).
* @param <Q>
* The quantity
* @return the decimal measure for the specified textual representation. This method first reads the
* <code>BigDecimal</code> value, then the unit if any (value and unit should be separated by white spaces).
* @throws NumberFormatException
* if the specified character sequence is not a valid representation of decimal measure.
*/
@SuppressWarnings("unchecked")
public static <Q extends Quantity> DecimalMeasure<Q> valueOf(CharSequence csq) {
String str = csq.toString();
int numberLength = str.length();
int unitStartIndex = -1;
for (int i = 0; i < str.length(); i++) {
if (Character.isWhitespace(str.charAt(i))) {
for (int j = i + 1; j < str.length(); j++) {
if (!Character.isWhitespace(str.charAt(j))) {
unitStartIndex = j;
break;
}
}
numberLength = i;
break;
}
}
BigDecimal decimal = new BigDecimal(str.substring(0, numberLength));
Unit<?> unit = Unit.ONE;
if (unitStartIndex > 0) {
unit = Unit.valueOf(str.substring(unitStartIndex));
}
return new DecimalMeasure<Q>(decimal, (Unit<Q>) unit);
}
@Override
public Unit<Q> getUnit() {
return this.unit;
}
@Override
public BigDecimal getValue() {
return this.value;
}
/**
* Returns the decimal measure equivalent to this measure but stated in the specified unit. This method will raise
* an ArithmeticException if the resulting measure does not have a terminating decimal expansion.
*
* @param unit
* the new measurement unit.
* @return the measure stated in the specified unit.
* @throws ArithmeticException
* if the converted measure value does not have a terminating decimal expansion
* @see #to(Unit, MathContext)
*/
@Override
public DecimalMeasure<Q> to(Unit<Q> unit) {
return to(unit, null);
}
/**
* Returns the decimal measure equivalent to this measure but stated in the specified unit, the conversion is
* performed using the specified math context.
*
* @param unit
* the new measurement unit.
* @param mathContext
* the mathContext used to convert <code>BigDecimal</code> values or <code>null</code> if none.
* @return the measure stated in the specified unit.
* @throws ArithmeticException
* if the result is inexact but the rounding mode is <code>MathContext.UNNECESSARY</code> or
* <code>mathContext.precision == 0</code> and the quotient has a non-terminating decimal expansion.
*/
public DecimalMeasure<Q> to(Unit<Q> unit, MathContext mathContext) {
if ((unit == this.unit) || (unit.equals(this.unit))) {
return this;
}
UnitConverter cvtr = this.unit.getConverterTo(unit);
if (cvtr instanceof RationalConverter) {
RationalConverter factor = (RationalConverter) cvtr;
BigDecimal dividend = BigDecimal.valueOf(factor.getDividend());
BigDecimal divisor = BigDecimal.valueOf(factor.getDivisor());
BigDecimal result = mathContext == null ? this.value.multiply(dividend).divide(divisor)
: this.value.multiply(dividend, mathContext).divide(divisor,
mathContext);
return new DecimalMeasure<Q>(result, unit);
} else if (cvtr.isLinear()) {
BigDecimal factor = BigDecimal.valueOf(cvtr.convert(1.0));
BigDecimal result = mathContext == null ? this.value.multiply(factor) : this.value.multiply(factor,
mathContext);
return new DecimalMeasure<Q>(result, unit);
} else if (cvtr instanceof AddConverter) {
BigDecimal offset = BigDecimal.valueOf(((AddConverter) cvtr).getOffset());
BigDecimal result = mathContext == null ? this.value.add(offset) : this.value.add(offset, mathContext);
return new DecimalMeasure<Q>(result, unit);
} else { // Non-linear and not an offset, convert the double value.
BigDecimal result = BigDecimal.valueOf(cvtr.convert(this.value.doubleValue()));
return new DecimalMeasure<Q>(result, unit);
}
}
@Override
public double doubleValue(Unit<Q> unit) {
if ((unit == this.unit) || (unit.equals(this.unit))) {
return this.value.doubleValue();
}
return this.unit.getConverterTo(unit).convert(this.value.doubleValue());
}
@Override
public Measurable<Q> add(Measurable<Q> other) {
if (other instanceof DecimalMeasure) {
return new DecimalMeasure<Q>(value.add(((DecimalMeasure<Q>) other).to(unit).value), unit);
} else {
return new DecimalMeasure<Q>(value.add(BigDecimal.valueOf(other.doubleValue(unit))), unit);
}
}
private static final long serialVersionUID = 1L;
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import static com.google.cloud.compute.v1.RegionNetworkEndpointGroupsClient.ListPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.testing.MockHttpService;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ApiException;
import com.google.api.gax.rpc.ApiExceptionFactory;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.testing.FakeStatusCode;
import com.google.cloud.compute.v1.Operation.Status;
import com.google.cloud.compute.v1.stub.HttpJsonRegionNetworkEndpointGroupsStub;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class RegionNetworkEndpointGroupsClientTest {
private static MockHttpService mockService;
private static RegionNetworkEndpointGroupsClient client;
@BeforeClass
public static void startStaticServer() throws IOException {
mockService =
new MockHttpService(
HttpJsonRegionNetworkEndpointGroupsStub.getMethodDescriptors(),
RegionNetworkEndpointGroupsSettings.getDefaultEndpoint());
RegionNetworkEndpointGroupsSettings settings =
RegionNetworkEndpointGroupsSettings.newBuilder()
.setTransportChannelProvider(
RegionNetworkEndpointGroupsSettings.defaultHttpJsonTransportProviderBuilder()
.setHttpTransport(mockService)
.build())
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = RegionNetworkEndpointGroupsClient.create(settings);
}
@AfterClass
public static void stopServer() {
client.close();
}
@Before
public void setUp() {}
@After
public void tearDown() throws Exception {
mockService.reset();
}
@Test
public void deleteTest() throws Exception {
Operation expectedResponse =
Operation.newBuilder()
.setClientOperationId("clientOperationId-1230366697")
.setCreationTimestamp("creationTimestamp-370203401")
.setDescription("description-1724546052")
.setEndTime("endTime-1607243192")
.setError(Error.newBuilder().build())
.setHttpErrorMessage("httpErrorMessage1577303431")
.setHttpErrorStatusCode(0)
.setId(3355)
.setInsertTime("insertTime966165798")
.setKind("kind3292052")
.setName("name3373707")
.setOperationGroupId("operationGroupId1716161683")
.setOperationType("operationType91999553")
.setProgress(-1001078227)
.setRegion("region-934795532")
.setSelfLink("selfLink1191800166")
.setStartTime("startTime-2129294769")
.setStatus(Status.DONE)
.setStatusMessage("statusMessage-958704715")
.setTargetId(-815576439)
.setTargetLink("targetLink486368555")
.setUser("user3599307")
.addAllWarnings(new ArrayList<Warnings>())
.setZone("zone3744684")
.build();
mockService.addResponse(expectedResponse);
String project = "project-6911";
String region = "region-9622";
String networkEndpointGroup = "networkEndpointGroup-386";
Operation actualResponse = client.deleteAsync(project, region, networkEndpointGroup).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void deleteExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String project = "project-6911";
String region = "region-9622";
String networkEndpointGroup = "networkEndpointGroup-386";
client.deleteAsync(project, region, networkEndpointGroup).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void getTest() throws Exception {
NetworkEndpointGroup expectedResponse =
NetworkEndpointGroup.newBuilder()
.putAllAnnotations(new HashMap<String, String>())
.setAppEngine(NetworkEndpointGroupAppEngine.newBuilder().build())
.setCloudFunction(NetworkEndpointGroupCloudFunction.newBuilder().build())
.setCloudRun(NetworkEndpointGroupCloudRun.newBuilder().build())
.setCreationTimestamp("creationTimestamp-370203401")
.setDefaultPort(-650363969)
.setDescription("description-1724546052")
.setId(3355)
.setKind("kind3292052")
.setName("name3373707")
.setNetwork("network1843485230")
.setNetworkEndpointType("networkEndpointType1733109693")
.setPscTargetService("pscTargetService-1789055740")
.setRegion("region-934795532")
.setSelfLink("selfLink1191800166")
.setSize(3530753)
.setSubnetwork("subnetwork-1302785042")
.setZone("zone3744684")
.build();
mockService.addResponse(expectedResponse);
String project = "project-6911";
String region = "region-9622";
String networkEndpointGroup = "networkEndpointGroup-386";
NetworkEndpointGroup actualResponse = client.get(project, region, networkEndpointGroup);
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void getExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String project = "project-6911";
String region = "region-9622";
String networkEndpointGroup = "networkEndpointGroup-386";
client.get(project, region, networkEndpointGroup);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void insertTest() throws Exception {
Operation expectedResponse =
Operation.newBuilder()
.setClientOperationId("clientOperationId-1230366697")
.setCreationTimestamp("creationTimestamp-370203401")
.setDescription("description-1724546052")
.setEndTime("endTime-1607243192")
.setError(Error.newBuilder().build())
.setHttpErrorMessage("httpErrorMessage1577303431")
.setHttpErrorStatusCode(0)
.setId(3355)
.setInsertTime("insertTime966165798")
.setKind("kind3292052")
.setName("name3373707")
.setOperationGroupId("operationGroupId1716161683")
.setOperationType("operationType91999553")
.setProgress(-1001078227)
.setRegion("region-934795532")
.setSelfLink("selfLink1191800166")
.setStartTime("startTime-2129294769")
.setStatus(Status.DONE)
.setStatusMessage("statusMessage-958704715")
.setTargetId(-815576439)
.setTargetLink("targetLink486368555")
.setUser("user3599307")
.addAllWarnings(new ArrayList<Warnings>())
.setZone("zone3744684")
.build();
mockService.addResponse(expectedResponse);
String project = "project-6911";
String region = "region-9622";
NetworkEndpointGroup networkEndpointGroupResource = NetworkEndpointGroup.newBuilder().build();
Operation actualResponse =
client.insertAsync(project, region, networkEndpointGroupResource).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void insertExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String project = "project-6911";
String region = "region-9622";
NetworkEndpointGroup networkEndpointGroupResource = NetworkEndpointGroup.newBuilder().build();
client.insertAsync(project, region, networkEndpointGroupResource).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
}
}
@Test
public void listTest() throws Exception {
NetworkEndpointGroup responsesElement = NetworkEndpointGroup.newBuilder().build();
NetworkEndpointGroupList expectedResponse =
NetworkEndpointGroupList.newBuilder()
.setNextPageToken("")
.addAllItems(Arrays.asList(responsesElement))
.build();
mockService.addResponse(expectedResponse);
String project = "project-6911";
String region = "region-9622";
ListPagedResponse pagedListResponse = client.list(project, region);
List<NetworkEndpointGroup> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getItemsList().get(0), resources.get(0));
List<String> actualRequests = mockService.getRequestPaths();
Assert.assertEquals(1, actualRequests.size());
String apiClientHeaderKey =
mockService
.getRequestHeaders()
.get(ApiClientHeaderProvider.getDefaultApiClientHeaderKey())
.iterator()
.next();
Assert.assertTrue(
GaxHttpJsonProperties.getDefaultApiClientHeaderPattern()
.matcher(apiClientHeaderKey)
.matches());
}
@Test
public void listExceptionTest() throws Exception {
ApiException exception =
ApiExceptionFactory.createException(
new Exception(), FakeStatusCode.of(StatusCode.Code.INVALID_ARGUMENT), false);
mockService.addException(exception);
try {
String project = "project-6911";
String region = "region-9622";
client.list(project, region);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.