code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package engine;
import org.lwjgl.opengl._
import utils._
/**
* Each camera is identified by a unique-ID, this object is
* used to create them
*/
private object CamID {
private var currentId : Int = 0
def getNewID : Int = {
currentId += 1
return currentId
}
}
class Camera () {
//this is just used as a cache, the real rotation is pitch/heading below
private var _rotation = new Quaternion(0.0f, Vector3(0,1,0))
private var position = new Vector3
private var frustum : Frustum = null
private var pitch : Float = 0.0f //rotation around x axis
private var heading : Float = 0.0f //rotation around y axis
private var roll : Float = 0.0f //rotation around z axis
val hFov : Float = 90.0f
//var vFov : Float = 0.0f
var aspectRatio : Float = 4.0f/3.0f
var zNear : Float = 0.1f //zNear MUST be positive
var zFar : Float = 500.0f //zFar MUST be positive
var e : Float = 0.0f //focal length
var id : Int = CamID.getNewID
def getPitch () = pitch
def getHeading () = heading
def getRoll () = roll
//Once this Camera is registered to the Renderer, this will get called when
//aspect ratio changes
def aspectRatioChanged (newAR: Float) {
aspectRatio = newAR
invalidate
}
def setPosition (v: Vector3) = {
position.load(v)
invalidate
}
def setPitch (p : Float ) {
pitch = p
invalidate
}
def setHeading (h: Float) {
heading = h
invalidate
}
def setRoll (r: Float) {
roll = r
invalidate
}
def changePitch (p: Float) {
pitch += p
invalidate
}
def changeHeading (h: Float) {
heading += h
invalidate
}
def getRotation = _rotation
def getPosition = position
/**
* Move the camera with a movement expressed in world coordinates
* @param v the movement relative to the world coordinate system
*/
def move (v: Vector3) = {
position += v
invalidate
}
/**
* Move the camera with a movement expressed on its local axis
* @param v the movement relative to the camera's local axis
*/
def moveRelative (v: Vector3) = {
position += getRotation.rotate(v)
invalidate
}
/**
* Recreate the frustum plane after a movement/rotation of the camera
*/
private def invalidate = {
//calculate new rotation
pitch = MathUtils.clamp(pitch, -MathUtils.PI_2, MathUtils.PI_2).toFloat
val qX = new Quaternion(pitch, Vector3(1,0,0))
val qY = new Quaternion(-heading, Vector3(0,1,0))
val qZ = new Quaternion(roll, Vector3(0,0,1))
_rotation = (qZ*qY*qX).getNormalized
// calculate new frustum planes, standard OpenGL is assumed, that is :
// - x poInts right
// - y poInts upward
// - z poInts in the opposite direction that in which the camera poInts
// => right-handed
e = 1.0f/scala.math.tan((hFov*MathUtils.DEG_TO_RAD)/2.0f).toFloat
//vFov = 2.0f*scala.math.atan(aspectRatio/e).toFloat
//Frustum planes are in world coordinates
val localZ = getRotation.zAxis
frustum = new Frustum(position,
new Plane(position-(localZ*zNear), getRotation.rotate(new Vector3(0,0,-1))), //near
new Plane(position-(localZ*zFar), getRotation.rotate(new Vector3(0,0,1))), //far
new Plane(position, getRotation.rotate(new Vector3(e,0,-1))):: //left
new Plane(position, getRotation.rotate(new Vector3(-e, 0, -1))):: //right
new Plane(position, getRotation.rotate(new Vector3(0, -e, -aspectRatio))):: //top
new Plane(position, getRotation.rotate(new Vector3(0, e, -aspectRatio)))::Nil) //bottom
}
/**
* Get the coordinates of the four edges of an axis-aligned rectangle that
* represents the near plane. This can be used to get the four parameters to
* pass to glFrustum
* @return a Rectangle with the following coordinates :
* yMax
* |---------|
* | |
* xMin| |xMax
* |---------|
* yMin
*/
def getNearRect : Rectangle = {
val rightX = zNear/e
val topY = (zNear*aspectRatio)/e
return new Rectangle(rightX, -rightX, topY, -topY)
}
/**
* Very similar to getNearRect but for the far rect
*/
def getFarRect : Rectangle = getFarRect(zFar)
// scale will be used instead of zFar for the far plane distance (useful for drawing)
def getFarRect (far: Float) : Rectangle = {
val rightX = far/e
val topY = far*aspectRatio/e
return new Rectangle(rightX, -rightX, topY, -topY)
}
/**
* Draw a debug frustum
* @param gl the current GL context
*/
def drawFrustum (scale: Float)= {
GL11.glPushMatrix
//transform to go from cam space to model space
GL11.glTranslatef(position.x, position.y, position.z)
val matrix = getRotation.getMatrix
GL11.glMultMatrix(matrix.getFloatBuffer)
//draw cam space axis
Renderer.drawAxis(position, getRotation, 5.0f)
GL11.glColor3f(0.0f,0.0f,1.0f)
GL11.glPolygonMode(GL11.GL_FRONT_AND_BACK, GL11.GL_LINE)
GL11.glDisable(GL11.GL_CULL_FACE)
//draw near plane
var nRect = getNearRect
GL11.glBegin(GL11.GL_QUADS)
GL11.glVertex3f(nRect.xMin, nRect.yMin, -zNear)
GL11.glVertex3f(nRect.xMin, nRect.yMax, -zNear)
GL11.glVertex3f(nRect.xMax, nRect.yMax, -zNear)
GL11.glVertex3f(nRect.xMax, nRect.yMin, -zNear)
GL11.glEnd()
//draw far plane, we don't really draw it at zFar for obvious reason
var fRect = getFarRect(scale)
GL11.glBegin(GL11.GL_QUADS)
GL11.glVertex3f(fRect.xMin, fRect.yMin, -scale)
GL11.glVertex3f(fRect.xMin, fRect.yMax, -scale)
GL11.glVertex3f(fRect.xMax, fRect.yMax, -scale)
GL11.glVertex3f(fRect.xMax, fRect.yMin, -scale)
GL11.glEnd()
//draw side planes
GL11.glBegin(GL11.GL_QUADS)
//right
GL11.glVertex3f(nRect.xMin, nRect.yMin, -zNear)
GL11.glVertex3f(nRect.xMin, nRect.yMax, -zNear)
GL11.glVertex3f(fRect.xMin, fRect.yMax, -scale)
GL11.glVertex3f(fRect.xMin, fRect.yMin, -scale)
//left
GL11.glVertex3f(nRect.xMin, nRect.yMin, -zNear)
GL11.glVertex3f(nRect.xMin, nRect.yMax, -zNear)
GL11.glVertex3f(fRect.xMin, fRect.yMax, -scale)
GL11.glVertex3f(fRect.xMin, fRect.yMin, -scale)
//top
GL11.glVertex3f(nRect.xMin, nRect.yMax, -zNear)
GL11.glVertex3f(fRect.xMin, fRect.yMax, -scale)
GL11.glVertex3f(fRect.xMax, fRect.yMax, -scale)
GL11.glVertex3f(nRect.xMax, nRect.yMax, -zNear)
//bottom
GL11.glVertex3f(nRect.xMin, nRect.yMin, -zNear)
GL11.glVertex3f(fRect.xMin, nRect.yMin, -scale)
GL11.glVertex3f(fRect.xMax, nRect.yMin, -scale)
GL11.glVertex3f(nRect.xMin, nRect.yMin, -zNear)
GL11.glEnd()
/* draw normals
* middle is the average of the sides extremities
* planes in the frustum struct are given in world coords, so inverse the rotation to get
* them back in local coords
*/
def _drawNormal0(middle: Vector3, norm: Vector3) : Unit = Renderer.drawLine(middle, middle + getRotation.getConjugate.rotate(norm*5), COL_BLACK, 1.0f)
def _drawNormal(middle: Vector3, planeNum: Int) : Unit = _drawNormal0(middle, frustum.sidePlanes(planeNum).normal)
//left
_drawNormal(new Vector3((nRect.xMin+fRect.xMin)/2, (nRect.yMin+nRect.yMax+fRect.yMax+fRect.yMin)/4, -(zNear+scale)/2), 0)
//right
_drawNormal(new Vector3((nRect.xMax+fRect.xMax)/2, (nRect.yMin+nRect.yMax+fRect.yMax+fRect.yMin)/4, -(zNear+scale)/2), 1)
//top
_drawNormal(new Vector3((nRect.xMin+fRect.xMin+fRect.xMax+nRect.xMax)/4, (nRect.yMax+fRect.yMax)/2, -(zNear+scale)/2), 2)
//bottom
_drawNormal(new Vector3((nRect.xMin+fRect.xMin+fRect.xMax+nRect.xMax)/4, (nRect.yMin+fRect.yMin)/2, -(zNear+scale)/2), 3)
//near
_drawNormal0(new Vector3((nRect.xMin+nRect.xMax)/2, (nRect.yMin+nRect.yMax)/2, -zNear), frustum.nearPlane.normal)
//far
_drawNormal0(new Vector3((fRect.xMin+fRect.xMax)/2, (fRect.yMin+fRect.yMax)/2, -scale), frustum.farPlane.normal)
GL11.glEnable(GL11.GL_CULL_FACE)
GL11.glPolygonMode(GL11.GL_FRONT_AND_BACK, GL11.GL_FILL)
GL11.glColor3f(1.0f,1.0f,1.0f)
GL11.glPopMatrix
}
}
| julienr/scalamd5 | src/main/scala/engine/camera.scala | Scala | bsd-2-clause | 8,191 |
package controllers.application
import actors.debug.{DebugFlightsActor, MessageQuery, MessageResponse}
import actors.persistent.arrivals.{AclForecastArrivalsActor, CirriumLiveArrivalsActor, PortForecastArrivalsActor, PortLiveArrivalsActor}
import akka.actor.Props
import akka.pattern.ask
import controllers.Application
import uk.gov.homeoffice.drt.auth.Roles.Debug
import play.api.mvc.{Action, AnyContent}
import server.protobuf.messages.CrunchState.FlightsWithSplitsDiffMessage
import services.{ActorTree, SDate}
import scala.collection.SortedMap
trait WithDebug {
self: Application =>
def getActorTree: Action[AnyContent] = authByRole(Debug) {
Action { _ =>
Ok(ActorTree.get().toString)
}
}
def getMessagesForFlightPersistenceIdAtTime(persistenceId: String, dateString: String, messages: Int): Action[AnyContent] = authByRole(Debug) {
Action.async { _ =>
val pit = SDate(dateString)
val persistenceIds = SortedMap(
"ACL" -> AclForecastArrivalsActor.persistenceId,
"Port Forecast" -> PortForecastArrivalsActor.persistenceId,
"Cirium Live" -> CirriumLiveArrivalsActor.persistenceId,
"Port Live" -> PortLiveArrivalsActor.persistenceId,
"Crunch State" -> "crunch-state",
"Flight State" -> "flight-state",
) ++ airportConfig.terminals.map(t => {
"Terminal Day Flight (for snapshot day)" -> f"terminal-flights-${t.toString.toLowerCase}-${pit.getFullYear()}-${pit.getMonth()}%02d-${pit.getDate()}%02d"
})
if (persistenceIds.keys.exists(_ == persistenceId)) throw new Exception("Invalid actor")
val actor = system.actorOf(Props(new DebugFlightsActor(persistenceId, Option(pit.millisSinceEpoch))))
val actorSelection = persistenceIds.map {
case (feed, id) =>
s"<a href='/debug/flights/$id/${pit.toISOString()}/$messages'>$feed</a></br>"
}.mkString("\\n")
val timeNavigation =
s"<a href='/debug/flights/$persistenceId/${pit.addDays(-1).toISOString()}/$messages'>-1 day</a> " +
s"<a href='/debug/flights/$persistenceId/${pit.addHours(-1).toISOString()}/$messages'>-1 hour</a> " +
s"<a href='/debug/flights/$persistenceId/${pit.addMinutes(-1).toISOString()}/$messages'>-1 mins</a> " +
pit.toISOString() +
s" <a href='/debug/flights/$persistenceId/${pit.addMinutes(1).toISOString()}/$messages'>+1 mins</a> " +
s"<a href='/debug/flights/$persistenceId/${pit.addHours(1).toISOString()}/$messages'>+1 hour</a> " +
s"<a href='/debug/flights/$persistenceId/${pit.addDays(1).toISOString()}/$messages'>+1 day</a> "
val numMessagesNavigation =
s"<a href='/debug/flights/$persistenceId/${pit.toISOString()}/10'>10</a> " +
s"<a href='/debug/flights/$persistenceId/${pit.toISOString()}/50'>50</a> " +
s"<a href='/debug/flights/$persistenceId/${pit.toISOString()}/500'>500</a> " +
s"<a href='/debug/flights/$persistenceId/${pit.toISOString()}/1000'>1000</a> "
val navigation =
"<h3>Actor Selection</h3>" + actorSelection +
"<h3>Time</h3>" + timeNavigation +
"<h3>Messages to show</h3>" + numMessagesNavigation
(actor ? MessageQuery(messages)).map {
case m: MessageResponse =>
val debugHtml = m.messages.map {
case f: FlightsWithSplitsDiffMessage =>
val heading = s"<h3>Created at: ${f.createdAt.map(SDate(_).toISOString()).getOrElse("Missing")}</h3>"
val updates = if (f.updates.nonEmpty) {
"<table border='1' cellpadding='5' cellspacing='0'><tr>" +
s"<td colspan='14'>Updates: </td>" +
"</tr>" +
"<tr>" +
"<td>Flight code</td>" +
"<td>Terminal</td>" +
"<td>Scheduled</td>" +
"<td>Est</td>" +
"<td>Est Chox</td>" +
"<td>Act Chox</td>" +
"<td>Est PCP</td>" +
"<td>Status</td>" +
"<td>Gate</td>" +
"<td>Stand</td>" +
"<td>Act Pax</td>" +
"<td>Api Pax</td>" +
"<td>Max Pax</td>" +
"<td>Tran Pax</td>" +
"</tr>" +
f.updates.map(a => {
"<tr>" +
"<td>" + a.getFlight.getIATA + "</td>" +
"<td>" + a.getFlight.getTerminal + "</td>" +
"<td>" + SDate(a.getFlight.getScheduled).toISOString + "</td>" +
"<td>" + a.getFlight.estimated.map(SDate(_).toISOString()).getOrElse("-") + "</td>" +
"<td>" + a.getFlight.estimatedChox.map(SDate(_).toISOString()).getOrElse("-") + "</td>" +
"<td>" + a.getFlight.actualChox.map(SDate(_).toISOString()).getOrElse("-") + "</td>" +
"<td>" + a.getFlight.pcpTime.map(SDate(_).toISOString()).getOrElse("-") + "</td>" +
"<td>" + a.getFlight.status.getOrElse("-") + "</td>" +
"<td>" + a.getFlight.gate.getOrElse("-") + "</td>" +
"<td>" + a.getFlight.stand.getOrElse("-") + "</td>" +
"<td>" + a.getFlight.actPax.getOrElse("-") + "</td>" +
"<td>" + a.getFlight.apiPax.getOrElse("-") + "</td>" +
"<td>" + a.getFlight.maxPax.getOrElse("-") + "</td>" +
"<td>" + a.getFlight.tranPax.getOrElse("-") + "</td>" +
"<tr>"
}).mkString("\\n") +
"</table>"
} else "No Updates </br>"
val removals = if (f.removals.nonEmpty) {
"<table border='1' cellpadding='5' cellspacing='0'><tr>" +
"<tr>" +
s"<td colspan='14'>Removals: </td>" +
"</tr>" +
"<tr>" +
s"<td>Scheduled</td>" +
s"<td>Terminal</td>" +
s"<td>Number</td>" +
"</tr>" +
f.removals.map(r => {
s"<tr><td>${SDate(r.getScheduled).toISOString()}</td><td>${r.getTerminalName}</td><td>${r.getNumber}</td></tr>"
}).mkString("\\n") +
"</table>"
} else "No removals </br>"
heading + updates + removals
}.mkString("\\n")
val heading = s"<h1>$persistenceId</h1>"
Ok(heading + navigation + "</br>" + debugHtml).as("text/html")
}
}
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | server/src/main/scala/controllers/application/WithDebug.scala | Scala | apache-2.0 | 6,618 |
package org.scalajs.core.compiler.test
import org.scalajs.core.compiler.test.util._
import org.junit.Test
// scalastyle:off line.size.limit
class JSExportTest extends DirectTest with TestHelpers {
override def extraArgs: List[String] =
super.extraArgs :+ "-deprecation"
override def preamble: String =
"""import scala.scalajs.js, js.annotation._
"""
@Test
def noDoubleUnderscoreExport: Unit = {
// Normal exports
"""
class A {
@JSExport(name = "__")
def foo = 1
@JSExport
def bar__(x: Int) = x
}
@JSExport
class B__
@JSExport
@ScalaJSDefined class C__ extends js.Object
""" hasErrors
"""
|newSource1.scala:4: error: An exported name may not contain a double underscore (`__`)
| @JSExport(name = "__")
| ^
|newSource1.scala:8: error: An exported name may not contain a double underscore (`__`)
| def bar__(x: Int) = x
| ^
|newSource1.scala:12: error: An exported name may not contain a double underscore (`__`)
| class B__
| ^
|newSource1.scala:15: error: An exported name may not contain a double underscore (`__`)
| @ScalaJSDefined class C__ extends js.Object
| ^
"""
// Inherited exports (objects)
"""
@JSExportDescendentObjects
trait A
package fo__o {
object B extends A
}
""" hasErrors
"""
|newSource1.scala:7: error: B may not have a double underscore (`__`) in its fully qualified name, since it is forced to be exported by a @JSExportDescendentObjects on trait A
| object B extends A
| ^
"""
"""
@JSExportDescendentObjects
@ScalaJSDefined trait A extends js.Object
package fo__o {
@ScalaJSDefined object B extends A
}
""" hasErrors
"""
|newSource1.scala:7: error: B may not have a double underscore (`__`) in its fully qualified name, since it is forced to be exported by a @JSExportDescendentObjects on trait A
| @ScalaJSDefined object B extends A
| ^
"""
// Inherited exports (classes)
"""
@JSExportDescendentClasses
trait A
package fo__o {
class B(x: Int) extends A {
def this() = this(1)
private def this(s: String) = this(1)
}
}
""" hasErrors
"""
|newSource1.scala:7: error: B may not have a double underscore (`__`) in its fully qualified name, since it is forced to be exported by a @JSExportDescendentClasses on trait A
| class B(x: Int) extends A {
| ^
|newSource1.scala:8: error: B may not have a double underscore (`__`) in its fully qualified name, since it is forced to be exported by a @JSExportDescendentClasses on trait A
| def this() = this(1)
| ^
"""
"""
@JSExportDescendentClasses
@ScalaJSDefined trait A extends js.Object
package fo__o {
@ScalaJSDefined class B(x: Int) extends A
}
""" hasErrors
"""
|newSource1.scala:7: error: B may not have a double underscore (`__`) in its fully qualified name, since it is forced to be exported by a @JSExportDescendentClasses on trait A
| @ScalaJSDefined class B(x: Int) extends A
| ^
"""
}
@Test
def noConflictingExport: Unit = {
"""
class Confl {
@JSExport("value")
def hello = "foo"
@JSExport("value")
def world = "bar"
}
""" fails() // No error test, Scala version dependent error messages
"""
class Confl {
class Box[T](val x: T)
@JSExport
def ub(x: Box[String]): String = x.x
@JSExport
def ub(x: Box[Int]): Int = x.x
}
""" fails() // No error test, Scala version dependent error messages
"""
class Confl {
@JSExport
def rtType(x: scala.scalajs.js.prim.Number) = x
@JSExport
def rtType(x: Double) = x
}
""" fails() // Error message depends on Scala version
"""
class Confl {
@JSExport
def foo(x: Int)(ys: Int*) = x
@JSExport
def foo(x: Int*) = x
}
""" hasErrors
"""
|newSource1.scala:7: error: Cannot disambiguate overloads for exported method $js$exported$meth$foo with types
| (x: Seq)Object
| (x: Int, ys: Seq)Object
| @JSExport
| ^
"""
"""
class Confl {
@JSExport
def foo(x: Int = 1) = x
@JSExport
def foo(x: String*) = x
}
""" hasErrors
"""
|newSource1.scala:4: error: Cannot disambiguate overloads for exported method $js$exported$meth$foo with types
| (x: Int)Object
| (x: Seq)Object
| @JSExport
| ^
"""
"""
class Confl {
@JSExport
def foo(x: scala.scalajs.js.prim.Number, y: String)(z: Int = 1) = x
@JSExport
def foo(x: Double, y: String)(z: String*) = x
}
""" fails() // Error message depends on Scala version
"""
class A {
@JSExport
def a(x: scala.scalajs.js.Any) = 1
@JSExport
def a(x: Any) = 2
}
""" fails() // Error message depends on Scala version
}
@Test
def noExportLocal: Unit = {
// Local class
"""
class A {
def method = {
@JSExport
class A
@JSExport
@ScalaJSDefined class B extends js.Object
}
}
""" hasErrors
"""
|newSource1.scala:5: error: You may not export a local class
| @JSExport
| ^
|newSource1.scala:8: error: You may not export a local class
| @JSExport
| ^
"""
// Local object
"""
class A {
def method = {
@JSExport
object A
@JSExport
@ScalaJSDefined object B extends js.Object
}
}
""" hasErrors
"""
|newSource1.scala:5: error: You may not export a local object
| @JSExport
| ^
|newSource1.scala:8: error: You may not export a local object
| @JSExport
| ^
"""
// Local method
"""
class A {
def method = {
@JSExport
def foo = 1
}
}
""" hasErrors
"""
|newSource1.scala:5: error: You may not export a local definition
| @JSExport
| ^
"""
// Local val
"""
class A {
def method = {
@JSExport
val x = 1
}
}
""" hasErrors
"""
|newSource1.scala:5: error: You may not export a local definition
| @JSExport
| ^
"""
// Local var
"""
class A {
def method = {
@JSExport
var x = 1
}
}
""" hasErrors
"""
|newSource1.scala:5: error: You may not export a local definition
| @JSExport
| ^
"""
}
@Test
def noMiddleVarArg: Unit = {
"""
class A {
@JSExport
def method(xs: Int*)(ys: String) = 1
}
""" hasErrors
"""
|newSource1.scala:4: error: In an exported method, a *-parameter must come last (through all parameter lists)
| @JSExport
| ^
"""
}
@Test
def noMiddleDefaultParam: Unit = {
"""
class A {
@JSExport
def method(x: Int = 1)(y: String) = 1
}
""" hasErrors
"""
|newSource1.scala:4: error: In an exported method, all parameters with defaults must be at the end
| @JSExport
| ^
"""
}
@Test
def noExportAbstractClass: Unit = {
"""
@JSExport
abstract class A
abstract class B(x: Int) {
@JSExport
def this() = this(5)
}
@JSExport
@ScalaJSDefined abstract class C extends js.Object
""" hasErrors
"""
|newSource1.scala:3: error: You may not export an abstract class
| @JSExport
| ^
|newSource1.scala:7: error: You may not export an abstract class
| @JSExport
| ^
|newSource1.scala:11: error: You may not export an abstract class
| @JSExport
| ^
"""
}
@Test
def noExportTrait: Unit = {
"""
@JSExport
trait Test
@JSExport
@ScalaJSDefined trait Test2 extends js.Object
@JSExport
@js.native
trait Test3 extends js.Object
""" hasErrors
"""
|newSource1.scala:3: error: You may not export a trait
| @JSExport
| ^
|newSource1.scala:6: error: You may not export a trait
| @JSExport
| ^
|newSource1.scala:9: error: You may not export a trait
| @JSExport
| ^
"""
}
@Test
def noExportNonPublicClassOrObject: Unit = {
"""
@JSExport
private class A
@JSExport
protected[this] class B
@JSExport
@ScalaJSDefined private class C extends js.Object
@JSExport
@ScalaJSDefined protected[this] class D extends js.Object
""" hasErrors
"""
|newSource1.scala:3: error: You may only export public and protected classes
| @JSExport
| ^
|newSource1.scala:6: error: You may only export public and protected classes
| @JSExport
| ^
|newSource1.scala:9: error: You may only export public and protected classes
| @JSExport
| ^
|newSource1.scala:12: error: You may only export public and protected classes
| @JSExport
| ^
"""
"""
@JSExport
private object A
@JSExport
protected[this] object B
@JSExport
@ScalaJSDefined private object C extends js.Object
@JSExport
@ScalaJSDefined protected[this] object D extends js.Object
""" hasErrors
"""
|newSource1.scala:3: error: You may only export public and protected objects
| @JSExport
| ^
|newSource1.scala:6: error: You may only export public and protected objects
| @JSExport
| ^
|newSource1.scala:9: error: You may only export public and protected objects
| @JSExport
| ^
|newSource1.scala:12: error: You may only export public and protected objects
| @JSExport
| ^
"""
}
@Test
def noExportNonPublicMember: Unit = {
"""
class A {
@JSExport
private def foo = 1
@JSExport
protected[this] def bar = 2
}
""" hasErrors
"""
|newSource1.scala:4: error: You may only export public and protected methods
| @JSExport
| ^
|newSource1.scala:7: error: You may only export public and protected methods
| @JSExport
| ^
"""
}
@Test
def noExportNestedClass: Unit = {
"""
class A {
@JSExport
class Nested {
@JSExport
def this(x: Int) = this()
}
@JSExport
@ScalaJSDefined class Nested2 extends js.Object
}
""" hasErrors
"""
|newSource1.scala:4: error: You may not export a nested class. Create an exported factory method in the outer class to work around this limitation.
| @JSExport
| ^
|newSource1.scala:6: error: You may not export a nested class. Create an exported factory method in the outer class to work around this limitation.
| @JSExport
| ^
|newSource1.scala:10: error: You may not export a nested class. Create an exported factory method in the outer class to work around this limitation.
| @JSExport
| ^
"""
}
@Test
def noImplicitNameNestedExportClass: Unit = {
"""
object A {
@JSExport
class Nested {
@JSExport
def this(x: Int) = this
}
@JSExport
@ScalaJSDefined class Nested2 extends js.Object
}
""" hasErrors
"""
|newSource1.scala:4: error: You must set an explicit name for exports of nested classes.
| @JSExport
| ^
|newSource1.scala:6: error: You must set an explicit name for exports of nested classes.
| @JSExport
| ^
|newSource1.scala:10: error: You must set an explicit name for exports of nested classes.
| @JSExport
| ^
"""
}
@Test
def noExportNestedObject: Unit = {
"""
class A {
@JSExport
object Nested
@JSExport
@ScalaJSDefined object Nested2 extends js.Object
}
""" hasErrors
"""
|newSource1.scala:4: error: You may not export a nested object
| @JSExport
| ^
|newSource1.scala:7: error: You may not export a nested object
| @JSExport
| ^
"""
}
@Test
def noImplicitNameNestedExportObject: Unit = {
"""
object A {
@JSExport
object Nested
@JSExport
@ScalaJSDefined object Nested2 extends js.Object
}
""" hasErrors
"""
|newSource1.scala:4: error: You must set an explicit name for exports of nested classes.
| @JSExport
| ^
|newSource1.scala:7: error: You must set an explicit name for exports of nested classes.
| @JSExport
| ^
"""
}
@Test
def noExportJSRaw: Unit = {
"""
import scala.scalajs.js
@JSExport
@js.native
object A extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: You may not export a native JS class or object
| @JSExport
| ^
"""
"""
import scala.scalajs.js
@JSExport
@js.native
trait A extends js.Object
""" hasErrors
"""
|newSource1.scala:5: error: You may not export a trait
| @JSExport
| ^
"""
"""
import scala.scalajs.js
@JSExport
@js.native
class A extends js.Object {
@JSExport
def this(x: Int) = this()
}
""" hasErrors
"""
|newSource1.scala:5: error: You may not export a native JS class or object
| @JSExport
| ^
|newSource1.scala:8: error: You may not export a constructor of a subclass of js.Any
| @JSExport
| ^
"""
}
@Test
def noExportJSRawMember: Unit = {
"""
import scala.scalajs.js
@js.native
class A extends js.Object {
@JSExport
def foo: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: You may not export a method of a subclass of js.Any
| @JSExport
| ^
"""
"""
import scala.scalajs.js
@ScalaJSDefined
class A extends js.Object {
@JSExport
def foo: Int = js.native
}
""" hasErrors
"""
|newSource1.scala:7: error: You may not export a method of a subclass of js.Any
| @JSExport
| ^
"""
}
@Test
def noBadSetterType: Unit = {
// Bad param list
"""
class A {
@JSExport
def foo_=(x: Int, y: Int) = ()
}
""" hasErrors
"""
|newSource1.scala:4: error: Exported setters must have exactly one argument
| @JSExport
| ^
"""
// Bad return type
"""
class A {
@JSExport
def foo_=(x: Int) = "string"
}
""" hasErrors
"""
|newSource1.scala:4: error: Exported setters must return Unit
| @JSExport
| ^
"""
// Varargs
"""
class A {
@JSExport
def foo_=(x: Int*) = ()
}
""" hasErrors
"""
|newSource1.scala:4: error: Exported setters may not have repeated params
| @JSExport
| ^
"""
// Default arguments
"""
class A {
@JSExport
def foo_=(x: Int = 1) = ()
}
""" hasWarns
"""
|newSource1.scala:4: warning: Exported setters may not have default params. This will be enforced in 1.0.
| @JSExport
| ^
"""
}
@Test
def noBadToStringExport: Unit = {
"""
class A {
@JSExport("toString")
def a(): Int = 5
}
""" hasErrors
"""
|newSource1.scala:4: error: You may not export a zero-argument method named other than 'toString' under the name 'toString'
| @JSExport("toString")
| ^
"""
}
@Test
def noBadNameExportAll: Unit = {
"""
@JSExportAll
class A {
val __f = 1
def a_= = 2
}
""" hasErrors
"""
|newSource1.scala:5: error: An exported name may not contain a double underscore (`__`)
| val __f = 1
| ^
|newSource1.scala:3: error: Exported setters must return Unit
| @JSExportAll
| ^
"""
}
@Test
def noConflictingMethodAndProperty: Unit = {
// Basic case
"""
class A {
@JSExport("a")
def bar() = 2
@JSExport("a")
val foo = 1
}
""" hasErrors
"""
|newSource1.scala:4: error: Exported property a conflicts with A.$js$exported$meth$a
| @JSExport("a")
| ^
|newSource1.scala:7: error: Exported method a conflicts with A.$js$exported$prop$a
| @JSExport("a")
| ^
"""
// Inherited case
"""
class A {
@JSExport("a")
def bar() = 2
}
class B extends A {
@JSExport("a")
def foo_=(x: Int): Unit = ()
@JSExport("a")
val foo = 1
}
""" hasErrors
"""
|newSource1.scala:4: error: Exported property a conflicts with A.$js$exported$meth$a
| @JSExport("a")
| ^
"""
}
@Test
def namedExportIsDeprecated: Unit = {
"""
class A {
@JSExportNamed
def foo(x: Int, y: Int) = 1
}
""" hasWarns
s"""
|newSource1.scala:5: warning: class JSExportNamed in package annotation is deprecated${since("0.6.11")}: Use @JSExport with an explicit option bag instead. See the Scaladoc for more details.
| def foo(x: Int, y: Int) = 1
| ^
|newSource1.scala:4: warning: class JSExportNamed in package annotation is deprecated${since("0.6.11")}: Use @JSExport with an explicit option bag instead. See the Scaladoc for more details.
| @JSExportNamed
| ^
"""
}
@Test
def noOverrideNamedExport: Unit = {
"""
class A {
@JSExportNamed
def foo(x: Int, y: Int) = 1
}
class B extends A {
@JSExportNamed
override def foo(x: Int, y: Int) = 2
}
""" hasErrors
s"""
|newSource1.scala:5: warning: class JSExportNamed in package annotation is deprecated${since("0.6.11")}: Use @JSExport with an explicit option bag instead. See the Scaladoc for more details.
| def foo(x: Int, y: Int) = 1
| ^
|newSource1.scala:4: warning: class JSExportNamed in package annotation is deprecated${since("0.6.11")}: Use @JSExport with an explicit option bag instead. See the Scaladoc for more details.
| @JSExportNamed
| ^
|newSource1.scala:9: error: overriding method $$js$$exported$$meth$$foo in class A of type (namedArgs: Any)Any;
| method $$js$$exported$$meth$$foo cannot override final member
| @JSExportNamed
| ^
|newSource1.scala:10: warning: class JSExportNamed in package annotation is deprecated${since("0.6.11")}: Use @JSExport with an explicit option bag instead. See the Scaladoc for more details.
| override def foo(x: Int, y: Int) = 2
| ^
"""
}
@Test
def noConflictNamedExport: Unit = {
// Normal method
"""
class A {
@JSExportNamed
def foo(x: Int, y: Int) = 1
@JSExport
def foo(x: scala.scalajs.js.Any) = 2
}
""" fails() // No error test, Scala version dependent error messages
// Ctors
"""
class A {
@JSExportNamed
def this(x: Int) = this()
@JSExport
def this(x: scala.scalajs.js.Any) = this
@JSExportNamed
def this(x: Long) = this()
}
""" fails() // No error test, Scala version dependent error messages
}
@Test
def noNamedExportObject: Unit = {
"""
@JSExportNamed
object A
@JSExportNamed
@ScalaJSDefined object B extends js.Object
""" hasErrors
"""
|newSource1.scala:3: error: You may not use @JSNamedExport on an object
| @JSExportNamed
| ^
|newSource1.scala:6: error: You may not use @JSNamedExport on an object
| @JSExportNamed
| ^
"""
}
@Test
def noNamedExportSJSDefinedClass: Unit = {
"""
@JSExportNamed
@ScalaJSDefined class A extends js.Object
""" hasErrors
"""
|newSource1.scala:3: error: You may not use @JSNamedExport on a Scala.js-defined JS class
| @JSExportNamed
| ^
"""
}
@Test
def noNamedExportVarArg: Unit = {
"""
class A {
@JSExportNamed
def foo(a: Int*) = 1
}
""" hasErrors
s"""
|newSource1.scala:5: warning: class JSExportNamed in package annotation is deprecated${since("0.6.11")}: Use @JSExport with an explicit option bag instead. See the Scaladoc for more details.
| def foo(a: Int*) = 1
| ^
|newSource1.scala:4: warning: class JSExportNamed in package annotation is deprecated${since("0.6.11")}: Use @JSExport with an explicit option bag instead. See the Scaladoc for more details.
| @JSExportNamed
| ^
|newSource1.scala:4: error: You may not name-export a method with a *-parameter
| @JSExportNamed
| ^
"""
}
@Test
def noNamedExportProperty: Unit = {
// Getter
"""
class A {
@JSExportNamed
def a = 1
}
""" hasErrors
"""
|newSource1.scala:4: error: You may not export a getter or a setter as a named export
| @JSExportNamed
| ^
"""
// Setter
"""
class A {
@JSExportNamed
def a_=(x: Int) = ()
}
""" hasErrors
"""
|newSource1.scala:4: error: You may not export a getter or a setter as a named export
| @JSExportNamed
| ^
"""
}
@Test
def gracefulDoubleDefaultFail: Unit = {
// This used to blow up (i.e. not just fail), because PrepJSExports asked
// for the symbol of the default parameter getter of [[y]], and asserted its
// not overloaded. Since the Scala compiler only fails later on this, the
// assert got triggered and made the compiler crash
"""
class A {
@JSExport
def foo(x: String, y: String = "hello") = x
def foo(x: Int, y: String = "bar") = x
}
""" fails()
}
@Test
def noNonLiteralExportNames: Unit = {
"""
object A {
val a = "Hello"
final val b = "World"
}
class B {
@JSExport(A.a)
def foo = 1
@JSExport(A.b)
def bar = 1
}
""" hasErrors
"""
|newSource1.scala:9: error: The argument to JSExport must be a literal string
| @JSExport(A.a)
| ^
"""
}
@Test
def noInheritIgnoreInvalidDescendants: Unit = {
"""
@JSExportDescendentClasses
trait A
@JSExportDescendentClasses(ignoreInvalidDescendants = true)
trait B
object A {
// Local class is not allowed
def foo = { new A with B }
}
""" hasErrors
"""
|newSource1.scala:11: error: You may not export a local class
| def foo = { new A with B }
| ^
"""
}
@Test
def noExportImplicitApply: Unit = {
"""
class A {
@JSExport
def apply(): Int = 1
}
""" hasWarns
"""
|newSource1.scala:4: warning: Member cannot be exported to function application. It is available under the name apply instead. Add @JSExport("apply") to silence this warning. This will be enforced in 1.0.
| @JSExport
| ^
"""
"""
@JSExportAll
class A {
def apply(): Int = 1
}
""" hasWarns
"""
|newSource1.scala:5: warning: Member cannot be exported to function application. It is available under the name apply instead. Add @JSExport("apply") to silence this warning. This will be enforced in 1.0.
| def apply(): Int = 1
| ^
"""
// For this case, deprecation warnings are not exactly the same in 2.10.x
"""
@JSExportAll
class A {
@JSExportNamed("apply")
@JSExport("foo")
def apply(): Int = 1
}
""" containsWarns
"""
|newSource1.scala:7: warning: Member cannot be exported to function application. It is available under the name apply instead. Add @JSExport("apply") to silence this warning. This will be enforced in 1.0.
| def apply(): Int = 1
| ^
"""
"""
@JSExportAll
class A {
@JSExport("apply")
def apply(): Int = 1
}
""".hasNoWarns
}
@Test
def exportObjectAsToString: Unit = {
"""
@JSExport("toString")
object ExportAsToString
""".succeeds
}
private def since(v: String): String = {
val version = scala.util.Properties.versionNumberString
if (version.startsWith("2.10.") || version.startsWith("2.11.")) ""
else s" (since $v)"
}
@Test
def noExportTopLevelTrait: Unit = {
"""
@JSExportTopLevel("foo")
trait A
@JSExportTopLevel("bar")
@ScalaJSDefined
trait B extends js.Object
""" hasErrors
"""
|newSource1.scala:3: error: You may not export a trait
| @JSExportTopLevel("foo")
| ^
|newSource1.scala:6: error: You may not export a trait
| @JSExportTopLevel("bar")
| ^
"""
"""
object Container {
@JSExportTopLevel("foo")
trait A
@JSExportTopLevel("bar")
@ScalaJSDefined
trait B extends js.Object
}
""" hasErrors
"""
|newSource1.scala:4: error: You may not export a trait
| @JSExportTopLevel("foo")
| ^
|newSource1.scala:7: error: You may not export a trait
| @JSExportTopLevel("bar")
| ^
"""
}
@Test
def noExportTopLevelGetter: Unit = {
"""
object A {
@JSExportTopLevel("foo")
def a: Int = 1
}
""" hasErrors
"""
|newSource1.scala:4: error: You may not export a getter or a setter to the top level
| @JSExportTopLevel("foo")
| ^
"""
}
@Test
def noExportTopLevelSetter: Unit = {
"""
object A {
@JSExportTopLevel("foo")
def a_=(x: Int): Unit = ()
}
""" hasErrors
"""
|newSource1.scala:4: error: You may not export a getter or a setter to the top level
| @JSExportTopLevel("foo")
| ^
"""
}
@Test
def noExportTopLevelFieldsWithSameName: Unit = {
"""
object A {
@JSExportTopLevel("foo")
val a: Int = 1
@JSExportTopLevel("foo")
var b: Int = 1
}
""" hasErrors
"""
|newSource1.scala:5: error: Duplicate top-level export with name 'foo': a field may not share its exported name with another field or method
| val a: Int = 1
| ^
"""
}
@Test
def noExportTopLevelFieldsAndMethodsWithSameName: Unit = {
"""
object A {
@JSExportTopLevel("foo")
val a: Int = 1
@JSExportTopLevel("foo")
def b(x: Int): Int = x + 1
}
""" hasErrors
"""
|newSource1.scala:7: error: Duplicate top-level export with name 'foo': a field may not share its exported name with another field or method
| @JSExportTopLevel("foo")
| ^
"""
"""
object A {
@JSExportTopLevel("foo")
def a(x: Int): Int = x + 1
@JSExportTopLevel("foo")
val b: Int = 1
}
""" hasErrors
"""
|newSource1.scala:4: error: Duplicate top-level export with name 'foo': a field may not share its exported name with another field or method
| @JSExportTopLevel("foo")
| ^
"""
}
@Test
def noExportTopLevelNonStatic: Unit = {
"""
class A {
@JSExportTopLevel("foo")
def a(): Unit = ()
}
""" hasErrors
"""
|newSource1.scala:4: error: Only static objects may export their members to the top level
| @JSExportTopLevel("foo")
| ^
"""
"""
class A {
object B {
@JSExportTopLevel("foo")
def a(): Unit = ()
}
}
""" hasErrors
"""
|newSource1.scala:5: error: Only static objects may export their members to the top level
| @JSExportTopLevel("foo")
| ^
"""
"""
class A {
@JSExportTopLevel("Foo")
object B
}
""" hasErrors
"""
|newSource1.scala:4: error: Only static objects may export their members to the top level
| @JSExportTopLevel("Foo")
| ^
"""
"""
class A {
@JSExportTopLevel("Foo")
@ScalaJSDefined
object B extends js.Object
}
""" hasErrors
"""
|newSource1.scala:4: error: Only static objects may export their members to the top level
| @JSExportTopLevel("Foo")
| ^
"""
"""
class A {
@JSExportTopLevel("Foo")
@ScalaJSDefined
class B extends js.Object
}
""" hasErrors
"""
|newSource1.scala:4: error: Only static objects may export their members to the top level
| @JSExportTopLevel("Foo")
| ^
"""
"""
class A {
@JSExportTopLevel("Foo")
class B
}
""" hasErrors
"""
|newSource1.scala:4: error: Only static objects may export their members to the top level
| @JSExportTopLevel("Foo")
| ^
"""
}
@Test
def noExportTopLevelJSModule: Unit = {
"""
@ScalaJSDefined
object A extends js.Object {
@JSExportTopLevel("foo")
def a(): Unit = ()
}
""" hasErrors
"""
|newSource1.scala:5: error: You may not export a method of a subclass of js.Any
| @JSExportTopLevel("foo")
| ^
"""
}
@Test
def noExportStaticModule: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
object A
}
""" hasErrors
"""
|newSource1.scala:7: error: Implementation restriction: cannot export a class or object as static
| @JSExportStatic
| ^
"""
}
@Test
def noExportStaticTrait: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
trait A
}
""" hasErrors
"""
|newSource1.scala:7: error: You may not export a trait as static.
| @JSExportStatic
| ^
"""
}
@Test
def noExportStaticClass: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
class A
}
""" hasErrors
"""
|newSource1.scala:7: error: Implementation restriction: cannot export a class or object as static
| @JSExportStatic
| ^
"""
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
class A {
@JSExportStatic
def this(x: Int) = this()
}
}
""" hasErrors
"""
|newSource1.scala:8: error: Implementation restriction: cannot export a class or object as static
| @JSExportStatic
| ^
"""
}
@Test
def noExportStaticValTwice: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
@JSExportStatic("b")
val a: Int = 1
}
""" hasErrors
"""
|newSource1.scala:8: error: Fields (val or var) cannot be exported as static more than once
| @JSExportStatic("b")
| ^
"""
}
@Test
def noExportStaticVarTwice: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
@JSExportStatic("b")
var a: Int = 1
}
""" hasErrors
"""
|newSource1.scala:8: error: Fields (val or var) cannot be exported as static more than once
| @JSExportStatic("b")
| ^
"""
}
@Test
def noExportValAsStaticAndTopLevel: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
@JSExportTopLevel("foo")
val a: Int = 1
}
""" hasErrors
"""
|newSource1.scala:8: error: Fields (val or var) cannot be exported both as static and at the top-level
| @JSExportTopLevel("foo")
| ^
"""
}
@Test
def noExportVarAsStaticAndTopLevel: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
@JSExportTopLevel("foo")
var a: Int = 1
}
""" hasErrors
"""
|newSource1.scala:8: error: Fields (val or var) cannot be exported both as static and at the top-level
| @JSExportTopLevel("foo")
| ^
"""
}
@Test
def noExportSetterWithBadSetterType: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def a_=(x: Int, y: Int): Unit = ()
}
""" hasErrors
"""
|newSource1.scala:7: error: Exported setters must have exactly one argument
| @JSExportStatic
| ^
"""
}
@Test
def noExportStaticCollapsingMethods: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def foo(x: Int): Int = x
@JSExportStatic("foo")
def bar(x: Int): Int = x + 1
}
""" hasErrors
"""
|newSource1.scala:11: error: Cannot disambiguate overloads for exported method bar with types
| (x: Int)Int
| (x: Int)Int
| def bar(x: Int): Int = x + 1
| ^
"""
}
@Test
def noExportStaticCollapsingGetters: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def foo: Int = 1
@JSExportStatic("foo")
def bar: Int = 2
}
""" hasErrors
"""
|newSource1.scala:8: error: Duplicate static getter export with name 'foo'
| def foo: Int = 1
| ^
"""
}
@Test
def noExportStaticCollapsingSetters: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def foo_=(v: Int): Unit = ()
@JSExportStatic("foo")
def bar_=(v: Int): Unit = ()
}
""" hasErrors
"""
|newSource1.scala:11: error: Cannot disambiguate overloads for exported method bar_$eq with types
| (v: Int)Unit
| (v: Int)Unit
| def bar_=(v: Int): Unit = ()
| ^
"""
}
@Test
def noExportStaticFieldsWithSameName: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
val a: Int = 1
@JSExportStatic("a")
var b: Int = 1
}
""" hasErrors
"""
|newSource1.scala:8: error: Duplicate static export with name 'a': a field may not share its exported name with another field or method
| val a: Int = 1
| ^
"""
}
@Test
def noExportStaticFieldsAndMethodsWithSameName: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
val a: Int = 1
@JSExportStatic("a")
def b(x: Int): Int = x + 1
}
""" hasErrors
"""
|newSource1.scala:10: error: Duplicate static export with name 'a': a field may not share its exported name with another field or method
| @JSExportStatic("a")
| ^
"""
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def a(x: Int): Int = x + 1
@JSExportStatic("a")
val b: Int = 1
}
""" hasErrors
"""
|newSource1.scala:7: error: Duplicate static export with name 'a': a field may not share its exported name with another field or method
| @JSExportStatic
| ^
"""
}
@Test
def noExportStaticFieldsAndPropertiesWithSameName: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
val a: Int = 1
@JSExportStatic("a")
def b: Int = 2
}
""" hasErrors
"""
|newSource1.scala:10: error: Duplicate static export with name 'a': a field may not share its exported name with another field or method
| @JSExportStatic("a")
| ^
"""
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def a: Int = 1
@JSExportStatic("a")
val b: Int = 2
}
""" hasErrors
"""
|newSource1.scala:7: error: Duplicate static export with name 'a': a field may not share its exported name with another field or method
| @JSExportStatic
| ^
"""
}
@Test
def noExportStaticPropertiesAndMethodsWithSameName: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def a: Int = 1
@JSExportStatic("a")
def b(x: Int): Int = x + 1
}
""" hasErrors
"""
|newSource1.scala:8: error: Exported property a conflicts with b
| def a: Int = 1
| ^
"""
"""
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def a(x: Int): Int = x + 1
@JSExportStatic("a")
def b: Int = 1
}
""" hasErrors
"""
|newSource1.scala:8: error: Exported method a conflicts with b
| def a(x: Int): Int = x + 1
| ^
"""
}
@Test
def noExportStaticNonStatic: Unit = {
"""
class A {
@ScalaJSDefined
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def a(): Unit = ()
}
}
""" hasErrors
"""
|newSource1.scala:8: error: Only a static object whose companion class is a Scala.js-defined JS class may export its members as static.
| @JSExportStatic
| ^
"""
}
@Test
def noExportStaticInJSModule: Unit = {
"""
@ScalaJSDefined
class StaticContainer extends js.Object
@ScalaJSDefined
object StaticContainer extends js.Object {
@JSExportStatic
def a(): Unit = ()
}
""" hasErrors
"""
|newSource1.scala:8: error: You may not export a method of a subclass of js.Any
| @JSExportStatic
| ^
"""
"""
@ScalaJSDefined
class StaticContainer extends js.Object
@js.native
object StaticContainer extends js.Object {
@JSExportStatic
def a(): Unit = js.native
}
""" hasErrors
"""
|newSource1.scala:8: error: You may not export a method of a subclass of js.Any
| @JSExportStatic
| ^
"""
}
@Test
def noExportStaticIfWrongCompanionType: Unit = {
"""
class StaticContainer
object StaticContainer {
@JSExportStatic
def a(): Unit = ()
}
""" hasErrors
"""
|newSource1.scala:6: error: Only a static object whose companion class is a Scala.js-defined JS class may export its members as static.
| @JSExportStatic
| ^
"""
"""
@ScalaJSDefined
trait StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def a(): Unit = ()
}
""" hasErrors
"""
|newSource1.scala:7: error: Only a static object whose companion class is a Scala.js-defined JS class may export its members as static.
| @JSExportStatic
| ^
"""
"""
@js.native
class StaticContainer extends js.Object
object StaticContainer {
@JSExportStatic
def a(): Unit = ()
}
""" hasErrors
"""
|newSource1.scala:7: error: Only a static object whose companion class is a Scala.js-defined JS class may export its members as static.
| @JSExportStatic
| ^
"""
}
}
| xuwei-k/scala-js | compiler/src/test/scala/org/scalajs/core/compiler/test/JSExportTest.scala | Scala | bsd-3-clause | 40,737 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers
import org.scalatest._
import java.lang.reflect.Method
import java.lang.reflect.Modifier
import scala.util.matching.Regex
import java.lang.reflect.Field
import scala.reflect.Manifest
import Helper.transformOperatorChars
// TODO: drop generic support for be as an equality comparison, in favor of specific ones.
// TODO: mention on JUnit and TestNG docs that you can now mix in ShouldMatchers or MustMatchers
// TODO: Put links from ShouldMatchers to wherever I reveal the matrix and algo of how properties are checked dynamically.
// TODO: double check that I wrote tests for (length (7)) and (size (8)) in parens
// TODO: document how to turn off the === implicit conversion
// TODO: Document you can use JMock, EasyMock, etc.
private[scalatest] object Helper {
// If the symbol passed is 'title, this will look for a field named "title", a method named "title", or a
// method named "getTitle". The method must take no parameters.
//
// F (field) | M (method) | G (get or is method) | Result
// 0 0 0 None
// 0 0 1 Some(G)
// 0 1 0 Some(M)
// 0 1 1 Some(M) prefer a Scala style one of a Java style, such as when using BeanProperty annotation
// 1 0 0 Some(F) ignore the field if there's a method. in Java often name a field and get method the same
// 1 0 1 Some(G)
// 1 1 0 Some(M)
// 1 1 1 Some(M) prefer a Scala style one of a Java style, such as when using BeanProperty annotation
//
def accessProperty(objectWithProperty: AnyRef, propertySymbol: Symbol, isBooleanProperty: Boolean): Option[Any] = {
// If 'title passed, propertyName would be "title"
val propertyName = propertySymbol.name
// if propertyName is '>, mangledPropertyName would be "$greater"
val mangledPropertyName = transformOperatorChars(propertyName)
// fieldNameToAccess and methodNameToInvoke would also be "title"
val fieldNameToAccess = mangledPropertyName
val methodNameToInvoke = mangledPropertyName
// methodNameToInvokeWithGet would be "getTitle"
val prefix = if (isBooleanProperty) "is" else "get"
val methodNameToInvokeWithGet = prefix + mangledPropertyName(0).toUpperCase + mangledPropertyName.substring(1)
val firstChar = propertyName(0).toLowerCase
val methodNameStartsWithVowel = firstChar == 'a' || firstChar == 'e' || firstChar == 'i' ||
firstChar == 'o' || firstChar == 'u'
def isFieldToAccess(field: Field): Boolean = field.getName == fieldNameToAccess
// If it is a predicate, I check the result type, otherwise I don't. Maybe I should just do that. Could be a later enhancement.
def isMethodToInvoke(method: Method): Boolean =
method.getName == methodNameToInvoke && method.getParameterTypes.length == 0 && !Modifier.isStatic(method.getModifiers()) &&
(!isBooleanProperty || method.getReturnType == classOf[Boolean])
def isGetMethodToInvoke(method: Method): Boolean =
method.getName == methodNameToInvokeWithGet && method.getParameterTypes.length == 0 && !Modifier.isStatic(method.getModifiers()) &&
(!isBooleanProperty || method.getReturnType == classOf[Boolean])
val fieldOption = objectWithProperty.getClass.getFields.find(isFieldToAccess)
val methodOption = objectWithProperty.getClass.getMethods.find(isMethodToInvoke)
val getMethodOption = objectWithProperty.getClass.getMethods.find(isGetMethodToInvoke)
(fieldOption, methodOption, getMethodOption) match {
case (_, Some(method), _) => Some(method.invoke(objectWithProperty, Array[AnyRef](): _*))
case (_, None, Some(getMethod)) => Some(getMethod.invoke(objectWithProperty, Array[AnyRef](): _*))
case (Some(field), None, None) => Some(field.get(objectWithProperty))
case (None, None, None) => None
}
}
def transformOperatorChars(s: String) = {
val builder = new StringBuilder
for (i <- 0 until s.length) {
val ch = s.charAt(i)
val replacement =
ch match {
case '!' => "$bang"
case '#' => "$hash"
case '~' => "$tilde"
case '|' => "$bar"
case '^' => "$up"
case '\\\\' => "$bslash"
case '@' => "$at"
case '?' => "$qmark"
case '>' => "$greater"
case '=' => "$eq"
case '<' => "$less"
case ':' => "$colon"
case '/' => "$div"
case '-' => "$minus"
case '+' => "$plus"
case '*' => "$times"
case '&' => "$amp"
case '%' => "$percent"
case _ => ""
}
if (replacement.length > 0)
builder.append(replacement)
else
builder.append(ch)
}
builder.toString
}
}
import Helper.accessProperty
/**
* This trait is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html">ShouldMatchers</a> or
* <a href="MustMatchers.html">MustMatchers</a> for an overview of the matchers DSL.
*
* @author Bill Venners
*/
trait Matchers extends Assertions { matchers =>
private[scalatest] def newTestFailedException(message: String): Throwable = {
val fileNames = List("Matchers.scala", "ShouldMatchers.scala", "MustMatchers.scala")
val temp = new RuntimeException
val stackDepth = temp.getStackTrace.takeWhile(stackTraceElement => fileNames.exists(_ == stackTraceElement.getFileName) || stackTraceElement.getMethodName == "newTestFailedException").length
new TestFailedException(message, stackDepth)
}
private def matchSymbolToPredicateMethod[S <: AnyRef](left: S, right: Symbol, hasArticle: Boolean, articleIsA: Boolean): MatchResult = {
// If 'empty passed, rightNoTick would be "empty"
val propertyName = right.name
accessProperty(left, right, true) match {
case None =>
// if propertyName is '>, mangledPropertyName would be "$greater"
val mangledPropertyName = transformOperatorChars(propertyName)
// methodNameToInvoke would also be "empty"
val methodNameToInvoke = mangledPropertyName
// methodNameToInvokeWithIs would be "isEmpty"
val methodNameToInvokeWithIs = "is"+ mangledPropertyName(0).toUpperCase + mangledPropertyName.substring(1)
val firstChar = propertyName(0).toLowerCase
val methodNameStartsWithVowel = firstChar == 'a' || firstChar == 'e' || firstChar == 'i' ||
firstChar == 'o' || firstChar == 'u'
throw newTestFailedException(
FailureMessages(
if (methodNameStartsWithVowel) "hasNeitherAnOrAnMethod" else "hasNeitherAOrAnMethod",
left,
UnquotedString(methodNameToInvoke),
UnquotedString(methodNameToInvokeWithIs)
)
)
case Some(result) =>
val (wasNot, was) =
if (hasArticle) {
if (articleIsA) ("wasNotA", "wasA") else ("wasNotAn", "wasAn")
}
else ("wasNot", "was")
MatchResult(
result == true, // Right now I just leave the return value of accessProperty as Any
FailureMessages(wasNot, left, UnquotedString(propertyName)),
FailureMessages(was, left, UnquotedString(propertyName))
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class MatcherWrapper[T](leftMatcher: Matcher[T]) { matchersWrapper =>
// TODO: mention not short circuited, and the precendence is even between and and or
/**
* Returns a matcher whose <code>apply</code> method returns a <code>MatchResult</code>
* that represents the logical-and of the results of the wrapped and the passed matcher applied to
* the same value.
*
* <p>
* The reason <code>and</code> has an upper bound on its type parameter is so that the <code>Matcher</code>
* resulting from an invocation of <code>and</code> will have the correct type parameter. If you call
* <code>and</code> on a <code>Matcher[Orange]</code>, passing in a <code>Matcher[Valencia]</code>,
* the result will have type <code>Matcher[Valencia]</code>. This is correct because both a
* <code>Matcher[Orange]</code> and a <code>Matcher[Valencia]</code> know how to match a
* <code>Valencia</code> (but a <code>Matcher[Valencia]</code> doesn't know how to
* match any old <code>Orange</code>). If you call
* <code>and</code> on a <code>Matcher[Orange]</code>, passing in a <code>Matcher[Fruit]</code>,
* the result will have type <code>Matcher[Orange]</code>. This is also correct because both a
* <code>Matcher[Orange]</code> and a <code>Matcher[Fruit]</code> know how to match an
* <code>Orange</code> (but a <code>Matcher[Orange]</code> doesn't know how to
* match any old <code>Fruit</code>).
* </p>
*
* @param the matcher to logical-and with this matcher
* @return a matcher that performs the logical-and of this and the passed matcher
*/
def and[U <: T](rightMatcher: Matcher[U]): Matcher[U] =
new Matcher[U] {
def apply(left: U) = {
val leftMatchResult = leftMatcher(left)
val rightMatchResult = rightMatcher(left) // Not short circuiting anymore
if (!leftMatchResult.matches)
MatchResult(
false,
leftMatchResult.failureMessage,
leftMatchResult.negatedFailureMessage,
leftMatchResult.midSentenceFailureMessage,
leftMatchResult.midSentenceNegatedFailureMessage
)
else {
MatchResult(
rightMatchResult.matches,
Resources("commaBut", leftMatchResult.negatedFailureMessage, rightMatchResult.midSentenceFailureMessage),
Resources("commaAnd", leftMatchResult.negatedFailureMessage, rightMatchResult.midSentenceNegatedFailureMessage),
Resources("commaBut", leftMatchResult.midSentenceNegatedFailureMessage, rightMatchResult.midSentenceFailureMessage),
Resources("commaAnd", leftMatchResult.midSentenceNegatedFailureMessage, rightMatchResult.midSentenceNegatedFailureMessage)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AndHaveWord {
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (have length (2) and have length (3 - 1))
* ^
* </pre>
*/
def length(expectedLength: Long) = and(have.length(expectedLength))
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (have size (2) and have size (3 - 1))
* ^
* </pre>
*/
def size(expectedSize: Long) = and(have.size(expectedSize))
}
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (have size (2) and have size (3 - 1))
* ^
* </pre>
*/
def and(haveWord: HaveWord): AndHaveWord = new AndHaveWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AndContainWord {
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (contain (2) and contain (3 - 1))
*
* </pre>
*/
def apply[T](expectedElement: T) = matchersWrapper.and(matchers.contain(expectedElement))
// def element[T](expectedElement: T) = matchersWrapper.and(matchers.contain.apply(expectedElement))
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (contain key ("two") and contain key ("one"))
* ^
* </pre>
*/
def key[T](expectedElement: T) = matchersWrapper.and(matchers.contain.key(expectedElement))
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (contain value (2) and contain value (1))
* ^
* </pre>
*/
def value[T](expectedValue: T) = matchersWrapper.and(matchers.contain.value(expectedValue))
}
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (contain key ("two") and contain key ("one"))
* ^
* </pre>
*/
def and(containWord: ContainWord): AndContainWord = new AndContainWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AndBeWord {
/**
* This method enables the following syntax:
*
* <pre>
* isFileMock should (be a ('file) and be a ('file))
* ^
* </pre>
*/
def a(symbol: Symbol) = and(be.a(symbol))
/**
* This method enables the following syntax:
*
* <pre>
* myFile should (be a (file) and be a (file))
* ^
* </pre>
*/
def a[T](bePropertyMatcher: BePropertyMatcher[T]) = and(be.a(bePropertyMatcher))
/**
* This method enables the following syntax:
*
* <pre>
* isAppleMock should (be an ('apple) and be an ('apple))
* ^
* </pre>
*/
def an(symbol: Symbol) = and(be.an(symbol))
/**
* This method enables the following syntax:
*
* <pre>
* isAppleMock should (be an (apple) and be an (apple))
* ^
* </pre>
*/
def an[T](bePropertyMatcher: BePropertyMatcher[T]) = and(be.an(bePropertyMatcher))
/**
* This method enables the following syntax:
*
* <pre>
* obj should (be theSameInstanceAs (string) and be theSameInstanceAs (string))
* ^
* </pre>
*/
def theSameInstanceAs(anyRef: AnyRef) = and(be.theSameInstanceAs(anyRef))
}
/**
* This method enables the following syntax:
*
* <pre>
* isFileMock should (be a ('file) and be a ('file))
* ^
* </pre>
*/
def and(beWord: BeWord): AndBeWord = new AndBeWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AndFullyMatchWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (fullyMatch regex (decimal) and fullyMatch regex (decimal))
* ^
* </pre>
*/
def regex(regexString: String) = and(fullyMatch.regex(regexString))
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (fullyMatch regex (decimalRegex) and fullyMatch regex (decimalRegex))
* ^
* </pre>
*/
def regex(regex: Regex) = and(fullyMatch.regex(regex))
}
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (fullyMatch regex (decimalRegex) and fullyMatch regex (decimalRegex))
* ^
* </pre>
*/
def and(fullyMatchWord: FullyMatchWord): AndFullyMatchWord = new AndFullyMatchWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AndIncludeWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (include regex (decimal) and include regex (decimal))
* ^
* </pre>
*/
def regex(regexString: String) = and(include.regex(regexString))
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (include regex (decimalRegex) and include regex (decimalRegex))
* ^
* </pre>
*/
def regex(regex: Regex) = and(include.regex(regex))
}
/**
* This method enables the following syntax:
*
* <pre>
* "hello, world" should (include regex ("hel*o") and include regex ("wor.d"))
* ^
* </pre>
*/
def and(includeWord: IncludeWord): AndIncludeWord = new AndIncludeWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AndStartWithWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (startWith regex (decimal) and startWith regex (decimal))
* ^
* </pre>
*/
def regex(regexString: String) = and(startWith.regex(regexString))
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (startWith regex (decimalRegex) and startWith regex (decimalRegex))
* ^
* </pre>
*/
def regex(regex: Regex) = and(startWith.regex(regex))
}
/**
* This method enables the following syntax:
*
* <pre>
* "1.78" should (have length (4) and startWith regex ("1.7"))
* ^
* </pre>
*/
def and(startWithWord: StartWithWord): AndStartWithWord = new AndStartWithWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AndEndWithWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (endWith regex (decimal) and endWith regex (decimal))
* ^
* </pre>
*/
def regex(regexString: String) = and(endWith.regex(regexString))
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (endWith regex (decimalRegex) and endWith regex (decimalRegex))
* ^
* </pre>
*/
def regex(regex: Regex) = and(endWith.regex(regex))
}
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (endWith regex (decimalRegex) and endWith regex (decimalRegex))
* ^
* </pre>
*/
def and(endWithWord: EndWithWord): AndEndWithWord = new AndEndWithWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AndNotWord {
/**
* This method enables the following syntax:
*
* <pre>
* 1 should (not equal (2) and not equal (3 - 1))
* ^
* </pre>
*/
def equal(any: Any) =
matchersWrapper.and(matchers.not.apply(matchers.equal(any)))
/**
* This method enables the following syntax:
*
* <pre>
* 1 should (not be (2) and not be (3 - 1))
* ^
* </pre>
*/
def be(any: Any) =
matchersWrapper.and(matchers.not.apply(matchers.be(any)))
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (not have size (5) and not have length (3))
* ^
* </pre>
*/
def have(resultOfLengthWordApplication: ResultOfLengthWordApplication) =
matchersWrapper.and(matchers.not.apply(matchers.have.length(resultOfLengthWordApplication.expectedLength)))
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (not have size (5) and not have size (3))
* ^
* </pre>
*/
def have(resultOfSizeWordApplication: ResultOfSizeWordApplication) =
matchersWrapper.and(matchers.not.apply(matchers.have.size(resultOfSizeWordApplication.expectedSize)))
/**
* This method enables the following syntax:
*
* <pre>
* book should (not have (title ("Moby Dick")) and not have (author ("Melville")))
* ^
* </pre>
*/
def have[T](firstPropertyMatcher: HavePropertyMatcher[T, _], propertyMatchers: HavePropertyMatcher[T, _]*) =
matchersWrapper.and(matchers.not.apply(matchers.have(firstPropertyMatcher, propertyMatchers: _*)))
/**
* This method enables the following syntax:
*
* <pre>
* 5 should (not be < (2) and not be < (6))
* ^
* </pre>
*/
def be[T](resultOfLessThanComparison: ResultOfLessThanComparison[T]) =
matchersWrapper.and(matchers.not.be(resultOfLessThanComparison))
/**
* This method enables the following syntax:
*
* <pre>
* map should (contain key (7) and not be (null))
* ^
* </pre>
*/
def be[T](o: Null) = matchersWrapper.and(matchers.not.be(o))
/**
* This method enables the following syntax:
*
* <pre>
* 7 should (not be > (8) and not be > (6))
* ^
* </pre>
*/
def be[T](resultOfGreaterThanComparison: ResultOfGreaterThanComparison[T]) =
matchersWrapper.and(matchers.not.be(resultOfGreaterThanComparison))
/**
* This method enables the following syntax:
*
* <pre>
* 2 should (not be <= (1) and not be <= (2))
* ^
* </pre>
*/
def be[T](resultOfLessThanOrEqualToComparison: ResultOfLessThanOrEqualToComparison[T]) =
matchersWrapper.and(matchers.not.be(resultOfLessThanOrEqualToComparison))
/**
* This method enables the following syntax:
*
* <pre>
* 7 should (not be >= (8) and not be >= (6))
* ^
* </pre>
*/
def be[T](resultOfGreaterThanOrEqualToComparison: ResultOfGreaterThanOrEqualToComparison[T]) =
matchersWrapper.and(matchers.not.be(resultOfGreaterThanOrEqualToComparison))
/**
* This method enables the following syntax:
*
* <pre>
* 5 should (not be === (2) and not be === (6))
* ^
* </pre>
*/
def be(resultOfTripleEqualsApplication: ResultOfTripleEqualsApplication) =
matchersWrapper.and(matchers.not.be(resultOfTripleEqualsApplication))
/**
* This method enables the following syntax:
*
* <pre>
* notEmptyMock should (not be ('empty) and not be ('empty))
* ^
* </pre>
*/
def be(symbol: Symbol) = matchersWrapper.and(matchers.not.be(symbol))
/**
* This method enables the following syntax:
*
* <pre>
* 2 should (not be (odd) and not be (odd))
* ^
* </pre>
*/
def be[T](beMatcher: BeMatcher[T]) = matchersWrapper.and(matchers.not.be(beMatcher))
/**
* This method enables the following syntax:
*
* <pre>
* myFile should (not be (directory) and not be (directory))
* ^
* </pre>
*/
def be[T](bePropertyMatcher: BePropertyMatcher[T]) = matchersWrapper.and(matchers.not.be(bePropertyMatcher))
/**
* This method enables the following syntax:
*
* <pre>
* isNotFileMock should (not be a ('file) and not be a ('file))
* ^
* </pre>
*/
def be(resultOfAWordApplication: ResultOfAWordToSymbolApplication) = matchersWrapper.and(matchers.not.be(resultOfAWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* myFile should (not be a (directory) and not be a (directory))
* ^
* </pre>
*/
def be[T <: AnyRef](resultOfAWordApplication: ResultOfAWordToBePropertyMatcherApplication[T]) = matchersWrapper.and(matchers.not.be(resultOfAWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* isNotAppleMock should (not be an ('apple) and not be an ('apple))
* ^
* </pre>
*/
def be[T](resultOfAnWordApplication: ResultOfAnWordToSymbolApplication) = matchersWrapper.and(matchers.not.be(resultOfAnWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* myFile should (not be an (directory) and not be an (directory))
* ^
* </pre>
*/
def be[T <: AnyRef](resultOfAnWordApplication: ResultOfAnWordToBePropertyMatcherApplication[T]) = matchersWrapper.and(matchers.not.be(resultOfAnWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* obj should (not be theSameInstanceAs (otherString) and not be theSameInstanceAs (otherString))
* ^
* </pre>
*/
def be[T](resultOfTheSameInstanceAsApplication: ResultOfTheSameInstanceAsApplication) = matchersWrapper.and(matchers.not.be(resultOfTheSameInstanceAsApplication))
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOh should (not be (17.0 plusOrMinus 0.2) and not be (17.0 plusOrMinus 0.2))
* ^
* </pre>
*/
def be(doubleTolerance: DoubleTolerance) = matchersWrapper.and(matchers.not.be(doubleTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOhFloat should (not be (17.0f plusOrMinus 0.2f) and not be (17.0f plusOrMinus 0.2f))
* ^
* </pre>
*/
def be(floatTolerance: FloatTolerance) = matchersWrapper.and(matchers.not.be(floatTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenLong should (not be (17L plusOrMinus 2L) and not be (17L plusOrMinus 2L))
* ^
* </pre>
*/
def be(longTolerance: LongTolerance) = matchersWrapper.and(matchers.not.be(longTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenInt should (not be (17 plusOrMinus 2) and not be (17 plusOrMinus 2))
* ^
* </pre>
*/
def be(intTolerance: IntTolerance) = matchersWrapper.and(matchers.not.be(intTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenShort should (not be (17.toShort plusOrMinus 2.toShort) and not be (17.toShort plusOrMinus 2.toShort))
* ^
* </pre>
*/
def be(shortTolerance: ShortTolerance) = matchersWrapper.and(matchers.not.be(shortTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenByte should ((not be (19.toByte plusOrMinus 2.toByte)) and (not be (19.toByte plusOrMinus 2.toByte)))
* ^
* </pre>
*/
def be(byteTolerance: ByteTolerance) = matchersWrapper.and(matchers.not.be(byteTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not fullyMatch regex ("bob") and not fullyMatch regex (decimal))
* ^
* </pre>
*/
def fullyMatch(resultOfRegexWordApplication: ResultOfRegexWordApplication) =
matchersWrapper.and(matchers.not.fullyMatch(resultOfRegexWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not include regex ("bob") and not include regex (decimal))
* ^
* </pre>
*/
def include(resultOfRegexWordApplication: ResultOfRegexWordApplication) =
matchersWrapper.and(matchers.not.include(resultOfRegexWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not include ("bob") and not include ("1.7"))
* ^
* </pre>
*/
def include(expectedSubstring: String) =
matchersWrapper.and(matchers.not.include(expectedSubstring))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not startWith regex ("bob") and not startWith regex (decimal))
* ^
* </pre>
*/
def startWith(resultOfRegexWordApplication: ResultOfRegexWordApplication) =
matchersWrapper.and(matchers.not.startWith(resultOfRegexWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not startWith ("red") and not startWith ("1.7"))
* ^
* </pre>
*/
def startWith(expectedSubstring: String) =
matchersWrapper.and(matchers.not.startWith(expectedSubstring))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not endWith regex ("bob") and not endWith regex (decimal))
* ^
* </pre>
*/
def endWith(resultOfRegexWordApplication: ResultOfRegexWordApplication) =
matchersWrapper.and(matchers.not.endWith(resultOfRegexWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not endWith ("fre") and not endWith ("1.7"))
* ^
* </pre>
*/
def endWith(expectedSubstring: String) =
matchersWrapper.and(matchers.not.endWith(expectedSubstring))
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (not contain (5) and not contain (3))
* ^
* </pre>
*/
def contain[T](expectedElement: T) =
matchersWrapper.and(matchers.not.contain(expectedElement))
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (not contain key ("five") and not contain key ("three"))
* ^
* </pre>
*/
def contain[T](resultOfKeyWordApplication: ResultOfKeyWordApplication[T]) =
matchersWrapper.and(matchers.not.contain(resultOfKeyWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (not contain value (5) and not contain value (3))
* ^
* </pre>
*/
def contain[T](resultOfValueWordApplication: ResultOfValueWordApplication[T]) =
matchersWrapper.and(matchers.not.contain(resultOfValueWordApplication))
}
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (not contain value (5) and not contain value (3))
* ^
* </pre>
*/
def and(notWord: NotWord): AndNotWord = new AndNotWord
/**
* Returns a matcher whose <code>apply</code> method returns a <code>MatchResult</code>
* that represents the logical-or of the results of this and the passed matcher applied to
* the same value.
*
* <p>
* The reason <code>or</code> has an upper bound on its type parameter is so that the <code>Matcher</code>
* resulting from an invocation of <code>or</code> will have the correct type parameter. If you call
* <code>or</code> on a <code>Matcher[Orange]</code>, passing in a <code>Matcher[Valencia]</code>,
* the result will have type <code>Matcher[Valencia]</code>. This is correct because both a
* <code>Matcher[Orange]</code> and a <code>Matcher[Valencia]</code> know how to match a
* <code>Valencia</code> (but a <code>Matcher[Valencia]</code> doesn't know how to
* match any old <code>Orange</code>). If you call
* <code>or</code> on a <code>Matcher[Orange]</code>, passing in a <code>Matcher[Fruit]</code>,
* the result will have type <code>Matcher[Orange]</code>. This is also correct because both a
* <code>Matcher[Orange]</code> and a <code>Matcher[Fruit]</code> know how to match an
* <code>Orange</code> (but a <code>Matcher[Orange]</code> doesn't know how to
* match any old <code>Fruit</code>).
* </p>
*
* @param the matcher to logical-or with this matcher
* @return a matcher that performs the logical-or of this and the passed matcher
*/
def or[U <: T](rightMatcher: Matcher[U]): Matcher[U] =
new Matcher[U] {
def apply(left: U) = {
val leftMatchResult = leftMatcher(left)
val rightMatchResult = rightMatcher(left) // Not short circuiting anymore
if (leftMatchResult.matches)
MatchResult(
true,
leftMatchResult.negatedFailureMessage,
leftMatchResult.failureMessage,
leftMatchResult.midSentenceNegatedFailureMessage,
leftMatchResult.midSentenceFailureMessage
)
else {
MatchResult(
rightMatchResult.matches,
Resources("commaAnd", leftMatchResult.failureMessage, rightMatchResult.midSentenceFailureMessage),
Resources("commaAnd", leftMatchResult.failureMessage, rightMatchResult.midSentenceNegatedFailureMessage),
Resources("commaAnd", leftMatchResult.midSentenceFailureMessage, rightMatchResult.midSentenceFailureMessage),
Resources("commaAnd", leftMatchResult.midSentenceFailureMessage, rightMatchResult.midSentenceNegatedFailureMessage)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class OrHaveWord {
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (have length (2) and have length (3 - 1))
* ^
* </pre>
*/
def length(expectedLength: Long) = or(have.length(expectedLength))
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (have size (2) and have size (3 - 1))
* ^
* </pre>
*/
def size(expectedSize: Long) = or(have.size(expectedSize))
}
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (have size (2) and have size (3 - 1))
* ^
* </pre>
*/
def or(haveWord: HaveWord): OrHaveWord = new OrHaveWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class OrContainWord {
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (contain (2) or contain (3 - 1))
* ^
* </pre>
*/
def apply[T](expectedElement: T) = matchersWrapper.or(matchers.contain(expectedElement))
// def element[T](expectedElement: T) = matchersWrapper.or(matchers.contain.apply(expectedElement))
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (contain key ("cat") or contain key ("one"))
* ^
* </pre>
*/
def key[T](expectedKey: T) = matchersWrapper.or(matchers.contain.key(expectedKey))
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (contain value (7) or contain value (1))
* ^
* </pre>
*/
def value[T](expectedValue: T) = matchersWrapper.or(matchers.contain.value(expectedValue))
}
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (contain value (7) or contain value (1))
* ^
* </pre>
*/
def or(containWord: ContainWord): OrContainWord = new OrContainWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class OrBeWord {
/**
* This method enables the following syntax:
*
* <pre>
* isFileMock should (be a ('file) or be a ('directory))
* ^
* </pre>
*/
def a(symbol: Symbol) = or(be.a(symbol))
/**
* This method enables the following syntax:
*
* <pre>
* isFileMock should (be a (file) or be a (directory))
* ^
* </pre>
*/
def a[T](bePropertyMatcher: BePropertyMatcher[T]) = or(be.a(bePropertyMatcher))
/**
* This method enables the following syntax:
*
* <pre>
* appleMock should (be an ('orange) or be an ('apple))
* ^
* </pre>
*/
def an(symbol: Symbol) = or(be.an(symbol))
/**
* This method enables the following syntax:
*
* <pre>
* appleMock should (be an (orange) or be an (apple))
* ^
* </pre>
*/
def an[T](bePropertyMatcher: BePropertyMatcher[T]) = or(be.an(bePropertyMatcher))
/**
* This method enables the following syntax:
*
* <pre>
* obj should (be theSameInstanceAs (string) or be theSameInstanceAs (otherString))
* ^
* </pre>
*/
def theSameInstanceAs(anyRef: AnyRef) = or(be.theSameInstanceAs(anyRef))
}
/**
* This method enables the following syntax:
*
* <pre>
* isFileMock should (be a ('file) or be a ('directory))
* ^
* </pre>
*/
def or(beWord: BeWord): OrBeWord = new OrBeWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class OrFullyMatchWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (fullyMatch regex ("hello") or fullyMatch regex (decimal))
* ^
* </pre>
*/
def regex(regexString: String) = or(fullyMatch.regex(regexString))
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (fullyMatch regex ("hello") or fullyMatch regex (decimal))
* ^
* </pre>
*/
def regex(regex: Regex) = or(fullyMatch.regex(regex))
}
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (fullyMatch regex ("hello") or fullyMatch regex (decimal))
* ^
* </pre>
*/
def or(fullyMatchWord: FullyMatchWord): OrFullyMatchWord = new OrFullyMatchWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class OrIncludeWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (include regex ("hello") or include regex (decimal))
* ^
* </pre>
*/
def regex(regexString: String) = or(include.regex(regexString))
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (include regex ("hello") or include regex (decimal))
* ^
* </pre>
*/
def regex(regex: Regex) = or(include.regex(regex))
}
/**
* This method enables the following syntax:
*
* <pre>
* "a1.7b" should (include regex ("1.7") or include regex ("1.7"))
* ^
* </pre>
*/
def or(includeWord: IncludeWord): OrIncludeWord = new OrIncludeWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class OrStartWithWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (startWith regex ("hello") or startWith regex (decimal))
* ^
* </pre>
*/
def regex(regexString: String) = or(startWith.regex(regexString))
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (startWith regex ("hello") or startWith regex (decimal))
* ^
* </pre>
*/
def regex(regex: Regex) = or(startWith.regex(regex))
}
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (startWith regex ("hello") or startWith regex ("1.7"))
* ^
* </pre>
*/
def or(startWithWord: StartWithWord): OrStartWithWord = new OrStartWithWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class OrEndWithWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (endWith regex ("hello") or endWith regex (decimal))
* ^
* </pre>
*/
def regex(regexString: String) = or(endWith.regex(regexString))
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (endWith regex ("hello") or endWith regex (decimal))
* ^
* </pre>
*/
def regex(regex: Regex) = or(endWith.regex(regex))
}
/**
* This method enables the following syntax:
*
* <pre>
* "1.7b" should (endWith regex ("hello") or endWith regex ("7b"))
* ^
* </pre>
*/
def or(endWithWord: EndWithWord): OrEndWithWord = new OrEndWithWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class OrNotWord {
/**
* This method enables the following syntax:
*
* <pre>
* 1 should (not equal (1) or not equal (2))
* ^
* </pre>
*/
def equal(any: Any) =
matchersWrapper.or(matchers.not.apply(matchers.equal(any)))
/**
* This method enables the following syntax:
*
* <pre>
* 1 should (not be (1) or not be (2))
* ^
* </pre>
*/
def be(any: Any) =
matchersWrapper.or(matchers.not.apply(matchers.be(any)))
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (not have length (2) or not have length (3))
* ^
* </pre>
*/
def have(resultOfLengthWordApplication: ResultOfLengthWordApplication) =
matchersWrapper.or(matchers.not.apply(matchers.have.length(resultOfLengthWordApplication.expectedLength)))
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (not have size (2) or not have size (3))
* ^
* </pre>
*/
def have(resultOfSizeWordApplication: ResultOfSizeWordApplication) =
matchersWrapper.or(matchers.not.apply(matchers.have.size(resultOfSizeWordApplication.expectedSize)))
/**
* This method enables the following syntax:
*
* <pre>
* book should (not have (title ("Moby Dick")) or not have (author ("Melville")))
* ^
* </pre>
*/
def have[T](firstPropertyMatcher: HavePropertyMatcher[T, _], propertyMatchers: HavePropertyMatcher[T, _]*) =
matchersWrapper.or(matchers.not.apply(matchers.have(firstPropertyMatcher, propertyMatchers: _*)))
/**
* This method enables the following syntax:
*
* <pre>
* map should (contain key (7) or not be (null))
* ^
* </pre>
*/
def be[T](o: Null) = matchersWrapper.or(matchers.not.be(o))
/**
* This method enables the following syntax:
*
* <pre>
* 5 should (not be < (7) or not be < (8))
* ^
* </pre>
*/
def be[T](resultOfLessThanComparison: ResultOfLessThanComparison[T]) =
matchersWrapper.or(matchers.not.be(resultOfLessThanComparison))
/**
* This method enables the following syntax:
*
* <pre>
* 7 should (not be > (5) or not be > (6))
* ^
* </pre>
*/
def be[T](resultOfGreaterThanComparison: ResultOfGreaterThanComparison[T]) =
matchersWrapper.or(matchers.not.be(resultOfGreaterThanComparison))
/**
* This method enables the following syntax:
*
* <pre>
* 2 should (not be <= (3) or not be <= (2))
* ^
* </pre>
*/
def be[T](resultOfLessThanOrEqualToComparison: ResultOfLessThanOrEqualToComparison[T]) =
matchersWrapper.or(matchers.not.be(resultOfLessThanOrEqualToComparison))
/**
* This method enables the following syntax:
*
* <pre>
* 8 should (not be >= (7) or not be >= (6))
* ^
* </pre>
*/
def be[T](resultOfGreaterThanOrEqualToComparison: ResultOfGreaterThanOrEqualToComparison[T]) =
matchersWrapper.or(matchers.not.be(resultOfGreaterThanOrEqualToComparison))
/**
* This method enables the following syntax:
*
* <pre>
* 5 should (not be === (7) or not be === (8))
* ^
* </pre>
*/
def be(resultOfTripleEqualsApplication: ResultOfTripleEqualsApplication) =
matchersWrapper.or(matchers.not.be(resultOfTripleEqualsApplication))
/**
* This method enables the following syntax:
*
* <pre>
* notEmptyMock should (not be ('full) or not be ('empty))
* ^
* </pre>
*/
def be(symbol: Symbol) = matchersWrapper.or(matchers.not.be(symbol))
/**
* This method enables the following syntax:
*
* <pre>
* 2 should (not be (even) or not be (odd))
* ^
* </pre>
*/
def be[T](beMatcher: BeMatcher[T]) = matchersWrapper.or(matchers.not.be(beMatcher))
/**
* This method enables the following syntax:
*
* <pre>
* myFile should (not be (directory) or not be (file))
* ^
* </pre>
*/
def be[T](bePropertyMatcher: BePropertyMatcher[T]) = matchersWrapper.or(matchers.not.be(bePropertyMatcher))
/**
* This method enables the following syntax:
*
* <pre>
* isNotFileMock should (not be a ('directory) or not be a ('file))
* ^
* </pre>
*/
def be[T](resultOfAWordApplication: ResultOfAWordToSymbolApplication) = matchersWrapper.or(matchers.not.be(resultOfAWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* myFile should (not be a (directory) or not be a (file))
* ^
* </pre>
*/
def be[T <: AnyRef](resultOfAWordApplication: ResultOfAWordToBePropertyMatcherApplication[T]) = matchersWrapper.or(matchers.not.be(resultOfAWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* notAppleMock should (not be an ('apple) or not be an ('apple))
* ^
* </pre>
*/
def be[T](resultOfAnWordApplication: ResultOfAnWordToSymbolApplication) = matchersWrapper.or(matchers.not.be(resultOfAnWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* myFile should (not be an (directory) or not be an (file))
* ^
* </pre>
*/
def be[T <: AnyRef](resultOfAnWordApplication: ResultOfAnWordToBePropertyMatcherApplication[T]) = matchersWrapper.or(matchers.not.be(resultOfAnWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* obj should (not be theSameInstanceAs (otherString) or not be theSameInstanceAs (string))
* ^
* </pre>
*/
def be[T](resultOfTheSameInstanceAsApplication: ResultOfTheSameInstanceAsApplication) = matchersWrapper.or(matchers.not.be(resultOfTheSameInstanceAsApplication))
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOh should (not be (17.0 plusOrMinus 0.2) or not be (17.0 plusOrMinus 0.2))
* ^
* </pre>
*/
def be(doubleTolerance: DoubleTolerance) = matchersWrapper.or(matchers.not.be(doubleTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOhFloat should (not be (17.0f plusOrMinus 0.2f) or not be (17.0f plusOrMinus 0.2f))
* ^
* </pre>
*/
def be(floatTolerance: FloatTolerance) = matchersWrapper.or(matchers.not.be(floatTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenLong should (not be (17L plusOrMinus 2L) or not be (17L plusOrMinus 2L))
* ^
* </pre>
*/
def be(longTolerance: LongTolerance) = matchersWrapper.or(matchers.not.be(longTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenInt should (not be (17 plusOrMinus 2) or not be (17 plusOrMinus 2))
* ^
* </pre>
*/
def be(intTolerance: IntTolerance) = matchersWrapper.or(matchers.not.be(intTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenShort should (not be (17.toShort plusOrMinus 2.toShort) or not be (17.toShort plusOrMinus 2.toShort))
* ^
* </pre>
*/
def be(shortTolerance: ShortTolerance) = matchersWrapper.or(matchers.not.be(shortTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* sevenByte should ((not be (19.toByte plusOrMinus 2.toByte)) or (not be (19.toByte plusOrMinus 2.toByte)))
* ^
* </pre>
*/
def be(byteTolerance: ByteTolerance) = matchersWrapper.or(matchers.not.be(byteTolerance))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not fullyMatch regex ("fred") or not fullyMatch regex (decimal))
* ^
* </pre>
*/
def fullyMatch(resultOfRegexWordApplication: ResultOfRegexWordApplication) =
matchersWrapper.or(matchers.not.fullyMatch(resultOfRegexWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not include regex ("fred") or not include regex (decimal))
* ^
* </pre>
*/
def include(resultOfRegexWordApplication: ResultOfRegexWordApplication) =
matchersWrapper.or(matchers.not.include(resultOfRegexWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not include ("bob") or not include ("1.7"))
* ^
* </pre>
*/
def include(expectedSubstring: String) =
matchersWrapper.or(matchers.not.include(expectedSubstring))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not startWith regex ("bob") or not startWith regex (decimal))
* ^
* </pre>
*/
def startWith(resultOfRegexWordApplication: ResultOfRegexWordApplication) =
matchersWrapper.or(matchers.not.startWith(resultOfRegexWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not startWith ("fred") or not startWith ("1.7"))
* ^
* </pre>
*/
def startWith(expectedSubstring: String) =
matchersWrapper.or(matchers.not.startWith(expectedSubstring))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not endWith regex ("bob") or not endWith regex (decimal))
* ^
* </pre>
*/
def endWith(resultOfRegexWordApplication: ResultOfRegexWordApplication) =
matchersWrapper.or(matchers.not.endWith(resultOfRegexWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* "fred" should (not endWith ("fred") or not endWith ("1.7"))
* ^
* </pre>
*/
def endWith(expectedSubstring: String) =
matchersWrapper.or(matchers.not.endWith(expectedSubstring))
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (not contain (1) or not contain (3))
* ^
* </pre>
*/
def contain[T](expectedElement: T) =
matchersWrapper.or(matchers.not.contain(expectedElement))
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (not contain key ("two") or not contain key ("three"))
* ^
* </pre>
*/
def contain[T](resultOfKeyWordApplication: ResultOfKeyWordApplication[T]) =
matchersWrapper.or(matchers.not.contain(resultOfKeyWordApplication))
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (not contain value (2) or not contain value (3))
* ^
* </pre>
*/
def contain[T](resultOfValueWordApplication: ResultOfValueWordApplication[T]) =
matchersWrapper.or(matchers.not.contain(resultOfValueWordApplication))
}
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (not contain value (2) or not contain value (3))
* ^
* </pre>
*/
def or(notWord: NotWord): OrNotWord = new OrNotWord
}
/**
* This implicit conversion method enables ScalaTest matchers expressions that involve <code>and</code> and <code>or</code>.
*/
implicit def convertToMatcherWrapper[T](leftMatcher: Matcher[T]): MatcherWrapper[T] = new MatcherWrapper(leftMatcher)
//
// This class is used as the return type of the overloaded should method (in MapShouldWrapper)
// that takes a HaveWord. It's key method will be called in situations like this:
//
// map should have key 1
//
// This gets changed to :
//
// convertToMapShouldWrapper(map).should(have).key(1)
//
// Thus, the map is wrapped in a convertToMapShouldWrapper call via an implicit conversion, which results in
// a MapShouldWrapper. This has a should method that takes a HaveWord. That method returns a
// ResultOfHaveWordPassedToShould that remembers the map to the left of should. Then this class
// ha a key method that takes a K type, they key type of the map. It does the assertion thing.
//
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfContainWordForMap[K, V](left: scala.collection.Map[K, V], shouldBeTrue: Boolean) {
// class ResultOfContainWordForMap[K, V](left: scala.collection.Map[K, V], shouldBeTrue: Boolean) extends ResultOfContainWordForIterable[Tuple2[K, V]](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* map should contain key ("one")
* ^
* </pre>
*/
def key(expectedKey: K) {
if (left.contains(expectedKey) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainKey" else "containedKey",
left,
expectedKey)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* map should contain value (1)
* ^
* </pre>
*/
def value(expectedValue: V) {
// if (left.values.contains(expectedValue) != shouldBeTrue) CHANGING FOR 2.8.0 RC1
if (left.values.exists(expectedValue == _) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainValue" else "containedValue",
left,
expectedValue)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfContainWordForJavaMap[K, V](left: java.util.Map[K, V], shouldBeTrue: Boolean) {
/**
* This method enables the following syntax (<code>javaMap</code> is a <code>java.util.Map</code>):
*
* <pre>
* javaMap should contain key ("two")
* ^
* </pre>
*/
def key(expectedKey: K) {
if (left.containsKey(expectedKey) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainKey" else "containedKey",
left,
expectedKey)
)
}
/**
* This method enables the following syntax (<code>javaMap</code> is a <code>java.util.Map</code>):
*
* <pre>
* javaMap should contain value ("2")
* ^
* </pre>
*/
def value(expectedValue: V) {
if (left.containsValue(expectedValue) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainValue" else "containedValue",
left,
expectedValue)
)
}
}
/**
* This implicit conversion method enables the following syntax (<code>javaColl</code> is a <code>java.util.Collection</code>):
*
* <pre>
* javaColl should contain ("two")
* </pre>
*
* The <code>(contain element ("two"))</code> expression will result in a <code>Matcher[scala.Iterable[String]]</code>. This
* implicit conversion method will convert that matcher to a <code>Matcher[java.util.Collection[String]]</code>.
*/
implicit def convertIterableMatcherToJavaCollectionMatcher[T](iterableMatcher: Matcher[Iterable[T]]) =
new Matcher[java.util.Collection[T]] {
def apply(left: java.util.Collection[T]) = {
val iterable = new Iterable[T] {
override def iterator = new Iterator[T] { // For 2.8
// def elements = new Iterator[T] { // For 2.7
private val javaIterator = left.iterator
def next: T = javaIterator.next
def hasNext: Boolean = javaIterator.hasNext
}
override def toString = left.toString
}
iterableMatcher.apply(iterable)
}
}
// XXX
implicit def convertIterableMatcherToArraytMatcher[T](iterableMatcher: Matcher[Iterable[T]]) =
new Matcher[Array[T]] {
def apply(left: Array[T]) = {
val iterable = new Iterable[T] {
override def iterator = new Iterator[T] { // For 2.8
private var index = 0
def hasNext: Boolean = index < left.length
def next: T = {
index += 1
left(index - 1)
}
}
// Need to prettify the array's toString, because by the time it gets to decorateToStringValue, the array
// has been wrapped in this Iterable and so it won't get prettified anymore by FailureMessages.decorateToStringValue.
override def toString = FailureMessages.prettifyArrays(left).toString
}
iterableMatcher.apply(iterable)
}
}
/**
* This implicit conversion method enables the following syntax (<code>javaMap</code> is a <code>java.util.Map</code>):
*
* <pre>
* javaMap should (contain key ("two"))
* </pre>
*
* The <code>(contain key ("two"))</code> expression will result in a <code>Matcher[scala.collection.Map[String, Any]]</code>. This
* implicit conversion method will convert that matcher to a <code>Matcher[java.util.Map[String, Any]]</code>.
*/
implicit def convertMapMatcherToJavaMapMatcher[K, V](mapMatcher: Matcher[scala.collection.Map[K, V]]) =
new Matcher[java.util.Map[K, V]] {
def apply(left: java.util.Map[K, V]) = {
// Even though the java map is mutable I just wrap it it to a plain old Scala map, because
// I have no intention of mutating it.
class MapWrapper[Z](javaMap: java.util.Map[K, Z]) extends scala.collection.Map[K, Z] {
override def size: Int = javaMap.size
def get(key: K): Option[Z] =
if (javaMap.containsKey(key)) Some(javaMap.get(key)) else None
override def iterator = new Iterator[(K, Z)] {
private val javaIterator = javaMap.keySet.iterator
def next: (K, Z) = {
val nextKey = javaIterator.next
(nextKey, javaMap.get(nextKey))
}
def hasNext: Boolean = javaIterator.hasNext
}
override def +[W >: Z] (kv: (K, W)): scala.collection.Map[K, W] = {
val newJavaMap = new java.util.HashMap[K, W](javaMap)
val (key, value) = kv
newJavaMap.put(key, value)
new MapWrapper[W](newJavaMap)
}
override def - (key: K): scala.collection.Map[K, Z] = {
val newJavaMap = new java.util.HashMap[K, Z](javaMap)
newJavaMap.remove(key)
new MapWrapper[Z](newJavaMap)
}
override def toString = javaMap.toString
}
val scalaMap = new MapWrapper[V](left)
mapMatcher.apply(scalaMap)
}
}
// Ack. The above conversion doesn't apply to java.util.Maps, because java.util.Map is not a subinterface
// of java.util.Collection. But right now Matcher[Iterable] supports only "contain" and "have size"
// syntax, and thus that should work on Java maps too, why not. Well I'll tell you why not. It is too complicated.
// Since java Map is not a java Collection, I'll say the contain syntax doesn't work on it. But you can say
// have key.
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ContainWord {
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (contain (2) and contain (1))
* ^
* </pre>
*/
def apply[T](expectedElement: T): Matcher[Iterable[T]] =
new Matcher[Iterable[T]] {
def apply(left: Iterable[T]) =
MatchResult(
left.elements.contains(expectedElement),
FailureMessages("didNotContainExpectedElement", left, expectedElement),
FailureMessages("containedExpectedElement", left, expectedElement)
)
}
//
// This key method is called when "contain" is used in a logical expression, such as:
// map should { contain key 1 and equal (Map(1 -> "Howdy")) }. It results in a matcher
// that remembers the key value. By making the value type Any, it causes overloaded shoulds
// to work, because for example a Matcher[Map[Int, Any]] is a subtype of Matcher[Map[Int, String]],
// given Map is covariant in its V (the value type stored in the map) parameter and Matcher is
// contravariant in its lone type parameter. Thus, the type of the Matcher resulting from contain key 1
// is a subtype of the map type that has a known value type parameter because its that of the map
// to the left of should. This means the should method that takes a map will be selected by Scala's
// method overloading rules.
//
/**
* This method enables the following syntax:
*
* <pre>
* map should (contain key ("fifty five") or contain key ("twenty two"))
* ^
* </pre>
*
* The map's value type parameter cannot be inferred because only a key type is provided in
* an expression like <code>(contain key ("fifty five"))</code>. The matcher returned
* by this method matches <code>scala.collection.Map</code>s with the inferred key type and value type <code>Any</code>. Given
* <code>Map</code> is covariant in its value type, and <code>Matcher</code> is contravariant in
* its type parameter, a <code>Matcher[Map[Int, Any]]</code>, for example, is a subtype of <code>Matcher[Map[Int, String]]</code>.
* This will enable the matcher returned by this method to be used against any <code>Map</code> that has
* the inferred key type.
*/
def key[K](expectedKey: K): Matcher[scala.collection.Map[K, Any]] =
new Matcher[scala.collection.Map[K, Any]] {
def apply(left: scala.collection.Map[K, Any]) =
MatchResult(
left.contains(expectedKey),
FailureMessages("didNotContainKey", left, expectedKey),
FailureMessages("containedKey", left, expectedKey)
)
}
// Holy smokes I'm starting to scare myself. I fixed the problem of the compiler not being
// able to infer the value type in contain value 1 and ... like expressions, because the
// value type is there, with an existential type. Since I don't know what K is, I decided to
// try just saying that with an existential type, and it compiled and ran. Pretty darned
// amazing compiler. The problem could not be fixed like I fixed the key method above, because
// Maps are nonvariant in their key type parameter, whereas they are covariant in their value
// type parameter, so the same trick wouldn't work. But this existential type trick seems to
// work like a charm.
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (not contain value (5) and not contain value (3))
* ^
* </pre>
*
* The map's key type parameter cannot be inferred because only a value type is provided in
* an expression like <code>(contain value (5))</code>. The matcher returned
* by this method matches <code>scala.collection.Map</code>s with the inferred value type and the existential key
* type <code>[K] forSome { type K }</code>. Even though <code>Matcher</code> is contravariant in its type parameter, because
* <code>Map</code> is nonvariant in its key type,
* a <code>Matcher[Map[Any, Int]]</code>, for example, is <em>not</em> a subtype of <code>Matcher[Map[String, Int]]</code>,
* so the key type parameter of the <code>Map</code> returned by this method cannot be <code>Any</code>. By making it
* an existential type, the Scala compiler will not infer it to anything more specific.
* This will enable the matcher returned by this method to be used against any <code>Map</code> that has
* the inferred value type.
*
*/
def value[V](expectedValue: V): Matcher[scala.collection.Map[K, V] forSome { type K }] =
new Matcher[scala.collection.Map[K, V] forSome { type K }] {
def apply(left: scala.collection.Map[K, V] forSome { type K }) =
MatchResult(
// left.values.contains(expectedValue), CHANGING FOR 2.8.0 RC1
left.values.exists(expectedValue == _),
FailureMessages("didNotContainValue", left, expectedValue),
FailureMessages("containedValue", left, expectedValue)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class IncludeWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7" should (include ("1.7") and include ("1.8"))
* ^
* </pre>
*/
def apply(expectedSubstring: String): Matcher[String] =
new Matcher[String] {
def apply(left: String) =
MatchResult(
left.indexOf(expectedSubstring) >= 0,
FailureMessages("didNotIncludeSubstring", left, expectedSubstring),
FailureMessages("includedSubstring", left, expectedSubstring)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* val decimal = """(-)?(\\d+)(\\.\\d*)?"""
* "a1.7b" should (include regex (decimal) and include regex (decimal))
* ^
* </pre>
*/
def regex[T <: String](right: T): Matcher[T] = regex(right.r)
/**
* This method enables the following syntax:
*
* <pre>
* val decimalRegex = """(-)?(\\d+)(\\.\\d*)?""".r
* "a1.7" should (include regex (decimalRegex) and include regex (decimalRegex))
* ^
* </pre>
*/
def regex(expectedRegex: Regex): Matcher[String] =
new Matcher[String] {
def apply(left: String) =
MatchResult(
expectedRegex.findFirstIn(left).isDefined,
FailureMessages("didNotIncludeRegex", left, expectedRegex),
FailureMessages("includedRegex", left, expectedRegex)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class StartWithWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7b" should (startWith ("1.7") and startWith ("1.7b"))
* ^
* </pre>
*/
def apply(right: String) =
new Matcher[String] {
def apply(left: String) =
MatchResult(
left startsWith right,
FailureMessages("didNotStartWith", left, right),
FailureMessages("startedWith", left, right)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* val decimal = """(-)?(\\d+)(\\.\\d*)?"""
* "1.7b" should (startWith regex (decimal) and startWith regex (decimal))
* ^
* </pre>
*/
def regex[T <: String](right: T): Matcher[T] = regex(right.r)
/**
* This method enables the following syntax:
*
* <pre>
* val decimalRegex = """(-)?(\\d+)(\\.\\d*)?""".r
* "1.7" should (startWith regex (decimalRegex) and startWith regex (decimalRegex))
* ^
* </pre>
*/
def regex(rightRegex: Regex): Matcher[String] =
new Matcher[String] {
def apply(left: String) =
MatchResult(
rightRegex.pattern.matcher(left).lookingAt,
FailureMessages("didNotStartWithRegex", left, rightRegex),
FailureMessages("startedWithRegex", left, rightRegex)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class EndWithWord {
/**
* This method enables the following syntax:
*
* <pre>
* "1.7b" should (endWith ("1.7b") and endWith ("7b"))
* ^
* </pre>
*/
def apply(right: String) =
new Matcher[String] {
def apply(left: String) =
MatchResult(
left endsWith right,
FailureMessages("didNotEndWith", left, right),
FailureMessages("endedWith", left, right)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* val decimal = """(-)?(\\d+)(\\.\\d*)?"""
* "b1.7" should (endWith regex (decimal) and endWith regex (decimal))
* ^
* </pre>
*/
def regex[T <: String](right: T): Matcher[T] = regex(right.r)
/**
* This method enables the following syntax:
*
* <pre>
* val decimalRegex = """(-)?(\\d+)(\\.\\d*)?""".r
* "b1.7" should (endWith regex (decimalRegex) and endWith regex (decimalRegex))
* ^
* </pre>
*/
def regex(rightRegex: Regex): Matcher[String] =
new Matcher[String] {
def apply(left: String) = {
val allMatches = rightRegex.findAllIn(left)
MatchResult(
allMatches.hasNext && (allMatches.end == left.length),
FailureMessages("didNotEndWithRegex", left, rightRegex),
FailureMessages("endedWithRegex", left, rightRegex)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class FullyMatchWord {
/**
* This method enables the following syntax:
*
* <pre>
* val decimal = """(-)?(\\d+)(\\.\\d*)?"""
* "1.7" should (fullyMatch regex (decimal) and fullyMatch regex (decimal))
* ^
* </pre>
*/
def regex(rightRegexString: String): Matcher[String] =
new Matcher[String] {
def apply(left: String) =
MatchResult(
java.util.regex.Pattern.matches(rightRegexString, left),
FailureMessages("didNotFullyMatchRegex", left, UnquotedString(rightRegexString)),
FailureMessages("fullyMatchedRegex", left, UnquotedString(rightRegexString))
)
}
/**
* This method enables the following syntax:
*
* <pre>
* val decimalRegex = """(-)?(\\d+)(\\.\\d*)?""".r
* "1.7" should (fullyMatch regex (decimalRegex) and fullyMatch regex (decimalRegex))
* ^
* </pre>
*/
def regex(rightRegex: Regex): Matcher[String] =
new Matcher[String] {
def apply(left: String) =
MatchResult(
rightRegex.pattern.matcher(left).matches,
FailureMessages("didNotFullyMatchRegex", left, rightRegex),
FailureMessages("fullyMatchedRegex", left, rightRegex)
)
}
}
// The getLength and getSize field conversions seem inconsistent with
// what I do in symbol HavePropertyMatchers. It isn't, though because the difference is here
// it's a Scala field and there a Java field: a val getLength is a
// perfectly valid Scala way to get a JavaBean property Java method in the bytecodes.
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* <p>
* Subclasses of this abstract class are used as the result of implicit conversions from the various structural types that
* are considered to represent length: <code>length</code> or <code>getLength</code> methods or fields that return <code>Int</code>
* or <code>Long</code>. This enables the <code>have length (7)</code> syntax to be used with any object that has a length.
* The implicit conversion methods that result in this type are:
* </p>
*
* <ul>
* <li><code>convertLengthFieldToIntLengthWrapper</code></li>
* <li><code>convertLengthMethodToIntLengthWrapper</code></li>
* <li><code>convertGetLengthFieldToIntLengthWrapper</code></li>
* <li><code>convertGetLengthMethodToIntLengthWrapper</code></li>
* <li><code>convertLengthFieldToLongLengthWrapper</code></li>
* <li><code>convertLengthFieldToLongLengthWrapper</code></li>
* <li><code>convertGetLengthFieldToLongLengthWrapper</code></li>
* <li><code>convertGetLengthMethodToLongLengthWrapper</code></li>
* </ul>
*
* @author Bill Venners
*/
abstract class LengthWrapper {
def length: Long
}
/**
* This implicit conversion method converts an object with a <code>length</code> field of type <code>Int</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have length (7)</code> syntax.
*/
implicit def convertLengthFieldToIntLengthWrapper(o: { val length: Int }) =
new LengthWrapper {
def length = o.length
}
/**
* This implicit conversion method converts an object with a <code>length</code> method of type <code>Int</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have length (7)</code> syntax.
*/
implicit def convertLengthMethodToIntLengthWrapper(o: { def length(): Int }) =
new LengthWrapper {
def length = o.length()
}
/**
* This implicit conversion method converts an object with a <code>getLength</code> field of type <code>Int</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have length (7)</code> syntax.
*/
implicit def convertGetLengthFieldToIntLengthWrapper(o: { val getLength: Int }) =
new LengthWrapper {
def length = o.getLength
}
/**
* This implicit conversion method converts an object with a <code>getLength</code> method of type <code>Int</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have length (7)</code> syntax.
*/
implicit def convertGetLengthMethodToIntLengthWrapper(o: { def getLength(): Int }) =
new LengthWrapper {
def length = o.getLength()
}
/**
* This implicit conversion method converts an object with a <code>length</code> field of type <code>Long</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have length (7)</code> syntax.
*/
implicit def convertLengthFieldToLongLengthWrapper(o: { val length: Long }) =
new LengthWrapper {
def length = o.length
}
/**
* This implicit conversion method converts an object with a <code>length</code> method of type <code>Long</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have length (7)</code> syntax.
*/
implicit def convertLengthMethodToLongLengthWrapper(o: { def length(): Long }) =
new LengthWrapper {
def length = o.length()
}
/**
* This implicit conversion method converts an object with a <code>getLength</code> field of type <code>Long</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have length (7)</code> syntax.
*/
implicit def convertGetLengthFieldToLongLengthWrapper(o: { val getLength: Long }) =
new LengthWrapper {
def length = o.getLength
}
/**
* This implicit conversion method converts an object with a <code>getLength</code> method of type <code>Long</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have length (7)</code> syntax.
*/
implicit def convertGetLengthMethodToLongLengthWrapper(o: { def getLength(): Long }) =
new LengthWrapper {
def length = o.getLength()
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* <p>
* Subclasses of this abstract class are used as the result of implicit conversions from the various structural types that
* are considered to represent size: <code>size</code> or <code>getSize</code> methods or fields that return <code>Int</code>
* or <code>Long</code>. This enables the <code>have size (7)</code> syntax to be used with any object that has a size.
* The implicit conversion methods that result in this type are:
* </p>
*
* <ul>
* <li><code>convertSizeFieldToIntSizeWrapper</code></li>
* <li><code>convertSizeMethodToIntSizeWrapper</code></li>
* <li><code>convertGetSizeFieldToIntSizeWrapper</code></li>
* <li><code>convertGetSizeMethodToIntSizeWrapper</code></li>
* <li><code>convertSizeFieldToLongSizeWrapper</code></li>
* <li><code>convertSizeFieldToLongSizeWrapper</code></li>
* <li><code>convertGetSizeFieldToLongSizeWrapper</code></li>
* <li><code>convertGetSizeMethodToLongSizeWrapper</code></li>
* </ul>
*
* @author Bill Venners
*/
abstract class SizeWrapper {
def size: Long
}
/**
* This implicit conversion method converts an object with a <code>size</code> field of type <code>Int</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have size (7)</code> syntax.
*/
implicit def convertSizeFieldToIntSizeWrapper(o: { val size: Int }) =
new SizeWrapper {
def size = o.size
}
/**
* This implicit conversion method converts an object with a <code>size</code> method of type <code>Int</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have size (7)</code> syntax.
*/
implicit def convertSizeMethodToIntSizeWrapper(o: { def size(): Int }) =
new SizeWrapper {
def size = o.size()
}
/**
* This implicit conversion method converts an object with a <code>getSize</code> field of type <code>Int</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have size (7)</code> syntax.
*/
implicit def convertGetSizeFieldToIntSizeWrapper(o: { val getSize: Int }) =
new SizeWrapper {
def size = o.getSize
}
/**
* This implicit conversion method converts an object with a <code>getSize</code> method of type <code>Int</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have size (7)</code> syntax.
*/
implicit def convertGetSizeMethodToIntSizeWrapper(o: { def getSize(): Int }) =
new SizeWrapper {
def size = o.getSize()
}
/**
* This implicit conversion method converts an object with a <code>size</code> field of type <code>Long</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have size (7)</code> syntax.
*/
implicit def convertSizeFieldToLongSizeWrapper(o: { val size: Long }) =
new SizeWrapper {
def size = o.size
}
/**
* This implicit conversion method converts an object with a <code>size</code> method of type <code>Long</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have size (7)</code> syntax.
*/
implicit def convertSizeMethodToLongSizeWrapper(o: { def size(): Long }) =
new SizeWrapper {
def size = o.size()
}
/**
* This implicit conversion method converts an object with a <code>getSize</code> field of type <code>Long</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have size (7)</code> syntax.
*/
implicit def convertGetSizeFieldToLongSizeWrapper(o: { val getSize: Long }) =
new SizeWrapper {
def size = o.getSize
}
/**
* This implicit conversion method converts an object with a <code>getSize</code> method of type <code>Long</code> to a
* <code>LengthWrapper</code>, to enable that object to be used with the <code>have size (7)</code> syntax.
*/
implicit def convertGetSizeMethodToLongSizeWrapper(o: { def getSize(): Long }) =
new SizeWrapper {
def size = o.getSize()
}
// This guy is generally done through an implicit conversion from a symbol. It takes that symbol, and
// then represents an object with an apply method. So it gives an apply method to symbols.
// book should have ('author ("Gibson"))
// ^ // Basically this 'author symbol gets converted into this class, and its apply method takes "Gibson"
// TODO, put the documentation of the details of the algo for selecting a method or field to use here.
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* <p>
* This class is used as the result of an implicit conversion from class <code>Symbol</code>, to enable symbols to be
* used in <code>have ('author ("Dickens"))</code> syntax. The name of the implicit conversion method is
* <code>convertSymbolToHavePropertyMatcherGenerator</code>.
* </p>
*
* <p>
* Class <code>HavePropertyMatcherGenerator</code>'s primary constructor takes a <code>Symbol</code>. The
* <code>apply</code> method uses reflection to find and access a property that has the name specified by the
* <code>Symbol</code> passed to the constructor, so it can determine if the property has the expected value
* passed to <code>apply</code>.
* If the symbol passed is <code>'title</code>, for example, the <code>apply</code> method
* will use reflection to look for a public Java field named
* "title", a public method named "title", or a public method named "getTitle".
* If a method, it must take no parameters. If multiple candidates are found,
* the <code>apply</code> method will select based on the following algorithm:
* </p>
*
* <table cellpadding="2" border="1">
* <tr><th>Field</th><th>Method</th><th>"get" Method</th><th>Result</th></tr>
* <tr><td> </td><td> </td><td> </td><td>Throws <code>TestFailedException</code>, because no candidates found</td></tr>
* <tr><td> </td><td> </td><td><code>getTitle()</code></td><td>Invokes <code>getTitle()</code></td></tr>
* <tr><td> </td><td><code>title()</code></td><td> </td><td>Invokes <code>title()</code></td></tr>
* <tr><td> </td><td><code>title()</code></td><td><code>getTitle()</code></td><td>Invokes <code>title()</code> (this can occur when <code>BeanProperty</code> annotation is used)</td></tr>
* <tr><td><code>title</code></td><td> </td><td> </td><td>Accesses field <code>title</code></td></tr>
* <tr><td><code>title</code></td><td> </td><td><code>getTitle()</code></td><td>Invokes <code>getTitle()</code></td></tr>
* <tr><td><code>title</code></td><td><code>title()</code></td><td> </td><td>Invokes <code>title()</code></td></tr>
* <tr><td><code>title</code></td><td><code>title()</code></td><td><code>getTitle()</code></td><td>Invokes <code>title()</code> (this can occur when <code>BeanProperty</code> annotation is used)</td></tr>
* </table>
*
*
* @author Bill Venners
*/
final class HavePropertyMatcherGenerator(symbol: Symbol) {
/**
* This method enables the following syntax:
*
* <pre>
* book should have ('title ("A Tale of Two Cities"))
* ^
* </pre>
*
* <p>
* This class has an <code>apply</code> method that will produce a <code>HavePropertyMatcher[AnyRef, Any]</code>.
* The implicit conversion method, <code>convertSymbolToHavePropertyMatcherGenerator</code>, will cause the
* above line of code to be eventually transformed into:
* </p>
*
* <pre>
* book should have (convertSymbolToHavePropertyMatcherGenerator('title).apply("A Tale of Two Cities"))
* </pre>
*/
def apply(expectedValue: Any) =
new HavePropertyMatcher[AnyRef, Any] {
/**
* This method enables the following syntax:
*
* <pre>
* book should have ('title ("A Tale of Two Cities"))
* </pre>
*
* <p>
* This method uses reflection to discover a field or method with a name that indicates it represents
* the value of the property with the name contained in the <code>Symbol</code> passed to the
* <code>HavePropertyMatcherGenerator</code>'s constructor. The field or method must be public. To be a
* candidate, a field must have the name <code>symbol.name</code>, so if <code>symbol</code> is <code>'title</code>,
* the field name sought will be <code>"title"</code>. To be a candidate, a method must either have the name
* <code>symbol.name</code>, or have a JavaBean-style <code>get</code> or <code>is</code>. If the type of the
* passed <code>expectedValue</code> is <code>Boolean</code>, <code>"is"</code> is prepended, else <code>"get"</code>
* is prepended. Thus if <code>'title</code> is passed as <code>symbol</code>, and the type of the <code>expectedValue</code> is
* <code>String</code>, a method named <code>getTitle</code> will be considered a candidate (the return type
* of <code>getTitle</code> will not be checked, so it need not be <code>String</code>. By contrast, if <code>'defined</code>
* is passed as <code>symbol</code>, and the type of the <code>expectedValue</code> is <code>Boolean</code>, a method
* named <code>isTitle</code> will be considered a candidate so long as its return type is <code>Boolean</code>.
* </p>
* TODO continue the story
*/
def apply(objectWithProperty: AnyRef): HavePropertyMatchResult[Any] = {
// If 'empty passed, propertyName would be "empty"
val propertyName = symbol.name
val isBooleanProperty =
expectedValue match {
case o: Boolean => true
case _ => false
}
accessProperty(objectWithProperty, symbol, isBooleanProperty) match {
case None =>
// if propertyName is '>, mangledPropertyName would be "$greater"
val mangledPropertyName = transformOperatorChars(propertyName)
// methodNameToInvoke would also be "title"
val methodNameToInvoke = mangledPropertyName
// methodNameToInvokeWithGet would be "getTitle"
val methodNameToInvokeWithGet = "get"+ mangledPropertyName(0).toUpperCase + mangledPropertyName.substring(1)
throw newTestFailedException(Resources("propertyNotFound", methodNameToInvoke, expectedValue.toString, methodNameToInvokeWithGet))
case Some(result) =>
new HavePropertyMatchResult[Any](
result == expectedValue,
propertyName,
expectedValue,
result
)
}
}
}
}
/**
* This implicit conversion method converts a <code>Symbol</code> to a
* <code>HavePropertyMatcherGenerator</code>, to enable the symbol to be used with the <code>have ('author ("Dickens"))</code> syntax.
*/
implicit def convertSymbolToHavePropertyMatcherGenerator(symbol: Symbol) = new HavePropertyMatcherGenerator(symbol)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class HaveWord {
// TODO: How about returning a Matcher[Gazornimplatz] and then providing implicit conversion
// methods from Matcher[Gazornimplatz] to Matcher[Seq], Matcher[String], Matcher[java.util.List], and
// Matcher[the structural length methods]. This is similar to the technique I used with "contain (7)"
// to get it to work with java.util.Collection.
// I couldn't figure out how to combine view bounds with existential types. May or may not
// be possible, but going dynamic for now at least.
/**
* This method enables the following syntax:
*
* <pre>
* book should have length (9)
* ^
* </pre>
*
* <p>
* Currently (as of ScalaTest 0.9.5), this method will produce a <code>Matcher[AnyRef]</code>, and if the
* <code>AnyRef</code> passed to that matcher's <code>apply</code> method does not have the appropriate <code>length</code> property
* structure, all will compile but a <code>TestFailedException</code> will result at runtime explaining the problem. The one exception is that it will work on
* <code>java.util.List</code>, even though that type has no <code>length</code> structure (its <code>size</code> property
* will be used instead.) In a future ScalaTest release, this may be tightened so that all is statically checked at compile time.
* </p>
*/
def length(expectedLength: Long) =
new Matcher[AnyRef] {
def apply(left: AnyRef) =
left match {
case leftArray: Array[_] =>
MatchResult(
leftArray.length == expectedLength,
FailureMessages("didNotHaveExpectedLength", left, expectedLength),
FailureMessages("hadExpectedLength", left, expectedLength)
)
case leftSeq: Seq[_] =>
MatchResult(
leftSeq.length == expectedLength,
FailureMessages("didNotHaveExpectedLength", left, expectedLength),
FailureMessages("hadExpectedLength", left, expectedLength)
)
case leftString: String =>
MatchResult(
leftString.length == expectedLength,
FailureMessages("didNotHaveExpectedLength", left, expectedLength),
FailureMessages("hadExpectedLength", left, expectedLength)
)
case leftJavaList: java.util.List[_] =>
MatchResult(
leftJavaList.size == expectedLength,
FailureMessages("didNotHaveExpectedLength", left, expectedLength),
FailureMessages("hadExpectedLength", left, expectedLength)
)
case _ =>
accessProperty(left, 'length, false) match {
case None =>
throw newTestFailedException(Resources("noLengthStructure", expectedLength.toString))
case Some(result) =>
MatchResult(
result == expectedLength,
FailureMessages("didNotHaveExpectedLength", left, expectedLength),
FailureMessages("hadExpectedLength", left, expectedLength)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* book should have size (9)
* ^
* </pre>
*
* <p>
* Currently (as of ScalaTest 0.9.5), this method will produce a <code>Matcher[AnyRef]</code>, and if the
* <code>AnyRef</code> passed to that matcher's <code>apply</code> method does not have the appropriate <code>size</code> property
* structure, all will compile but a <code>TestFailedException</code> will result at runtime explaining the problem.
* In a future ScalaTest release, this may be tightened so that all is statically checked at compile time.
* </p>
*/
def size(expectedSize: Long) =
new Matcher[AnyRef] {
def apply(left: AnyRef) =
left match {
case leftArray: Array[_] =>
MatchResult(
leftArray.length == expectedSize,
FailureMessages("didNotHaveExpectedSize", left, expectedSize),
FailureMessages("hadExpectedSize", left, expectedSize)
)
case leftSeq: Collection[_] =>
MatchResult(
leftSeq.size == expectedSize,
FailureMessages("didNotHaveExpectedSize", left, expectedSize),
FailureMessages("hadExpectedSize", left, expectedSize)
)
case leftJavaList: java.util.List[_] =>
MatchResult(
leftJavaList.size == expectedSize,
FailureMessages("didNotHaveExpectedSize", left, expectedSize),
FailureMessages("hadExpectedSize", left, expectedSize)
)
case _ =>
accessProperty(left, 'size, false) match {
case None =>
throw newTestFailedException(Resources("noSizeStructure", expectedSize.toString))
case Some(result) =>
MatchResult(
result == expectedSize,
FailureMessages("didNotHaveExpectedSize", left, expectedSize),
FailureMessages("hadExpectedSize", left, expectedSize)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* book should have (title ("A Tale of Two Cities"))
* ^
* </pre>
*/
def apply[T](firstPropertyMatcher: HavePropertyMatcher[T, _], propertyMatchers: HavePropertyMatcher[T, _]*): Matcher[T] =
new Matcher[T] {
def apply(left: T) = {
val results =
for (propertyVerifier <- firstPropertyMatcher :: propertyMatchers.toList) yield
propertyVerifier(left)
val firstFailureOption = results.find(pv => !pv.matches)
val justOneProperty = propertyMatchers.length == 0
firstFailureOption match {
case Some(firstFailure) =>
val failedVerification = firstFailure
val failureMessage =
FailureMessages(
"propertyDidNotHaveExpectedValue",
UnquotedString(failedVerification.propertyName),
failedVerification.expectedValue,
failedVerification.actualValue,
left
)
val midSentenceFailureMessage =
FailureMessages(
"midSentencePropertyDidNotHaveExpectedValue",
UnquotedString(failedVerification.propertyName),
failedVerification.expectedValue,
failedVerification.actualValue,
left
)
MatchResult(false, failureMessage, failureMessage, midSentenceFailureMessage, midSentenceFailureMessage)
case None =>
val failureMessage =
if (justOneProperty) {
val firstPropertyResult = results.head // know this will succeed, because firstPropertyMatcher was required
FailureMessages(
"propertyHadExpectedValue",
UnquotedString(firstPropertyResult.propertyName),
firstPropertyResult.expectedValue,
left
)
}
else FailureMessages("allPropertiesHadExpectedValues", left)
val midSentenceFailureMessage =
if (justOneProperty) {
val firstPropertyResult = results.head // know this will succeed, because firstPropertyMatcher was required
FailureMessages(
"midSentencePropertyHadExpectedValue",
UnquotedString(firstPropertyResult.propertyName),
firstPropertyResult.expectedValue,
left
)
}
else FailureMessages("midSentenceAllPropertiesHadExpectedValues", left)
MatchResult(true, failureMessage, failureMessage, midSentenceFailureMessage, midSentenceFailureMessage)
}
}
}
}
//
// This class is used as the return type of the overloaded should method (in CollectionShouldWrapper)
// that takes a HaveWord. It's size method will be called in situations like this:
//
// list should have size 1
//
// This gets changed to :
//
// convertToCollectionShouldWrapper(list).should(have).size(1)
//
// Thus, the list is wrapped in a convertToCollectionShouldWrapper call via an implicit conversion, which results in
// a CollectionShouldWrapper. This has a should method that takes a HaveWord. That method returns a
// ResultOfHaveWordForCollectionPassedToShould that remembers the map to the left of should. Then this class
// has a size method that takes a T type, type parameter of the iterable. It does the assertion thing.
//
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
sealed class ResultOfHaveWordForCollection[T](left: Collection[T], shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* collection should have size (10)
* ^
* </pre>
*/
def size(expectedSize: Int) {
if ((left.size == expectedSize) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedSize" else "hadExpectedSize",
left,
expectedSize)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
sealed class ResultOfHaveWordForJavaCollection[T](left: java.util.Collection[T], shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* javaCollection should have size (10)
* ^
* </pre>
*/
def size(expectedSize: Int) {
if ((left.size == expectedSize) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedSize" else "hadExpectedSize",
left,
expectedSize)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfHaveWordForJavaMap(left: java.util.Map[_, _], shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* javaMap should have size (10)
* ^
* </pre>
*/
def size(expectedSize: Int) {
if ((left.size == expectedSize) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedSize" else "hadExpectedSize",
left,
expectedSize)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfHaveWordForSeq[T](left: Seq[T], shouldBeTrue: Boolean) extends ResultOfHaveWordForCollection[T](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* seq should have length (20)
* ^
* </pre>
*/
def length(expectedLength: Int) {
if ((left.length == expectedLength) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
expectedLength)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><cod
e>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
class ResultOfHaveWordForArray[T](left: Array[T], shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* array should have size (10)
* ^
* </pre>
*/
def size(expectedSize: Int) {
if ((left.size == expectedSize) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedSize" else "hadExpectedSize",
left,
expectedSize)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* array should have length (20)
* ^
* </pre>
*/
def length(expectedLength: Int) {
if ((left.length == expectedLength) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
expectedLength) )
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
sealed class ResultOfNotWordForIterable[E, T <: Iterable[E]](left: T, shouldBeTrue: Boolean)
extends ResultOfNotWordForAnyRef(left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* iterable should not contain ("one")
* ^
* </pre>
*/
def contain(expectedElement: E) {
val right = expectedElement
if ((left.exists(_ == right)) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainExpectedElement" else "containedExpectedElement",
left,
right
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
sealed class ResultOfNotWordForCollection[E, T <: Collection[E]](left: T, shouldBeTrue: Boolean)
extends ResultOfNotWordForIterable[E, T](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* collection should not have size (3)
* ^
* </pre>
*/
def have(resultOfSizeWordApplication: ResultOfSizeWordApplication) {
val right = resultOfSizeWordApplication.expectedSize
if ((left.size == right) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedSize" else "hadExpectedSize",
left,
right
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
sealed class ResultOfNotWordForJavaCollection[E, T <: java.util.Collection[E]](left: T, shouldBeTrue: Boolean)
extends ResultOfNotWordForAnyRef(left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* javaCollection should not have size (3)
* ^
* </pre>
*/
def have(resultOfSizeWordApplication: ResultOfSizeWordApplication) {
val right = resultOfSizeWordApplication.expectedSize
if ((left.size == right) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedSize" else "hadExpectedSize",
left,
right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* javaCollection should not contain ("elephant")
* ^
* </pre>
*/
def contain(expectedElement: E) {
val right = expectedElement
if ((left.contains(right)) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainExpectedElement" else "containedExpectedElement",
left,
right
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForMap[K, V](left: scala.collection.Map[K, V], shouldBeTrue: Boolean)
extends ResultOfNotWordForCollection[(K, V), scala.collection.Map[K, V]](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* map should not contain key ("three")
* ^
* </pre>
*/
def contain(resultOfKeyWordApplication: ResultOfKeyWordApplication[K]) {
val right = resultOfKeyWordApplication.expectedKey
if ((left.contains(right)) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainKey" else "containedKey",
left,
right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should not contain value (3)
* ^
* </pre>
*/
def contain(resultOfValueWordApplication: ResultOfValueWordApplication[V]) {
val right = resultOfValueWordApplication.expectedValue
if ((left.values.exists(_ == right)) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainValue" else "containedValue",
left,
right
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForJavaMap[K, V](left: java.util.Map[K, V], shouldBeTrue: Boolean)
extends ResultOfNotWordForAnyRef(left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* javaMap should not contain key ("three")
* ^
* </pre>
*/
def contain(resultOfKeyWordApplication: ResultOfKeyWordApplication[K]) {
val right = resultOfKeyWordApplication.expectedKey
if ((left.containsKey(right)) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainKey" else "containedKey",
left,
right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* javaMap should not contain value (3)
* ^
* </pre>
*/
def contain(resultOfValueWordApplication: ResultOfValueWordApplication[V]) {
val right = resultOfValueWordApplication.expectedValue
if ((left.containsValue(right)) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainValue" else "containedValue",
left,
right
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForSeq[E, T <: Seq[E]](left: T, shouldBeTrue: Boolean)
extends ResultOfNotWordForCollection[E, T](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* List(1, 2) should not have length (12)
* ^
* </pre>
*/
def have(resultOfLengthWordApplication: ResultOfLengthWordApplication) {
val right = resultOfLengthWordApplication.expectedLength
if ((left.length == right) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
right
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html">
<code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForArray[E](left: Array[E], shouldBeTrue: Boolean)
extends ResultOfNotWordForAnyRef(left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* Array("two", "three") should not contain ("one")
* ^
* </pre>
*/
def contain(expectedElement: E) {
val right = expectedElement
if ((left.exists(_ == right)) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotContainExpectedElement" else "containedExpectedElement",
left,
right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should not have size (3)
* ^
* </pre>
*/
def have(resultOfSizeWordApplication: ResultOfSizeWordApplication) {
val right = resultOfSizeWordApplication.expectedSize
if ((left.size == right) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedSize" else "hadExpectedSize",
left,
right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should not have length (12)
* ^
* </pre>
*/
def have(resultOfLengthWordApplication: ResultOfLengthWordApplication) {
val right = resultOfLengthWordApplication.expectedLength
if ((left.length == right) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
right
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfHaveWordForJavaList[T](left: java.util.List[T], shouldBeTrue: Boolean) extends ResultOfHaveWordForJavaCollection[T](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* javaList should have length (12)
* ^
* </pre>
*
* <p>
* This method invokes <code>size</code> on the <code>java.util.List</code> passed as <code>left</code> to
* determine its length.
* </p>
*/
def length(expectedLength: Int) {
if ((left.size == expectedLength) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
expectedLength)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForJavaList[E, T <: java.util.List[E]](left: T, shouldBeTrue: Boolean)
extends ResultOfNotWordForJavaCollection[E, T](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* javaList should not have length (12)
* ^
* </pre>
*
* <p>
* This method invokes <code>size</code> on the <code>java.util.List</code> passed as <code>left</code> to
* determine its length.
* </p>
*/
def have(resultOfLengthWordApplication: ResultOfLengthWordApplication) {
val right = resultOfLengthWordApplication.expectedLength
if ((left.size == right) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
right
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfBeWordForAnyRef[T <: AnyRef](left: T, shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* object should be theSameInstanceAs anotherObject
* ^
* </pre>
*/
def theSameInstanceAs(right: AnyRef) {
if ((left eq right) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotSameInstanceAs" else "wasSameInstanceAs",
left,
right
)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* fileMock should be a ('file)
* ^
* </pre>
*/
def a(symbol: Symbol) {
val matcherResult = matchSymbolToPredicateMethod(left, symbol, true, true)
if (matcherResult.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue) matcherResult.failureMessage else matcherResult.negatedFailureMessage
)
}
}
// TODO: Check the shouldBeTrues, are they sometimes always false or true?
/**
* This method enables the following syntax, where <code>badBook</code> is, for example, of type <code>Book</code> and
* <code>goodRead</code> refers to a <code>BePropertyMatcher[Book]</code>:
*
* <pre>
* badBook should be a (goodRead)
* ^
* </pre>
*/
def a(bePropertyMatcher: BePropertyMatcher[T]) {
val result = bePropertyMatcher(left)
if (result.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue)
FailureMessages("wasNotA", left, UnquotedString(result.propertyName))
else
FailureMessages("wasA", left, UnquotedString(result.propertyName))
)
}
}
// TODO, in both of these, the failure message doesn't have a/an
/**
* This method enables the following syntax:
*
* <pre>
* fruit should be an ('orange)
* ^
* </pre>
*/
def an(symbol: Symbol) {
val matcherResult = matchSymbolToPredicateMethod(left, symbol, true, false)
if (matcherResult.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue) matcherResult.failureMessage else matcherResult.negatedFailureMessage
)
}
}
/**
* This method enables the following syntax, where <code>badBook</code> is, for example, of type <code>Book</code> and
* <code>excellentRead</code> refers to a <code>BePropertyMatcher[Book]</code>:
*
* <pre>
* book should be an (excellentRead)
* ^
* </pre>
*/
def an(beTrueMatcher: BePropertyMatcher[T]) {
val beTrueMatchResult = beTrueMatcher(left)
if (beTrueMatchResult.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue)
FailureMessages("wasNotAn", left, UnquotedString(beTrueMatchResult.propertyName))
else
FailureMessages("wasAn", left, UnquotedString(beTrueMatchResult.propertyName))
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
sealed class ResultOfNotWord[T](left: T, shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* result should not equal (7)
* ^
* </pre>
*/
def equal(right: Any) {
if ((left == right) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotEqual" else "equaled",
left,
right
)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* result should not be (7)
* ^
* </pre>
*/
def be(right: Any) {
if ((left == right) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotEqualTo" else "wasEqualTo",
left,
right
)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* result should not be <= (7)
* ^
* </pre>
*/
def be(comparison: ResultOfLessThanOrEqualToComparison[T]) {
if (comparison(left) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotLessThanOrEqualTo" else "wasLessThanOrEqualTo",
left,
comparison.right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* result should not be >= (7)
* ^
* </pre>
*/
def be(comparison: ResultOfGreaterThanOrEqualToComparison[T]) {
if (comparison(left) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotGreaterThanOrEqualTo" else "wasGreaterThanOrEqualTo",
left,
comparison.right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* result should not be < (7)
* ^
* </pre>
*/
def be(comparison: ResultOfLessThanComparison[T]) {
if (comparison(left) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotLessThan" else "wasLessThan",
left,
comparison.right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* result should not be > (7)
* ^
* </pre>
*/
def be(comparison: ResultOfGreaterThanComparison[T]) {
if (comparison(left) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotGreaterThan" else "wasGreaterThan",
left,
comparison.right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* result should not be === (7)
* ^
* </pre>
*/
def be(comparison: ResultOfTripleEqualsApplication){
if (comparison(left) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotEqualTo" else "wasEqualTo",
left,
comparison.right
)
)
}
}
/**
* This method enables the following syntax, where <code>odd</code> refers to
* a <code>BeMatcher[Int]</code>:
*
* <pre>
* 2 should not be (odd)
* ^
* </pre>
*/
def be(beMatcher: BeMatcher[T]) {
val result = beMatcher(left)
if (result.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue)
result.failureMessage
else
result.negatedFailureMessage
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
sealed class ResultOfNotWordForAnyRef[T <: AnyRef](left: T, shouldBeTrue: Boolean)
extends ResultOfNotWord[T](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* map should not be (null)
* ^
* </pre>
*/
def be(o: Null) {
if ((left == null) != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue)
FailureMessages("wasNotNull", left)
else
FailureMessages("wasNull")
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* stack should not be ('empty)
* ^
* </pre>
*/
def be(symbol: Symbol) {
val matcherResult = matchSymbolToPredicateMethod(left, symbol, false, false)
if (matcherResult.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue) matcherResult.failureMessage else matcherResult.negatedFailureMessage
)
}
}
/**
* This method enables the following syntax, where <code>stack</code> is, for example, of type <code>Stack</code> and
* <code>empty</code> refers to a <code>BePropertyMatcher[Stack]</code>:
*
* <pre>
* stack should not be (empty)
* ^
* </pre>
*/
def be(bePropertyMatcher: BePropertyMatcher[T]) {
val result = bePropertyMatcher(left)
if (result.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue)
FailureMessages("wasNot", left, UnquotedString(result.propertyName))
else
FailureMessages("was", left, UnquotedString(result.propertyName))
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* notFileMock should not be a ('file)
* ^
* </pre>
*/
def be(resultOfAWordApplication: ResultOfAWordToSymbolApplication) {
val matcherResult = matchSymbolToPredicateMethod(left, resultOfAWordApplication.symbol, true, true)
if (matcherResult.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue) matcherResult.failureMessage else matcherResult.negatedFailureMessage
)
}
}
/**
* This method enables the following syntax, where <code>notFileMock</code> is, for example, of type <code>File</code> and
* <code>file</code> refers to a <code>BePropertyMatcher[File]</code>:
*
* <pre>
* notFileMock should not be a (file)
* ^
* </pre>
*/
def be[U >: T](resultOfAWordApplication: ResultOfAWordToBePropertyMatcherApplication[U]) {
val result = resultOfAWordApplication.bePropertyMatcher(left)
if (result.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue)
FailureMessages("wasNotA", left, UnquotedString(result.propertyName))
else
FailureMessages("wasA", left, UnquotedString(result.propertyName))
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* keyEvent should not be an ('actionKey)
* ^
* </pre>
*/
def be(resultOfAnWordApplication: ResultOfAnWordToSymbolApplication) {
val matcherResult = matchSymbolToPredicateMethod(left, resultOfAnWordApplication.symbol, true, false)
if (matcherResult.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue) matcherResult.failureMessage else matcherResult.negatedFailureMessage
)
}
}
/**
* This method enables the following syntax, where <code>keyEvent</code> is, for example, of type <code>KeyEvent</code> and
* <code>actionKey</code> refers to a <code>BePropertyMatcher[KeyEvent]</code>:
*
* <pre>
* keyEvent should not be an (actionKey)
* ^
* </pre>
*/
def be[U >: T](resultOfAnWordApplication: ResultOfAnWordToBePropertyMatcherApplication[U]) {
val result = resultOfAnWordApplication.bePropertyMatcher(left)
if (result.matches != shouldBeTrue) {
throw newTestFailedException(
if (shouldBeTrue)
FailureMessages("wasNotAn", left, UnquotedString(result.propertyName))
else
FailureMessages("wasAn", left, UnquotedString(result.propertyName))
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* otherString should not be theSameInstanceAs (string)
* ^
* </pre>
*/
def be(resultOfSameInstanceAsApplication: ResultOfTheSameInstanceAsApplication) {
if ((resultOfSameInstanceAsApplication.right eq left) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotSameInstanceAs" else "wasSameInstanceAs",
left,
resultOfSameInstanceAsApplication.right
)
)
}
}
// TODO: Explain this matrix somewhere
// The type parameter U has T as its lower bound, which means that U must be T or a supertype of T. Left is T, oh, because
// HavePropertyMatcher is contravariant in its type parameter T, and that nmakes sense, because a HavePropertyMatcher of Any should
// be able to match on a String.
// <code>not have (a (1), b (2))</code> must mean the opposite of <code>have (a (1), b (2))</code>, which means that
// <code>not have (a (1), b (2))</code> will be true if either <code>(a (1)).matches</code> or <code>(b (1)).matches</code> is false.
// Only if both <code>(a (1)).matches</code> or <code>(b (1)).matches</code> are true will <code>not have (a (1), b (2))</code> be false.
// title/author matches | have | have not
// 0 0 | 0 | 1
// 0 1 | 0 | 1
// 1 0 | 0 | 1
// 1 1 | 1 | 0
//
/**
* This method enables the following syntax, where <code>badBook</code> is, for example, of type <code>Book</code> and
* <code>title ("One Hundred Years of Solitude")</code> results in a <code>HavePropertyMatcher[Book]</code>:
*
* <pre>
* book should not have (title ("One Hundred Years of Solitude"))
* ^
* </pre>
*/
def have[U >: T](firstPropertyMatcher: HavePropertyMatcher[U, _], propertyMatchers: HavePropertyMatcher[U, _]*) {
val results =
for (propertyVerifier <- firstPropertyMatcher :: propertyMatchers.toList) yield
propertyVerifier(left)
val firstFailureOption = results.find(pv => !pv.matches)
val justOneProperty = propertyMatchers.length == 0
// if shouldBeTrue is false, then it is like "not have ()", and should throw TFE if firstFailureOption.isDefined is false
// if shouldBeTrue is true, then it is like "not (not have ()), which should behave like have ()", and should throw TFE if firstFailureOption.isDefined is true
if (firstFailureOption.isDefined == shouldBeTrue) {
firstFailureOption match {
case Some(firstFailure) =>
// This is one of these cases, thus will only get here if shouldBeTrue is true
// 0 0 | 0 | 1
// 0 1 | 0 | 1
// 1 0 | 0 | 1
throw newTestFailedException(
FailureMessages(
"propertyDidNotHaveExpectedValue",
UnquotedString(firstFailure.propertyName),
firstFailure.expectedValue,
firstFailure.actualValue,
left
)
)
case None =>
// This is this cases, thus will only get here if shouldBeTrue is false
// 1 1 | 1 | 0
val failureMessage =
if (justOneProperty) {
val firstPropertyResult = results.head // know this will succeed, because firstPropertyMatcher was required
FailureMessages(
"propertyHadExpectedValue",
UnquotedString(firstPropertyResult.propertyName),
firstPropertyResult.expectedValue,
left
)
}
else FailureMessages("allPropertiesHadExpectedValues", left)
throw newTestFailedException(failureMessage)
}
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForString(left: String, shouldBeTrue: Boolean)
extends ResultOfNotWordForAnyRef[String](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* string should not have length (12)
* ^
* </pre>
*/
def have(resultOfLengthWordApplication: ResultOfLengthWordApplication) {
val right = resultOfLengthWordApplication.expectedLength
if ((left.length == right) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
right
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* string should not fullyMatch regex ("""(-)?(\\d+)(\\.\\d*)?""")
* ^
* </pre>
*
* <p>
* The regular expression passed following the <code>regex</code> token can be either a <code>String</code>
* or a <code>scala.util.matching.Regex</code>.
* </p>
*/
def fullyMatch(resultOfRegexWordApplication: ResultOfRegexWordApplication) {
val rightRegex = resultOfRegexWordApplication.regex
if (rightRegex.pattern.matcher(left).matches != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotFullyMatchRegex" else "fullyMatchedRegex",
left,
rightRegex
)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* string should not include regex ("wo.ld")
* ^
* </pre>
*
* <p>
* The regular expression passed following the <code>regex</code> token can be either a <code>String</code>
* or a <code>scala.util.matching.Regex</code>.
* </p>
*/
def include(resultOfRegexWordApplication: ResultOfRegexWordApplication) {
val rightRegex = resultOfRegexWordApplication.regex
if (rightRegex.findFirstIn(left).isDefined != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotIncludeRegex" else "includedRegex",
left,
rightRegex
)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* string should not include ("world")
* ^
* </pre>
*/
def include(expectedSubstring: String) {
if ((left.indexOf(expectedSubstring) >= 0) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotIncludeSubstring" else "includedSubstring",
left,
expectedSubstring
)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* string should not startWith regex ("Hel*o")
* ^
* </pre>
*
* <p>
* The regular expression passed following the <code>regex</code> token can be either a <code>String</code>
* or a <code>scala.util.matching.Regex</code>.
* </p>
*/
def startWith(resultOfRegexWordApplication: ResultOfRegexWordApplication) {
val rightRegex = resultOfRegexWordApplication.regex
if (rightRegex.pattern.matcher(left).lookingAt != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotStartWithRegex" else "startedWithRegex",
left,
rightRegex
)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* "eight" should not startWith ("1.7")
* ^
* </pre>
*/
def startWith(expectedSubstring: String) {
if ((left.indexOf(expectedSubstring) == 0) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotStartWith" else "startedWith",
left,
expectedSubstring
)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* greeting should not endWith regex ("wor.d")
* ^
* </pre>
*/
def endWith(resultOfRegexWordApplication: ResultOfRegexWordApplication) {
val rightRegex = resultOfRegexWordApplication.regex
val allMatches = rightRegex.findAllIn(left)
if (allMatches.hasNext && (allMatches.end == left.length) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotEndWithRegex" else "endedWithRegex",
left,
rightRegex
)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* "eight" should not endWith ("1.7")
* ^
* </pre>
*/
def endWith(expectedSubstring: String) {
if ((left endsWith expectedSubstring) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotEndWith" else "endedWith",
left,
expectedSubstring
)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForDouble(left: Double, shouldBeTrue: Boolean)
extends ResultOfNotWord[Double](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOh should not be (6.5 plusOrMinus 0.2)
* ^
* </pre>
*/
def be(doubleTolerance: DoubleTolerance) {
import doubleTolerance._
if ((left <= right + tolerance && left >= right - tolerance) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotPlusOrMinus" else "wasPlusOrMinus",
left,
right,
tolerance
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForFloat(left: Float, shouldBeTrue: Boolean)
extends ResultOfNotWord[Float](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOhFloat should not be (6.5f plusOrMinus 0.2f)
* ^
* </pre>
*/
def be(floatTolerance: FloatTolerance) {
import floatTolerance._
if ((left <= right + tolerance && left >= right - tolerance) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotPlusOrMinus" else "wasPlusOrMinus",
left,
right,
tolerance
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForLong(left: Long, shouldBeTrue: Boolean)
extends ResultOfNotWord[Long](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOhLong should not be (4L plusOrMinus 2L)
* ^
* </pre>
*/
def be(longTolerance: LongTolerance) {
import longTolerance._
if ((left <= right + tolerance && left >= right - tolerance) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotPlusOrMinus" else "wasPlusOrMinus",
left,
right,
tolerance
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForInt(left: Int, shouldBeTrue: Boolean)
extends ResultOfNotWord[Int](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOhInt should not be (4 plusOrMinus 2)
* ^
* </pre>
*/
def be(intTolerance: IntTolerance) {
import intTolerance._
if ((left <= right + tolerance && left >= right - tolerance) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotPlusOrMinus" else "wasPlusOrMinus",
left,
right,
tolerance
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForShort(left: Short, shouldBeTrue: Boolean)
extends ResultOfNotWord[Short](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOhShort should not be (4.toShort plusOrMinus 2.toShort)
* ^
* </pre>
*/
def be(shortTolerance: ShortTolerance) {
import shortTolerance._
if ((left <= right + tolerance && left >= right - tolerance) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotPlusOrMinus" else "wasPlusOrMinus",
left,
right,
tolerance
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForByte(left: Byte, shouldBeTrue: Boolean)
extends ResultOfNotWord[Byte](left, shouldBeTrue) {
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOhByte should not be (4.toByte plusOrMinus 2.toByte)
* ^
* </pre>
*/
def be(byteTolerance: ByteTolerance) {
import byteTolerance._
if ((left <= right + tolerance && left >= right - tolerance) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "wasNotPlusOrMinus" else "wasPlusOrMinus",
left,
right,
tolerance
)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class RegexWord {
/**
* This method enables the following syntax:
*
* <pre>
* "eight" should not fullyMatch regex ("""(-)?(\\d+)(\\.\\d*)?""".r)
* ^
* </pre>
*/
def apply(regexString: String) = new ResultOfRegexWordApplication(regexString)
/**
* This method enables the following syntax:
*
* <pre>
* "eight" should not fullyMatch regex ("""(-)?(\\d+)(\\.\\d*)?""")
* ^
* </pre>
*/
def apply(regex: Regex) = new ResultOfRegexWordApplication(regex)
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* <p>
* The primary constructor enables the following syntax (with a passed <code>scala.util.matching.Regex</code>):
* </p>
*
* <pre>
* "eight" should not fullyMatch regex ("""(-)?(\\d+)(\\.\\d*)?""".r)
* ^
* </pre>
*
* @author Bill Venners
*/
final class ResultOfRegexWordApplication(val regex: Regex) {
/**
* This auxiliary constructor enables the following syntax (with a passed <code>java.lang.String</code>):
*
* <pre>
* "eight" should not fullyMatch regex ("""(-)?(\\d+)(\\.\\d*)?""")
* ^
* </pre>
*/
def this(regexString: String) = this(new Regex(regexString))
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfHaveWordForString(left: String, shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* string should have length (12)
* ^
* </pre>
*/
def length(expectedLength: Int) {
if ((left.length == expectedLength) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
expectedLength
)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfIncludeWordForString(left: String, shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* string should include regex ("world")
* ^
* </pre>
*/
def regex(rightRegexString: String) { regex(rightRegexString.r) }
/**
* This method enables the following syntax:
*
* <pre>
* string should include regex ("wo.ld".r)
* ^
* </pre>
*/
def regex(rightRegex: Regex) {
if (rightRegex.findFirstIn(left).isDefined != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotIncludeRegex" else "includedRegex",
left,
rightRegex
)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfStartWithWordForString(left: String, shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* string should startWith regex ("Hel*o")
* ^
* </pre>
*/
def regex(rightRegexString: String) { regex(rightRegexString.r) }
/**
* This method enables the following syntax:
*
* <pre>
* string should startWith regex ("Hel*o".r)
* ^
* </pre>
*/
def regex(rightRegex: Regex) {
if (rightRegex.pattern.matcher(left).lookingAt != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotStartWithRegex" else "startedWithRegex",
left,
rightRegex
)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfEndWithWordForString(left: String, shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* string should endWith regex ("wor.d")
* ^
* </pre>
*/
def regex(rightRegexString: String) { regex(rightRegexString.r) }
/**
* This method enables the following syntax:
*
* <pre>
* string should endWith regex ("wor.d".r)
* ^
* </pre>
*/
def regex(rightRegex: Regex) {
val allMatches = rightRegex.findAllIn(left)
if ((allMatches.hasNext && (allMatches.end == left.length)) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotEndWithRegex" else "endedWithRegex",
left,
rightRegex
)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfFullyMatchWordForString(left: String, shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* string should fullMatch regex ("Hel*o world")
* ^
* </pre>
*/
def regex(rightRegexString: String) { regex(rightRegexString.r) }
/**
* This method enables the following syntax:
*
* <pre>
* string should fullymatch regex ("Hel*o world".r)
* ^
* </pre>
*/
def regex(rightRegex: Regex) {
if (rightRegex.pattern.matcher(left).matches != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotFullyMatchRegex" else "fullyMatchedRegex",
left,
rightRegex
)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* result should equal (7)
* ^
* </pre>
*
* <p>
* The <code>left should equal (right)</code> syntax works by calling <code>==</code> on the <code>left</code>
* value, passing in the <code>right</code> value, on every type except arrays. If <code>left</code> is an array, <code>deepEquals</code>
* will be invoked on <code>left</code>, passing in <code>right</code>. Thus, even though this expression
* will yield false, because <code>Array</code>'s <code>equals</code> method compares object identity:
* </p>
*
* <pre class="indent">
* Array(1, 2) == Array(1, 2) // yields false
* </pre>
*
* <p>
* The following expression will <em>not</em> result in a <code>TestFailedException</code>, because <code>deepEquals</code> compares
* the two arrays structurally, taking into consideration the equality of the array's contents:
* </p>
*
* <pre class="indent">
* Array(1, 2) should equal (Array(1, 2)) // succeeds (i.e., does not throw TestFailedException)
* </pre>
*
* <p>
* If you ever do want to verify that two arrays are actually the same object (have the same identity), you can use the
* <code>be theSameInstanceAs</code> syntax.
* </p>
*
*/
def equal(right: Any): Matcher[Any] =
new Matcher[Any] {
def apply(left: Any) =
left match {
case leftArray: Array[_] =>
MatchResult(
leftArray.deepEquals(right),
FailureMessages("didNotEqual", left, right),
FailureMessages("equaled", left, right)
)
case _ =>
MatchResult(
left == right,
FailureMessages("didNotEqual", left, right),
FailureMessages("equaled", left, right)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
/*
final class TreatedAsOrderedWrapper {
def <[T <% Ordered[T]](right: T): Matcher[T] =
new Matcher[T] {
def apply(left: T) =
MatchResult(
left < right,
FailureMessages("wasNotLessThan", left, right),
FailureMessages("wasLessThan", left, right)
)
}
def >[T <% Ordered[T]](right: T): Matcher[T] =
new Matcher[T] {
def apply(left: T) =
MatchResult(
left > right,
FailureMessages("wasNotGreaterThan", left, right),
FailureMessages("wasGreaterThan", left, right)
)
}
def <=[T <% Ordered[T]](right: T): Matcher[T] =
new Matcher[T] {
def apply(left: T) =
MatchResult(
left <= right,
FailureMessages("wasNotLessThanOrEqualTo", left, right),
FailureMessages("wasLessThanOrEqualTo", left, right)
)
}
def >=[T <% Ordered[T]](right: T): Matcher[T] =
new Matcher[T] {
def apply(left: T) =
MatchResult(
left >= right,
FailureMessages("wasNotGreaterThanOrEqualTo", left, right),
FailureMessages("wasGreaterThanOrEqualTo", left, right)
)
}
}
// This one is for one should be < (7)
implicit def convertBeWordToForOrdered(beWord: BeWord): TreatedAsOrderedWrapper = new TreatedAsOrderedWrapper
*/
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* <p>
* Class <code>BeWord</code> contains an <code>apply</code> method that takes a <code>Symbol</code>, which uses reflection
* to find and access a <code>Boolean</code> property and determine if it is <code>true</code>.
* If the symbol passed is <code>'empty</code>, for example, the <code>apply</code> method
* will use reflection to look for a public Java field named
* "empty", a public method named "empty", or a public method named "isEmpty". If a field, it must be of type <code>Boolean</code>.
* If a method, it must take no parameters and return <code>Boolean</code>. If multiple candidates are found,
* the <code>apply</code> method will select based on the following algorithm:
* </p>
*
* <table cellpadding="2" border="1">
* <tr><th>Field</th><th>Method</th><th>"is" Method</th><th>Result</th></tr>
* <tr><td> </td><td> </td><td> </td><td>Throws <code>TestFailedException</code>, because no candidates found</td></tr>
* <tr><td> </td><td> </td><td><code>isEmpty()</code></td><td>Invokes <code>isEmpty()</code></td></tr>
* <tr><td> </td><td><code>empty()</code></td><td> </td><td>Invokes <code>empty()</code></td></tr>
* <tr><td> </td><td><code>empty()</code></td><td><code>isEmpty()</code></td><td>Invokes <code>empty()</code> (this can occur when <code>BeanProperty</code> annotation is used)</td></tr>
* <tr><td><code>empty</code></td><td> </td><td> </td><td>Accesses field <code>empty</code></td></tr>
* <tr><td><code>empty</code></td><td> </td><td><code>isEmpty()</code></td><td>Invokes <code>isEmpty()</code></td></tr>
* <tr><td><code>empty</code></td><td><code>empty()</code></td><td> </td><td>Invokes <code>empty()</code></td></tr>
* <tr><td><code>empty</code></td><td><code>empty()</code></td><td><code>isEmpty()</code></td><td>Invokes <code>empty()</code> (this can occur when <code>BeanProperty</code> annotation is used)</td></tr>
* </table>
*
* @author Bill Venners
*/
final class BeWord {
/**
* This method enables the following syntax:
*
* <pre>
* result should be < (7)
* ^
* </pre>
*
* <p>
* Note that the less than operator will be invoked on <code>be</code> in this expression, not
* on a result of passing <code>be</code> to <code>should</code>, as with most other operators
* in the matchers DSL, because the less than operator has a higher precedence than <code>should</code>.
* Thus in the above case the first expression evaluated will be <code>be < (7)</code>, which results
* in a matcher that is passed to <code>should</code>.
* </p>
*
* <p>
* This method also enables the following syntax:
* </p>
*
* <pre>
* result should not (be < (7))
* ^
* </pre>
*/
def <[T <% Ordered[T]](right: T): Matcher[T] =
new Matcher[T] {
def apply(left: T) =
MatchResult(
left < right,
FailureMessages("wasNotLessThan", left, right),
FailureMessages("wasLessThan", left, right)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* result should be > (7)
* ^
* </pre>
*
* <p>
* Note that the greater than operator will be invoked on <code>be</code> in this expression, not
* on a result of passing <code>be</code> to <code>should</code>, as with most other operators
* in the matchers DSL, because the greater than operator has a higher precedence than <code>should</code>.
* Thus in the above case the first expression evaluated will be <code>be > (7)</code>, which results
* in a matcher that is passed to <code>should</code>.
* </p>
*
* <p>
* This method also enables the following syntax:
* </p>
*
* <pre>
* result should not (be > (7))
* ^
* </pre>
*/
def >[T <% Ordered[T]](right: T): Matcher[T] =
new Matcher[T] {
def apply(left: T) =
MatchResult(
left > right,
FailureMessages("wasNotGreaterThan", left, right),
FailureMessages("wasGreaterThan", left, right)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* result should be <= (7)
* ^
* </pre>
*
* <p>
* Note that the less than or equal to operator will be invoked on <code>be</code> in this expression, not
* on a result of passing <code>be</code> to <code>should</code>, as with most other operators
* in the matchers DSL, because the less than or equal to operator has a higher precedence than <code>should</code>.
* Thus in the above case the first expression evaluated will be <code>be <= (7)</code>, which results
* in a matcher that is passed to <code>should</code>.
* </p>
*
* <p>
* This method also enables the following syntax:
* </p>
*
* <pre>
* result should not (be <= (7))
* ^
* </pre>
*/
def <=[T <% Ordered[T]](right: T): Matcher[T] =
new Matcher[T] {
def apply(left: T) =
MatchResult(
left <= right,
FailureMessages("wasNotLessThanOrEqualTo", left, right),
FailureMessages("wasLessThanOrEqualTo", left, right)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* result should be >= (7)
* ^
* </pre>
*
* <p>
* Note that the greater than or equal to operator will be invoked on <code>be</code> in this expression, not
* on a result of passing <code>be</code> to <code>should</code>, as with most other operators
* in the matchers DSL, because the greater than or equal to operator has a higher precedence than <code>should</code>.
* Thus in the above case the first expression evaluated will be <code>be >= (7)</code>, which results
* in a matcher that is passed to <code>should</code>.
* </p>
*
* <p>
* This method also enables the following syntax:
* </p>
*
* <pre>
* result should not (be >= (7))
* ^
* </pre>
*/
def >=[T <% Ordered[T]](right: T): Matcher[T] =
new Matcher[T] {
def apply(left: T) =
MatchResult(
left >= right,
FailureMessages("wasNotGreaterThanOrEqualTo", left, right),
FailureMessages("wasGreaterThanOrEqualTo", left, right)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* result should be === (7)
* ^
* </pre>
*
* <p>
* Note that the === operator will be invoked on <code>be</code> in this expression, not
* on a result of passing <code>be</code> to <code>should</code>, as with most other operators
* in the matchers DSL, because the ===n operator has a higher precedence than <code>should</code>.
* Thus in the above case the first expression evaluated will be <code>be === (7)</code>, which results
* in a matcher that is passed to <code>should</code>.
* </p>
*
* <p>
* This method also enables the following syntax:
* </p>
*
* <pre>
* result should not (be === (7))
* ^
* </pre>
*/
def ===(right: Any): Matcher[Any] =
new Matcher[Any] {
def apply(left: Any) =
left match {
case leftArray: Array[_] =>
MatchResult(
leftArray.deepEquals(right),
FailureMessages("wasNotEqualTo", left, right),
FailureMessages("wasEqualTo", left, right)
)
case _ =>
MatchResult(
left == right,
FailureMessages("wasNotEqualTo", left, right),
FailureMessages("wasEqualTo", left, right)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* fileMock should not { be a ('file) }
* ^
* </pre>
*/
def a[S <: AnyRef](right: Symbol): Matcher[S] =
new Matcher[S] {
def apply(left: S) = matchSymbolToPredicateMethod[S](left, right, true, true)
}
/**
* This method enables the following syntax, where <code>fileMock</code> is, for example, of type <code>File</code> and
* <code>file</code> refers to a <code>BePropertyMatcher[File]</code>:
*
* <pre>
* fileMock should not { be a (file) }
* ^
* </pre>
*/
def a[S <: AnyRef](bePropertyMatcher: BePropertyMatcher[S]): Matcher[S] =
new Matcher[S] {
def apply(left: S) = {
val result = bePropertyMatcher(left)
MatchResult(
result.matches,
FailureMessages("wasNotA", left, UnquotedString(result.propertyName)),
FailureMessages("wasA", left, UnquotedString(result.propertyName))
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* animal should not { be an ('elephant) }
* ^
* </pre>
*/
def an[S <: AnyRef](right: Symbol): Matcher[S] =
new Matcher[S] {
def apply(left: S) = matchSymbolToPredicateMethod[S](left, right, true, false)
}
/**
* This method enables the following syntax, where <code>keyEvent</code> is, for example, of type <code>KeyEvent</code> and
* <code>actionKey</code> refers to a <code>BePropertyMatcher[KeyEvent]</code>:
*
* <pre>
* keyEvent should not { be an (actionKey) }
* ^
* </pre>
*/
def an[S <: AnyRef](bePropertyMatcher: BePropertyMatcher[S]): Matcher[S] =
new Matcher[S] {
def apply(left: S) = {
val result = bePropertyMatcher(left)
MatchResult(
result.matches,
FailureMessages("wasNotAn", left, UnquotedString(result.propertyName)),
FailureMessages("wasAn", left, UnquotedString(result.propertyName))
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOh should be (7.1 plusOrMinus 0.2)
* ^
* </pre>
*/
def apply(doubleTolerance: DoubleTolerance): Matcher[Double] =
new Matcher[Double] {
def apply(left: Double) = {
import doubleTolerance._
MatchResult(
left <= right + tolerance && left >= right - tolerance,
FailureMessages("wasNotPlusOrMinus", left, right, tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOhFloat should be (7.1f plusOrMinus 0.2f)
* ^
* </pre>
*/
def apply(floatTolerance: FloatTolerance): Matcher[Float] =
new Matcher[Float] {
def apply(left: Float) = {
import floatTolerance._
MatchResult(
left <= right + tolerance && left >= right - tolerance,
FailureMessages("wasNotPlusOrMinus", left, right, tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenLong should be (7L plusOrMinus 2L)
* ^
* </pre>
*/
def apply(longTolerance: LongTolerance): Matcher[Long] =
new Matcher[Long] {
def apply(left: Long) = {
import longTolerance._
MatchResult(
left <= right + tolerance && left >= right - tolerance,
FailureMessages("wasNotPlusOrMinus", left, right, tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenInt should be (7 plusOrMinus 2)
* ^
* </pre>
*/
def apply(intTolerance: IntTolerance): Matcher[Int] =
new Matcher[Int] {
def apply(left: Int) = {
import intTolerance._
MatchResult(
left <= right + tolerance && left >= right - tolerance,
FailureMessages("wasNotPlusOrMinus", left, right, tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenShort should be (7.toShort plusOrMinus 2.toShort)
* ^
* </pre>
*/
def apply(shortTolerance: ShortTolerance): Matcher[Short] =
new Matcher[Short] {
def apply(left: Short) = {
import shortTolerance._
MatchResult(
left <= right + tolerance && left >= right - tolerance,
FailureMessages("wasNotPlusOrMinus", left, right, tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenByte should be (7.toByte plusOrMinus 2.toByte)
* ^
* </pre>
*/
def apply(byteTolerance: ByteTolerance): Matcher[Byte] =
new Matcher[Byte] {
def apply(left: Byte) = {
import byteTolerance._
MatchResult(
left <= right + tolerance && left >= right - tolerance,
FailureMessages("wasNotPlusOrMinus", left, right, tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* object should be theSameInstancreAs (anotherObject)
* ^
* </pre>
*/
def theSameInstanceAs(right: AnyRef): Matcher[AnyRef] =
new Matcher[AnyRef] {
def apply(left: AnyRef) =
MatchResult(
left eq right,
FailureMessages("wasNotSameInstanceAs", left, right),
FailureMessages("wasSameInstanceAs", left, right)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* result should be (true)
* ^
* </pre>
*/
def apply(right: Boolean) =
new Matcher[Boolean] {
def apply(left: Boolean) =
MatchResult(
left == right,
FailureMessages("wasNot", left, right),
FailureMessages("was", left, right)
)
}
/* Well heck if I don't need this one
[fsc] both method apply in class BeWord of type [T](org.scalatest.BePropertyMatcher[T])org.scalatest.Matcher[T]
[fsc] and method apply in class BeWord of type [T](org.scalatest.BeMatcher[T])org.scalatest.Matcher[T]
[fsc] match argument types (Null)
[fsc] o should be (null)
[fsc] ^
*/
/**
* This method enables the following syntax:
*
* <pre>
* object should be (null)
* ^
* </pre>
*/
def apply(o: Null) =
new Matcher[AnyRef] {
def apply(left: AnyRef) = {
MatchResult(
left == null,
FailureMessages("wasNotNull", left),
FailureMessages("wasNull"),
FailureMessages("wasNotNull", left),
FailureMessages("midSentenceWasNull")
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* set should be ('empty)
* ^
* </pre>
*/
def apply[S <: AnyRef](right: Symbol): Matcher[S] =
new Matcher[S] {
def apply(left: S) = matchSymbolToPredicateMethod[S](left, right, false, false)
}
/**
* This method enables the following syntax, where <code>num</code> is, for example, of type <code>Int</code> and
* <code>odd</code> refers to a <code>BeMatcher[Int]</code>:
*
* <pre>
* num should be (odd)
* ^
* </pre>
*/
def apply[T](right: BeMatcher[T]): Matcher[T] =
new Matcher[T] {
def apply(left: T) = right(left)
}
/**
* This method enables the following syntax, where <code>open</code> refers to a <code>BePropertyMatcher</code>:
*
* <pre>
* door should be (open)
* ^
* </pre>
*/
def apply[T](bePropertyMatcher: BePropertyMatcher[T]): Matcher[T] =
new Matcher[T] {
def apply(left: T) = {
val result = bePropertyMatcher(left)
MatchResult(
result.matches,
FailureMessages("wasNot", left, UnquotedString(result.propertyName)),
FailureMessages("was", left, UnquotedString(result.propertyName))
)
}
}
/**
* This method enables <code>be</code> to be used for equality comparison. Here are some examples:
*
* <pre>
* object should be (None)
* ^
* object should be (Some(1))
* ^
* result should be (true)
* ^
* result should be (false)
* ^
* sum should be (19)
* ^
* </pre>
*/
def apply(right: Any): Matcher[Any] =
new Matcher[Any] {
def apply(left: Any) =
left match {
case null =>
MatchResult(
right == null,
FailureMessages("wasNotNull", right),
FailureMessages("wasNull"),
FailureMessages("wasNotNull", right),
FailureMessages("midSentenceWasNull")
)
case leftArray: Array[_] =>
MatchResult(
leftArray.deepEquals(right),
FailureMessages("wasNotEqualTo", left, right),
FailureMessages("wasEqualTo", left, right)
)
case _ =>
MatchResult(
left == right,
FailureMessages("wasNotEqualTo", left, right),
FailureMessages("wasEqualTo", left, right)
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class NotWord {
/**
* This method enables the following syntax, where <code>tempFile</code>, for example, refers to a <code>java.io.File</code>
* and <code>exist</code> is a <code>Matcher[java.io.File]</code>:
*
* <pre>
* tempFile should not (exist)
* ^
* </pre>
*/
def apply[S <: Any](matcher: Matcher[S]) =
new Matcher[S] {
def apply(left: S) =
matcher(left) match {
case MatchResult(bool, s1, s2, s3, s4) => MatchResult(!bool, s2, s1, s4, s3)
}
}
/**
* This method enables any <code>BeMatcher</code> to be negated by passing it to <code>not</code>.
* For example, if you have a <code>BeMatcher[Int]</code> called <code>odd</code>, which matches
* <code>Int</code>s that are odd, you can negate it to get a <code>BeMatcher[Int]</code> that matches
* even <code>Int</code>s, like this:
*
* <pre>
* val even = not (odd)
* ^
* </pre>
*
* <p>
* In addition, this method enables you to negate a <code>BeMatcher</code> at its point of use, like this:
* </p>
*
* </pre>
* num should be (not (odd))
* </pre>
*
* <p>
* Nevertheless, in such as case it would be more idiomatic to write:
* </p>
*
* </pre>
* num should not be (odd)
* </pre>
*/
def apply[S <: Any](beMatcher: BeMatcher[S]) =
new BeMatcher[S] {
def apply(left: S) =
beMatcher(left) match {
case MatchResult(bool, s1, s2, s3, s4) => MatchResult(!bool, s2, s1, s4, s3)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* num should (not equal (7) and be < (9))
* ^
* </pre>
*/
def equal(right: Any): Matcher[Any] = apply(matchers.equal(right))
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (not have length (5) and not have length (3))
* ^
* </pre>
*/
def have(resultOfLengthWordApplication: ResultOfLengthWordApplication): Matcher[AnyRef] =
apply(matchers.have.length(resultOfLengthWordApplication.expectedLength))
// This looks similar to the AndNotWord one, but not quite the same because no and
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (not have size (5) and not have size (3))
* ^
* </pre>
*/
def have(resultOfSizeWordApplication: ResultOfSizeWordApplication): Matcher[AnyRef] =
apply(matchers.have.size(resultOfSizeWordApplication.expectedSize))
/**
* This method enables the following syntax, where, for example, <code>book</code> is of type <code>Book</code> and <code>title</code> and <code>author</code>
* are both of type <code>HavePropertyMatcher[Book, String]</code>:
*
* <pre>
* book should (not have (title ("Moby Dick")) and (not have (author ("Melville"))))
* ^
* </pre>
*/
def have[T](firstPropertyMatcher: HavePropertyMatcher[T, _], propertyMatchers: HavePropertyMatcher[T, _]*): Matcher[T] =
apply(matchers.have(firstPropertyMatcher, propertyMatchers: _*))
/**
* This method enables the following syntax, where, for example, <code>num</code> is an <code>Int</code> and <code>odd</code>
* of type <code>BeMatcher[Int]</code>:
*
* <pre>
* num should (not be (odd) and be <= (8))
* ^
* </pre>
*/
def be[T](beMatcher: BeMatcher[T]): Matcher[T] = {
new Matcher[T] {
def apply(left: T) =
beMatcher(left) match {
case MatchResult(bool, s1, s2, s3, s4) => MatchResult(!bool, s2, s1, s4, s3)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* map should (not be (null))
* ^
* </pre>
*/
def be(o: Null) =
new Matcher[AnyRef] {
def apply(left: AnyRef) = {
MatchResult(
left != null,
FailureMessages("wasNull"),
FailureMessages("wasNotNull", left),
FailureMessages("midSentenceWasNull"),
FailureMessages("wasNotNull", left)
)
}
}
// These next four are for things like not be </>/<=/>=:
/**
* This method enables the following syntax:
*
* <pre>
* num should (not be < (7) and not be > (10))
* ^
* </pre>
*/
def be[T](resultOfLessThanComparison: ResultOfLessThanComparison[T]): Matcher[T] = {
new Matcher[T] {
def apply(left: T) =
MatchResult(
!resultOfLessThanComparison(left),
FailureMessages("wasLessThan", left, resultOfLessThanComparison.right),
FailureMessages("wasNotLessThan", left, resultOfLessThanComparison.right)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* num should (not be > (10) and not be < (7))
* ^
* </pre>
*/
def be[T](resultOfGreaterThanComparison: ResultOfGreaterThanComparison[T]): Matcher[T] = {
new Matcher[T] {
def apply(left: T) =
MatchResult(
!resultOfGreaterThanComparison(left),
FailureMessages("wasGreaterThan", left, resultOfGreaterThanComparison.right),
FailureMessages("wasNotGreaterThan", left, resultOfGreaterThanComparison.right)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* num should (not be <= (7) and not be > (10))
* ^
* </pre>
*/
def be[T](resultOfLessThanOrEqualToComparison: ResultOfLessThanOrEqualToComparison[T]): Matcher[T] = {
new Matcher[T] {
def apply(left: T) =
MatchResult(
!resultOfLessThanOrEqualToComparison(left),
FailureMessages("wasLessThanOrEqualTo", left, resultOfLessThanOrEqualToComparison.right),
FailureMessages("wasNotLessThanOrEqualTo", left, resultOfLessThanOrEqualToComparison.right)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* num should (not be >= (10) and not be < (7))
* ^
* </pre>
*/
def be[T](resultOfGreaterThanOrEqualToComparison: ResultOfGreaterThanOrEqualToComparison[T]): Matcher[T] = {
new Matcher[T] {
def apply(left: T) =
MatchResult(
!resultOfGreaterThanOrEqualToComparison(left),
FailureMessages("wasGreaterThanOrEqualTo", left, resultOfGreaterThanOrEqualToComparison.right),
FailureMessages("wasNotGreaterThanOrEqualTo", left, resultOfGreaterThanOrEqualToComparison.right)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* num should (not be === (7) and not be === (10))
* ^
* </pre>
*/
def be(resultOfTripleEqualsApplication: ResultOfTripleEqualsApplication): Matcher[Any] = {
new Matcher[Any] {
def apply(left: Any) =
MatchResult(
!resultOfTripleEqualsApplication(left),
FailureMessages("wasEqualTo", left, resultOfTripleEqualsApplication.right),
FailureMessages("wasNotEqualTo", left, resultOfTripleEqualsApplication.right)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* myFile should (not be ('hidden) and have (name ("temp.txt")))
* ^
* </pre>
*/
def be[T <: AnyRef](symbol: Symbol): Matcher[T] = {
new Matcher[T] {
def apply(left: T) = {
val positiveMatchResult = matchSymbolToPredicateMethod(left, symbol, false, false)
MatchResult(
!positiveMatchResult.matches,
positiveMatchResult.negatedFailureMessage,
positiveMatchResult.failureMessage
)
}
}
}
/**
* This method enables the following syntax, where <code>tempFile</code>, for example, refers to a <code>java.io.File</code>
* and <code>hidden</code> is a <code>BePropertyMatcher[java.io.File]</code>:
*
* <pre>
* tempFile should (not be (hidden) and have ('name ("temp.txt")))
* ^
* </pre>
*/
def be[T <: AnyRef](bePropertyMatcher: BePropertyMatcher[T]): Matcher[T] = {
new Matcher[T] {
def apply(left: T) = {
val result = bePropertyMatcher(left)
MatchResult(
!result.matches,
FailureMessages("was", left, UnquotedString(result.propertyName)),
FailureMessages("wasNot", left, UnquotedString(result.propertyName))
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* isNotFileMock should (not be a ('file) and have ('name ("temp.txt"))))
* ^
* </pre>
*/
def be[T <: AnyRef](resultOfAWordApplication: ResultOfAWordToSymbolApplication): Matcher[T] = {
new Matcher[T] {
def apply(left: T) = {
val positiveMatchResult = matchSymbolToPredicateMethod(left, resultOfAWordApplication.symbol, true, true)
MatchResult(
!positiveMatchResult.matches,
positiveMatchResult.negatedFailureMessage,
positiveMatchResult.failureMessage
)
}
}
}
/**
* This method enables the following syntax, where <code>notSoSecretFile</code>, for example, refers to a <code>java.io.File</code>
* and <code>directory</code> is a <code>BePropertyMatcher[java.io.File]</code>:
*
* <pre>
* notSoSecretFile should (not be a (directory) and have ('name ("passwords.txt")))
* ^
* </pre>
*/
def be[T <: AnyRef](resultOfAWordApplication: ResultOfAWordToBePropertyMatcherApplication[T]): Matcher[T] = {
new Matcher[T] {
def apply(left: T) = {
val result = resultOfAWordApplication.bePropertyMatcher(left)
MatchResult(
!result.matches,
FailureMessages("wasA", left, UnquotedString(result.propertyName)),
FailureMessages("wasNotA", left, UnquotedString(result.propertyName))
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* isNotAppleMock should (not be an ('apple) and not be ('rotten))
* ^
* </pre>
*/
def be[T <: AnyRef](resultOfAnWordApplication: ResultOfAnWordToSymbolApplication): Matcher[T] = {
new Matcher[T] {
def apply(left: T) = {
val positiveMatchResult = matchSymbolToPredicateMethod(left, resultOfAnWordApplication.symbol, true, false)
MatchResult(
!positiveMatchResult.matches,
positiveMatchResult.negatedFailureMessage,
positiveMatchResult.failureMessage
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* myFile should (not be an (directory) and not be an (directory))
* ^
* </pre>
*/
def be[T <: AnyRef](resultOfAnWordApplication: ResultOfAnWordToBePropertyMatcherApplication[T]): Matcher[T] = {
new Matcher[T] {
def apply(left: T) = {
val result = resultOfAnWordApplication.bePropertyMatcher(left)
MatchResult(
!result.matches,
FailureMessages("wasAn", left, UnquotedString(result.propertyName)),
FailureMessages("wasNotAn", left, UnquotedString(result.propertyName))
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* myFish should (not be theSameInstanceAs (redFish) and not be theSameInstanceAs (blueFish))
* ^
* </pre>
*/
def be[T <: AnyRef](resultOfTheSameInstanceAsApplication: ResultOfTheSameInstanceAsApplication): Matcher[T] = {
new Matcher[T] {
def apply(left: T) = {
MatchResult(
resultOfTheSameInstanceAsApplication.right ne left,
FailureMessages("wasSameInstanceAs", left, resultOfTheSameInstanceAsApplication.right),
FailureMessages("wasNotSameInstanceAs", left, resultOfTheSameInstanceAsApplication.right)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOh should ((not be (17.1 plusOrMinus 0.2)) and (not be (27.1 plusOrMinus 0.2)))
* ^
* </pre>
*/
def be(doubleTolerance: DoubleTolerance): Matcher[Double] = {
import doubleTolerance._
new Matcher[Double] {
def apply(left: Double) = {
MatchResult(
!(left <= right + tolerance && left >= right - tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance),
FailureMessages("wasNotPlusOrMinus", left, right, tolerance)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOhFloat should ((not be (17.1f plusOrMinus 0.2f)) and (not be (27.1f plusOrMinus 0.2f)))
* ^
* </pre>
*/
def be(floatTolerance: FloatTolerance): Matcher[Float] = {
import floatTolerance._
new Matcher[Float] {
def apply(left: Float) = {
MatchResult(
!(left <= right + tolerance && left >= right - tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance),
FailureMessages("wasNotPlusOrMinus", left, right, tolerance)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenLong should ((not be (19L plusOrMinus 2L)) and (not be (29L plusOrMinus 2L)))
* ^
* </pre>
*/
def be(longTolerance: LongTolerance): Matcher[Long] = {
import longTolerance._
new Matcher[Long] {
def apply(left: Long) = {
MatchResult(
!(left <= right + tolerance && left >= right - tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance),
FailureMessages("wasNotPlusOrMinus", left, right, tolerance)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenInt should ((not be (19 plusOrMinus 2)) and (not be (29 plusOrMinus 2)))
* ^
* </pre>
*/
def be(intTolerance: IntTolerance): Matcher[Int] = {
import intTolerance._
new Matcher[Int] {
def apply(left: Int) = {
MatchResult(
!(left <= right + tolerance && left >= right - tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance),
FailureMessages("wasNotPlusOrMinus", left, right, tolerance)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenShort should ((not be (19.toShort plusOrMinus 2.toShort)) and (not be (29.toShort plusOrMinus 2.toShort)))
* ^
* </pre>
*/
def be(shortTolerance: ShortTolerance): Matcher[Short] = {
import shortTolerance._
new Matcher[Short] {
def apply(left: Short) = {
MatchResult(
!(left <= right + tolerance && left >= right - tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance),
FailureMessages("wasNotPlusOrMinus", left, right, tolerance)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* sevenByte should ((not be (19.toByte plusOrMinus 2.toByte)) and (not be (29.toByte plusOrMinus 2.toByte)))
* ^
* </pre>
*/
def be(byteTolerance: ByteTolerance): Matcher[Byte] = {
import byteTolerance._
new Matcher[Byte] {
def apply(left: Byte) = {
MatchResult(
!(left <= right + tolerance && left >= right - tolerance),
FailureMessages("wasPlusOrMinus", left, right, tolerance),
FailureMessages("wasNotPlusOrMinus", left, right, tolerance)
)
}
}
}
/**
* This method enables <code>be</code> to be used for inequality comparison. Here are some examples:
*
* <pre>
* object should not be (None)
* ^
* object should not be (Some(1))
* ^
* result should not be (true)
* ^
* result should not be (false)
* ^
* sum should not be (19)
* ^
* </pre>
*/
def be(right: Any): Matcher[Any] = {
new Matcher[Any] {
def apply(left: Any) = {
left match {
case null =>
MatchResult(
right != null,
FailureMessages("wasNull"),
FailureMessages("wasNotNull", right),
FailureMessages("midSentenceWasNull"),
FailureMessages("wasNotNull", right)
)
case leftArray: Array[_] =>
MatchResult(
!leftArray.deepEquals(right),
FailureMessages("wasEqualTo", left, right),
FailureMessages("wasNotEqualTo", left, right)
)
case _ =>
MatchResult(
left != right,
FailureMessages("wasEqualTo", left, right),
FailureMessages("wasNotEqualTo", left, right)
)
}
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* string should (not fullyMatch regex ("Hel*o) and not include ("orld"))
* ^
* </pre>
*/
def fullyMatch(resultOfRegexWordApplication: ResultOfRegexWordApplication): Matcher[String] = {
val rightRegexString = resultOfRegexWordApplication.regex.toString
new Matcher[String] {
def apply(left: String) =
MatchResult(
!java.util.regex.Pattern.matches(rightRegexString, left),
FailureMessages("fullyMatchedRegex", left, UnquotedString(rightRegexString)),
FailureMessages("didNotFullyMatchRegex", left, UnquotedString(rightRegexString))
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* string should (not include regex ("Hel.o") and not include regex ("""(-)?(\\d+)(\\.\\d*)?"""))
* ^
* </pre>
*/
def include(resultOfRegexWordApplication: ResultOfRegexWordApplication): Matcher[String] = {
val rightRegex = resultOfRegexWordApplication.regex
new Matcher[String] {
def apply(left: String) =
MatchResult(
!rightRegex.findFirstIn(left).isDefined,
FailureMessages("includedRegex", left, rightRegex),
FailureMessages("didNotIncludeRegex", left, rightRegex)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* string should (not include ("cat") and not include ("1.7"))
* ^
* </pre>
*/
def include(expectedSubstring: String): Matcher[String] = {
new Matcher[String] {
def apply(left: String) =
MatchResult(
!(left.indexOf(expectedSubstring) >= 0),
FailureMessages("includedSubstring", left, expectedSubstring),
FailureMessages("didNotIncludeSubstring", left, expectedSubstring)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* string should (not startWith regex ("hel*o") and not endWith regex ("wor.d))
* ^
* </pre>
*/
def startWith(resultOfRegexWordApplication: ResultOfRegexWordApplication): Matcher[String] = {
val rightRegex = resultOfRegexWordApplication.regex
new Matcher[String] {
def apply(left: String) =
MatchResult(
!rightRegex.pattern.matcher(left).lookingAt,
FailureMessages("startedWithRegex", left, rightRegex),
FailureMessages("didNotStartWithRegex", left, rightRegex)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* string should ((not startWith ("red")) and (not startWith ("1.7")))
* ^
* </pre>
*/
def startWith(expectedSubstring: String): Matcher[String] = {
new Matcher[String] {
def apply(left: String) =
MatchResult(
left.indexOf(expectedSubstring) != 0,
FailureMessages("startedWith", left, expectedSubstring),
FailureMessages("didNotStartWith", left, expectedSubstring)
)
}
}
/**
* This method enables the following syntax:
*
* <pre>
* string should (not endWith regex ("wor.d") and not startWith regex ("Hel*o"))
* ^
* </pre>
*/
def endWith(resultOfRegexWordApplication: ResultOfRegexWordApplication): Matcher[String] = {
val rightRegex = resultOfRegexWordApplication.regex
new Matcher[String] {
def apply(left: String) = {
val allMatches = rightRegex.findAllIn(left)
MatchResult(
!(allMatches.hasNext && (allMatches.end == left.length)),
FailureMessages("endedWithRegex", left, rightRegex),
FailureMessages("didNotEndWithRegex", left, rightRegex)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* string should (not endWith ("blue") and not endWith ("1.7"))
* ^
* </pre>
*/
def endWith(expectedSubstring: String): Matcher[String] = {
new Matcher[String] {
def apply(left: String) = {
MatchResult(
!(left endsWith expectedSubstring),
FailureMessages("endedWith", left, expectedSubstring),
FailureMessages("didNotEndWith", left, expectedSubstring)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* Array(1, 2) should (not contain (5) and not contain (3))
* ^
* </pre>
*/
def contain[T](expectedElement: T): Matcher[Iterable[T]] = {
new Matcher[Iterable[T]] {
def apply(left: Iterable[T]) = {
MatchResult(
!(left.exists(_ == expectedElement)),
FailureMessages("containedExpectedElement", left, expectedElement),
FailureMessages("didNotContainExpectedElement", left, expectedElement)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (not contain key ("three"))
* ^
* </pre>
*/
def contain[K](resultOfKeyWordApplication: ResultOfKeyWordApplication[K]): Matcher[scala.collection.Map[K, Any]] = {
val expectedKey = resultOfKeyWordApplication.expectedKey
new Matcher[scala.collection.Map[K, Any]] {
def apply(left: scala.collection.Map[K, Any]) = {
MatchResult(
!(left.contains(expectedKey)),
FailureMessages("containedKey", left, expectedKey),
FailureMessages("didNotContainKey", left, expectedKey)
)
}
}
}
/**
* This method enables the following syntax:
*
* <pre>
* Map("one" -> 1, "two" -> 2) should (not contain value (3))
* ^
* </pre>
*/
def contain[K, V](resultOfValueWordApplication: ResultOfValueWordApplication[V]): Matcher[scala.collection.Map[K, V] forSome { type K }] = {
val expectedValue = resultOfValueWordApplication.expectedValue
new Matcher[scala.collection.Map[K, V] forSome { type K }] {
def apply(left: scala.collection.Map[K, V] forSome { type K }) = {
MatchResult(
!(left.values.exists(_ == expectedValue)),
FailureMessages("containedValue", left, expectedValue),
FailureMessages("didNotContainValue", left, expectedValue)
)
}
}
}
}
/**
* This field enables syntax like the following:
*
* <pre>
* myFile should (not be an (directory) and not have ('name ("foo.bar")))
* ^
* </pre>
*/
val not = new NotWord
/**
* This method enables syntax such as the following:
*
* <pre>
* obj should (be theSameInstanceAs (string) and be theSameInstanceAs (string))
* ^
* </pre>
*/
val be = new BeWord
/*
In HaveWord's methods key, value, length, and size, I can give type parameters.
The type HaveWord can contain a key method that takes a S or what not, and returns a matcher, which
stores the key value in a val and whose apply method checks the passed map for the remembered key. This one would be used in things like:
map should { have key 9 and have value "bob" }
There's an overloaded should method on Shouldifier that takes a HaveWord. This method results in
a different type that also has a key method that takes an S. So when you say:
map should have key 9
what happens is that this alternate should method gets invoked. The result is this other class that
has a key method, and its constructor takes the map and stores it in a val. So this time when key is
invoked, it checks to make sure the passed key is in the remembered map, and does the assertion.
length and size can probably use structural types, because I want to use length on string and array for
starters, and other people may create classes that have length methods. Would be nice to be able to use them.
*/
/**
* This method enables syntax such as the following:
*
* <pre>
* list should (have length (3) and not contain ('a'))
* ^
* </pre>
*/
val have = new HaveWord
/**
* This method enables syntax such as the following:
*
* <pre>
* list should (contain ('a') and have length (7))
* ^
* </pre>
*/
val contain = new ContainWord
/**
* This method enables syntax such as the following:
*
* <pre>
* string should (include ("hope") and not startWith ("no"))
* ^
* </pre>
*/
val include = new IncludeWord
/**
* This method enables syntax such as the following:
*
* <pre>
* string should (fullyMatch regex ("Hel*o, wor.d") and not have length (99))
* ^
* </pre>
*/
val fullyMatch = new FullyMatchWord
/**
* This method enables syntax such as the following:
*
* <pre>
* string should (startWith ("Four") and include ("year"))
* ^
* </pre>
*/
val startWith = new StartWithWord
/**
* This method enables syntax such as the following:
*
* <pre>
* string should (endWith ("ago") and include ("score"))
* ^
* </pre>
*/
val endWith = new EndWithWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfLengthWordApplication(val expectedLength: Long) extends HavePropertyMatcher[AnyRef, Long] {
/**
* This method enables the following syntax:
*
* <pre>
* "hi" should not have (length (3))
* ^
* </pre>
*
* <p>
* This reason <code>ResultOfLengthWordApplication</code> is a <code>HavePropertyMatcher[AnyRef, Long]</code> is
* so that you don't have to remember whether <code>length</code> needs to be surrounded by parentheses when following
* <code>have</code>. Only <code>length</code> and <code>size</code> can be used without parentheses: everything else
* needs the parentheses. So this approach means that if you use the unneeded parentheses with <code>length</code> and
* <code>size</code>, it will still work. This <code>apply</code> method uses reflection to find and access the <code>length</code>
* property on the passed <code>objectWithProperty</code>. Therefore if the object does not have the appropriate structure, the expression
* will compile, but at will produce a <code>TestFailedException</code> at runtime.
* </p>
*/
def apply(objectWithProperty: AnyRef): HavePropertyMatchResult[Long] = {
accessProperty(objectWithProperty, 'length, false) match {
case None =>
throw newTestFailedException(Resources("propertyNotFound", "length", expectedLength.toString, "getLength"))
case Some(result) =>
new HavePropertyMatchResult[Long](
result == expectedLength,
"length",
expectedLength,
result match {
case value: Byte => value.toLong
case value: Short => value.toLong
case value: Int => value.toLong
case value: Long => value
case _ => throw newTestFailedException(Resources("lengthPropertyNotAnInteger"))
}
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class LengthWord {
/**
* This method enables the following syntax:
*
* <pre>
* "hi" should not have length (3)
* ^
* </pre>
*/
def apply(expectedLength: Long) = new ResultOfLengthWordApplication(expectedLength)
}
/**
* This field enables the following syntax:
*
* <pre>
* "hi" should not have length (3)
* ^
* </pre>
*/
val length = new LengthWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfSizeWordApplication(val expectedSize: Long) extends HavePropertyMatcher[AnyRef, Long] {
/**
* This method enables the following syntax:
*
* <pre>
* set should not have (size (3))
* ^
* </pre>
*
* <p>
* This reason <code>ResultOfSizeWordApplication</code> is a <code>HavePropertyMatcher[AnyRef, Long]</code> is
* so that you don't have to remember whether <code>size</code> needs to be surrounded by parentheses when following
* <code>have</code>. Only <code>length</code> and <code>size</code> can be used without parentheses: everything else
* needs the parentheses. So this approach means that if you use the unneeded parentheses with <code>length</code> and
* <code>size</code>, it will still work. This <code>apply</code> method uses reflection to find and access the <code>size</code>
* property on the passed <code>objectWithProperty</code>. Therefore if the object does not have the appropriate structure, the expression
* will compile, but at will produce a <code>TestFailedException</code> at runtime.
* </p>
*/
def apply(objectWithProperty: AnyRef): HavePropertyMatchResult[Long] = {
accessProperty(objectWithProperty, 'size, false) match {
case None =>
throw newTestFailedException(Resources("propertyNotFound", "size", expectedSize.toString, "getSize"))
case Some(result) =>
new HavePropertyMatchResult[Long](
result == expectedSize,
"size",
expectedSize,
result match {
case value: Byte => value.toLong
case value: Short => value.toLong
case value: Int => value.toLong
case value: Long => value
case _ => throw newTestFailedException(Resources("sizePropertyNotAnInteger"))
}
)
}
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class SizeWord {
/**
* This method enables the following syntax:
*
* <pre>
* set should not have size (3)
* ^
* </pre>
*/
def apply(expectedSize: Long) = new ResultOfSizeWordApplication(expectedSize)
}
/**
* This field enables the following syntax:
*
* <pre>
* set should not have size (3)
* ^
* </pre>
*/
val size = new SizeWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfElementWordApplication[T](val expectedElement: T)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfKeyWordApplication[T](val expectedKey: T)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class KeyWord {
/**
* This method enables the following syntax:
*
* <pre>
* map should not contain key (10)
* ^
* </pre>
*/
def apply[T](expectedKey: T) = new ResultOfKeyWordApplication(expectedKey)
}
/**
* This field enables the following syntax:
*
* <pre>
* map should not contain key (10)
* ^
* </pre>
*/
val key = new KeyWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfValueWordApplication[T](val expectedValue: T)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ValueWord {
/**
* This method enables the following syntax:
*
* <pre>
* map should not contain value (10)
* ^
* </pre>
*/
def apply[T](expectedValue: T) = new ResultOfValueWordApplication(expectedValue)
}
/**
* This field enables the following syntax:
*
* <pre>
* map should not contain value (10)
* ^
* </pre>
*/
val value = new ValueWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfAWordToSymbolApplication(val symbol: Symbol)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfAWordToBePropertyMatcherApplication[T](val bePropertyMatcher: BePropertyMatcher[T])
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AWord {
/**
* This method enables the following syntax:
*
* <pre>
* badBook should not be a ('goodRead)
* ^
* </pre>
*/
def apply(symbol: Symbol) = new ResultOfAWordToSymbolApplication(symbol)
/**
* This method enables the following syntax, where, for example, <code>badBook</code> is of type <code>Book</code> and <code>goodRead</code>
* is a <code>BePropertyMatcher[Book]</code>:
*
* <pre>
* badBook should not be a (goodRead)
* ^
* </pre>
*/
def apply[T](beTrueMatcher: BePropertyMatcher[T]) = new ResultOfAWordToBePropertyMatcherApplication(beTrueMatcher)
}
/**
* This field enables the following syntax:
*
* <pre>
* badBook should not be a ('goodRead)
* ^
* </pre>
*/
val a = new AWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfAnWordToSymbolApplication(val symbol: Symbol)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfAnWordToBePropertyMatcherApplication[T](val bePropertyMatcher: BePropertyMatcher[T])
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class AnWord {
/**
* This method enables the following syntax:
*
* <pre>
* badBook should not be an ('excellentRead)
* ^
* </pre>
*/
def apply(symbol: Symbol) = new ResultOfAnWordToSymbolApplication(symbol)
/**
* This method enables the following syntax, where, for example, <code>badBook</code> is of type <code>Book</code> and <code>excellentRead</code>
* is a <code>BePropertyMatcher[Book]</code>:
*
* <pre>
* badBook should not be an (excellentRead)
* ^
* </pre>
*/
def apply[T](beTrueMatcher: BePropertyMatcher[T]) = new ResultOfAnWordToBePropertyMatcherApplication(beTrueMatcher)
}
/**
* This field enables the following syntax:
*
* <pre>
* badBook should not be an (excellentRead)
* ^
* </pre>
*/
val an = new AnWord
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfTheSameInstanceAsApplication(val right: AnyRef)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class TheSameInstanceAsPhrase {
/**
* This method enables the following syntax:
*
* <pre>
* oneString should not be theSameInstanceAs (anotherString)
* ^
* </pre>
*/
def apply(anyRef: AnyRef) = new ResultOfTheSameInstanceAsApplication(anyRef)
}
/**
* This field enables the following syntax:
*
* <pre>
* oneString should not be theSameInstanceAs (anotherString)
* ^
* </pre>
*/
val theSameInstanceAs = new TheSameInstanceAsPhrase
/**
* This field enables the following syntax:
*
* <pre>
* "eight" should not fullyMatch regex ("""(-)?(\\d+)(\\.\\d*)?""".r)
* ^
* </pre>
*/
val regex = new RegexWord
/**
* This method enables the following syntax:
*
* <pre>
* "eight" should not include substring ("seven")
* ^
* </pre>
val substring = new SubstringWord
*/
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final case class DoubleTolerance(right: Double, tolerance: Double)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class DoublePlusOrMinusWrapper(right: Double) {
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOh should be (17.0 plusOrMinus 0.2)
* ^
* </pre>
*/
def plusOrMinus(tolerance: Double): DoubleTolerance = {
if (tolerance <= 0.0)
throw newTestFailedException(Resources("negativeOrZeroRange", tolerance.toString))
DoubleTolerance(right, tolerance)
}
}
/**
* Implicitly converts an object of type <code>Double</code> to a <code>DoublePlusOrMinusWrapper</code>,
* to enable a <code>plusOrMinus</code> method to be invokable on that object.
*/
implicit def convertDoubleToPlusOrMinusWrapper(right: Double) = new DoublePlusOrMinusWrapper(right)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final case class FloatTolerance(right: Float, tolerance: Float)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class FloatPlusOrMinusWrapper(right: Float) {
/**
* This method enables the following syntax:
*
* <pre>
* sevenDotOh should be (17.0f plusOrMinus 0.2f)
* ^
* </pre>
*/
def plusOrMinus(tolerance: Float): FloatTolerance = {
if (tolerance <= 0.0f)
throw newTestFailedException(Resources("negativeOrZeroRange", tolerance.toString))
FloatTolerance(right, tolerance)
}
}
/**
* Implicitly converts an object of type <code>Float</code> to a <code>FloatPlusOrMinusWrapper</code>,
* to enable a <code>plusOrMinus</code> method to be invokable on that object.
*/
implicit def convertFloatToPlusOrMinusWrapper(right: Float) = new FloatPlusOrMinusWrapper(right)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final case class LongTolerance(right: Long, tolerance: Long)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class LongPlusOrMinusWrapper(right: Long) {
/**
* This method enables the following syntax:
*
* <pre>
* seven should be (17L plusOrMinus 2)
* ^
* </pre>
*/
def plusOrMinus(tolerance: Long): LongTolerance = {
if (tolerance <= 0L)
throw newTestFailedException(Resources("negativeOrZeroRange", tolerance.toString))
LongTolerance(right, tolerance)
}
}
/**
* Implicitly converts an object of type <code>Long</code> to a <code>LongPlusOrMinusWrapper</code>,
* to enable a <code>plusOrMinus</code> method to be invokable on that object.
*/
implicit def convertLongToPlusOrMinusWrapper(right: Long) = new LongPlusOrMinusWrapper(right)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final case class IntTolerance(right: Int, tolerance: Int)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class IntPlusOrMinusWrapper(right: Int) {
/**
* This method enables the following syntax:
*
* <pre>
* seven should be (17 plusOrMinus 2)
* ^
* </pre>
*/
def plusOrMinus(tolerance: Int): IntTolerance = {
if (tolerance <= 0)
throw newTestFailedException(Resources("negativeOrZeroRange", tolerance.toString))
IntTolerance(right, tolerance)
}
}
/**
* Implicitly converts an object of type <code>Int</code> to a <code>IntPlusOrMinusWrapper</code>,
* to enable a <code>plusOrMinus</code> method to be invokable on that object.
*/
implicit def convertIntToPlusOrMinusWrapper(right: Int) = new IntPlusOrMinusWrapper(right)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final case class ShortTolerance(right: Short, tolerance: Short)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ShortPlusOrMinusWrapper(right: Short) {
/**
* This method enables the following syntax:
*
* <pre>
* seven should be (17.toShort plusOrMinus 2.toShort)
* ^
* </pre>
*/
def plusOrMinus(tolerance: Short): ShortTolerance = {
if (tolerance <= 0)
throw newTestFailedException(Resources("negativeOrZeroRange", tolerance.toString))
ShortTolerance(right, tolerance)
}
}
/**
* Implicitly converts an object of type <code>Short</code> to a <code>ShortPlusOrMinusWrapper</code>,
* to enable a <code>plusOrMinus</code> method to be invokable on that object.
*/
implicit def convertShortToPlusOrMinusWrapper(right: Short) = new ShortPlusOrMinusWrapper(right)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final case class ByteTolerance(right: Byte, tolerance: Byte)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class BytePlusOrMinusWrapper(right: Byte) {
/**
* This method enables the following syntax:
*
* <pre>
* seven should be (17.toByte plusOrMinus 2.toByte)
* ^
* </pre>
*/
def plusOrMinus(tolerance: Byte): ByteTolerance = {
if (tolerance <= 0)
throw newTestFailedException(Resources("negativeOrZeroRange", tolerance.toString))
ByteTolerance(right, tolerance)
}
}
/**
* Implicitly converts an object of type <code>Byte</code> to a <code>BytePlusOrMinusWrapper</code>,
* to enable a <code>plusOrMinus</code> method to be invokable on that object.
*/
implicit def convertByteToPlusOrMinusWrapper(right: Byte) = new BytePlusOrMinusWrapper(right)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForSizeWrapper[A <: AnyRef <% SizeWrapper](left: A, shouldBeTrue: Boolean)
extends ResultOfNotWordForAnyRef(left, shouldBeTrue) {
/* I just added this whole thing in here for completeness when doing SizeShouldWrapper. Write some tests to prove it is needed.
// TODO: This should be for "sizey should not have size (12)" Try that test.
def have(resultOfLengthWordApplication: ResultOfLengthWordApplication) {
val right = resultOfLengthWordApplication.expectedLength
if ((left.length == right) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
right
)
)
}
}
*/
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfNotWordForLengthWrapper[A <: AnyRef <% LengthWrapper](left: A, shouldBeTrue: Boolean)
extends ResultOfNotWordForAnyRef(left, shouldBeTrue) {
/* TODO What's going on? Why can I drop this and still get a compile
// TODO: This should be for "lengthy should not have length (12)" Try that test.
def have(resultOfLengthWordApplication: ResultOfLengthWordApplication) {
val right = resultOfLengthWordApplication.expectedLength
if ((left.length == right) != shouldBeTrue) {
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
right
)
)
}
}
*/
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfHaveWordForLengthWrapper[A <% LengthWrapper](left: A, shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* obj should have length (2)
* ^
* </pre>
*
* <p>
* This method is ultimately invoked for objects that have a <code>length</code> property structure
* of type <code>Int</code>,
* but is of a type that is not handled by implicit conversions from nominal types such as
* <code>scala.Seq</code>, <code>java.lang.String</code>, and <code>java.util.List</code>.
* </p>
*/
def length(expectedLength: Int) {
if ((left.length == expectedLength) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
expectedLength)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* obj should have length (2L)
* ^
* </pre>
*
* <p>
* This method is ultimately invoked for objects that have a <code>length</code> property structure
* of type <code>Long</code>,
* but is of a type that is not handled by implicit conversions from nominal types such as
* <code>scala.Seq</code>, <code>java.lang.String</code>, and <code>java.util.List</code>.
* </p>
*/
def length(expectedLength: Long) {
if ((left.length == expectedLength) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedLength" else "hadExpectedLength",
left,
expectedLength)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfHaveWordForSizeWrapper[A <% SizeWrapper](left: A, shouldBeTrue: Boolean) {
/**
* This method enables the following syntax:
*
* <pre>
* obj should have size (2)
* ^
* </pre>
*
* <p>
* This method is ultimately invoked for objects that have a <code>size</code> property structure
* of type <code>Int</code>,
* but is of a type that is not handled by implicit conversions from nominal types such as
* <code>scala.Collection</code> and <code>java.util.Collection</code>.
* </p>
*/
def size(expectedSize: Int) {
if ((left.size == expectedSize) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedSize" else "hadExpectedSize",
left,
expectedSize)
)
}
/**
* This method enables the following syntax:
*
* <pre>
* obj should have size (2L)
* ^
* </pre>
*
* <p>
* This method is ultimately invoked for objects that have a <code>size</code> property structure
* of type <code>Long</code>,
* but is of a type that is not handled by implicit conversions from nominal types such as
* <code>scala.Collection</code> and <code>java.util.Collection</code>.
* </p>
*/
def size(expectedSize: Long) {
if ((left.size == expectedSize) != shouldBeTrue)
throw newTestFailedException(
FailureMessages(
if (shouldBeTrue) "didNotHaveExpectedSize" else "hadExpectedSize",
left,
expectedSize)
)
}
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfLessThanComparison[T <% Ordered[T]](val right: T) {
/**
* This method is invoked by <code>be</code> methods to which instances of this class are passed, which
* enables syntax such as:
*
* <pre>
* result should not be < (7)
* ^ ... invoked by this be method
* </pre>
*
* <p>
* or
* </p>
*
* <pre>
* num should (not be < (10) and not be > (17))
* ^ ... invoked by this be method
* </pre>
*/
def apply(left: T): Boolean = left < right
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfGreaterThanComparison[T <% Ordered[T]](val right: T) {
/**
* This method is invoked by <code>be</code> methods to which instances of this class are passed, which
* enables syntax such as:
*
* <pre>
* result should not be > (7)
* ^ ... invoked by this be method
* </pre>
*
* <p>
* or
* </p>
*
* <pre>
* num should (not be > (10) and not be < (7))
* ^ ... invoked by this be method
* </pre>
*/
def apply(left: T): Boolean = left > right
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfLessThanOrEqualToComparison[T <% Ordered[T]](val right: T) {
/**
* This method is invoked by <code>be</code> methods to which instances of this class are passed, which
* enables syntax such as:
*
* <pre>
* result should not be <= (7)
* ^ ... invoked by this be method
* </pre>
*
* <p>
* or
* </p>
*
* <pre>
* num should (not be <= (10) and not be > (17))
* ^ ... invoked by this be method
* </pre>
*/
def apply(left: T): Boolean = left <= right
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfGreaterThanOrEqualToComparison[T <% Ordered[T]](val right: T) {
/**
* This method is invoked by <code>be</code> methods to which instances of this class are passed, which
* enables syntax such as:
*
* <pre>
* result should not be >= (7)
* ^ ... invoked by this be method
* </pre>
*
* <p>
* or
* </p>
*
* <pre>
* num should (not be >= (10) and not be < (7))
* ^ ... invoked by this be method
* </pre>
*/
def apply(left: T): Boolean = left >= right
}
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfTripleEqualsApplication(val right: Any) {
/**
* This method is invoked by <code>be</code> methods to which instances of this class are passed, which
* enables syntax such as:
*
* <pre>
* result should not be === (7)
* ^ ... invoked by this be method
* </pre>
*
* <p>
* or
* </p>
*
* <pre>
* num should (not be === (10) and not be > (17))
* ^ ... invoked by this be method
* </pre>
*/
def apply(left: Any): Boolean = left == right
}
/**
* This method enables the following syntax:
*
* <pre>
* num should (not be < (10) and not be > (17))
* ^
* </pre>
*/
def <[T <% Ordered[T]] (right: T): ResultOfLessThanComparison[T] =
new ResultOfLessThanComparison(right)
/**
* This method enables the following syntax:
*
* <pre>
* num should (not be > (10) and not be < (7))
* ^
* </pre>
*/
def >[T <% Ordered[T]] (right: T): ResultOfGreaterThanComparison[T] =
new ResultOfGreaterThanComparison(right)
/**
* This method enables the following syntax:
*
* <pre>
* num should (not be <= (10) and not be > (17))
* ^
* </pre>
*/
def <=[T <% Ordered[T]] (right: T): ResultOfLessThanOrEqualToComparison[T] =
new ResultOfLessThanOrEqualToComparison(right)
/**
* This method enables the following syntax:
*
* <pre>
* num should (not be >= (10) and not be < (7))
* ^
* </pre>
*/
def >=[T <% Ordered[T]] (right: T): ResultOfGreaterThanOrEqualToComparison[T] =
new ResultOfGreaterThanOrEqualToComparison(right)
/**
* This method enables the following syntax:
*
* <pre>
* num should not be === (10)
* ^
* </pre>
*/
def === (right: Any): ResultOfTripleEqualsApplication =
new ResultOfTripleEqualsApplication(right)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfEvaluatingApplication(val fun: () => Any)
/**
* This method enables syntax such as the following:
*
* <pre>
* evaluating { "hi".charAt(-1) } should produce [StringIndexOutOfBoundsException]
* ^
* </pre>
*/
def evaluating(fun: => Any): ResultOfEvaluatingApplication =
new ResultOfEvaluatingApplication(fun _)
/**
* This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="ShouldMatchers.html"><code>ShouldMatchers</code></a> or <a href="MustMatchers.html"><code>MustMatchers</code></a> for an overview of
* the matchers DSL.
*
* @author Bill Venners
*/
final class ResultOfProduceInvocation[T](val clazz: Class[T])
/**
* This method enables the following syntax:
*
* <pre>
* evaluating { "hi".charAt(-1) } should produce [StringIndexOutOfBoundsException]
* ^
* </pre>
*/
def produce[T](implicit manifest: Manifest[T]): ResultOfProduceInvocation[T] =
new ResultOfProduceInvocation(manifest.erasure.asInstanceOf[Class[T]])
}
| kevinwright/scalatest | src/main/scala/org/scalatest/matchers/Matchers.scala | Scala | apache-2.0 | 248,876 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.cdi.spring.config
import org.beangle.cdi.bind.Reconfig
import org.springframework.core.io.Resource
import org.w3c.dom.Element
import org.xml.sax.InputSource
import java.io.FileNotFoundException
import javax.xml.parsers.DocumentBuilderFactory
/** BeanDefinitionReader
*
* @author chaostone
*/
object ReconfigReader {
/** load spring-config.xml
*/
def load(resource: Resource): List[Reconfig.Definition] = {
val holders = new collection.mutable.ListBuffer[Reconfig.Definition]
try {
val inputStream = resource.getInputStream
try {
val inputSource = new InputSource(inputStream)
val factory = DocumentBuilderFactory.newInstance()
val docBuilder = factory.newDocumentBuilder()
val doc = docBuilder.parse(inputSource)
val root = doc.getDocumentElement
val nl = root.getChildNodes
val parser = new ReconfigParser()
for (i <- 0 until nl.getLength) {
val node = nl.item(i)
if (node.isInstanceOf[Element]) {
val ele = node.asInstanceOf[Element]
val holder = parser.parseBeanDefinitionElement(ele)
if (null != holder) {
holders += holder
}
}
}
} finally {
if (null != inputStream) inputStream.close()
}
} catch {
case _: FileNotFoundException => //ignore
case ex: Exception => throw new RuntimeException("IOException parsing XML document from " + resource.getDescription(), ex)
}
holders.toList
}
}
| beangle/cdi | spring/src/main/scala/org/beangle/cdi/spring/config/ReconfigReader.scala | Scala | lgpl-3.0 | 2,275 |
package com.gigaspaces.sbp.gstest
import org.scalatest.ConfigMap
/** User: jason
* Date: 5/27/14
* Time: 8:47 PM
*/
trait GetFromConfigMap {
val schemaProperty = "schema"
val spaceUrlProperty = "spaceUrl"
val numInstancesProperty = "numInstances"
val numBackupsProperty = "numBackups"
val instanceIdProperty = "instanceId"
val spaceModeProperty = "spaceMode"
val configLocationProperty = "configLocation"
val localViewQueryListProperty = "localViewQueryList"
implicit val defaultConfigMap: ConfigMap
def getProperty(propertyName: String, configMap: ConfigMap = new ConfigMap(Map[String, Any]())): Any = {
val prop = configMap.get(propertyName)
val innerP = prop match {
case (Some(p)) => p
case _ =>
defaultConfigMap.get(propertyName)
}
innerP
}
} | jasonnerothin/gs-gigaspace-api | src/test/scala/com/gigaspaces/sbp/gstest/GetFromConfigMap.scala | Scala | apache-2.0 | 817 |
package com.bizo.util.args4j
import org.kohsuke.args4j.CmdLineException
import org.kohsuke.args4j.CmdLineParser
import scala.collection.JavaConverters._
import org.kohsuke.args4j.NamedOptionDef
trait OptionsWithHelp {
import org.kohsuke.args4j.Option
@Option(name="--help", aliases=Array("-h"), usage="show this message")
var help = false
}
object OptionsHelper {
def optionsOrExit[T <: OptionsWithHelp](args: Array[String], options: T): T = {
val parser = new CmdLineParser(options)
try {
parser.parseArgument(args : _*)
if (options.help) {
parser.printUsage(System.err)
sys.exit(0)
}
} catch {
case e: CmdLineException => {
// required arguments not present, or parsing error. Check to see if help was requested
val help = e.getParser.getOptions.asScala.find { o =>
o.option match {
case n: NamedOptionDef => n.name == "--help"
case _ => false
}
}
for (h <- help if h.setter.asFieldSetter.getValue == true) {
parser.printUsage(System.err)
sys.exit(0)
}
// missing required var and help not requested
System.err.println(e.getMessage)
parser.printUsage(System.err)
sys.exit(1)
}
}
options
}
} | ogrodnek/args4j-helpers | src/main/scala/com/bizo/util/args4j/OptionsHelper.scala | Scala | apache-2.0 | 1,355 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import kafka.security.authorizer.AclAuthorizer
import kafka.server.KafkaConfig
import kafka.utils.JaasTestUtils
import org.apache.kafka.common.config.SslConfigs
import org.apache.kafka.common.security.auth.KafkaPrincipal
import org.junit.jupiter.api.Assertions.assertNull
import scala.collection.immutable.List
class SaslGssapiSslEndToEndAuthorizationTest extends SaslEndToEndAuthorizationTest {
override val clientPrincipal = new KafkaPrincipal(KafkaPrincipal.USER_TYPE,
JaasTestUtils.KafkaClientPrincipalUnqualifiedName)
override val kafkaPrincipal = new KafkaPrincipal(KafkaPrincipal.USER_TYPE,
JaasTestUtils.KafkaServerPrincipalUnqualifiedName)
override protected def kafkaClientSaslMechanism = "GSSAPI"
override protected def kafkaServerSaslMechanisms = List("GSSAPI")
override protected def authorizerClass = classOf[AclAuthorizer]
// Configure brokers to require SSL client authentication in order to verify that SASL_SSL works correctly even if the
// client doesn't have a keystore. We want to cover the scenario where a broker requires either SSL client
// authentication or SASL authentication with SSL as the transport layer (but not both).
serverConfig.put(KafkaConfig.SslClientAuthProp, "required")
assertNull(producerConfig.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
assertNull(consumerConfig.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
assertNull(adminClientConfig.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG))
}
| guozhangwang/kafka | core/src/test/scala/integration/kafka/api/SaslGssapiSslEndToEndAuthorizationTest.scala | Scala | apache-2.0 | 2,304 |
/*
* Copyright (c) 2013-16 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import org.junit.Test
import org.junit.Assert._
import scala.collection.mutable.ListBuffer
import test._
class LazyStrictTests {
@Test
def testEffectOrder {
val effects = ListBuffer[Int]()
implicit def lazyInt: Lazy[Int] = Lazy[Int]{ effects += 3 ; 23 }
implicit def strictInt: Strict[Int] = Strict[Int]{ effects += 6 ; 23 }
def summonLazyInt(implicit li: Lazy[Int]): Int = {
effects += 2
val i = li.value
effects += 4
i
}
def summonStrictInt(implicit li: Strict[Int]): Int = {
effects += 7
val i = li.value
effects += 8
i
}
effects += 1
val il = summonLazyInt
effects += 5
val is = summonStrictInt
effects += 9
assertEquals(23, il)
assertEquals(23, is)
assertEquals(1 to 9, effects.toList)
}
@Test
def testDefConversion {
val effects = ListBuffer[Int]()
def effectfulLazyInt: Int = { effects += 3 ; 23 }
def useEffectfulLazyInt(li: Lazy[Int]): Int = {
effects += 2
val i = li.value
effects += 4
i
}
def effectfulStrictInt: Int = { effects += 6 ; 23 }
def useEffectfulStrictInt(li: Strict[Int]): Int = {
effects += 7
val i = li.value
effects += 8
i
}
effects += 1
val il = useEffectfulLazyInt(effectfulLazyInt)
effects += 5
val is = useEffectfulStrictInt(effectfulStrictInt)
effects += 9
assertEquals(23, il)
assertEquals(23, is)
assertEquals(1 to 9, effects.toList)
}
@Test
def testLazyConversion {
val effects = ListBuffer[Int]()
lazy val effectfulLazyInt: Int = { effects += 3 ; 23 }
lazy val effectfulStrictInt: Int = { effects += 6 ; 23 }
def useEffectfulLazyInt(li: Lazy[Int]): Int = {
effects += 2
val i = li.value
effects += 4
i
}
def useEffectfulStrictInt(li: Strict[Int]): Int = {
effects += 7
val i = li.value
effects += 8
i
}
effects += 1
val il = useEffectfulLazyInt(effectfulLazyInt)
effects += 5
val is = useEffectfulStrictInt(effectfulStrictInt)
effects += 9
assertEquals(23, il)
assertEquals(23, is)
assertEquals(1 to 9, effects.toList)
}
@Test
def testInlineConversion {
val effects = ListBuffer[Int]()
def useEffectfulLazyInt(li: Lazy[Int]): Int = {
effects += 3
val i = li.value
effects += 4
i
}
def useEffectfulStrictInt(si: Strict[Int]): Int = {
effects += 7
val i = si.value
effects += 8
i
}
effects += 1
val il = useEffectfulLazyInt({ effects += 2 ; 23 })
effects += 5
val is = useEffectfulStrictInt({ effects += 6 ; 23 })
effects += 9
assertEquals(23, il)
assertEquals(23, is)
assertEquals(1 to 9, effects.toList)
}
sealed trait List[+T]
case class Cons[T](hd: T, tl: List[T]) extends List[T]
sealed trait Nil extends List[Nothing]
case object Nil extends Nil
trait Show[T] {
def apply(t: T): String
}
def show[T](t: T)(implicit s: Show[T]) = s(t)
trait CommonShows {
implicit def showInt: Show[Int] = new Show[Int] {
def apply(t: Int) = t.toString
}
implicit def showNil: Show[Nil] = new Show[Nil] {
def apply(t: Nil) = "Nil"
}
}
object LazyShows extends CommonShows {
implicit def showCons[T](implicit st: Lazy[Show[T]], sl: Lazy[Show[List[T]]]): Show[Cons[T]] = new Show[Cons[T]] {
def apply(t: Cons[T]) = s"Cons(${show(t.hd)(st.value)}, ${show(t.tl)(sl.value)})"
}
implicit def showList[T](implicit sc: Lazy[Show[Cons[T]]]): Show[List[T]] = new Show[List[T]] {
def apply(t: List[T]) = t match {
case n: Nil => show(n)
case c: Cons[T] => show(c)(sc.value)
}
}
}
object LazyStrictMixShows extends CommonShows {
implicit def showCons[T](implicit st: Strict[Show[T]], sl: Strict[Show[List[T]]]): Show[Cons[T]] = new Show[Cons[T]] {
def apply(t: Cons[T]) = s"Cons(${show(t.hd)(st.value)}, ${show(t.tl)(sl.value)})"
}
implicit def showList[T](implicit sc: Lazy[Show[Cons[T]]]): Show[List[T]] = new Show[List[T]] {
def apply(t: List[T]) = t match {
case n: Nil => show(n)
case c: Cons[T] => show(c)(sc.value)
}
}
}
@Test
def testRecursive {
val l: List[Int] = Cons(1, Cons(2, Cons(3, Nil)))
val lazyRepr = {
import LazyShows._
show(l)
}
val strictRepr = {
import LazyStrictMixShows._
show(l)
}
val expectedRepr = "Cons(1, Cons(2, Cons(3, Nil)))"
assertEquals(expectedRepr, lazyRepr)
assertEquals(expectedRepr, strictRepr)
}
trait Foo[T]
object Foo {
implicit def mkFoo[T]: Foo[T] = new Foo[T] {}
}
@Test
def testMultiple {
val foos = Lazy.values[Foo[Int] :: Foo[String] :: Foo[Boolean] :: HNil]
implicit val x :: y :: z :: HNil = foos
typed[Foo[Int]](x)
typed[Foo[String]](y)
typed[Foo[Boolean]](z)
val x1 = implicitly[Foo[Int]]
val y1 = implicitly[Foo[String]]
val z1 = implicitly[Foo[Boolean]]
assertTrue(x1 eq x)
assertTrue(y1 eq y)
assertTrue(z1 eq z)
}
trait Bar[A] { def foo(a: A): Unit }
object Bar {
implicit val intBar = new Bar[Int] { def foo(x: Int) = () }
}
@Test
def testEta {
implicitly[Lazy[Bar[Int]]].value.foo _
implicitly[Strict[Bar[Int]]].value.foo _
}
trait Baz[T] {
type U
}
object Baz {
def lazyBaz[T, U](t: T)(implicit bt: Lazy[Aux[T, U]]): Aux[T, U] = bt.value
def strictBaz[T, U](t: T)(implicit bt: Strict[Aux[T, U]]): Aux[T, U] = bt.value
type Aux[T, U0] = Baz[T] { type U = U0 }
implicit val bazIS: Aux[Int, String] = new Baz[Int] { type U = String }
implicit val bazBD: Aux[Boolean, Double] = new Baz[Boolean] { type U = Double }
}
@Test
def testAux {
val lIS = Baz.lazyBaz(23)
val sIS = Baz.strictBaz(23)
typed[Baz.Aux[Int, String]](lIS)
typed[Baz.Aux[Int, String]](sIS)
val lBD = Baz.lazyBaz(true)
val sBD = Baz.strictBaz(true)
typed[Baz.Aux[Boolean, Double]](lBD)
typed[Baz.Aux[Boolean, Double]](sBD)
}
@Test
def testExtractors {
implicitly[Lazy[Generic[Symbol]]]
implicitly[Strict[Generic[Symbol]]]
val x = {
case class Leaf[A](value: A)
implicitly[Lazy[Generic[Leaf[Int]]]]
implicitly[Strict[Generic[Leaf[Int]]]]
()
}
}
}
| clhodapp/shapeless | core/shared/src/test/scala/shapeless/lazy.scala | Scala | apache-2.0 | 7,024 |
package provingground.fol
import provingground.NlpProse._
import provingground.fol.Logic._
import provingground.TextToInt._
import provingground.fol.Theory._
import provingground.fol.Theory.Document._
// import provingground.Arithmetic._
// import provingground.Collections._
/** Translates Stanford Dependency tree to Logical expression */
object ParseProse {
/** Factory for variable symbols */
trait Variables {
private var usedVars: Set[Var] = Set()
def newVar: Var = {
val newvar = (varstream filterNot (usedVars contains _)).head
usedVars += newvar
newvar
}
def newVars(n: Int): List[Var] = (for (i <- 1 to n) yield (newVar)).toList
}
/** The scope of a formula */
trait Scope extends Variables
/** The default scope */
object Global extends Scope
// The interpreter
/** Print tree for unparsed prose */
trait ProsePrint {
val t: ProseTree
override def toString = t.toString
}
val unParsed: PartialFunction[Formula, ProseTree] = {
case p: ProsePrint => p.t
}
/** Makes unparsed prose function, predicate etc to unparsed formula */
trait MapProseFormula[A] {
val t: ProseTree
def apply(d: A): Formula = ProseFormula(t)
}
/** Unparsed formula */
case class ProseFormula(val t: ProseTree) extends Formula with ProsePrint {
def subs(xt: Var => Term): Formula = this
val freeVars: Set[Var] = Set()
}
/** Unparsed Property */
case class ProsePropt(val t: ProseTree)
extends Propt
with MapProseFormula[Var]
with ProsePrint
/** Unparsed Condition */
class ProseCondition(val t: ProseTree)
extends Condition
with MapProseFormula[Formula]
with ProsePrint
/** Unparsed ConditionProperty */
case class ProseCondPropt(val t: ProseTree)
extends CondPropt
with ProsePrint {
def apply(x: Var, p: Formula) = new ProseFormula(t)
}
/** Extractor for Adverbial Clause */
object Advcl {
def unapply(t: ProseTree): Option[(ProseTree, ProseTree)] = {
val advcls = t find "advcl"
advcls match {
case Some(y) => {
val s = t -< y.dep
Some((s, (t - s)))
}
case None => None
}
}
}
/** Extractor for 'if' as determiner */
object IfMark {
def unapply(t: ProseTree): Option[ProseTree] = {
val ifdet = t find ("mark", "if")
ifdet match {
case Some(x) => Some(t - x.dep)
case None => None
}
}
}
/** Is a vowel */
def isVowel(c: Char) = List('a', 'e', 'i', 'o', 'u') contains c
/** Has a vowel */
def hasVowel(s: String) = s.exists(isVowel _)
/** A crude criterion for being a name: has more than one letter and contains a vowel*/
def isName(s: String): Boolean = {
if (s.length == 1) false
else {
if (hasVowel(s)) true else false
}
}
/** A simple property : word is taken as the name of the property */
object SimpPropt {
def unapply(t: ProseTree): Option[String] = {
if (t.heirs.length == (t findAll "conj").length) Some(t.root.word)
else None
}
}
/** Extractor for determiner */
object Det {
def unapply(t: ProseTree): Option[(String, ProseTree)] = {
val det = t find "det"
det match {
case None => Some(("", t))
case Some(d) => Some(d.dep.word, t - d.dep)
}
}
}
/** Extractor for (several) adjectival properties */
object Amods {
def unapply(t: ProseTree): Option[(ProseTree, List[ProseTree])] = {
val amods = t findAll (List("amod", "nn"))
amods match {
case List() => None
case _ => {
val subtrees = amods map (node => t -< node.dep)
Some(t -- subtrees, subtrees)
}
}
}
}
/** Extractor for a predicate and its parameters; Optionally returns predicate name and trees for parameters */
object PredParams {
def unapply(t: ProseTree): Option[(String, List[ProseTree])] = {
val params = t findAll List("nsubj", "dobj", "prep")
params match {
case List() => None
case _ => {
val subtrees = params map (node => t -< node.dep)
Some(t.root.word, subtrees)
}
}
}
}
/** Extractor for `one of' */
object OneOf {
def unapply(t: ProseTree): Option[List[ProseTree]] = {
val preps = t findAll List("prep_of")
preps match {
case List() => None
case _ => {
val subtrees = preps map (node => t -< node.dep)
Some(subtrees)
}
}
}
}
// This is broken into Advcl and IfMark
@deprecated("Use Advcl followed by IfMark", "Replacement ready, to test")
object If {
def unapply(t: ProseTree): Option[(ProseTree, ProseTree)] = {
val advcls = t find "advcls"
advcls match {
case Some(y) => {
val s = t -< y.dep
val ifdet = s find ("det", "if")
ifdet match {
case Some(x) => Some((s - x.dep), (t - s))
case None => None
}
}
case None => None
}
}
}
/** returns condition (Formula => Formula) from ProseTree */
def toCondition(d: ParseData, scope: Scope): Formula => Formula = {
d match {
case IfMark(p) => {
val ptrans: Formula = toFormula(p, scope)
def impl(q: Formula) = ptrans implies q
impl _
}
case t: ProseTree => new ProseCondition(t)
}
}
/** Optional Formula */
def optFormula(d: ParseData, scope: Scope): Option[Formula] = {
val p = toFormula(d, scope)
val unparsed = desc(p) collect (unParsed)
if (unparsed.isEmpty) Some(p)
else {
println(p)
unparsed.foreach(println)
None
}
}
def isFormula(d: ParseData) = optFormula(d, Global).isDefined
/** returns Formula from ProseTree */
def toFormula(d: ParseData, scope: Scope): Formula = {
d match {
case Advcl(p, q) => toCondition(p, scope)(toFormula(q, scope))
case PredParams(p, params) => {
val n = params.length
val xs = scope.newVars(n)
val pred = n match {
case 1 => UnRel(p)
case 2 => BinRel(p)
case _ => PredSym(p, n)
}
val baseFormula = AtomFormula(pred, xs)
val condPropts = params map (toCondPropt(_, scope))
zipSubs(condPropts, xs, baseFormula)
}
case Cop(p, q) =>
val x = scope.newVar
ExQuantFormula(x, (toPropt(p, scope)(x)) & (toPropt(q, scope)(x)))
// case If(p,q) => toFormula(p, scope) implies toFormula(q, scope)
case t: ProseTree => new ProseFormula(t)
}
}
/** Returns Property (Var => Formula) from ProseTree
*
* Note that a term is returned as Var = Term
*/
def toPropt(d: ParseData, scope: Scope): Var => Formula = {
d match {
case SimpPropt(name) => {
if (isName(name)) {
val p = PredSym(name, 1)
x: Var =>
p(x)
} else {
val t = new TermFmla(name)
x: Var =>
t eqls x
}
}
case OneOf(params) =>
val propts = params map (toPropt(_, scope))
def xval(x: Var): Formula = {
val terms = propts map (p => p(x))
((terms.head) /: (terms.tail))(or)
}
xval
case Amods(main, params) =>
(x => subs(toPropt(main, scope)(x), x, params map (toPropt(_, scope))))
case Rcmod(_, s, t) =>
// println("rcmod")
// println(s)
// println(t)
(x: Var) =>
toPropt(s, scope)(x) & toPropt(t, scope)(x)
case Which(_, _, t) =>
// println("which")
// println(t)
(x: Var) =>
toPropt(t, scope)(x)
case Cop(p, q) =>
println("cop")
(x: Var) =>
(toPropt(p, scope)(x)) & (toPropt(q, scope)(x))
case Gt(node, _, _) =>
// println("greater than")
// println(node.gov.word)
(x => BinRel(">")(x, IntConst(stringNumber(node.gov.word))))
case Lt(node, _, _) =>
(x => BinRel("<")(x, IntConst(stringNumber(node.gov.word))))
case t: ProseTree => new ProsePropt(t)
}
}
/** returns Condition-Property (Var, Formula) => Formula from ProseTree */
def toCondPropt(d: ParseData, scope: Scope): (Var, Formula) => Formula = {
d match {
case Det(s, t) =>
s match {
case "" => ((x: Var, p: Formula) => toPropt(t, scope)(x) & p)
case "a" =>
((x: Var,
p: Formula) => ExQuantFormula(x, toPropt(t, scope)(x) & p))
case "every" =>
((x: Var,
p: Formula) =>
UnivQuantFormula(x, toPropt(t, scope)(x) implies p))
}
case t: ProseTree => new ProseCondPropt(t)
}
}
/** Optionally parses to Fmla(Formula) */
def optFmla(d: ParseData, scope: Scope): Option[Paragraph] =
optFormula(d, scope) map (Fmla)
/** Pattern for Fmla(Formula) */
object FmlaData {
def unapply(d: ParseData): Option[ParseData] = optFmla(d, Global) map {
(p: Paragraph) =>
d
}
}
/** Optionally parses to an Assertion */
def optAssert(d: ParseData, scope: Scope): Option[Paragraph] = None
def toAction(d: ParseData): Formula => Paragraph = d match {
case t: ProseTree =>
val rootWord = t.root.word.toLowerCase
rootWord match {
case "assume" => ((p: Formula) => Assume(p))
case _ => Fmla(_)
}
}
/** Parses to a paragraph, fallback to Text() */
def toPara(d: ParseData, scope: Scope): Paragraph = d match {
case Ccomp(_, p, q) => toAction(p)(toFormula(q, scope))
case FmlaData(data) => Fmla(toFormula(data, scope))
case d => Text(d.toString)
}
}
| siddhartha-gadgil/ProvingGround | digressions/src/main/scala/provingground/fol/ParseProse.scala | Scala | mit | 9,718 |
package com.learning.akka.remoting
import akka.actor.{Props, ActorRef, Actor, ActorSystem}
import com.typesafe.config.ConfigFactory
import scala.collection.concurrent.TrieMap
case class RegisterWorker(workerId: String)
case class RegisterWorkerSuccess(message: String)
class Master(actorSystem: ActorSystem) {
val workers: TrieMap[String, ActorRef] = TrieMap[String, ActorRef]()
val supervisor = actorSystem.actorOf(Props(new MasterActor), "Master")
class MasterActor extends Actor{
override def preStart() = {
println(s"MasterActor started ${self.path.toString}")
println(s"${self.path.toStringWithoutAddress}")
}
def receive = {
case RegisterWorker(workerId) =>
println("register worker: " + workerId)
workers += workerId -> sender()
sender() ! RegisterWorkerSuccess("Register Success")
println(workers)
}
}
}
object Master {
def main(args: Array[String]) {
val config = ConfigFactory.load("remote").getConfig("master")
val system = ActorSystem("TestService", config)
val master = new Master(system)
}
} | lgrcyanny/LearningAkka | src/main/scala/com/learning/akka/remoting/Master.scala | Scala | gpl-2.0 | 1,100 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2016 Algolia
* http://www.algolia.com/
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package algolia.dsl
import algolia.AlgoliaDsl._
import algolia.AlgoliaTest
import algolia.http.{GET, HttpPayload}
import algolia.responses.Task
class TaskStatusTest extends AlgoliaTest {
describe("get task status") {
it("get the status of a task") {
getStatus task Task(1L, None) from "toto"
}
it("should call API") {
(getStatus task Task(1L, None) from "toto").build() should be(
HttpPayload(
GET,
Seq("1", "indexes", "toto", "task", "1"),
isSearch = true,
requestOptions = None
)
)
}
}
}
| algolia/algoliasearch-client-scala | src/test/scala/algolia/dsl/TaskStatusTest.scala | Scala | mit | 1,758 |
package com.twitter.finagle.thrift
import com.twitter.finagle.Service
import com.twitter.finagle.SourcedException
import com.twitter.finagle.context.Contexts
import com.twitter.finagle.service.ReqRep
import com.twitter.finagle.service.ResponseClass
import com.twitter.finagle.service.ResponseClassifier
import com.twitter.finagle.stats.Counter
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.finagle.thrift.ClientDeserializeCtx.Key
import com.twitter.finagle.thrift.Headers.Request
import com.twitter.scrooge.TReusableBuffer
import com.twitter.scrooge.ThriftStruct
import com.twitter.util.Future
import com.twitter.util.Throwables
import com.twitter.util.Try
import org.apache.thrift.protocol.TMessage
import org.apache.thrift.protocol.TMessageType
import org.apache.thrift.protocol.TProtocolFactory
object ClientFunction {
/**
* Serialize a given request {@code inputArgs} and send the serialized
* request over the wire to a finagle service {@code service}, and
* classify the deserialized response Upon receiving it from the server.
*
* @param clientFuncNameForWire the over the wire function name
* @param replyDeserializer the deserializer used to create a
* [[ClientDeserializeCtx]]
* @param inputArgs the request to serialize
* @param serviceName used to exception metrics scoping
* @param service the service to execute the serialized request, and
* return a deserialized response
* @param responseClassifier classify the deserialized response
* @param tlReusableBuffer a buffer to call [[encodeRequest]]
* @param protocolFactory a [[TProtocolFactory]] to call
* [[encodeRequest]]
* @param failuresScope a counter scope to report exceptions
* @param requestCounter a counter to report number of request
* @param successCounter a counter to report number of successful
* request
* @param failuresCounter a counter to report number of failed
* request
* @tparam T The type of deserialized response
*
* @note This method is intended to be invoked from Scrooge generated
* finagle client, typical scrooge users should not need to call
* it directly.
*/
def serde[T](
clientFuncNameForWire: String,
replyDeserializer: Array[Byte] => Try[T],
inputArgs: ThriftStruct,
serviceName: String,
service: Service[ThriftClientRequest, Array[Byte]],
responseClassifier: PartialFunction[ReqRep, ResponseClass],
tlReusableBuffer: TReusableBuffer,
protocolFactory: TProtocolFactory,
failuresScope: StatsReceiver,
requestCounter: Counter,
successCounter: Counter,
failuresCounter: Counter
): Future[T] = {
requestCounter.incr()
val serdeCtx = new ClientDeserializeCtx[T](inputArgs, replyDeserializer)
Contexts.local.let(
Key,
serdeCtx,
Request.Key,
Request.newValues
) {
serdeCtx.rpcName(clientFuncNameForWire)
val start = System.nanoTime
val serialized =
encodeRequest(clientFuncNameForWire, inputArgs, tlReusableBuffer, protocolFactory)
serdeCtx.serializationTime(System.nanoTime - start)
service(serialized)
.flatMap { response =>
Future.const(serdeCtx.deserialize(response))
}.respond { response =>
val classified =
responseClassifier.applyOrElse(ReqRep(inputArgs, response), ResponseClassifier.Default)
classified match {
case _: ResponseClass.Successful =>
successCounter.incr()
case _: ResponseClass.Failed =>
failuresCounter.incr()
if (response.isThrow) {
SourcedException.setServiceName(response.throwable, serviceName)
failuresScope.counter(Throwables.mkString(response.throwable): _*).incr()
}
case _ =>
} // Last ResponseClass is Ignorable, which we do not need to record
}
}
}
private[this] def encodeRequest(
name: String,
args: ThriftStruct,
tlReusableBuffer: TReusableBuffer,
protocolFactory: TProtocolFactory
): ThriftClientRequest = {
val memoryBuffer = tlReusableBuffer.get()
try {
val oprot = protocolFactory.getProtocol(memoryBuffer)
oprot.writeMessageBegin(new TMessage(name, TMessageType.CALL, 0))
args.write(oprot)
oprot.writeMessageEnd()
oprot.getTransport.flush()
val bytes = java.util.Arrays.copyOfRange(
memoryBuffer.getArray(),
0,
memoryBuffer.length()
)
new ThriftClientRequest(bytes, false)
} finally {
tlReusableBuffer.reset()
}
}
}
| twitter/finagle | finagle-thrift/src/main/scala/com/twitter/finagle/thrift/ClientFunction.scala | Scala | apache-2.0 | 4,733 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.sexp
/**
* The output is a non-standard interpretation of "pretty lisp" ---
* emacs style formatting requires counting the length of the text on
* the current line and indenting off that, which is not so easy when
* all you have is a `StringBuilder`.
*/
trait SexpPrettyPrinter extends SexpPrinter {
val Indent = 2
def print(sexp: Sexp, sb: StringBuilder): Unit = print(sexp, sb, 0)
private def print(sexp: Sexp, sb: StringBuilder, indent: Int): Unit = sexp match {
case SexpData(data) => printData(data, sb, indent)
case SexpList(els) => printList(els, sb, indent)
case SexpCons(x, y) => printCons(x, y, sb, indent)
case atom: SexpAtom => printAtom(atom, sb)
}
protected def printCons(x: Sexp, y: Sexp, sb: StringBuilder, indent: Int): Unit = {
// recursive, could blow up for big trees
sb.append('(')
print(x, sb, indent)
sb.append(" .\\n")
printIndent(sb, indent + Indent)
print(y, sb, indent + Indent)
sb.append(')')
}
protected def printData(data: Map[SexpSymbol, Sexp], sb: StringBuilder, indent: Int): Unit =
if (data.isEmpty) print(SexpNil, sb)
else {
sb.append("(\\n")
printSeq(data, sb.append('\\n')) { el =>
printIndent(sb, indent + Indent)
printSymbol(el._1.value, sb)
sb.append(' ')
print(el._2, sb, indent + Indent)
}
sb.append('\\n')
printIndent(sb, indent)
sb.append(')')
}
protected def printList(els: List[Sexp], sb: StringBuilder, indent: Int): Unit =
if (els.isEmpty) print(SexpNil, sb)
else {
sb.append('(')
printSeq(els, { sb.append("\\n"); printIndent(sb, indent + Indent) }) {
print(_, sb, indent + Indent)
}
sb.append(')')
}
protected def printIndent(sb: StringBuilder, indent: Int): Unit =
(0 until indent) foreach { _ =>
sb.append(' ')
}
}
object SexpPrettyPrinter extends SexpPrettyPrinter
| d1egoaz/ensime-sbt | src/sbt-test/sbt-ensime/ensime-server/s-express/src/main/scala/org/ensime/sexp/SexpPrettyPrinter.scala | Scala | apache-2.0 | 2,075 |
package epic.parser
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import epic.trees._
import org.junit.runner.RunWith
import org.scalatest._
import org.scalatest.junit._
import repl.DSLGrammar
/**
*
* @author dlwh
*/
@RunWith(classOf[JUnitRunner])
class InsideOutsideTest extends FunSuite {
implicit class Near(val x: Double) {
def near(y: Double) = (x-y).abs < 1E-4 * math.max(x+y,1E-4)/2
}
test("Simple test from iobasics") {
val grammar = DSLGrammar.simpleGrammar
val sent = "She eats pizza without anchovies" split " "
val io = SimpleChartMarginal(grammar, sent)
val counts = io.expectedCounts(new RuleFeaturizer(grammar.topology))
assert(counts(BinaryRule("Sb","NPu","VPu")) near 1.0)
assert(counts(BinaryRule("NPb","Nu","PPu")) near 1.0)
assert(counts(BinaryRule("PPb","Pu","Nu")) near 1.0)
assert(counts(BinaryRule("VPb","Vu","NPu")) near 1.0)
//assert(counts(LexicalProduction("P","without")) near 1.0)
//assert(counts(LexicalProduction("N","pizza")) near 1.0)
//assert(counts(LexicalProduction("N","She")) near 1.0)
//assert(counts(LexicalProduction("N","anchovies")) near 1.0)
//assert(counts(LexicalProduction("V","eats")) near 1.0)
}
test("complex example") {
val grammar = grammarForComplexExample
val lexicon = lexiconForComplexExample
val sent = "He has good control" split " "
val io = RefinedChartMarginal(grammar, sent)
val counts = io.expectedCounts(new RuleFeaturizer(grammar.topology))
assert(counts(BinaryRule("Sb","NPu","VPu")) near 1.0)
assert(counts(BinaryRule("VPb","VBZu","NPu")) near 0.5)
assert(counts(BinaryRule("VPb","VBZu","ADJPu"))near 0.5)
assert(counts(BinaryRule("NPb","JJu","NNu"))near 0.5)
assert(counts(UnaryRule("NPu","NN", IndexedSeq.empty))near 0.5)
assert(counts(UnaryRule("NPu","PRP", IndexedSeq.empty))near 1.0)
assert(counts(BinaryRule("ADJPb","JJu","NPu")) near 0.5)
//assert(counts(LexicalProduction("JJ","good")) near 1.0)
//assert(counts(LexicalProduction("NN","control")) near 1.0)
//assert(counts(LexicalProduction("VBZ","has")) near 1.0)
//assert(counts(LexicalProduction("PRP","He")) near 1.0)
}
test("complex example charts, SimpleChartMarginal") {
val grammar = grammarForComplexExample
val sent = "He has good control" split " "
val io = SimpleChartMarginal.apply(grammar, sent)
import io._
assert(inside.bot.labelScore(0,1,"PRP") near 0.0)
assert(inside.top.labelScore(0,1,"NPu") near math.log(1.0/3.0))
// assert(inside.top.enteredLabelScores(0,1).length near 1,"t01")
// assert(inside.bot.enteredLabelScores(0,1).length near 1,"b01")
assert(inside.bot.labelScore(1,2, "VBZ") near 0.0)
assert(inside.top.labelScore(1,2, "VBZu") near 0.0)
assert(inside.bot.labelScore(2,3, "JJ") near 0.0)
assert(inside.top.labelScore(2,3, "JJu") near 0.0)
assert(inside.top.labelScore(3,4, "NPu") near math.log(1.0/3.0))
assert(inside.bot.labelScore(3,4, "NN") near 0.0)
assert(inside.top.labelScore(3,4, "NNu") near 0.0)
assert(inside.bot.labelScore(2,4, "NPb") near 0.0,"24npb")
assert(inside.bot.labelScore(2,4, "ADJPb") near math.log(1.0/3.0),"24adjpb")
assert(inside.top.labelScore(2,4, "ADJPu") near math.log(1.0/3.0),"24adjpu")
assert(inside.top.labelScore(2,4, "NPu") near math.log(1.0/3.0),"24npu")
assert((inside.top.labelScore(1,4, "VPu") near math.log(1.0/3.0)), "1,4 vpu")
assert((inside.top.labelScore(0,4, "S") - math.log(1.0/9.0)).abs < 1E-4)
assert((outside.top.labelScore(0,4, "S") near 0.0))
assert((outside.top.labelScore(2,4, "NPu") - math.log(1.0/6.0)).abs < 1E-6, outside.top.labelScore(2,4,"NPu") -> "NP" -> outside.bot.labelScore(2,4,"NPb"))
assert((outside.top.labelScore(2,4, "ADJPu") - math.log(1.0/6.0)).abs < 1E-6, outside.bot.labelScore(2,4,"ADJPb") -> "ADJPb")
assert((outside.bot.labelScore(3,4,"NN") - math.log(1.0/9.0)).abs < 1E-5, outside.bot.labelScore(3,4,"NN") + "NN")
}
test("complex example charts") {
val grammar = grammarForComplexExample
val lexicon = lexiconForComplexExample
val sent = "He has good control" split " "
val io = RefinedChartMarginal.apply(grammar, sent)
import io._
assert(inside.bot.labelScore(0,1,"PRP", 0) near 0.0)
assert(inside.top.labelScore(0,1,"NPu", 0) near math.log(1.0/3.0))
assert(inside.top.enteredLabelScores(0,1).length near 1,"t01")
assert(inside.bot.enteredLabelScores(0,1).length near 1,"b01")
assert(inside.bot.labelScore(1,2, "VBZ", 0) near 0.0)
assert(inside.top.labelScore(1,2, "VBZu", 0) near 0.0)
assert(inside.bot.enteredLabelScores(1,2).length near 1,"t12")
assert(inside.top.enteredLabelScores(1,2).length near 1,"b12")
assert(inside.bot.labelScore(2,3, "JJ", 0) near 0.0)
assert(inside.top.labelScore(2,3, "JJu", 0) near 0.0)
assert(inside.bot.enteredLabelScores(2,3).toSeq.length near 1,"b23")
assert(inside.top.enteredLabelScores(2,3).toSeq.length near 1,"t23")
assert(inside.top.labelScore(3,4, "NPu", 0) near math.log(1.0/3.0))
assert(inside.bot.labelScore(3,4, "NN", 0) near 0.0)
assert(inside.top.labelScore(3,4, "NNu", 0) near 0.0)
assert(inside.bot.enteredLabelScores(3,4).toSeq.length near 1, "b34")
assert(inside.top.enteredLabelScores(3,4).toSeq.length near 2, "t34")
assert(inside.bot.enteredLabelScores(0,2).toSeq.length near 0)
assert(inside.bot.enteredLabelScores(1,3).toSeq.length near 0)
assert(inside.bot.labelScore(2,4, "NPb", 0) near 0.0,"24npb")
assert(inside.bot.labelScore(2,4, "ADJPb", 0) near math.log(1.0/3.0),"24adjpb")
assert(inside.top.labelScore(2,4, "ADJPu", 0) near math.log(1.0/3.0),"24adjpu")
assert(inside.top.labelScore(2,4, "NPu", 0) near math.log(1.0/3.0),"24npu")
assert(inside.top.enteredLabelScores(2,4).toSeq.length near 2)
assert(inside.bot.enteredLabelScores(2,4).toSeq.length near 2)
assert(inside.bot.enteredLabelScores(0,3).toSeq.length near 0)
assert((inside.top.labelScore(1,4, "VPu", 0) - math.log(1.0/3.0)).abs < 1E-4)
assert(inside.bot.enteredLabelScores(1,4).toSeq.length near 1,"b14")
assert((inside.top.labelScore(0,4, "S", 0) - math.log(1.0/9.0)).abs < 1E-4)
assert(inside.top.enteredLabelScores(0,4).toSeq.length near 1)
assert((outside.top.labelScore(0,4, "S", 0) near 0.0))
assert(outside.top.enteredLabelScores(0,4).toSeq.length === 1)
assert(outside.top.enteredLabelScores(0,3).toSeq.length === 0)
assert((outside.top.labelScore(1,4, "VPu", 0) - math.log(1.0/3.0)).abs < 1E-4, outside.top.enteredLabelScores(1,4).toIndexedSeq)
assert((outside.bot.labelScore(0,1,"PRP", 0) - math.log(1.0/9.0)).abs < 1E-5, outside.bot.enteredLabelScores(1,4) + " " + outside.bot.labelScore(0,1,"PRP", 0) + " " + math.log(1.0/9.0))
assert((outside.top.labelScore(2,4, "NPu", 0) - math.log(1.0/6.0)).abs < 1E-6, outside.top.labelScore(2,4,"NPu", 0) -> "NP" -> outside.bot.labelScore(2,4,"NPb", 0))
assert((outside.top.labelScore(2,4, "ADJPu", 0) - math.log(1.0/6.0)).abs < 1E-6, outside.bot.labelScore(2,4,"ADJPb", 0) -> "ADJPb")
assert((outside.top.labelScore(0,1,"NPu", 0) - math.log(1.0/3.0)).abs < 1E-5, outside.top.enteredLabelScores(0,1).toIndexedSeq + "NPu")
assert((outside.bot.labelScore(1,2,"VBZ", 0) - math.log(1.0/9.0)).abs < 1E-5, outside.bot.enteredLabelScores(1,2).toIndexedSeq + "VBZb")
assert((outside.bot.labelScore(2,3,"JJ", 0) - math.log(1.0/9.0)).abs < 1E-5, outside.bot.enteredLabelScores(2,3).toIndexedSeq + "JJ")
assert((outside.top.labelScore(3,4, "NPu", 0) - math.log(1.0/6.0)).abs < 1E-5, outside.top.enteredLabelScores(3,4).toIndexedSeq + "NP")
assert((outside.bot.labelScore(3,4,"NN", 0) - math.log(1.0/9.0)).abs < 1E-5, outside.bot.labelScore(3,4,"NN", 0) + "NN")
}
import DSLGrammar._
def grammarForComplexExample = grammar(
'S -> 'Sb -> 1.0,
'Sb -> ('NPu,'VPu) -> (1.0),
'VPu -> 'VPb -> 1.0,
'VPb -> ('VBZu,'NPu) -> (1.0),
'VPb -> ('VBZu,'ADJPu) -> (1.0),
'ADJPu-> 'ADJPb -> 1.0,
'ADJPb-> ('JJu,'NPu) -> (1.0),
'NPu-> 'NPb -> 1.0,
'NPu-> ('PRP) -> (1.0),
'NPu-> ('NN) -> (1.0),
'NPb-> ('JJu,'NNu) -> (1.0),
'JJu -> 'JJ -> 1.0,
'NNu -> 'NN -> 1.0,
'VBZu -> 'VBZ -> 1.0,
'XXX -> 'PUNCT -> 1.0,
'JJ -> "good" -> (1.0),
'NN -> "control" -> (1.0),
'VBZ -> "has" -> (1.0),
'PRP -> "He" -> (1.0),
'PUNCT -> "." -> (1.0)
)
def lexiconForComplexExample = IndexedSeq(
LexicalProduction("JJ","good"),
LexicalProduction("NN","control"),
LexicalProduction("VBZ","has"),
LexicalProduction("PRP","He"),
LexicalProduction("PUNCT",".")
)
}
| langkilde/epic | src/test/scala/epic/parser/InsideOutsideTest.scala | Scala | apache-2.0 | 9,229 |
package com.github.andr83.parsek.resources
import com.github.andr83.parsek.PValue
import com.typesafe.config.Config
/**
* @author andr83
*/
abstract class ResourceLoader {
def read(config: Config): PValue
}
| andr83/parsek | core/src/main/scala/com/github/andr83/parsek/resources/ResourceLoader.scala | Scala | mit | 213 |
package io.swagger.client.model
import io.swagger.client.core.ApiModel
import org.joda.time.DateTime
case class Inline_response_200_10 (
data: Option[Credential],
success: Option[Boolean])
extends ApiModel
| QuantiModo/QuantiModo-SDK-Akka-Scala | src/main/scala/io/swagger/client/model/Inline_response_200_10.scala | Scala | gpl-2.0 | 218 |
/*
* Copyright (c) 2016. Fengguo Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.jawa.compiler.compile
import org.sireum.util._
import java.io.File
/**
* @author <a href="mailto:fgwei521@gmail.com">Fengguo Wei</a>
*/
trait Output {
}
trait SingleOutput extends Output {
/** The directory where class files should be generated.
* Incremental compilation will manage the class files in this directory.
* In particular, outdated class files will be deleted before compilation.
* It is important that this directory is exclusively used for one set of sources. */
def outputDirectory(): File
}
trait MultipleOutput extends Output {
trait OutputGroup {
/** The directory where source files are stored for this group.
* Source directories should uniquely identify the group for a source file. */
def sourceDirectory(): File
/** The directory where class files should be generated.
* Incremental compilation will manage the class files in this directory.
* In particular, outdated class files will be deleted before compilation.
* It is important that this directory is exclusively used for one set of sources. */
def outputDirectory(): File
}
def outputGroups(): IList[OutputGroup]
}
| arguslab/jawa-compiler | src/main/scala/org/argus/jawa/compiler/compile/Output.scala | Scala | epl-1.0 | 1,551 |
package ilc
package feature
package integers
import org.scalatest.FunSuite
import ilc.util.EvalGenerated
import ilc.language.bacchus
class IntegerSuite
extends FunSuite
with ImplicitSyntaxSugar
with Evaluation
with ToScala
with bacchus.Evaluation
with bacchus.BasicDerivation
with bacchus.ToScala
with EvalGenerated
with functions.Pretty
with AbelianDerivation
{
def expectToGet(i: Int)(t: => Term) {
assert(eval(t) === IntValue(i))
try { assert(evalGenerated(t) === i) }
catch { case e: Throwable =>
info(e.getMessage)
info(pretty(t))
info(toScala(t))
fail
}
}
test("2 + 2 = 4") { expectToGet( 4) { PlusInt ! 2 ! 2 } }
test("2 - 4 = -2") { expectToGet(-2) { PlusInt ! 2 ! (NegateInt ! 4) } }
test("i ⊕ (j ⊝ i) = j") {
val (i, j) = (4, 2)
expectToGet(j) { ChangeUpdate ! (Diff ! j ! i) ! i }
}
test("-4 ⊕ (+ 20) = 16") {
val plus20: Term = Inj1.tapply(ℤ) !
(Pair ! additiveGroupOnIntegers ! 20)
assert(plus20.getType === deltaType(IntType))
expectToGet(16) { ChangeUpdate ! plus20 ! -4 }
}
}
| inc-lc/ilc-scala | src/test/scala/ilc/feature/integers/IntegerSuite.scala | Scala | mit | 1,115 |
package de.tudresden.inf.lat.tabulas.datatype
/** This models the primitive data type Decimal.
*
*/
case class DecimalType() extends PrimitiveType {
final val TypeName: String = "Decimal"
override val isList: Boolean = false
override val getTypeName: String = TypeName
override val toString: String = getTypeName
def castInstance(value: PrimitiveTypeValue): DecimalValue = parse(value.render)
override def parse(str: String): DecimalValue = DecimalValue(str)
}
object DecimalType {}
| julianmendez/tabulas | tabulas-core/src/main/scala/de/tudresden/inf/lat/tabulas/datatype/DecimalType.scala | Scala | apache-2.0 | 508 |
package chess
import chess.format.FEN
case class StartingPosition(
eco: String,
name: String,
fen: FEN,
wikiPath: String,
moves: String,
featurable: Boolean = true
) {
def url = s"https://en.wikipedia.org/wiki/$wikiPath"
val shortName = name takeWhile (':' !=)
def fullName = s"$eco $name"
def initial = fen == format.Forsyth.initial
}
object StartingPosition {
case class Category(name: String, positions: List[StartingPosition])
val categories: List[Category] = List(
Category(
"e4",
List(
StartingPosition(
"B00",
"King's Pawn",
FEN("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq - 0 1"),
"King's_Pawn_Game",
"1. e4",
featurable = false
),
StartingPosition(
"B00",
"Open Game",
FEN("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2"),
"Open_Game",
"1. e4 e5",
featurable = false
),
StartingPosition(
"B02",
"Alekhine's Defence",
FEN("rnbqkb1r/pppppppp/5n2/8/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 1 2"),
"Alekhine's_Defence",
"1. e4 Nf6"
),
StartingPosition(
"B04",
"Alekhine's Defence: Modern Variation",
FEN("rnbqkb1r/ppp1pppp/3p4/3nP3/3P4/5N2/PPP2PPP/RNBQKB1R b KQkq - 1 4"),
"Alekhine's_Defence#Modern_Variation:_3.d4_d6_4.Nf3",
"1. e4 Nf6 2. e5 Nd5 3. d4 d6 4. Nf3"
),
StartingPosition(
"C23",
"Bishop's Opening",
FEN("rnbqkbnr/pppp1ppp/8/4p3/2B1P3/8/PPPP1PPP/RNBQK1NR b KQkq - 1 2"),
"Bishop%27s_Opening",
"1. e4 e5 2. Bc4"
),
StartingPosition(
"B10",
"Caro-Kann Defence",
FEN("rnbqkbnr/pp1ppppp/2p5/8/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2"),
"Caro–Kann_Defence",
"1. e4 c6"
),
StartingPosition(
"B12",
"Caro-Kann Defence: Advance Variation",
FEN("rnbqkbnr/pp2pppp/2p5/3pP3/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 0 3"),
"Caro–Kann_Defence#Advance_Variation:_3.e5",
"1. e4 c6 2. d4 d5 3. e5"
),
StartingPosition(
"B18",
"Caro-Kann Defence: Classical Variation",
FEN("rn1qkbnr/pp2pppp/2p5/5b2/3PN3/8/PPP2PPP/R1BQKBNR w KQkq - 1 5"),
"Caro–Kann_Defence#Classical_Variation:_4...Bf5",
"1. e4 c6 2. d4 d5 3. Nc3 dxe4 4. Nxe4 Bf5"
),
StartingPosition(
"B13",
"Caro-Kann Defence: Exchange Variation",
FEN("rnbqkbnr/pp2pppp/2p5/3P4/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 0 3"),
"Caro%E2%80%93Kann_Defence#Exchange_Variation:_3.exd5_cxd5",
"1. e4 c6 2. d4 d5 3. exd5"
),
StartingPosition(
"B14",
"Caro-Kann Defence: Panov-Botvinnik Attack",
FEN("rnbqkb1r/pp2pppp/5n2/3p4/2PP4/2N5/PP3PPP/R1BQKBNR b KQkq - 2 5"),
"Caro–Kann_Defence#Panov.E2.80.93Botvinnik_Attack:_4.c4",
"1. e4 c6 2. d4 d5 3. exd5 cxd5 4. c4 Nf6 5. Nc3"
),
StartingPosition(
"B17",
"Caro-Kann Defence: Steinitz Variation",
FEN("r1bqkbnr/pp1npppp/2p5/8/3PN3/8/PPP2PPP/R1BQKBNR w KQkq - 1 5"),
"Caro–Kann_Defence#Modern_Variation:_4...Nd7",
"1. e4 c6 2. d4 d5 3. Nc3 dxe4 4. Nxe4 Nd7"
),
StartingPosition(
"C21",
"Danish Gambit",
FEN("rnbqkbnr/pppp1ppp/8/8/3pP3/2P5/PP3PPP/RNBQKBNR b KQkq - 0 3"),
"Danish_Gambit",
"1. e4 e5 2. d4 exd4 3. c3"
),
StartingPosition(
"C46",
"Four Knights Game",
FEN("r1bqkb1r/pppp1ppp/2n2n2/4p3/4P3/2N2N2/PPPP1PPP/R1BQKB1R w KQkq - 4 4"),
"Four_Knights_Game",
"1. e4 e5 2. Nf3 Nc6 3. Nc3 Nf6"
),
StartingPosition(
"C47",
"Four Knights Game: Scotch Variation",
FEN("r1bqkb1r/pppp1ppp/2n2n2/4p3/3PP3/2N2N2/PPP2PPP/R1BQKB1R b KQkq - 0 4"),
"Four_Knights_Game#4.d4",
"1. e4 e5 2. Nf3 Nc6 3. Nc3 Nf6 4. d4"
),
StartingPosition(
"C48",
"Four Knights Game: Spanish Variation",
FEN("r1bqkb1r/pppp1ppp/2n2n2/1B2p3/4P3/2N2N2/PPPP1PPP/R1BQK2R b KQkq - 5 4"),
"Four_Knights_Game#4.Bb5",
"1. e4 e5 2. Nf3 Nf6 3. Nc3 Nc6 4. Bb5"
),
StartingPosition(
"C00",
"French Defence",
FEN("rnbqkbnr/pppp1ppp/4p3/8/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2"),
"French_Defence",
"1. e4 e6"
),
StartingPosition(
"C02",
"French Defence: Advance Variation",
FEN("rnbqkbnr/ppp2ppp/4p3/3pP3/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 0 3"),
"French_Defence#Advance_Variation:_3.e5",
"1. e4 e6 2. d4 d5 3. e5"
),
StartingPosition(
"C11",
"French Defence: Burn Variation",
FEN("rnbqkb1r/ppp2ppp/4pn2/3p2B1/3PP3/2N5/PPP2PPP/R2QKBNR b KQkq - 0 5"),
"French_Defence#3.Nc3",
"1. e4 e6 2. d4 d5 3. Nc3 Nf6 4. Bg5 dxe4"
),
StartingPosition(
"C11",
"French Defence: Classical Variation",
FEN("rnbqkb1r/ppp2ppp/4pn2/3p4/3PP3/2N5/PPP2PPP/R1BQKBNR w KQkq - 2 4"),
"French_Defence#Classical_Variation:_3...Nf6",
"1. e4 e6 2. d4 d5 3. Nc3 Nf6"
),
StartingPosition(
"C01",
"French Defence: Exchange Variation",
FEN("rnbqkbnr/ppp2ppp/4p3/3P4/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 0 3"),
"French_Defence#Exchange_Variation:_3.exd5_exd5",
"1. e4 e6 2. d4 d5 3. exd5"
),
StartingPosition(
"C10",
"French Defence: Rubinstein Variation",
FEN("rnbqkbnr/ppp2ppp/4p3/8/3Pp3/2N5/PPP2PPP/R1BQKBNR w KQkq - 0 4"),
"French_Defence#Rubinstein_Variation:_3...dxe4",
"1. e4 e6 2. d4 d5 3. Nc3 dxe4"
),
StartingPosition(
"C03",
"French Defence: Tarrasch Variation",
FEN("rnbqkbnr/ppp2ppp/4p3/3p4/3PP3/8/PPPN1PPP/R1BQKBNR b KQkq - 1 3"),
"French_Defence#Tarrasch_Variation:_3.Nd2",
"1. e4 e6 2. d4 d5 3. Nd2"
),
StartingPosition(
"C15",
"French Defence: Winawer Variation",
FEN("rnbqk1nr/ppp2ppp/4p3/3p4/1b1PP3/2N5/PPP2PPP/R1BQKBNR w KQkq - 2 4"),
"French_Defence#Winawer_Variation:_3...Bb4",
"1. e4 e6 2. d4 d5 3. Nc3 Bb4"
),
StartingPosition(
"C50",
"Giuoco Piano",
FEN("r1bqk1nr/pppp1ppp/2n5/2b1p3/2B1P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 4 4"),
"Giuoco_Piano",
"1. e4 e5 2. Nf3 Nc6 3. Bc4 Bc5"
),
StartingPosition(
"C50",
"Italian Game",
FEN("r1bqkbnr/pppp1ppp/2n5/4p3/2B1P3/5N2/PPPP1PPP/RNBQK2R b KQkq - 3 3"),
"Italian_Game",
"1. e4 e5 2. Nf3 Nc6 3. Bc4"
),
StartingPosition(
"C51",
"Evans Gambit",
FEN("r1bqk1nr/pppp1ppp/2n5/2b1p3/1PB1P3/5N2/P1PP1PPP/RNBQK2R b KQkq - 0 4"),
"Evans_Gambit",
"1. e4 e5 2. Nf3 Nc6 3. Bc4 Bc5 4. b4"
),
StartingPosition(
"C50",
"Italian Game: Hungarian Defence",
FEN("r1bqk1nr/ppppbppp/2n5/4p3/2B1P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 4 4"),
"Hungarian_Defense",
"1. e4 e5 2. Nf3 Nc6 3. Bc4 Be7"
),
StartingPosition(
"C55",
"Italian Game: Two Knights Defence",
FEN("r1bqkb1r/pppp1ppp/2n2n2/4p3/2B1P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 4 4"),
"Two_Knights_Defense",
"1. e4 e5 2. Nf3 Nc6 3. Bc4 Nf6"
),
StartingPosition(
"C30",
"King's Gambit",
FEN("rnbqkbnr/pppp1ppp/8/4p3/4PP2/8/PPPP2PP/RNBQKBNR b KQkq - 0 2"),
"King's_Gambit",
"1. e4 e5 2. f4"
),
StartingPosition(
"C33",
"King's Gambit Accepted",
FEN("rnbqkbnr/pppp1ppp/8/8/4Pp2/8/PPPP2PP/RNBQKBNR w KQkq - 0 3"),
"King's_Gambit#King.27s_Gambit_Accepted:_2...exf4",
"1. e4 e5 2. f4 exf4"
),
StartingPosition(
"C33",
"King's Gambit Accepted: Bishop's Gambit",
FEN("rnbqkbnr/pppp1ppp/8/8/2B1Pp2/8/PPPP2PP/RNBQK1NR b KQkq - 1 3"),
"King's_Gambit#King.27s_Gambit_Accepted:_2...exf4",
"1. e4 e5 2. f4 exf4 3. Bc4"
),
StartingPosition(
"C36",
"King's Gambit Accepted: Modern Defence",
FEN("rnbqkbnr/ppp2ppp/8/3p4/4Pp2/5N2/PPPP2PP/RNBQKB1R w KQkq d6 0 4"),
"King's_Gambit#Modern_Defence:_3...d5",
"1. e4 e5 2. f4 exf4 3. Nf3 d5"
),
StartingPosition(
"C30",
"King's Gambit Accepted: Classical Variation",
FEN("rnbqkbnr/pppp1p1p/8/6p1/4Pp2/5N2/PPPP2PP/RNBQKB1R w KQkq - 0 4"),
"King's_Gambit#Classical_Variation:_3...g5",
"1. e4 e5 2. f4 exf4 3. Nf3 g5"
),
StartingPosition(
"C30",
"King's Gambit Declined: Classical Variation",
FEN("rnbqk1nr/pppp1ppp/8/2b1p3/4PP2/8/PPPP2PP/RNBQKBNR w KQkq - 1 3"),
"King's_Gambit#Classical_Defence:_2...Bc5",
"1. e4 e5 2. f4 Bc5"
),
StartingPosition(
"C31",
"King's Gambit: Falkbeer Countergambit",
FEN("rnbqkbnr/ppp2ppp/8/3pp3/4PP2/8/PPPP2PP/RNBQKBNR w KQkq - 0 3"),
"King%27s_Gambit,_Falkbeer_Countergambit",
"1. e4 e5 2. f4 d5"
),
StartingPosition(
"B06",
"Modern Defence",
FEN("rnbqkbnr/pppppp1p/6p1/8/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2"),
"Modern_Defense",
"1. e4 g6"
),
StartingPosition(
"B06",
"Modern Defence: Robatsch Defence",
FEN("rnbqk1nr/ppppppbp/6p1/8/3PP3/2N5/PPP2PPP/R1BQKBNR b KQkq - 2 3"),
"Modern_Defense",
"1. e4 g6 2. d4 Bg7 3. Nc3"
),
StartingPosition(
"C41",
"Philidor Defence",
FEN("rnbqkbnr/ppp2ppp/3p4/4p3/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 0 3"),
"Philidor_Defence",
"1. e4 e5 2. Nf3 d6"
),
StartingPosition(
"C41",
"Philidor Defence: Lion Variation",
FEN("r1bqkb1r/pppn1ppp/3p1n2/4p3/3PP3/2N2N2/PPP2PPP/R1BQKB1R w KQkq - 2 5"),
"Philidor_Defence",
"1. e4 d6 2. d4 Nf6 3. Nc3 e5 4. Nf3 Nbd7"
),
StartingPosition(
"B07",
"Lion Variation: Anti-Philidor",
FEN("r1bqkb1r/pppn1ppp/3p1n2/4p3/3PPP2/2N5/PPP3PP/R1BQKBNR w KQkq - 0 5"),
"Philidor_Defence",
"1. e4 d6 2. d4 Nf6 3. Nc3 Nbd7 4. f4 e5"
),
StartingPosition(
"B07",
"Pirc Defence",
FEN("rnbqkb1r/ppp1pppp/3p1n2/8/3PP3/8/PPP2PPP/RNBQKBNR w KQkq - 2 3"),
"Pirc_Defence",
"1. e4 d6 2. d4 Nf6 3. Nc3"
),
StartingPosition(
"B09",
"Pirc Defence: Austrian Attack",
FEN("rnbqkb1r/ppp1pp1p/3p1np1/8/3PPP2/2N5/PPP3PP/R1BQKBNR b KQkq - 0 4"),
"Pirc_Defence#Austrian_Attack:_4.f4",
"1. e4 d6 2. d4 Nf6 3. Nc3 g6 4. f4"
),
StartingPosition(
"B07",
"Pirc Defence: Classical Variation",
FEN("rnbqkb1r/ppp1pp1p/3p1np1/8/3PP3/2N2N2/PPP2PPP/R1BQKB1R b KQkq - 1 4"),
"Pirc_Defence#Classical_.28Two_Knights.29_System:_4.Nf3",
"1. e4 d6 2. d4 Nf6 3. Nc3 g6 4. Nf3"
),
StartingPosition(
"B07",
"Pirc Defence: Lion Variation",
FEN("r1bqkb1r/pppnpppp/3p1n2/8/3PP3/2N5/PPP2PPP/R1BQKBNR w KQkq - 3 4"),
"Pirc_Defence#Classical_.28Two_Knights.29_System",
"1. e4 d6 2. d4 Nf6 3. Nc3 Nbd7"
),
StartingPosition(
"C42",
"Petrov's Defence",
FEN("rnbqkb1r/pppp1ppp/5n2/4p3/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 2 3"),
"Petrov's_Defence",
"1. e4 e5 2. Nf3 Nf6"
),
StartingPosition(
"C42",
"Petrov's Defence: Classical Attack",
FEN("rnbqkb1r/ppp2ppp/3p4/8/3Pn3/5N2/PPP2PPP/RNBQKB1R b KQkq - 0 5"),
"Petrov's_Defence#3.Nxe5",
"1. e4 e5 2. Nf3 Nf6 3. Nxe5 d6 4. Nf3 Nxe4 5. d4"
),
StartingPosition(
"C43",
"Petrov's Defence: Steinitz Attack",
FEN("rnbqkb1r/pppp1ppp/5n2/4p3/3PP3/5N2/PPP2PPP/RNBQKB1R b KQkq - 0 3"),
"Petrov's_Defence#3.d4",
"1. e4 e5 2. Nf3 Nf6 3. d4"
),
StartingPosition(
"C42",
"Petrov's Defence: Three Knights Game",
FEN("rnbqkb1r/pppp1ppp/5n2/4p3/4P3/2N2N2/PPPP1PPP/R1BQKB1R b KQkq - 3 3"),
"Petrov's_Defence#3.Nc3",
"1. e4 e5 2. Nf3 Nf6 3. Nc3"
),
StartingPosition(
"C60",
"Ruy Lopez",
FEN("r1bqkbnr/pppp1ppp/2n5/1B2p3/4P3/5N2/PPPP1PPP/RNBQK2R b KQkq - 3 3"),
"Ruy_Lopez",
"1. e4 e5 2. Nf3 Nc6 3. Bb5"
),
StartingPosition(
"C65",
"Ruy Lopez: Berlin Defence",
FEN("r1bqkb1r/pppp1ppp/2n2n2/1B2p3/4P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 4 4"),
"Ruy_Lopez#Berlin_Defence:_3...Nf6",
"1. e4 e5 2. Nf3 Nc6 3. Bb5 Nf6"
),
StartingPosition(
"C64",
"Ruy Lopez: Classical Variation",
FEN("r1bqk1nr/pppp1ppp/2n5/1Bb1p3/4P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 4 4"),
"Ruy_Lopez#Classical_Defence:_3...Bc5",
"1. e4 e5 2. Nf3 Nc6 3. Bb5 Bc5"
),
StartingPosition(
"C84",
"Ruy Lopez: Closed Variation",
FEN("r1bqk2r/2ppbppp/p1n2n2/1p2p3/4P3/1B3N2/PPPP1PPP/RNBQR1K1 b kq - 1 7"),
"Ruy_Lopez#Main_line:_4.Ba4_Nf6_5.0-0_Be7_6.Re1_b5_7.Bb3_d6_8.c3_0-0",
"1. e4 e5 2. Nf3 Nc6 3. Bb5 a6 4. Ba4 Nf6 5. O-O Be7 6. Re1 b5 7. Bb3"
),
StartingPosition(
"C68",
"Ruy Lopez: Exchange Variation",
FEN("r1bqkbnr/1ppp1ppp/p1B5/4p3/4P3/5N2/PPPP1PPP/RNBQK2R b KQkq - 0 4"),
"Ruy_Lopez,_Exchange_Variation",
"1. e4 e5 2. Nf3 Nc6 3. Bb5 a6 4. Bxc6"
),
StartingPosition(
"C89",
"Ruy Lopez: Marshall Attack",
FEN("r1bq1rk1/2p1bppp/p1n2n2/1p1pp3/4P3/1BP2N2/PP1P1PPP/RNBQR1K1 w - - 0 9"),
"Ruy_Lopez#Marshall_Attack",
"1. e4 e5 2. Nf3 Nc6 3. Bb5 a6 4. Ba4 Nf6 5. O-O Be7 6. Re1 b5 7. Bb3 O-O 8. c3 d5"
),
StartingPosition(
"C63",
"Ruy Lopez: Schliemann Defence",
FEN("r1bqkbnr/pppp2pp/2n5/1B2pp2/4P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 0 4"),
"Ruy_Lopez#Schliemann_Defence:_3...f5",
"1. e4 e5 2. Nf3 Nc6 3. Bb5 f5"
),
StartingPosition(
"B01",
"Scandinavian Defence",
FEN("rnbqkbnr/ppp1pppp/8/3p4/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2"),
"Scandinavian_Defense",
"1. e4 d5"
),
StartingPosition(
"B01",
"Scandinavian Defence: Modern Variation",
FEN("rnbqkb1r/ppp1pppp/5n2/3P4/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 0 3"),
"Scandinavian_Defense#2...Nf6",
"1. e4 d5 2. exd5 Nf6 3. d4"
),
StartingPosition(
"B01",
"Scandinavian Defence: Icelandic-Palme Gambit",
FEN("rnbqkb1r/ppp2ppp/4pn2/3P4/2P5/8/PP1P1PPP/RNBQKBNR w KQkq - 0 4"),
"Scandinavian_Defense#2...Nf6",
"1. e4 d5 2. exd5 Nf6 3. c4 e6"
),
StartingPosition(
"C44",
"Scotch Game",
FEN("r1bqkbnr/pppp1ppp/2n5/4p3/3PP3/5N2/PPP2PPP/RNBQKB1R b KQkq - 0 3"),
"Scotch_Game",
"1. e4 e5 2. Nf3 Nc6 3. d4"
),
StartingPosition(
"C45",
"Scotch Game: Classical Variation",
FEN("r1bqk1nr/pppp1ppp/2n5/2b5/3NP3/8/PPP2PPP/RNBQKB1R w KQkq - 1 5"),
"Scotch_Game,_Classical_Variation",
"1. e4 e5 2. Nf3 Nc6 3. d4 exd4 4. Nxd4 Bc5"
),
StartingPosition(
"C45",
"Scotch Game: Mieses Variation",
FEN("r1bqkb1r/p1pp1ppp/2p2n2/4P3/8/8/PPP2PPP/RNBQKB1R b KQkq - 0 6"),
"Scotch_Game#Schmidt_Variation:_4...Nf6",
"1. e4 e5 2. Nf3 Nc6 3. d4 exd4 4. Nxd4 Nf6 5. Nxc6 bxc6 6. e5"
),
StartingPosition(
"C45",
"Scotch Game: Steinitz Variation",
FEN("r1b1kbnr/pppp1ppp/2n5/8/3NP2q/8/PPP2PPP/RNBQKB1R w KQkq - 1 5"),
"Scotch_Game#Steinitz_Variation:_4...Qh4.21.3F",
"1. e4 e5 2. Nf3 Nc6 3. d4 exd4 4. Nxd4 Qh4"
),
StartingPosition(
"B20",
"Sicilian Defence",
FEN("rnbqkbnr/pp1ppppp/8/2p5/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2"),
"Sicilian_Defence",
"1. e4 c5"
),
StartingPosition(
"B36",
"Sicilian Defence: Accelerated Dragon",
FEN("r1bqkbnr/pp1ppp1p/2n3p1/8/3NP3/8/PPP2PPP/RNBQKB1R w KQkq - 0 5"),
"Sicilian_Defence,_Accelerated_Dragon",
"1. e4 c5 2. Nf3 Nc6 3. d4 cxd4 4. Nxd4 g6"
),
StartingPosition(
"B22",
"Sicilian Defence: Alapin Variation",
FEN("rnbqkbnr/pp1ppppp/8/2p5/4P3/2P5/PP1P1PPP/RNBQKBNR b KQkq - 0 2"),
"Sicilian_Defence,_Alapin_Variation",
"1. e4 c5 2. c3"
),
StartingPosition(
"B23",
"Sicilian Defence: Closed Variation",
FEN("rnbqkbnr/pp1ppppp/8/2p5/4P3/2N5/PPPP1PPP/R1BQKBNR b KQkq - 1 2"),
"Sicilian_Defence#Closed_Sicilian",
"1. e4 c5 2. Nc3"
),
StartingPosition(
"B70",
"Sicilian Defence: Dragon Variation",
FEN("rnbqkb1r/pp2pp1p/3p1np1/8/3NP3/2N5/PPP2PPP/R1BQKB1R w KQkq - 0 6"),
"Sicilian_Defence,_Dragon_Variation",
"1. e4 c5 2. Nf3 d6 3. d4 cxd4 4. Nxd4 Nf6 5. Nc3 g6"
),
StartingPosition(
"B23",
"Sicilian Defence: Grand Prix Attack",
FEN("rnbqkbnr/pp1ppppp/8/2p5/4PP2/8/PPPP2PP/RNBQKBNR b KQkq - 0 2"),
"Sicilian_Defence#Grand_Prix_Attack",
"1. e4 c5 2. f4"
),
StartingPosition(
"B27",
"Sicilian Defence: Hyper-Accelerated Dragon",
FEN("rnbqkbnr/pp1ppp1p/6p1/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 0 3"),
"Sicilian_Defence#2...g6:_Hungarian_Variation",
"1. e4 c5 2. Nf3 g6"
),
StartingPosition(
"B41",
"Sicilian Defence: Kan Variation",
FEN("rnbqkbnr/1p1p1ppp/p3p3/8/3NP3/8/PPP2PPP/RNBQKB1R w KQkq - 0 5"),
"Sicilian_Defence#Kan_.28Paulsen.29_Variation:_4...a6",
"1. e4 c5 2. Nf3 e6 3. d4 cxd4 4. Nxd4 a6"
),
StartingPosition(
"B90",
"Sicilian Defence: Najdorf Variation",
FEN("rnbqkb1r/1p2pppp/p2p1n2/8/3NP3/2N5/PPP2PPP/R1BQKB1R w KQkq - 0 6"),
"Sicilian_Defence,_Najdorf_Variation",
"1. e4 c5 2. Nf3 d6 3. d4 cxd4 4. Nxd4 Nf6 5. Nc3 a6"
),
StartingPosition(
"B60",
"Sicilian Defence: Richter-Rauzer Variation",
FEN("r1bqkb1r/pp2pppp/2np1n2/6B1/3NP3/2N5/PPP2PPP/R2QKB1R b KQkq - 4 6"),
"Sicilian_Defence#Classical_Variation:_5...Nc6",
"1. e4 c5 2. Nf3 d6 3. d4 cxd4 4. Nxd4 Nf6 5. Nc3 Nc6 6. Bg5"
),
StartingPosition(
"B80",
"Sicilian Defence: Scheveningen Variation",
FEN("rnbqkb1r/pp3ppp/3ppn2/8/3NP3/2N5/PPP2PPP/R1BQKB1R w KQkq - 0 6"),
"Sicilian_Defence,_Scheveningen_Variation",
"1. e4 c5 2. Nf3 d6 3. d4 cxd4 4. Nxd4 Nf6 5. Nc3 e6"
),
StartingPosition(
"B21",
"Sicilian Defence: Smith-Morra Gambit",
FEN("rnbqkbnr/pp1ppppp/8/8/3pP3/2P5/PP3PPP/RNBQKBNR b KQkq - 0 3"),
"Sicilian_Defence,_Smith–Morra_Gambit",
"1. e4 c5 2. d4 cxd4 3. c3"
),
StartingPosition(
"C25",
"Vienna Game",
FEN("rnbqkbnr/pppp1ppp/8/4p3/4P3/2N5/PPPP1PPP/R1BQKBNR b KQkq - 1 2"),
"Vienna_Game",
"1. e4 e5 2. Nc3"
),
StartingPosition(
"C27",
"Vienna Game: Frankenstein-Dracula Variation",
FEN("rnbqkb1r/pppp1ppp/8/4p3/2B1n3/2N5/PPPP1PPP/R1BQK1NR w KQkq - 0 4"),
"Frankenstein-Dracula_Variation",
"1. e4 e5 2. Nc3 Nf6 3. Bc4 Nxe4"
),
StartingPosition(
"C46",
"Four Knights Game: Halloween Gambit",
FEN("r1bqkb1r/pppp1ppp/2n2n2/4N3/4P3/2N5/PPPP1PPP/R1BQKB1R b KQkq - 0 4"),
"Halloween_Gambit",
"1. e4 e5 2. Nf3 Nc6 3. Nc3 Nf6 4. Nxe5"
),
StartingPosition(
"C20",
"King's Pawn Game: Wayward Queen Attack",
FEN("rnbqkbnr/pppp1ppp/8/4p2Q/4P3/8/PPPP1PPP/RNB1KBNR b KQkq - 1 2"),
"Danvers_Opening",
"1. e4 e5 2. Qh5"
),
StartingPosition(
"C20",
"Bongcloud Attack",
FEN("rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPPKPPP/RNBQ1BNR b kq - 1 2"),
"Bong",
"1. e4 e5 2. Ke2",
featurable = false
)
)
),
Category(
"d4",
List(
StartingPosition(
"A40",
"Queen's Pawn",
FEN("rnbqkbnr/pppppppp/8/8/3P4/8/PPP1PPPP/RNBQKBNR b KQkq - 0 1"),
"Queen's_Pawn_Game",
"1. d4",
featurable = false
),
StartingPosition(
"A57",
"Benko Gambit",
FEN("rnbqkb1r/p2ppppp/5n2/1ppP4/2P5/8/PP2PPPP/RNBQKBNR w KQkq - 0 4"),
"Benko_Gambit",
"1. d4 Nf6 2. c4 c5 3. d5 b5"
),
StartingPosition(
"A61",
"Benoni Defence: Modern Benoni",
FEN("rnbqkb1r/pp1p1ppp/4pn2/2pP4/2P5/8/PP2PPPP/RNBQKBNR w KQkq - 0 4"),
"Modern_Benoni",
"1. d4 Nf6 2. c4 c5 3. d5 e6"
),
StartingPosition(
"A43",
"Benoni Defence: Czech Benoni",
FEN("rnbqkb1r/pp1p1ppp/5n2/2pPp3/2P5/8/PP2PPPP/RNBQKBNR w KQkq e6 0 4"),
"Benoni_Defense#Czech_Benoni:_1.d4_Nf6_2.c4_c5_3.d5_e5",
"1. d4 Nf6 2. c4 c5 3. d5 e5"
),
StartingPosition(
"D00",
"Blackmar Gambit",
FEN("rnbqkbnr/ppp1pppp/8/3p4/3PP3/8/PPP2PPP/RNBQKBNR b KQkq - 0 2"),
"Blackmar–Diemer_Gambit",
"1. d4 d5 2. e4"
),
StartingPosition(
"E11",
"Bogo-Indian Defence",
FEN("rnbqk2r/pppp1ppp/4pn2/8/1bPP4/5N2/PP2PPPP/RNBQKB1R w KQkq - 2 4"),
"Bogo-Indian_Defence",
"1. d4 Nf6 2. c4 e6 3. Nf3 Bb4+"
),
StartingPosition(
"E00",
"Catalan Opening",
FEN("rnbqkb1r/pppp1ppp/4pn2/8/2PP4/6P1/PP2PP1P/RNBQKBNR b KQkq - 0 3"),
"Catalan_Opening",
"1. d4 Nf6 2. c4 e6 3. g3"
),
StartingPosition(
"E06",
"Catalan Opening: Closed Variation",
FEN("rnbqk2r/ppp1bppp/4pn2/3p4/2PP4/5NP1/PP2PPBP/RNBQK2R b KQkq - 3 5"),
"Catalan_Opening",
"1. d4 Nf6 2. c4 e6 3. g3 d5 4. Nf3 Be7 5. Bg2"
),
StartingPosition(
"A80",
"Dutch Defence",
FEN("rnbqkbnr/ppppp1pp/8/5p2/3P4/8/PPP1PPPP/RNBQKBNR w KQkq - 0 2"),
"Dutch_Defence",
"1. d4 f5"
),
StartingPosition(
"A96",
"Dutch Defence: Classical Variation",
FEN("rnbq1rk1/ppp1b1pp/3ppn2/5p2/2PP4/5NP1/PP2PPBP/RNBQ1RK1 w - - 0 7"),
"Dutch_Defence",
"1. d4 f5 2. c4 Nf6 3. g3 e6 4. Bg2 Be7 5. Nf3 O-O 6. O-O d6"
),
StartingPosition(
"A87",
"Dutch Defence: Leningrad Variation",
FEN("rnbqk2r/ppppp1bp/5np1/5p2/2PP4/5NP1/PP2PPBP/RNBQK2R b KQkq - 3 5"),
"Dutch_Defence",
"1. d4 f5 2. c4 Nf6 3. g3 g6 4. Bg2 Bg7 5. Nf3"
),
StartingPosition(
"A83",
"Dutch Defence: Staunton Gambit",
FEN("rnbqkb1r/ppppp1pp/5n2/6B1/3Pp3/2N5/PPP2PPP/R2QKBNR b KQkq - 3 4"),
"Dutch_Defence",
"1. d4 f5 2. e4 fxe4 3. Nc3 Nf6 4. Bg5"
),
StartingPosition(
"A92",
"Dutch Defence: Stonewall Variation",
FEN("rnbq1rk1/ppp1b1pp/4pn2/3p1p2/2PP4/5NP1/PP2PPBP/RNBQ1RK1 w - - 0 7"),
"Dutch_Defence",
"1. d4 f5 2. c4 Nf6 3. g3 e6 4. Bg2 Be7 5. Nf3 O-O 6. O-O d5"
),
StartingPosition(
"D80",
"Grünfeld Defence",
FEN("rnbqkb1r/ppp1pp1p/5np1/3p4/2PP4/2N5/PP2PPPP/R1BQKBNR w KQkq - 0 4"),
"Grünfeld_Defence",
"1. d4 Nf6 2. c4 g6 3. Nc3 d5"
),
StartingPosition(
"D82",
"Grünfeld Defence: Brinckmann Attack",
FEN("rnbqkb1r/ppp1pp1p/5np1/3p4/2PP1B2/2N5/PP2PPPP/R2QKBNR b KQkq - 1 4"),
"Grünfeld_Defence#Lines_with_4.Bf4_and_the_Gr.C3.BCnfeld_Gambit",
"1. d4 Nf6 2. c4 g6 3. Nc3 d5 4. Bf4"
),
StartingPosition(
"D85",
"Grünfeld Defence: Exchange Variation",
FEN("rnbqkb1r/ppp1pp1p/6p1/3n4/3P4/2N5/PP2PPPP/R1BQKBNR w KQkq - 0 5"),
"Grünfeld_Defence#Exchange_Variation:_4.cxd5_Nxd5_5.e4",
"1. d4 Nf6 2. c4 g6 3. Nc3 d5 4. cxd5 Nxd5"
),
StartingPosition(
"D80",
"Grünfeld Defence: Russian Variation",
FEN("rnbqkb1r/ppp1pp1p/5np1/3p4/2PP4/1QN5/PP2PPPP/R1B1KBNR b KQkq - 1 4"),
"Grünfeld_Defence#Russian_System:_4.Nf3_Bg7_5.Qb3",
"1. d4 Nf6 2. c4 g6 3. Nc3 d5 4. Qb3"
),
StartingPosition(
"D90",
"Grünfeld Defence: Taimanov Variation",
FEN("rnbqk2r/ppp1ppbp/5np1/3p2B1/2PP4/2N2N2/PP2PPPP/R2QKB1R b KQkq - 3 5"),
"Grünfeld_Defence#Taimanov.27s_Variation_with_4.Nf3_Bg7_5.Bg5",
"1. d4 Nf6 2. c4 g6 3. Nc3 d5 4. Nf3 Bg7 5. Bg5"
),
StartingPosition(
"E61",
"King's Indian Defence",
FEN("rnbqkb1r/pppppp1p/5np1/8/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 0 3"),
"King's_Indian_Defence",
"1. d4 Nf6 2. c4 g6"
),
StartingPosition(
"E77",
"King's Indian Defence: 4.e4",
FEN("rnbqk2r/ppp1ppbp/3p1np1/8/2PPP3/2N5/PP3PPP/R1BQKBNR w KQkq - 0 5"),
"King's_Indian_Defence",
"1. d4 Nf6 2. c4 g6 3. Nc3 Bg7 4. e4 d6"
),
StartingPosition(
"E73",
"King's Indian Defence: Averbakh Variation",
FEN("rnbq1rk1/ppp1ppbp/3p1np1/6B1/2PPP3/2N5/PP2BPPP/R2QK1NR b KQ - 3 6"),
"King's_Indian_Defence#Averbakh_Variation:_5.Be2_0-0_6.Bg5",
"1. d4 Nf6 2. c4 g6 3. Nc3 Bg7 4. e4 d6 5. Be2 O-O 6. Bg5"
),
StartingPosition(
"E62",
"King's Indian Defence: Fianchetto Variation",
FEN("rnbqk2r/ppp1ppbp/3p1np1/8/2PP4/2N2NP1/PP2PP1P/R1BQKB1R b KQkq - 0 5"),
"King's_Indian_Defence#Fianchetto_Variation:_3.Nf3_Bg7_4.g3",
"1. d4 Nf6 2. c4 g6 3. Nc3 Bg7 4. Nf3 d6 5. g3"
),
StartingPosition(
"E76",
"King's Indian Defence: Four Pawns Attack",
FEN("rnbqk2r/ppp1ppbp/3p1np1/8/2PPPP2/2N5/PP4PP/R1BQKBNR b KQkq - 0 5"),
"King%27s_Indian_Defence,_Four_Pawns_Attack",
"1. d4 Nf6 2. c4 g6 3. Nc3 Bg7 4. e4 d6 5. f4"
),
StartingPosition(
"E91",
"King's Indian Defence: Classical Variation",
FEN("rnbq1rk1/ppp1ppbp/3p1np1/8/2PPP3/2N2N2/PP2BPPP/R1BQK2R b KQ - 3 6"),
"King's_Indian_Defence#Classical_Variation:_5.Nf3_0-0_6.Be2_e5",
"1. d4 Nf6 2. c4 g6 3. Nc3 Bg7 4. e4 d6 5. Nf3 O-O 6. Be2"
),
StartingPosition(
"E80",
"King's Indian Defence: Sämisch Variation",
FEN("rnbqk2r/ppp1ppbp/3p1np1/8/2PPP3/2N2P2/PP4PP/R1BQKBNR b KQkq - 0 5"),
"King's_Indian_Defence#S.C3.A4misch_Variation:_5.f3",
"1. d4 Nf6 2. c4 g6 3. Nc3 Bg7 4. e4 d6 5. f3"
),
StartingPosition(
"A41",
"Queens's Pawn Game: Modern Defence",
FEN("rnbqk1nr/ppp1ppbp/3p2p1/8/2PP4/2N5/PP2PPPP/R1BQKBNR w KQkq - 2 4"),
"Queen's_Pawn_Game#1...g6",
"1. d4 g6 2. c4 d6 3. Nc3 Bg7"
),
StartingPosition(
"E20",
"Nimzo-Indian Defence",
FEN("rnbqk2r/pppp1ppp/4pn2/8/1bPP4/2N5/PP2PPPP/R1BQKBNR w KQkq - 2 4"),
"Nimzo-Indian_Defence",
"1. d4 Nf6 2. c4 e6 3. Nc3 Bb4"
),
StartingPosition(
"E32",
"Nimzo-Indian Defence: Classical Variation",
FEN("rnbqk2r/pppp1ppp/4pn2/8/1bPP4/2N5/PPQ1PPPP/R1B1KBNR b KQkq - 3 4"),
"Nimzo-Indian_Defence#Classical_Variation:_4.Qc2",
"1. d4 Nf6 2. c4 e6 3. Nc3 Bb4 4. Qc2"
),
StartingPosition(
"E43",
"Nimzo-Indian Defence: Fischer Variation",
FEN("rnbqk2r/p1pp1ppp/1p2pn2/8/1bPP4/2N1P3/PP3PPP/R1BQKBNR w KQkq - 0 5"),
"Nimzo-Indian_Defence#4...b6",
"1. d4 Nf6 2. c4 e6 3. Nc3 Bb4 4. e3 b6"
),
StartingPosition(
"E41",
"Nimzo-Indian Defence: Hübner Variation",
FEN("r1bqk2r/pp3ppp/2nppn2/2p5/2PP4/2PBPN2/P4PPP/R1BQK2R w KQkq - 0 8"),
"Nimzo-Indian_Defence#4...c5",
"1. d4 Nf6 2. c4 e6 3. Nc3 Bb4 4. e3 c5 5. Bd3 Nc6 6. Nf3 Bxc3+ 7. bxc3 d6"
),
StartingPosition(
"E21",
"Nimzo-Indian Defence: Kasparov Variation",
FEN("rnbqk2r/pppp1ppp/4pn2/8/1bPP4/2N2N2/PP2PPPP/R1BQKB1R b KQkq - 3 4"),
"Nimzo-Indian_Defence#Kasparov_Variation:_4.Nf3",
"1. d4 Nf6 2. c4 e6 3. Nc3 Bb4 4. Nf3"
),
StartingPosition(
"E30",
"Nimzo-Indian Defence: Leningrad Variation",
FEN("rnbqk2r/pppp1ppp/4pn2/6B1/1bPP4/2N5/PP2PPPP/R2QKBNR b KQkq - 3 4"),
"Nimzo-Indian_Defence#Other_variations",
"1. d4 Nf6 2. c4 e6 3. Nc3 Bb4 4. Bg5"
),
StartingPosition(
"E26",
"Nimzo-Indian Defence: Sämisch Variation",
FEN("rnbqk2r/pppp1ppp/4pn2/8/2PP4/P1P5/4PPPP/R1BQKBNR b KQkq - 0 5"),
"Nimzo-Indian_Defence#Other_variations",
"1. d4 Nf6 2. c4 e6 3. Nc3 Bb4 4. a3 Bxc3+ 5. bxc3"
),
StartingPosition(
"A53",
"Old Indian Defence",
FEN("rnbqkb1r/ppp1pppp/3p1n2/8/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 0 3"),
"Old_Indian_Defense",
"1. d4 Nf6 2. c4 d6"
),
StartingPosition(
"D06",
"Queen's Gambit",
FEN("rnbqkbnr/ppp1pppp/8/3p4/2PP4/8/PP2PPPP/RNBQKBNR b KQkq - 0 2"),
"Queen's_Gambit",
"1. d4 d5 2. c4"
),
StartingPosition(
"D20",
"Queen's Gambit Accepted",
FEN("rnbqkbnr/ppp1pppp/8/8/2pP4/8/PP2PPPP/RNBQKBNR w KQkq - 0 3"),
"Queen%27s_Gambit_Accepted",
"1. d4 d5 2. c4 dxc4"
),
StartingPosition(
"D43",
"Queen's Gambit Declined: Semi-Slav Defence",
FEN("rnbqkb1r/pp3ppp/2p1pn2/3p4/2PP4/2N2N2/PP2PPPP/R1BQKB1R w KQkq - 0 5"),
"Semi-Slav_Defense",
"1. d4 d5 2. c4 e6 3. Nc3 Nf6 4. Nf3 c6"
),
StartingPosition(
"D10",
"Queen's Gambit Declined: Slav Defence",
FEN("rnbqkbnr/pp2pppp/2p5/3p4/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 0 3"),
"Slav_Defense",
"1. d4 d5 2. c4 c6"
),
StartingPosition(
"D40",
"Queen's Gambit Declined: Semi-Tarrasch Defence",
FEN("rnbqkb1r/pp3ppp/4pn2/2pp4/2PP4/2N2N2/PP2PPPP/R1BQKB1R w KQkq - 0 5"),
"Tarrasch_Defense#Semi-Tarrasch_Defense",
"1. d4 d5 2. c4 e6 3. Nc3 Nf6 4. Nf3 c5"
),
StartingPosition(
"D32",
"Queen's Gambit Declined: Tarrasch Defence",
FEN("rnbqkbnr/pp3ppp/4p3/2pp4/2PP4/2N5/PP2PPPP/R1BQKBNR w KQkq - 0 4"),
"Tarrasch_Defense",
"1. d4 d5 2. c4 e6 3. Nc3 c5"
),
StartingPosition(
"D08",
"Queen's Gambit: Albin Countergambit",
FEN("rnbqkbnr/ppp2ppp/8/3pp3/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 0 3"),
"Albin_Countergambit",
"1. d4 d5 2. c4 e5"
),
StartingPosition(
"D07",
"Queen's Gambit: Chigorin Defence",
FEN("r1bqkbnr/ppp1pppp/2n5/3p4/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 1 3"),
"Chigorin_Defense",
"1. d4 d5 2. c4 Nc6"
),
StartingPosition(
"E12",
"Queen's Indian Defence",
FEN("rnbqkb1r/p1pp1ppp/1p2pn2/8/2PP4/5N2/PP2PPPP/RNBQKB1R w KQkq - 0 4"),
"Queen's_Indian_Defense",
"1. d4 Nf6 2. c4 e6 3. Nf3 b6"
),
StartingPosition(
"D02",
"London System",
FEN("rnbqkb1r/ppp1pppp/5n2/3p4/3P1B2/5N2/PPP1PPPP/RN1QKB1R b KQkq - 3 3"),
"London_System",
"1. d4 d5 2. Nf3 Nf6 3. Bf4"
),
StartingPosition(
"D00",
"London System: Mason Attack",
FEN("rnbqkbnr/ppp1pppp/8/3p4/3P1B2/8/PPP1PPPP/RN1QKBNR b KQkq - 1 2"),
"London_System",
"1. d4 d5 2. Bf4"
),
StartingPosition(
"D01",
"Rapport-Jobava System",
FEN("rnbqkb1r/ppp1pppp/5n2/3p4/3P1B2/2N5/PPP1PPPP/R2QKBNR b KQkq - 3 3"),
"London_System",
"1. d4 d5 2. Nc3 Nf6 3. Bf4"
),
StartingPosition(
"D03",
"Torre Attack",
FEN("rnbqkb1r/ppp1pppp/5n2/3p2B1/3P4/5N2/PPP1PPPP/RN1QKB1R b KQkq - 3 3"),
"Torre_Attack",
"1. d4 d5 2. Nf3 Nf6 3. Bg5"
),
StartingPosition(
"D01",
"Richter-Veresov Attack",
FEN("rnbqkb1r/ppp1pppp/5n2/3p2B1/3P4/2N5/PPP1PPPP/R2QKBNR b KQkq - 3 3"),
"Richter-Veresov_Attack",
"1. d4 d5 2. Nc3 Nf6 3. Bg5"
),
StartingPosition(
"A52",
"Budapest Defence",
FEN("rnbqkb1r/pppp1ppp/5n2/4p3/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 0 3"),
"Budapest_Gambit",
"1. d4 Nf6 2. c4 e5",
featurable = false
),
StartingPosition(
"D00",
"Closed Game",
FEN("rnbqkbnr/ppp1pppp/8/3p4/3P4/8/PPP1PPPP/RNBQKBNR w KQkq - 0 2"),
"Closed_Game",
"1. d4 d5",
featurable = false
),
StartingPosition(
"A45",
"Trompowsky Attack",
FEN("rnbqkb1r/pppppppp/5n2/6B1/3P4/8/PPP1PPPP/RN1QKBNR b KQkq - 2 2"),
"Trompowsky_Attack",
"1. d4 Nf6 2. Bg5"
)
)
),
Category(
"Nf3",
List(
StartingPosition(
"A04",
"Zukertort Opening",
FEN("rnbqkbnr/pppppppp/8/8/8/5N2/PPPPPPPP/RNBQKB1R b KQkq - 1 1"),
"Zukertort_Opening",
"1. Nf3",
featurable = false
),
StartingPosition(
"A07",
"King's Indian Attack",
FEN("rnbqkbnr/ppp1pppp/8/3p4/8/5NP1/PPPPPP1P/RNBQKB1R b KQkq - 0 2"),
"King's_Indian_Attack",
"1. Nf3 d5 2. g3"
),
StartingPosition(
"A09",
"Réti Opening",
FEN("rnbqkbnr/ppp1pppp/8/3p4/2P5/5N2/PP1PPPPP/RNBQKB1R b KQkq - 0 2"),
"Réti_Opening",
"1. Nf3 d5 2. c4"
)
)
),
Category(
"c4",
List(
StartingPosition(
"A10",
"English Opening",
FEN("rnbqkbnr/pppppppp/8/8/2P5/8/PP1PPPPP/RNBQKBNR b KQkq - 0 1"),
"English_Opening",
"1. c4",
featurable = false
),
StartingPosition(
"A20",
"English Opening: Reversed Sicilian",
FEN("rnbqkbnr/pppp1ppp/8/4p3/2P5/8/PP1PPPPP/RNBQKBNR w KQkq - 0 2"),
"English_Opening",
"1. c4 e5"
),
StartingPosition(
"A30",
"English Opening: Symmetrical Variation",
FEN("rnbqkbnr/pp1ppppp/8/2p5/2P5/8/PP1PPPPP/RNBQKBNR w KQkq - 0 2"),
"English_Opening",
"1. c4 c5"
),
StartingPosition(
"A26",
"English Opening: Closed System",
FEN("r1bqk1nr/ppp2pbp/2np2p1/4p3/2P5/2NP2P1/PP2PPBP/R1BQK1NR w KQkq - 0 6"),
"English_Opening",
"1. c4 e5 2. Nc3 Nc6 3. g3 g6 4. Bg2 Bg7 5. d3 d6"
)
)
),
Category(
"b3",
List(
StartingPosition(
"A01",
"Nimzo-Larsen Attack",
FEN("rnbqkbnr/pppppppp/8/8/8/1P6/P1PPPPPP/RNBQKBNR b KQkq - 0 1"),
"Larsen's_Opening",
"1. b3",
featurable = false
)
)
),
Category(
"b4",
List(
StartingPosition(
"A00",
"Sokolsky Opening",
FEN("rnbqkbnr/pppppppp/8/8/1P6/8/P1PPPPPP/RNBQKBNR b KQkq - 0 1"),
"Sokolsky_Opening",
"1. b4",
featurable = false
)
)
),
Category(
"f4",
List(
StartingPosition(
"A02",
"Bird's Opening",
FEN("rnbqkbnr/pppppppp/8/8/5P2/8/PPPPP1PP/RNBQKBNR b KQkq - 0 1"),
"Bird's_Opening",
"1. f4"
),
StartingPosition(
"A02",
"Bird's Opening: Dutch Variation",
FEN("rnbqkbnr/ppp1pppp/8/3p4/5P2/8/PPPPP1PP/RNBQKBNR w KQkq - 0 2"),
"Bird's_Opening",
"1. f4 d5"
)
)
),
Category(
"g3",
List(
StartingPosition(
"A00",
"Hungarian Opening",
FEN("rnbqkbnr/pppppppp/8/8/8/6P1/PPPPPP1P/RNBQKBNR b KQkq - 0 1"),
"King's_Fianchetto_Opening",
"1. g3",
featurable = false
)
)
)
)
val all: IndexedSeq[StartingPosition] = categories.flatMap(_.positions).toIndexedSeq
val initial = StartingPosition("---", "Initial position", format.Forsyth.initial, "Chess", "")
def allWithInitial = initial +: all
lazy val featurable = new scala.util.Random(475591).shuffle(all.filter(_.featurable))
def randomFeaturable = featurable(scala.util.Random.nextInt(featurable.size))
object presets {
val halloween = StartingPosition(
"C47",
"Halloween Gambit",
FEN("r1bqkb1r/pppp1ppp/2n2n2/4N3/4P3/2N5/PPPP1PPP/R1BQKB1R b KQkq - 0 4"),
"Halloween_Gambit",
"1. e4 e5 2. Nf3 Nc6 3. Nc3 Nf6 4. Nxe5"
)
val frankenstein = StartingPosition(
"C27",
"Frankenstein-Dracula Variation",
FEN("rnbqkb1r/pppp1ppp/8/4p3/2B1n3/2N5/PPPP1PPP/R1BQK1NR w KQkq - 0 4"),
"Frankenstein-Dracula_Variation",
"1. e4 e5 2. Nc3 Nf6 3. Bc4 Nxe4"
)
}
}
| ornicar/scalachess | src/main/scala/StartingPosition.scala | Scala | mit | 39,231 |
package com.harrys.orika
import ma.glasnost.orika.impl.DefaultMapperFactory
import ma.glasnost.orika.{Converter, MapperFactory}
/**
* Created by chris on 10/14/15.
*/
object OrikaUtils {
def defaultConverters : Seq[Converter[_, _]] = ScalaConverters.scalaConverters ++ DateTimeConverters.dateTimeConverters
def createDefaultMapperFactory() : MapperFactory = {
val factory = new DefaultMapperFactory.Builder().mapNulls(true).build()
registerDefaultConverters(factory)
}
def registerDefaultConverters(factory: MapperFactory) : MapperFactory = {
defaultConverters.foreach(c => factory.getConverterFactory.registerConverter(c))
factory
}
}
| harrystech/ingestion-utils | src/main/scala/com/harrys/orika/OrikaUtils.scala | Scala | mit | 670 |
package yokohama.holdem
import akka.actor.{ PossiblyHarmful, Props }
import scopt.OptionParser
// CLI arguments are parsed to this class
case class ArgConf(action: String = "", playerCount: Int = -1, playerName: String = "")
trait Terminal {
// possiblyharmful is just a marker
sealed trait Command extends PossiblyHarmful
// lists commands that can be received
object Command {
case class Register(name: String, props: Props, count: Int = 1) extends Command
case object Shutdown extends Command
case class Unknown(command: String, message: String) extends Command
def apply(command: Array[String]): Command =
CommandParser.parseAsCommand(command)
}
object CommandParser {
val parser = new OptionParser[ArgConf]("yokohama-holdem") {
help("help") text("prints this usage message")
// $> register -n testuser -c 5
// register 5 users
cmd("register") action { (_, argConf) =>
argConf.copy(action = "register")
} text("register is a command") children (
opt[String]("register name") abbr("n") action { (n,argConf) =>
argConf.copy(playerName = n)
} text("playername is a string"),
opt[Int]("playercount") abbr("c") action { (c,argConf) =>
argConf.copy(playerCount = c)
} text("playercount is a number"),
checkConfig { argConf =>
if(argConf.playerName != "" && argConf.playerCount != -1) {
success
} else {
failure("invalid register options")
}
}
)
// $> shutdown
cmd("shutdown") action { (_, argConf) =>
argConf.copy(action = "shutdown")
} text("shutting down")
}
def parseAsCommand(args: Array[String]): Command = {
val optionConf = parser.parse(args, ArgConf())
optionConf match {
case Some(ArgConf("register", _, _)) =>
val regConf = optionConf.get
new Command.Register(regConf.playerName, Props(new Player()), regConf.playerCount)
case Some(ArgConf("shutdown",_,_)) =>
Command.Shutdown
case Some(argConf) =>
new Command.Unknown("incomplete", "this looks like a valid argument, but something is not quite right")
case None =>
new Command.Unknown("something", "something else")
}
}
}
}
| jeffusan/yokohama-holdem | src/main/scala/yokohama/holdem/Terminal.scala | Scala | gpl-2.0 | 2,366 |
trait A extends AnyRef {
def f = 1;
val x: A;
}
trait B extends AnyRef {
def f = 2;
}
trait C extends AnyRef with A with B {
override def f = super[B].f;
val a: A;
val x: a.type = a;
}
| yusuke2255/dotty | tests/untried/pos/override.scala | Scala | bsd-3-clause | 199 |
package lila.analyse
import chess.Color
import chess.format.Nag
import org.joda.time.DateTime
case class Analysis(
id: String,
infos: List[Info],
done: Boolean,
date: DateTime) {
lazy val infoAdvices: InfoAdvices = {
(Info.start :: infos) sliding 2 collect {
case List(prev, info) => info -> {
info.hasVariation ?? Advice(prev, info)
}
}
}.toList
lazy val advices: List[Advice] = infoAdvices.map(_._2).flatten
// ply -> UCI
def bestMoves: Map[Int, String] = (infos map { i =>
i.best map { b => i.ply -> b.keys }
}).flatten.toMap
def complete(infos: List[Info]) = copy(
infos = infos,
done = true)
def encode: RawAnalysis = RawAnalysis(id, encodeInfos, done, date)
private def encodeInfos = Info encodeList infos
def summary: List[(Color, List[(Nag, Int)])] = Color.all map { color =>
color -> (Nag.badOnes map { nag =>
nag -> (advices count { adv =>
adv.color == color && adv.nag == nag
})
})
}
def valid = encodeInfos.replace(";", "").nonEmpty
def stalled = (done && !valid) || (!done && date.isBefore(DateTime.now minusHours 2))
def nbEmptyInfos = infos.count(_.isEmpty)
def emptyRatio: Double = nbEmptyInfos.toDouble / infos.size
}
object Analysis {
import lila.db.JsTube, JsTube.Helpers._
import play.api.libs.json._
private[analyse] lazy val tube = JsTube(
reader = (__.json update readDate('date)) andThen Reads[Analysis](js =>
~(for {
obj ← js.asOpt[JsObject]
rawAnalysis ← RawAnalysis.tube.read(obj).asOpt
analysis ← rawAnalysis.decode
} yield JsSuccess(analysis): JsResult[Analysis])
),
writer = Writes[Analysis](analysis =>
RawAnalysis.tube.write(analysis.encode) getOrElse JsUndefined("[db] Can't write analysis " + analysis.id)
) andThen (__.json update writeDate('date))
)
}
private[analyse] case class RawAnalysis(
id: String,
data: String,
done: Boolean,
date: DateTime) {
def decode: Option[Analysis] = (done, data) match {
case (true, "") => new Analysis(id, Nil, false, date).some
case (true, d) => Info decodeList d map { new Analysis(id, _, done, date) }
case (false, _) => new Analysis(id, Nil, false, date).some
}
}
private[analyse] object RawAnalysis {
import lila.db.JsTube
import JsTube.Helpers._
import play.api.libs.json._
private def defaults = Json.obj(
"data" -> "",
"done" -> false)
private[analyse] lazy val tube = JsTube(
(__.json update merge(defaults)) andThen Json.reads[RawAnalysis],
Json.writes[RawAnalysis])
}
| danilovsergey/i-bur | modules/analyse/src/main/Analysis.scala | Scala | mit | 2,615 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package expr
import com.intellij.lang.ASTNode
import com.intellij.psi.PsiElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import scala.collection.Seq
/**
* @author Alexander Podkhalyuzin
* Date: 06.03.2008
*/
class ScPrefixExprImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScPrefixExpr {
override def toString: String = "PrefixExpression"
def argumentExpressions: Seq[ScExpression] = Seq.empty
def getInvokedExpr: ScExpression = operation
override def accept(visitor: ScalaElementVisitor) {
visitor.visitPrefixExpression(this)
}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case visitor: ScalaElementVisitor => visitor.visitPrefixExpression(this)
case _ => super.accept(visitor)
}
}
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScPrefixExprImpl.scala | Scala | apache-2.0 | 928 |
object Test {
// First example with a concrete type <: AnyVal
def main(args: Array[String]): Unit = {
val coll = new java.util.ArrayList[Int]()
java.util.Collections.addAll(coll, 5, 6)
println(coll.size())
foo(5, 6)
}
// Second example with an abstract type not known to be <: AnyRef
def foo[A](a1: A, a2: A): Unit = {
val coll = new java.util.ArrayList[A]()
java.util.Collections.addAll(coll, a1, a2)
println(coll.size())
}
}
| dotty-staging/dotty | tests/run/i9439.scala | Scala | apache-2.0 | 469 |
package com.github.mdr.mash.ns.http
import com.github.mdr.mash.functions.{ BoundParams, Parameter }
import com.github.mdr.mash.ns.core.NoArgFunction._
import com.github.mdr.mash.ns.http.HttpFunctions.Params._
import com.github.mdr.mash.runtime.{ MashBoolean, MashObject, MashString }
object HttpFunctions {
object Params {
val Url = Parameter(
nameOpt = Some("url"),
summaryOpt = Some("URL to send request to"))
val Body = Parameter(
nameOpt = Some("body"),
summaryOpt = Some("Body of request"),
defaultValueGeneratorOpt = Some(MashString("")))
val File = Parameter(
nameOpt = Some("file"),
summaryOpt = Some("Whether the body should be fetched from a file (default false)"),
descriptionOpt = Some(
"""If true, the body parameter will be interpreted as a path, and the contents of the file at that location used as the body.
|If false, the body will be used directly.""".stripMargin),
shortFlagOpt = Some('f'),
isFlag = true,
defaultValueGeneratorOpt = Some(false),
isBooleanFlag = true)
val BasicAuth = Parameter(
nameOpt = Some("basicAuth"),
summaryOpt = Some("Basic authentication"),
descriptionOpt = Some(
"""Must provide the username and password either as a colon-separated String:
|<mash>
| --basicAuth='username:password'
|</mash>
|or an object of the form:
|<mash>
| --basicAuth={ username: 'username', password: 'password' }
|</mash>""".stripMargin),
defaultValueGeneratorOpt = Some(NoArgValue),
isFlag = true)
val Headers = Parameter(
nameOpt = Some("headers"),
summaryOpt = Some("Headers to add to request"),
descriptionOpt = Some(
"""Headers can be provided either as an object or a list of name/value pairs:
|<mash>
| --headers={ header1: value }
| --headers=["header1:value", "header2:value"]
| --headers=[{ name: "header1", value: "value" }]
|</mash>""".stripMargin),
isFlag = true,
defaultValueGeneratorOpt = Some(NoArgValue))
val Cookies = Parameter(
nameOpt = Some("cookies"),
summaryOpt = Some("Cookies to add to the request"),
descriptionOpt = Some(
"""Cookies can be provided as an object:
|<mash>
| --cookies={ sessionId: 'xyz' }
|</mash>""".stripMargin),
isFlag = true,
defaultValueGeneratorOpt = Some(NoArgValue))
val QueryParams = Parameter(
nameOpt = Some("queryParams"),
summaryOpt = Some("Query parameters to add to the URL"),
descriptionOpt = Some(
"""Examples:
|<mash>
| --queryParams={ name: "value" }
|</mash>""".stripMargin),
isFlag = true,
defaultValueGeneratorOpt = Some(MashObject.empty))
val Json = Parameter(
nameOpt = Some("json"),
summaryOpt = Some("Whether to send the body as JSON (default false)"),
shortFlagOpt = Some('j'),
isFlag = true,
defaultValueGeneratorOpt = Some(false),
isBooleanFlag = true)
val Form = Parameter(
nameOpt = Some("form"),
summaryOpt = Some("Whether to send the body as a URL-encoded form (default false)"),
isFlag = true,
defaultValueGeneratorOpt = Some(false),
isBooleanFlag = true)
}
def getBodySource(boundParams: BoundParams): BodySource =
if (boundParams(File).isTruthy)
BodySource.File(boundParams.validatePath(Body))
else
BodySource.Value(boundParams(Body))
}
| mdr/mash | src/main/scala/com/github/mdr/mash/ns/http/HttpFunctions.scala | Scala | mit | 3,608 |
/*
* Copyright (c) 2015. All rights reserved. Author flankerhe@keencloudtech.
*/
package org.k33nteam.jade.solver
import org.k33nteam.jade.solver.Types.IntentInfo
import soot.{EquivalentValue, Body, Local, Value}
import soot.jimple._
import soot.toolkits.graph._
import soot.toolkits.scalar._
import scala.collection.JavaConversions._
import scala.collection.immutable.HashMap
import scala.collection.immutable
import scala.collection.mutable
import scala.collection.mutable.Queue
import scala.collection.mutable.ListBuffer
import org.k33nteam.jade.helpers.SyntaxHelpers._
/**
* Created by hqd on 3/12/15.
* Intra-analysis to collect intent infos.
* This is a preserve-style analysis, which means if one-path is implicit, another-path is explicit, will regard as implicit
* Not used currently, substituted by scanIntra
*
* TODO
*/
object Types{
type IntentInfo = mutable.HashMap[immutable.HashSet[Value], immutable.HashSet[IntentMeta]]
}
class IPCBackwardIntentTrackAnalysis (graph: DirectedGraph[soot.Unit]) extends ForwardFlowAnalysis[soot.Unit, IntentInfo](graph)
{
val emptySet = new ArraySparseSet[Local]()
doAnalysis()
private def wrapRef(value:Value): Value =
{
value match {
case _:Ref => new EquivalentValue(value)
case _ => value
}
}
override protected def flowThrough(in: IntentInfo, d: soot.Unit, out: IntentInfo): Unit = {
/*println("###")
println("flowing through " + d)
println("in: " + in)
println("out: " + in)
println("***")*/
out.clear()
out ++= in
val stmt = d.asInstanceOf[Stmt]
def killValue(value: Value): Unit = {
//kills left, generate a new one
val leftSetOp = in.keys.find(_.contains(value))
if(leftSetOp.isDefined)
{
out.remove(leftSetOp.get)
}
}
def updateLocalMetas(base: Local, updateFunc: IntentMeta=>IntentMeta): Unit ={
val leftInSetOp = in.keys.find(_.contains(base))
//println("in update local meta")
//println("leftInSetOp: " + leftInSetOp)
// println("base: " + base)
leftInSetOp.foreach(localkeys => {
val newMetas = in.get(localkeys).get.map(updateFunc)
//println("newMetas: " + newMetas)
out.remove(localkeys)
out.put(localkeys, newMetas)
})
}
def processIIE(iie: InstanceInvokeExpr): Unit ={
val w = updateLocalMetas(iie.getBase.asInstanceOf[Local], _: IntentMeta=>IntentMeta)
(iie.getMethod.getName,iie.getMethod.getSignature) match {
case ("setComponent" | "setClass" | "setPackage" | "setClassName", _) => w(_.copy(isImplicit = false))
case ("<init>", "<android.content.Intent: void <init>(android.content.Context,java.lang.Class)>" |
"<android.content.Intent: void <init>(java.lang.String,android.net.Uri,android.content.Context,java.lang.Class)>") => w(_.copy(isImplicit = false))
case ("<init>", "<android.content.Intent: void <init>(java.lang.String)>") =>w(_.copy(hasAction = true))
//case ("putExtra", _) => hasExtras = true
case ("setAction", _) => w(im => {im.copy(hasAction = true)})
case (_, _) =>
}
}
if(stmt.containsInvokeExpr() && stmt.isInstanceOf[InvokeStmt])
{
stmt.getInvokeExpr match {
case iie:InstanceInvokeExpr => {
processIIE(iie)
}
case _ =>
}//end invokeExpr check
}
stmt match {
case assignStmt:AssignStmt => {
(assignStmt.getLeftOp, assignStmt.getRightOp) match {
case p @ (_:Local|_:Ref, _:Local|_:Ref) => {
//e.g. $r1 = $r2, kills $r1, add $r1 to $r2 set,
//we assume $r2 has appeared before
def processAssignValue(leftV: Value, rightV: Value): Unit ={
//println("in process Assign Value")
val left:Value = wrapRef(leftV)
val right:Value = wrapRef(rightV)
in.keys.find(_.contains(right)).foreach(rightKeys => {
in.keys.find(_.contains(left)).foreach(leftKeys => {
out.remove(leftKeys)
val newKeys = leftKeys - left
newKeys.size match {
case 0 =>
case _ => in.get(leftKeys).foreach(out.put(newKeys,_))
}
})
in.get(rightKeys).foreach(oldMetas => {
out.remove(rightKeys)
out.put(rightKeys + left, oldMetas)
})
})
}
processAssignValue(p._1, p._2)
}
case (left:Local, right:NewExpr) => {
//kills left
killValue(left)
if(right.getType.toString == "android.content.Intent")
{
//generate a new one
out.put(immutable.HashSet(left), immutable.HashSet(IntentMeta()))
}
}
case (left:Local, right:InvokeExpr) => {
//kills left
killValue(left)
if(right.getMethod.getReturnType.toString == "android.content.Intent")
{
//generate, assume a brand new intent
out.put(immutable.HashSet(left), immutable.HashSet(IntentMeta()))
}
}
case (left:Local, _) => {
//???a constant? do kill
killValue(left)
}
case (_,_) => {
//???what is left?? pass
}
}
}
case _ =>
} //end stmt assign check
//println("after " + d)
//println("in: " + in)
//println("out: " + out)
}
override protected def merge(in1: IntentInfo, in2: IntentInfo, out: IntentInfo): Unit = {
//union-style merge, preserve on *any*
//println("merging in1:" +in1)
//println("merging in2:" +in2)
out.clear()
//merge IntentMetas
out ++= in1
in2.foreach{case(key,value) => {
out.put(key, value ++ out.getOrElse(key,Set()))
}}
}
override protected def newInitialFlow(): IntentInfo = {
new IntentInfo()
}
override protected def copy(source: IntentInfo, dest: IntentInfo): Unit = {
dest.clear()
dest ++= source
}
override protected def entryInitialFlow(): IntentInfo = {
new IntentInfo()
}
}
case class IntentMeta(isImplicit:Boolean = true, hasExtras:Boolean = false, hasAction:Boolean = false, attrs:Map[String,String] = new HashMap())
object IPCAnalysisIntra {
final val DEBUG = false
val IPC_METHODS = Map(
"sendBroadcast" -> 0,
"sendBroadcastAsUser" ->0,
"sendOrderedBroadcast"->0,
"sendStickyBroadcast"->0,
"sendStickyBroadcastAsUser"->0,
"sendStickyOrderedBroadcast"->0,
"startActivity"->0,
"startService"->0,
"getActivity"->2,
"getBroadcast"->2
)
def scanIntra(body:Body): Map[Stmt, IntentMeta] = {
val graph = new BriefUnitGraph(body)
val defAnalysis = SmartLocalDefsPool.v().getSmartLocalDefsFor(body)
val useanalysis = new SimpleLocalUses(graph, defAnalysis)
body.getUnits.filter(p => {
p.containsInvokeExpr() && stmtCallMayUsesIntent(p.getInvokeExpr, true) != -1
}).map(u => u.asInstanceOf[Stmt]->getIntentMeta(stmtCallMayUsesIntent(u.getInvokeExpr, true), u, useanalysis, defAnalysis)).toMap
}
private def stmtCallMayUsesIntent(invokeExpr:InvokeExpr, frameworkMethodOnly: Boolean =false): Int ={
//returns calling arg position, -1 means not-intent related
IPC_METHODS.get(invokeExpr.getMethod().getName).getOrElse({
if(frameworkMethodOnly) -1
else{
//TODO
-1
}
})
}
def getIntentMeta(loc:Int, keyStmt:Stmt, useanalysis:SimpleLocalUses, defAnalysis: SmartLocalDefs): IntentMeta = {
val dominatorAnalysis = new MHGDominatorsFinder(defAnalysis.getGraph)
val keyValue = keyStmt.getInvokeExpr().getArg(stmtCallMayUsesIntent(keyStmt.getInvokeExpr(), true))
val relatedStmts = getRelatedStmts(useanalysis, defAnalysis, dominatorAnalysis, keyStmt, keyValue)
var isImplicit = true
var hasExtras = false
var hasAction = false
//println(relatedStmts)
for(stmt <- relatedStmts)
{
if(stmt.containsInvokeExpr() && stmt.isInstanceOf[InvokeStmt] && stmt.getInvokeExpr.isInstanceOf[InstanceInvokeExpr])
{
(stmt.getInvokeExpr.getMethod.getName,stmt.getInvokeExpr.getMethod.getSignature) match {
case ("setComponent" | "setClass" | "setPackage" | "setClassName", _) => isImplicit = false
case ("<init>", "<android.content.Intent: void <init>(android.content.Context,java.lang.Class)>" |
"<android.content.Intent: void <init>(java.lang.String,android.net.Uri,android.content.Context,java.lang.Class)>") => isImplicit = false
case ("<init>", "<android.content.Intent: void <init>(java.lang.String)>") => hasAction = true
case ("putExtra",_) => hasExtras = true
case ("setAction",_) => hasAction = true
case (_,_) =>
//TODO add more put**listExtra stmts
}
}
}
new IntentMeta(isImplicit, hasExtras, hasAction)
}
def getRelatedStmts(useanalysis:SimpleLocalUses, defAnalysis: SmartLocalDefs, dominatorAnalysis:DominatorsFinder[soot.Unit], keyStmt: soot.Unit, keyValue: soot.Value): List[Stmt] = {
//find all relevent statements
val expr = keyStmt.getInvokeExpr
val loc = stmtCallMayUsesIntent(expr)
if(expr.getArgCount() < loc + 1)
{
//whoa? FUCK same method name as framework method
//println(expr)
return List()
}
if(expr.getArg(loc).getType().toString() != "android.content.Intent")
{
//whoa? FUCK arg type isn't Intent
//println(expr)
return List()
}
//val intent = expr.getArg(stmtCallMayUsesIntent(expr)).asInstanceOf[Local]
val intent = keyValue
if(intent.isInstanceOf[Local])
{
val queue = new Queue[(Local, Stmt)]()
val statements = new ListBuffer[Stmt]()
queue += ((intent.asInstanceOf[Local], keyStmt))
val hashSet = new mutable.HashSet[(Local,Stmt)]()
while (!queue.isEmpty) {
val cur = queue.dequeue()
val defs = ListBuffer[soot.Unit]()
if(cur._2.isInstanceOf[AssignStmt] && cur._2.asInstanceOf[AssignStmt].getLeftOp().equivTo(cur._1))
{
defs += cur._2
}
else
{
defs ++= defAnalysis.getDefsOfAt(cur._1, cur._2)
}
if(DEBUG) {
println(cur)
println(defs)
}
//first use found defs to add to related statements
//check if def statement is an assign statement, if it is, add it to queue
defs.foreach(defstmt => {
//FIXME: dominatorAnalysis does not work in branch situations.
statements ++= useanalysis.getUsesOf(defstmt).map(_.getUnit).map(_.asInstanceOf[Stmt]).filter(dominatorAnalysis.isDominatedBy(keyStmt, _))
//check for assignment at right
//Local and Field should be treated differently TODO
if (defstmt.isInstanceOf[AssignStmt]) {
defstmt.asInstanceOf[AssignStmt].getRightOp match {
case local:Local => {
if(!hashSet.contains((local,defstmt)))
{
queue += ((local, defstmt))
}
hashSet += ((local, defstmt))
}
case fieldRef:FieldRef => {
//use a naive method to deal with FieldRef: just search upward and found assignStmt to field, use the right-side Local!!!
val local = findFieldRefDefLocal(fieldRef, defstmt.asInstanceOf[AssignStmt], defAnalysis.getGraph)
local match {
case Some((local, stmt)) => {
if(!hashSet.contains((local,stmt)))
{
queue += ((local, stmt))
}
hashSet += ((local, stmt))
}
case _ =>
}
}
case _ =>
}
}
})
}
if(DEBUG){
println(statements)
}
statements.toList
}
else {
//the value is a FieldRef mostly, currently for fieldRef we only do assumption
//however how can an invoke expr's arg be a fieldRef?
if(DEBUG)
{
println(expr)
}
new ListBuffer[Stmt]().toList
}
}
def findFieldRefDefLocal(target: FieldRef, stmt: AssignStmt, graph: UnitGraph): Option[(Local,Stmt)] ={
//This is a backward search, found first equal-local of a specific fieldRef
val visited = new mutable.HashSet[soot.Unit]()
val queue = new Queue[soot.Unit]()
queue ++= graph.getPredsOf(stmt)
while(!queue.isEmpty)
{
val cur = queue.dequeue()
if(visited.contains(cur))
{
//seems we're in a loop and still not found. return now
return None
}
visited.add(cur)
cur match {
case assignStmt:AssignStmt => {
//This is for $r1 = <field_ref>
assignStmt.getRightOp match {
case fieldRef:FieldRef => {
if(fieldRef.equivTo(target))
{
//found! return now, should be a local
return Option((assignStmt.getLeftOp().asInstanceOf[Local], cur))
}
}
case _ =>
}
//For <field_ref> = $r1, which is commonly seen in modification on field, like after $r1-><init>
assignStmt.getLeftOp match {
case fieldRef:FieldRef => {
if(fieldRef.equivTo(target))
{
return Option((assignStmt.getRightOp()).asInstanceOf[Local], cur)
}
}
case _ =>
}
}
case _ =>
}
//build queue
queue ++= graph.getPredsOf(cur)
}
None
}
private def isValuableInsts(stmts:List[Stmt]): Boolean ={
// stmts.exists(stmt => {
// if(stmt.containsInvokeExpr() && stmt.isInstanceOf[InvokeStmt] && stmt.getInvokeExpr.isInstanceOf[InstanceInvokeExpr])
// {
// (stmt.getInvokeExpr.getMethod.getName,stmt.getInvokeExpr.getMethod.getSignature) match {
// case ("putExtra", _) => true
// case ("<init>", "<android.content.Intent: void <init>(android.content.Context,java.lang.Class)>" |
// "<android.content.Intent: void <init>(java.lang.String,android.net.Uri,android.content.Context,java.lang.Class)>") => true
// case (_,_) => false
// }
// }
// else{
// false
// }
// })
true
}
} | flankerhqd/JAADAS | jade/src/main/scala/org/k33nteam/jade/solver/IPCAnalysis.scala | Scala | gpl-3.0 | 14,586 |
package mavigator
import akka.actor._
import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller}
import akka.http.scaladsl.model.MediaTypes._
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model.ws._
import akka.http.scaladsl.server._
import akka.util._
import akka.stream.scaladsl._
import play.twirl.api.Html
import uav.Uav
object Router {
import Directives._
final val SocketEndpoint = "mavlink"
def withSocketUri: Directive1[Uri] = extractUri.map { uri =>
uri.withScheme("ws").withPath(Path.Empty / SocketEndpoint)
}
def route(implicit system: ActorSystem): Route = (
path("whoami") {
get {
withSocketUri { sock =>
complete(sock.toString)
}
}
} ~
path("cockpit" / IntNumber) { id =>
get {
withSocketUri { socket =>
val html = mavigator.views.html.app(
"Mavigator",
"mavigator_cockpit_Main",
Map(
"socketUrl" -> socket.toString,
"remoteSystemId" -> id.toString
)
)
complete(html)
}
}
} ~
path(SocketEndpoint) {
get {
val fromWebSocket = Flow[Message].collect{
case BinaryMessage.Strict(data) => data
}
val toWebSocket = Flow[ByteString].map{bytes =>
BinaryMessage(bytes)
}
val backend = Uav().connect()
handleWebSocketMessages(fromWebSocket via backend via toWebSocket)
}
} ~
pathEndOrSingleSlash {
get {
withSocketUri { socket =>
val html = mavigator.views.html.app(
"Index",
"mavigator_index_Main",
Map(
"socketUrl" -> socket.toString
)
)
complete(html)
}
}
} ~
pathPrefix("assets") {
get {
encodeResponse {
getFromResourceDirectory("assets")
}
}
}
)
/** Enables completing requests with html. */
implicit val twirlHtml : ToEntityMarshaller[Html] =
Marshaller.StringMarshaller.wrap(`text/html`){ h: Html =>
h.toString
}
}
| project-condor/mavigator | mavigator-server/src/main/scala/mavigator/Router.scala | Scala | gpl-3.0 | 2,277 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval
import cats.effect._
import cats.laws._
import cats.laws.discipline._
import cats.syntax.all._
import cats.{effect, Eval}
import monix.catnap.SchedulerEffect
import monix.execution.CancelablePromise
import monix.execution.exceptions.DummyException
import monix.execution.internal.Platform
import org.reactivestreams.{Publisher, Subscriber, Subscription}
import scala.concurrent.duration._
import scala.util.{Failure, Success}
object TaskConversionsSuite extends BaseTestSuite {
test("Task.from(task.to[IO]) == task") { implicit s =>
check1 { (task: Task[Int]) =>
Task.from(task.to[IO]) <-> task
}
}
test("Task.from(IO.raiseError(e))") { implicit s =>
val dummy = DummyException("dummy")
val task = Task.from(IO.raiseError(dummy))
assertEquals(task.runToFuture.value, Some(Failure(dummy)))
}
test("Task.from(IO.raiseError(e).shift)") { implicit s =>
val dummy = DummyException("dummy")
val task = Task.from(for (_ <- IO.shift(s); x <- IO.raiseError[Int](dummy)) yield x)
val f = task.runToFuture
assertEquals(f.value, None)
s.tick()
assertEquals(f.value, Some(Failure(dummy)))
}
test("Task.now(v).to[IO]") { implicit s =>
assertEquals(Task.now(10).to[IO].unsafeRunSync(), 10)
}
test("Task.raiseError(dummy).to[IO]") { implicit s =>
val dummy = DummyException("dummy")
intercept[DummyException] {
Task.raiseError[Unit](dummy).to[IO].unsafeRunSync()
}
()
}
test("Task.eval(thunk).to[IO]") { implicit s =>
assertEquals(Task.eval(10).to[IO].unsafeRunSync(), 10)
}
test("Task.eval(fa).asyncBoundary.to[IO]") { implicit s =>
val io = Task.eval(1).asyncBoundary.to[IO]
val f = io.unsafeToFuture()
assertEquals(f.value, None); s.tick()
assertEquals(f.value, Some(Success(1)))
}
test("Task.raiseError(dummy).asyncBoundary.to[IO]") { implicit s =>
val dummy = DummyException("dummy")
val io = Task.raiseError[Int](dummy).executeAsync.to[IO]
val f = io.unsafeToFuture()
assertEquals(f.value, None); s.tick()
assertEquals(f.value, Some(Failure(dummy)))
}
test("Task.fromConcurrent(task.toConcurrent[IO]) == task") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
check1 { (task: Task[Int]) =>
Task.fromConcurrentEffect(task.toConcurrent[IO]) <-> task
}
}
test("Task.fromAsync(task.toAsync[IO]) == task") { implicit s =>
check1 { (task: Task[Int]) =>
Task.fromEffect(task.toAsync[IO]) <-> task
}
}
test("Task.fromConcurrent(task) == task") { implicit s =>
val ref = Task.evalAsync(1)
assertEquals(Task.fromConcurrentEffect(ref), ref)
}
test("Task.fromConcurrent(io)") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
val f = Task.fromConcurrentEffect(IO(1)).runToFuture
assertEquals(f.value, Some(Success(1)))
val io2 = for (_ <- IO.shift; a <- IO(1)) yield a
val f2 = Task.fromConcurrentEffect(io2).runToFuture
assertEquals(f2.value, None); s.tick()
assertEquals(f2.value, Some(Success(1)))
}
test("Task.fromAsync(Effect)") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
implicit val ioEffect: Effect[CIO] = new CustomEffect
val f = Task.fromEffect(CIO(IO(1))).runToFuture
assertEquals(f.value, Some(Success(1)))
val io2 = for (_ <- CIO(IO.shift); a <- CIO(IO(1))) yield a
val f2 = Task.fromEffect(io2).runToFuture
assertEquals(f2.value, None); s.tick()
assertEquals(f2.value, Some(Success(1)))
val dummy = DummyException("dummy")
val f3 = Task.fromEffect(CIO(IO.raiseError(dummy))).runToFuture
assertEquals(f3.value, Some(Failure(dummy)))
}
test("Task.fromConcurrent(ConcurrentEffect)") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
implicit val ioEffect: ConcurrentEffect[CIO] = new CustomConcurrentEffect()
val f = Task.fromConcurrentEffect(CIO(IO(1))).runToFuture
assertEquals(f.value, Some(Success(1)))
val io2 = for (_ <- CIO(IO.shift); a <- CIO(IO(1))) yield a
val f2 = Task.fromConcurrentEffect(io2).runToFuture
assertEquals(f2.value, None); s.tick()
assertEquals(f2.value, Some(Success(1)))
val dummy = DummyException("dummy")
val f3 = Task.fromConcurrentEffect(CIO(IO.raiseError(dummy))).runToFuture
assertEquals(f3.value, Some(Failure(dummy)))
}
test("Task.fromAsync(broken Effect)") { implicit s =>
val dummy = DummyException("dummy")
implicit val ioEffect: Effect[CIO] =
new CustomEffect()(IO.contextShift(s)) {
override def runAsync[A](fa: CIO[A])(cb: (Either[Throwable, A]) => IO[Unit]): SyncIO[Unit] =
throw dummy
}
val f = Task.fromEffect(CIO(IO(1))).runToFuture
s.tick()
assertEquals(f.value, None)
assertEquals(s.state.lastReportedError, dummy)
}
test("Task.fromConcurrent(broken ConcurrentEffect)") { implicit s =>
val dummy = DummyException("dummy")
implicit val ioEffect: ConcurrentEffect[CIO] =
new CustomConcurrentEffect()(IO.contextShift(s)) {
override def runCancelable[A](fa: CIO[A])(cb: Either[Throwable, A] => IO[Unit]): SyncIO[CancelToken[CIO]] =
throw dummy
}
val f = Task.fromConcurrentEffect(CIO(IO(1))).runToFuture
assertEquals(f.value, None); s.tick()
assertEquals(f.value, None)
assertEquals(s.state.lastReportedError, dummy)
}
test("Task.from is cancelable") { implicit s =>
val timer = SchedulerEffect.timerLiftIO[IO](s)
val io = timer.sleep(10.seconds)
val f = Task.from(io).runToFuture
s.tick()
assert(s.state.tasks.nonEmpty, "tasks.nonEmpty")
assertEquals(f.value, None)
f.cancel()
s.tick()
assert(s.state.tasks.isEmpty, "tasks.isEmpty")
assertEquals(f.value, None)
s.tick(10.seconds)
assertEquals(f.value, None)
}
test("Task.fromConcurrent(io) is cancelable") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
val timer = SchedulerEffect.timer[IO](s)
val io = timer.sleep(10.seconds)
val f = Task.fromConcurrentEffect(io).runToFuture
s.tick()
assert(s.state.tasks.nonEmpty, "tasks.nonEmpty")
assertEquals(f.value, None)
f.cancel()
s.tick()
assert(s.state.tasks.isEmpty, "tasks.isEmpty")
assertEquals(f.value, None)
s.tick(10.seconds)
assertEquals(f.value, None)
}
test("Task.fromConcurrent(ConcurrentEffect) is cancelable") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
implicit val effect: ConcurrentEffect[CIO] = new CustomConcurrentEffect
val timer = SchedulerEffect.timer[CIO](s)
val io = timer.sleep(10.seconds)
val f = Task.fromConcurrentEffect(io)(effect).runToFuture
s.tick()
assert(s.state.tasks.nonEmpty, "tasks.nonEmpty")
assertEquals(f.value, None)
f.cancel()
assert(s.state.tasks.isEmpty, "tasks.isEmpty")
assertEquals(f.value, None)
s.tick(10.seconds)
assertEquals(f.value, None)
}
test("Task.fromConcurrent(task.to[IO]) preserves cancelability") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
val task0 = Task(1).delayExecution(10.seconds)
val task = Task.fromConcurrentEffect(task0.toConcurrent[IO])
val f = task.runToFuture
s.tick()
assertEquals(f.value, None)
f.cancel()
s.tick()
assertEquals(f.value, None)
assert(s.state.tasks.isEmpty, "tasks.isEmpty")
s.tick(10.seconds)
assertEquals(f.value, None)
}
test("Task.fromConcurrent(task.to[CIO]) preserves cancelability") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
implicit val effect: ConcurrentEffect[CIO] = new CustomConcurrentEffect
val task0 = Task(1).delayExecution(10.seconds)
val task = Task.fromConcurrentEffect(task0.toConcurrent[CIO])
val f = task.runToFuture
s.tick()
assertEquals(f.value, None)
f.cancel()
s.tick()
assertEquals(f.value, None)
assert(s.state.tasks.isEmpty, "tasks.isEmpty")
s.tick(10.seconds)
assertEquals(f.value, None)
}
test("Task.fromAsync(task.to[IO]) preserves cancelability (because IO is known)") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
val task0 = Task(1).delayExecution(10.seconds)
val task = Task.fromEffect(task0.toConcurrent[IO])
val f = task.runToFuture
s.tick()
assertEquals(f.value, None)
f.cancel()
s.tick()
assertEquals(f.value, None)
assert(s.state.tasks.isEmpty, "tasks.isEmpty")
s.tick(10.seconds)
assertEquals(f.value, None)
}
test("Task.fromConcurrent(task.toConcurrent[F]) <-> task (Effect)") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
implicit val effect = new CustomConcurrentEffect
check1 { (task: Task[Int]) =>
Task.fromConcurrentEffect(task.toConcurrent[CIO]) <-> task
}
}
test("Task.fromAsync(task.toAsync[F]) <-> task") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
implicit val effect = new CustomEffect
check1 { (task: Task[Int]) =>
Task.fromEffect(task.toAsync[CIO]) <-> task
}
}
test("Task.fromConcurrent(task.to[F]) <-> task (ConcurrentEffect)") { implicit s =>
implicit val cs = SchedulerEffect.contextShift[IO](s)
implicit val effect = new CustomConcurrentEffect
check1 { (task: Task[Int]) =>
Task.fromConcurrentEffect(task.toConcurrent[CIO]) <-> task
}
}
test("Task.from[Eval]") { implicit s =>
var effect = 0
val task = Task.from(Eval.always { effect += 1; effect })
assertEquals(task.runToFuture.value, Some(Success(1)))
assertEquals(task.runToFuture.value, Some(Success(2)))
assertEquals(task.runToFuture.value, Some(Success(3)))
}
test("Task.from[Eval] protects against user error") { implicit s =>
val dummy = DummyException("dummy")
val task = Task.from(Eval.always { throw dummy })
assertEquals(task.runToFuture.value, Some(Failure(dummy)))
}
test("Task.fromCancelablePromise") { implicit s =>
val p = CancelablePromise[Int]()
val task = Task.fromCancelablePromise(p)
val token1 = task.runToFuture
val token2 = task.runToFuture
token1.cancel()
p.success(1)
s.tick()
assertEquals(token2.value, Some(Success(1)))
assertEquals(token1.value, None)
val token3 = task.runToFuture
assertEquals(token3.value, Some(Success(1)))
}
test("Task.fromCancelablePromise stack safety") { implicit s =>
val count = if (Platform.isJVM) 10000 else 1000
val p = CancelablePromise[Int]()
val task = Task.fromCancelablePromise(p)
def loop(n: Int): Task[Int] =
if (n > 0) task.flatMap(_ => loop(n - 1))
else task
val f = loop(count).runToFuture
s.tick()
assertEquals(f.value, None)
p.success(99)
s.tick()
assertEquals(f.value, Some(Success(99)))
val f2 = loop(count).runToFuture
s.tick()
assertEquals(f2.value, Some(Success(99)))
}
test("Task.fromReactivePublisher protects against user error") { implicit s =>
val dummy = DummyException("dummy")
val pub = new Publisher[Int] {
def subscribe(s: Subscriber[_ >: Int]): Unit = {
s.onSubscribe(new Subscription {
def request(n: Long): Unit = throw dummy
def cancel(): Unit = throw dummy
})
}
}
assertEquals(Task.fromReactivePublisher(pub).runToFuture.value, Some(Failure(dummy)))
}
test("Task.fromReactivePublisher yields expected input") { implicit s =>
val pub = new Publisher[Int] {
def subscribe(s: Subscriber[_ >: Int]): Unit = {
s.onSubscribe(new Subscription {
var isActive = true
def request(n: Long): Unit = {
if (n > 0 && isActive) {
isActive = false
s.onNext(1)
s.onComplete()
}
}
def cancel(): Unit = {
isActive = false
}
})
}
}
assertEquals(Task.fromReactivePublisher(pub).runToFuture.value, Some(Success(Some(1))))
}
test("Task.fromReactivePublisher <-> task") { implicit s =>
check1 { task: Task[Int] =>
Task.fromReactivePublisher(task.toReactivePublisher) <-> task.map(Some(_))
}
}
final case class CIO[+A](io: IO[A])
class CustomEffect(implicit cs: ContextShift[IO]) extends Effect[CIO] {
override def runAsync[A](fa: CIO[A])(cb: (Either[Throwable, A]) => IO[Unit]): SyncIO[Unit] =
fa.io.runAsync(cb)
override def async[A](k: ((Either[Throwable, A]) => Unit) => Unit): CIO[A] =
CIO(IO.async(k))
override def asyncF[A](k: ((Either[Throwable, A]) => Unit) => CIO[Unit]): CIO[A] =
CIO(IO.asyncF(cb => k(cb).io))
override def suspend[A](thunk: => CIO[A]): CIO[A] =
CIO(IO.suspend(thunk.io))
override def flatMap[A, B](fa: CIO[A])(f: (A) => CIO[B]): CIO[B] =
CIO(fa.io.flatMap(a => f(a).io))
override def tailRecM[A, B](a: A)(f: (A) => CIO[Either[A, B]]): CIO[B] =
CIO(IO.ioConcurrentEffect.tailRecM(a)(x => f(x).io))
override def raiseError[A](e: Throwable): CIO[A] =
CIO(IO.raiseError(e))
override def handleErrorWith[A](fa: CIO[A])(f: (Throwable) => CIO[A]): CIO[A] =
CIO(IO.ioConcurrentEffect.handleErrorWith(fa.io)(x => f(x).io))
override def pure[A](x: A): CIO[A] =
CIO(IO.pure(x))
override def liftIO[A](ioa: IO[A]): CIO[A] =
CIO(ioa)
override def bracketCase[A, B](acquire: CIO[A])(use: A => CIO[B])(
release: (A, ExitCase[Throwable]) => CIO[Unit]): CIO[B] =
CIO(acquire.io.bracketCase(a => use(a).io)((a, e) => release(a, e).io))
}
class CustomConcurrentEffect(implicit cs: ContextShift[IO]) extends CustomEffect with ConcurrentEffect[CIO] {
override def runCancelable[A](fa: CIO[A])(cb: Either[Throwable, A] => IO[Unit]): SyncIO[CancelToken[CIO]] =
fa.io.runCancelable(cb).map(CIO(_))
override def cancelable[A](k: (Either[Throwable, A] => Unit) => CancelToken[CIO]): CIO[A] =
CIO(IO.cancelable(cb => k(cb).io))
override def uncancelable[A](fa: CIO[A]): CIO[A] =
CIO(fa.io.uncancelable)
override def start[A](fa: CIO[A]): CIO[effect.Fiber[CIO, A]] =
CIO(fa.io.start.map(fiberT))
override def racePair[A, B](fa: CIO[A], fb: CIO[B]) =
CIO(IO.racePair(fa.io, fb.io).map {
case Left((a, fiber)) => Left((a, fiberT(fiber)))
case Right((fiber, b)) => Right((fiberT(fiber), b))
})
private def fiberT[A](fiber: effect.Fiber[IO, A]): effect.Fiber[CIO, A] =
effect.Fiber(CIO(fiber.join), CIO(fiber.cancel))
}
}
| alexandru/monifu | monix-eval/shared/src/test/scala/monix/eval/TaskConversionsSuite.scala | Scala | apache-2.0 | 15,334 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution
package internal
import java.util.concurrent.CompletableFuture
import monix.execution.CancelableFuture
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
private[execution] abstract class FutureUtilsForPlatform { self =>
/**
* Convert any Scala `Future` to Java's `CompletableFuture`
*/
def toJavaCompletable[A](source: Future[A])
(implicit ec: ExecutionContext): CompletableFuture[A] = {
source match {
case ref: CancelableFuture[A] @unchecked =>
CancelableFuture.toJavaCompletable(ref)
case _ =>
val cf = new CompletableFuture[A]()
source.onComplete {
case Success(a) =>
cf.complete(a)
case Failure(ex) =>
cf.completeExceptionally(ex)
}
cf
}
}
/**
* Convert [[CancelableFuture]] to Java `CompletableFuture`
*/
def fromJavaCompletable[A](cfa: CompletableFuture[A])(implicit ec: ExecutionContext): Future[A] =
CancelableFuture.fromJavaCompletable(cfa)
/**
* Extension methods specific for Java 8 and up.
*/
implicit final class Java8Extensions[F[T] <: Future[T], A](val source: F[A]) {
/**
* Extension method, alias of [[FutureUtils.toJavaCompletable]].
*/
def asJava(implicit ec: ExecutionContext): CompletableFuture[A] =
self.toJavaCompletable(source)
}
}
| Wogan/monix | monix-execution/jvm/src/main/scala_2.12/monix/execution/internal/FutureUtilsForPlatform.scala | Scala | apache-2.0 | 2,077 |
package info.fotm.clustering
import info.fotm.util.MathVector
object FeatureSettings {
def sqr(x: Double) = x * x
lazy val defaultFeatures = List[Feature[CharacterStatsUpdate]](
Feature[CharacterStatsUpdate]("rating", u => u.next.rating),
Feature[CharacterStatsUpdate]("ratingDerivative", u => sqr(u.next.rating - u.prev.rating) / u.prev.rating.toDouble),
Feature[CharacterStatsUpdate]("ratingDiff", u => Math.abs(u.next.rating - u.prev.rating)),
Feature[CharacterStatsUpdate]("seasonWinsRatio", u => u.next.season.wins / u.next.season.total.toDouble),
Feature[CharacterStatsUpdate]("weeklyWinsRatio", u => u.next.weekly.wins / u.next.weekly.total.toDouble),
Feature[CharacterStatsUpdate]("weeklyTotal", u => u.next.weekly.total),
Feature[CharacterStatsUpdate]("seasonTotal", u => u.next.season.total),
Feature[CharacterStatsUpdate]("weeklyWins", u => u.next.weekly.wins),
Feature[CharacterStatsUpdate]("seasonWins", u => u.next.season.wins),
Feature[CharacterStatsUpdate]("weeklyLosses", u => u.next.weekly.losses),
Feature[CharacterStatsUpdate]("seasonLosses", u => u.next.season.losses),
// ==============
Feature[CharacterStatsUpdate]("weeklyDiff", u => u.next.weekly.wins - u.next.weekly.losses),
Feature[CharacterStatsUpdate]("seasonDiff", u => u.next.season.wins - u.next.season.losses),
Feature[CharacterStatsUpdate]("weeklyDiffSqr", u => sqr(u.next.weekly.wins - u.next.weekly.losses)),
Feature[CharacterStatsUpdate]("seasonDiffSqr", u => sqr(u.next.season.wins - u.next.season.losses)),
Feature[CharacterStatsUpdate]("ratingDiffSqr", u => sqr(u.next.rating - u.prev.rating)),
Feature[CharacterStatsUpdate]("seasonWinsRatioSqr", u => sqr(u.next.season.wins / u.next.season.total.toDouble)),
Feature[CharacterStatsUpdate]("weeklyWinsRatioSqr", u => sqr(u.next.weekly.wins / u.next.weekly.total.toDouble))
// Feature.const[CharacterStatsUpdate]
)
//lazy val startingWeights = MathVector(defaultFeatures.map(_.weight): _*)
lazy val startingWeights =
MathVector(
12.852989130434821, // rating
5.968931159420276, // ratingDerivative
4.141576086956533, // ratingDiff
3.0, // weeklyWinsRatio
0.0, // seasonWinsRatio
3.0, // weeklyTotal
0.0, // seasonTotal
2.0, // weeklyWins
0.0, // seasonWins
2.0, // weeklyLosses
0.0, // seasonLosses
// ========================
3.0, // weeklyDiff
1.523460144927535, // seasonDiff
3.0, // weeklyDiffSqr
1.936684782608684, // seasonDiffSqr
8.47282608695654, // ratingDiffSqr
0.0, // seasonWinsRatioSqr
1.0) // weeklyWinsRatioSqr
//MathVector(10.0, 4.947290554297682, 3.9245008509864188, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5.617074058685279, 0, 6.438833041471343, 3.979327535042576, 0, 0)
//MathVector(2.8128878640744457,2.365736698172473,2.1150888266780514,1.7327148005022326,0.3537944136973483,0.8186993678222529,0.0,0.6894010972052322,0.0,1.1751600226144494,0.0,0.9090362094228353,2.4500330593300763,0.0,1.140232696173764,1.2359704158062994,1.411336217018037,0.838994409813651)
lazy val features = Feature.reweigh(defaultFeatures.zip(startingWeights.coords)).toList
}
| Groz/fotm-info | core/src/main/scala/info/fotm/clustering/FeatureSettings.scala | Scala | mit | 3,531 |
package unfiltered.mac
import unfiltered.request._
import unfiltered.response._
import scala.language.implicitConversions
object Mac extends Signing {
def challenge: ResponseFunction[Any] = challenge(None)
def challenge(err: Option[String]) = Unauthorized ~> WWWAuthenticate("%s%s" format("MAC", err match {
case Some(e) => """ error="%s"""" format e
case _ => ""
}))
}
/** MAC Authorization extractor
* See also http://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-00 */
object MacAuthorization {
val Id = "id"
val Nonce = "nonce"
val BodyHash = "bodyhash"
val Ext = "ext"
val MacKey = "mac"
object MacHeader {
import QParams._
val NonceFormat = """^(\d+)[:](\S+)$""".r
val KeyVal = """(\w+)="([\w|=|:|\/|.|%|-|+]+)" """.trim.r
val keys = Id :: Nonce :: BodyHash :: Ext :: MacKey :: Nil
val headerSpace = "MAC" + " "
def unapply(hvals: List[String]) = hvals match {
case x :: xs if x startsWith headerSpace =>
val map = Map(hvals map { _.replace(headerSpace, "") } flatMap {
case KeyVal(k, v) if(keys.contains(k)) => Seq((k -> Seq(v)))
case e =>
Nil
}: _*)
val expect = for {
id <- lookup(Id) is nonempty("id is empty") is required("id is required")
nonce <- lookup(Nonce) is nonempty("nonce is empty") is required("nonce is required") is
pred({NonceFormat.findFirstIn(_).isDefined}, _ + " is an invalid format")
bodyhash <- lookup(BodyHash) is optional[String, String]
ext <- lookup(Ext) is optional[String, String]
mac <- lookup(MacKey) is nonempty("mac is nempty") is required("mac is required")
} yield {
Some(id.get, nonce.get, bodyhash.get, ext.get, mac.get)
}
expect(map) orFail { f =>
None
}
case _ => None
}
}
/** @return (id, nonce, Option[bodyhash], Option[ext], mac)*/
def unapply[T](r: HttpRequest[T]) = r match {
case Authorization(value) =>
value.split(",").toList match {
case MacHeader(id, nonce, bodyhash, ext, mac) =>
Some(id, nonce, bodyhash, ext, mac)
case p => None
}
case _ => None
}
}
/** MAC request signing as defined by
* http://tools.ietf.org/html/draft-hammer-oauth-v2-mac-token-05
* The MAC protocol defines
* 1. MAC key identifier (access token value)
* 2. MAC key (access token secret)
* 3. MAC algorithm - one of ("hmac-sha-1" or "hmac-sha-256")
* 4. Issue time - time when credentials were issued to calculate the age
*/
trait Signing {
import org.apache.commons.codec.binary.Base64.encodeBase64
val HmacSha1 = "HmacSHA1"
val HmacSha256 = "HmacSHA256"
val charset = "UTF8"
val MacAlgorithms = Map("hmac-sha-1" -> HmacSha1, "hmac-sha-256" -> HmacSha256)
private val JAlgorithms = Map(HmacSha1 -> "SHA-1", HmacSha256 -> "SHA-256")
implicit def s2b(s: String) = s.getBytes(charset)
/** @return Either[String error, String hashed value] */
def hash(data: Array[Byte])(algo: String) =
JAlgorithms.get(algo) match {
case Some(h) =>
val msg = java.security.MessageDigest.getInstance(h)
msg.update(data)
Right(msg.digest)
case unsup => Left("unsupported algorithm %s" format unsup)
}
/** @return Either[String error, String hashed value] */
def macHash(alg: String, key: String)(body: String) =
if(MacAlgorithms.isDefinedAt(alg)) {
val macAlg = MacAlgorithms(alg)
val mac = javax.crypto.Mac.getInstance(macAlg)
mac.init(new javax.crypto.spec.SecretKeySpec(key, macAlg))
Right(new String(encodeBase64(mac.doFinal(body)), charset))
}
else Left("unsupported mac algorithm %s" format alg)
def bodyhash(body: Array[Byte])(alg: String) =
hash(body)(alg).fold({ Left(_) }, { h => Right(new String(encodeBase64(h), charset)) })
/** @return signed request for a given key, request, and algorithm */
def sign[T](r: HttpRequest[T], nonce: String, ext: Option[String],
bodyHash: Option[String], key: String, alg: String): Either[String, String] =
requestString(r, alg, nonce, ext, bodyHash).fold({ Left(_) }, { rstr =>
sign(key, rstr, alg)
})
/** @return Either[String error, String mac signed req] */
def sign(key: String, request: String, alg: String): Either[String, String] =
macHash(alg, key)(request)
/** calculates the normalized the request string from a request */
def requestString[T](r: HttpRequest[T], alg: String,
nonce: String, ext: Option[String], bodyHash: Option[String]):
Either[String, String] =
MacAlgorithms.get(alg) match {
case None => Left("unsupported mac algorithm %s" format alg)
case Some(macAlg) =>
r match {
case HostPort(hostname, port) & r =>
bodyHash match {
case Some(bh) => // calculate bodyhash
val body = Body.bytes(r)
bodyhash(body)(macAlg).fold({ Left(_) }, { bhash =>
Right(requestString(nonce, r.method, r.uri,
hostname, port,
bhash,
ext.getOrElse("")))
})
case _ => // don't calculate bodyhash
Right(requestString(nonce, r.method, r.uri,
hostname, port, "", ext.getOrElse("")))
}
}
}
/** calculates the normalized request string from parts of a request */
def requestString(nonce: String, method: String, uri: String, hostname: String,
port: Int, bodyhash: String, ext: String): String =
nonce :: method :: uri :: hostname :: port ::
bodyhash :: ext :: Nil mkString("","\n","\n")
}
| benhutchison/unfiltered | mac/src/main/scala/mac.scala | Scala | mit | 5,864 |
package test.scala.MineSweeper
import org.scalatest.BeforeAndAfterEach
import main.scala.MineSweeper.MineReader
import test.scala.UnitSuite
class MineReaderAcceptance extends UnitSuite with BeforeAndAfterEach {
val dir = "src/test/resources/"
val maybeFields = MineReader(dir + "minesweeper_acceptance.txt")
val fields = maybeFields match {
case Some(fields) => fields
case None => fail("Expected Some(List[Field]), got None")
}
test("Acceptance input has two MineFields") {
assertResult(2)(fields.length)
}
test("The first filed is 4 by 4") {
val field = fields(0)
assertResult((4, 4))(field.dims)
}
test("The second field is 3 by 5") {
val field = fields(1)
assertResult((3, 5))(field.dims)
}
test("The first field has mines at (0, 0) and (2, 1)") {
val field = fields(0)
assertResult(Some('*'))(field(0, 0))
assertResult(Some('*'))(field(2, 1))
}
test("The second field has mines at (0, 0), (0, 1) and (2, 1)") {
val field = fields(1)
assertResult(Some('*'))(field(0, 0))
assertResult(Some('*'))(field(0, 1))
assertResult(Some('*'))(field(2, 1))
}
} | ollielo/ScalaKata | src/test/scala/MineSweeper/MineReaderAcceptance.scala | Scala | mit | 1,152 |
package ch2
import scala.annotation.tailrec
object Exercise2 {
@tailrec def isSorted[A](as: Array[A], ordered: (A,A) => Boolean) : Boolean = as match {
case Array() | Array(_) => true
case Array(x, y, _*) if !ordered(x,y) => false
case Array(x, y, _*) => isSorted(as.tail, ordered)
}
}
import Exercise2._
/*
from repl you can test typing:
:load src/main/scala/fpinscala/ch2/Exercise2.scala
isSorted[Int](Array(1,2,3,4), (a,b) => {a <= b})
isSorted[Int](Array('a','b','c','d'), (a,b) => {a <= b})
isSorted[Int](Array(1,3,2,4), (a,b) => {a <= b})
isSorted[Int](Array(4,3,2,4), (a,b) => {a <= b})
isSorted[Int](Array('a','c','b','d'), (a,b) => {a <= b})
*/
| rucka/fpinscala | src/main/scala/fpinscala/ch2/Exercise2.scala | Scala | gpl-2.0 | 714 |
package it.svjm.textanalysis
import org.junit.Test
@Test
class ScalaAnalyzedTextProcessorTest extends AnalyzedTextProcessorTest {
override def getProcessor() = new ScalaAnalyzedTextProcessor
}
| vraffy/scalaVsJavaMeetup | svjm-scala/src/test/it/svjm/textanalysis/ScalaAnalyzedTextProcessorTest.scala | Scala | apache-2.0 | 201 |
import sbt._
import Keys._
import Tests._
import com.twitter.scrooge.ScroogeSBT
import com.typesafe.sbt.SbtSite.site
import com.typesafe.sbt.site.SphinxSupport.Sphinx
import pl.project13.scala.sbt.JmhPlugin
import sbtunidoc.Plugin.UnidocKeys._
import sbtunidoc.Plugin.{ScalaUnidoc, unidocSettings}
import scoverage.ScoverageSbtPlugin
object Finagle extends Build {
val branch = Process("git" :: "rev-parse" :: "--abbrev-ref" :: "HEAD" :: Nil).!!.trim
val suffix = if (branch == "master") "" else "-SNAPSHOT"
val libVersion = "6.28.0" + suffix
val utilVersion = "6.27.0" + suffix
val ostrichVersion = "9.11.0" + suffix
val scroogeVersion = "4.0.0" + suffix
val nettyLib = "io.netty" % "netty" % "3.10.1.Final"
val ostrichLib = "com.twitter" %% "ostrich" % ostrichVersion
val jacksonVersion = "2.4.4"
val jacksonLibs = Seq(
"com.fasterxml.jackson.core" % "jackson-core" % jacksonVersion,
"com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion,
"com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion exclude("com.google.guava", "guava"),
"com.google.guava" % "guava" % "16.0.1"
)
val thriftLibs = Seq(
"org.apache.thrift" % "libthrift" % "0.5.0" intransitive(),
"org.slf4j" % "slf4j-api" % "1.7.7" % "provided"
)
val scroogeLibs = thriftLibs ++ Seq(
"com.twitter" %% "scrooge-core" % scroogeVersion)
def util(which: String) =
"com.twitter" %% ("util-"+which) % utilVersion excludeAll(
ExclusionRule(organization = "junit"),
ExclusionRule(organization = "org.scala-tools.testing"),
ExclusionRule(organization = "org.mockito"))
lazy val publishM2Configuration =
TaskKey[PublishConfiguration]("publish-m2-configuration",
"Configuration for publishing to the .m2 repository.")
lazy val publishM2 =
TaskKey[Unit]("publish-m2",
"Publishes artifacts to the .m2 repository.")
lazy val m2Repo =
Resolver.file("publish-m2-local",
Path.userHome / ".m2" / "repository")
val sharedSettings = Seq(
version := libVersion,
organization := "com.twitter",
crossScalaVersions := Seq("2.10.5", "2.11.7"),
scalaVersion := "2.10.5",
libraryDependencies ++= Seq(
"org.scalacheck" %% "scalacheck" % "1.12.2" % "test",
"org.scalatest" %% "scalatest" % "2.2.4" % "test",
"junit" % "junit" % "4.10" % "test",
"org.mockito" % "mockito-all" % "1.9.5" % "test"
),
resolvers += "twitter-repo" at "https://maven.twttr.com",
ScoverageSbtPlugin.ScoverageKeys.coverageHighlighting := (
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 10)) => false
case _ => true
}
),
publishM2Configuration <<= (packagedArtifacts, checksums in publish, ivyLoggingLevel) map { (arts, cs, level) =>
Classpaths.publishConfig(arts, None, resolverName = m2Repo.name, checksums = cs, logging = level)
},
publishM2 <<= Classpaths.publishTask(publishM2Configuration, deliverLocal),
otherResolvers += m2Repo,
javaOptions in Test := Seq("-DSKIP_FLAKY=1"),
ivyXML :=
<dependencies>
<exclude org="com.sun.jmx" module="jmxri" />
<exclude org="com.sun.jdmk" module="jmxtools" />
<exclude org="javax.jms" module="jms" />
</dependencies>,
scalacOptions ++= Seq("-encoding", "utf8"),
scalacOptions += "-deprecation",
scalacOptions += "-language:_",
javacOptions ++= Seq("-source", "1.7", "-target", "1.7"),
javacOptions in doc := Seq("-source", "1.7"),
// This is bad news for things like com.twitter.util.Time
parallelExecution in Test := false,
// This effectively disables packageDoc, which craps out
// on generating docs for generated thrift due to the use
// of raw java types.
// packageDoc in Compile := new java.io.File("nosuchjar"),
// Sonatype publishing
publishArtifact in Test := false,
pomIncludeRepository := { _ => false },
publishMavenStyle := true,
autoAPIMappings := true,
apiURL := Some(url("https://twitter.github.io/finagle/docs/")),
pomExtra := (
<url>https://github.com/twitter/finagle</url>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0</url>
</license>
</licenses>
<scm>
<url>git@github.com:twitter/finagle.git</url>
<connection>scm:git:git@github.com:twitter/finagle.git</connection>
</scm>
<developers>
<developer>
<id>twitter</id>
<name>Twitter Inc.</name>
<url>https://www.twitter.com/</url>
</developer>
</developers>),
publishTo <<= version { (v: String) =>
val nexus = "https://oss.sonatype.org/"
if (v.trim.endsWith("SNAPSHOT"))
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
},
resourceGenerators in Compile <+=
(resourceManaged in Compile, name, version) map { (dir, name, ver) =>
val file = dir / "com" / "twitter" / name / "build.properties"
val buildRev = Process("git" :: "rev-parse" :: "HEAD" :: Nil).!!.trim
val buildName = new java.text.SimpleDateFormat("yyyyMMdd-HHmmss").format(new java.util.Date)
val contents = (
"name=%s\nversion=%s\nbuild_revision=%s\nbuild_name=%s"
).format(name, ver, buildRev, buildName)
IO.write(file, contents)
Seq(file)
}
)
val jmockSettings = Seq(
libraryDependencies ++= Seq(
"org.jmock" % "jmock" % "2.4.0" % "test",
"cglib" % "cglib" % "2.2.2" % "test",
"asm" % "asm" % "3.3.1" % "test",
"org.objenesis" % "objenesis" % "1.1" % "test",
"org.hamcrest" % "hamcrest-all" % "1.1" % "test"
)
)
lazy val finagle = Project(
id = "finagle",
base = file("."),
settings = Project.defaultSettings ++
sharedSettings ++
unidocSettings ++ Seq(
unidocProjectFilter in (ScalaUnidoc, unidoc) :=
inAnyProject -- inProjects(finagleExample)
)
) aggregate(
// Core, support.
finagleCore, finagleTest, finagleStats,
finagleZipkin, finagleServersets, finagleCacheResolver,
finagleException, finagleCommonsStats,
finagleExp, finagleMdns, finagleTesters, finagleOstrich4,
// Protocols
finagleHttp, finagleHttpX, finagleHttpXCompat, finagleStream, finagleNative,
finagleThrift, finagleMemcached, finagleKestrel,
finagleMux, finagleThriftMux, finagleMySQL,
finagleSpdy, finagleRedis,
// Use and integration
finagleStress
// removing benchmark because swift can't build outside of twitter for now
// finagleBenchmark
// Removing projects with specs tests and their dependencies
// finagleExample
)
lazy val finagleTest = Project(
id = "finagle-test",
base = file("finagle-test"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-test",
libraryDependencies ++= Seq(nettyLib, util("core"))
)
lazy val finagleCore = Project(
id = "finagle-core",
base = file("finagle-core"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-core",
libraryDependencies ++= Seq(nettyLib, util("app"), util("core"), util("collection"),
util("hashing"), util("stats"), util("jvm"), util("logging"), util("codec"),
"com.twitter" % "jsr166e" % "1.0.0")
).dependsOn(finagleTest % "test")
lazy val finagleOstrich4 = Project(
id = "finagle-ostrich4",
base = file("finagle-ostrich4"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-ostrich4",
libraryDependencies ++= Seq(
ostrichLib,
util("registry"),
util("stats")
)
).dependsOn(finagleCore, finagleHttpX)
lazy val finagleStats = Project(
id = "finagle-stats",
base = file("finagle-stats"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-stats",
libraryDependencies ++= Seq(
"com.twitter.common" % "metrics" % "0.0.37",
util("events"),
util("logging"),
util("registry"),
util("stats")
),
libraryDependencies ++= jacksonLibs
).dependsOn(finagleCore, finagleHttpX)
lazy val finagleZipkin = Project(
id = "finagle-zipkin",
base = file("finagle-zipkin"),
settings = Project.defaultSettings ++
ScroogeSBT.newSettings ++
sharedSettings
).settings(
name := "finagle-zipkin",
libraryDependencies ++= Seq(util("codec"), util("events")) ++ scroogeLibs,
libraryDependencies ++= jacksonLibs
).dependsOn(finagleCore, finagleThrift, finagleTest % "test")
lazy val finagleException = Project(
id = "finagle-exception",
base = file("finagle-exception"),
settings = Project.defaultSettings ++
ScroogeSBT.newSettings ++
sharedSettings
).settings(
name := "finagle-exception",
libraryDependencies ++= Seq(
util("codec")
) ++ scroogeLibs,
libraryDependencies ++= jacksonLibs
).dependsOn(finagleCore, finagleThrift)
lazy val finagleCommonsStats = Project(
id = "finagle-commons-stats",
base = file("finagle-commons-stats"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-commons-stats",
libraryDependencies ++= Seq(
"com.twitter.common" % "stats" % "0.0.114",
util("registry"),
util("stats")
)
).dependsOn(finagleCore)
lazy val finagleServersets = Project(
id = "finagle-serversets",
base = file("finagle-serversets"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-serversets",
fork in Test := true,
libraryDependencies ++= Seq(
"commons-codec" % "commons-codec" % "1.6",
util("zk-common"),
util("zk-test") % "test",
"com.twitter.common.zookeeper" % "server-set" % "1.0.103",
"com.google.guava" % "guava" % "16.0.1"
),
libraryDependencies ++= jacksonLibs,
excludeFilter in unmanagedSources := "ZkTest.scala",
ivyXML :=
<dependencies>
<dependency org="com.twitter.common.zookeeper" name="server-set" rev="1.0.103">
<exclude org="com.google.guava" name="guava"/>
<exclude org="com.twitter" name="finagle-core"/>
<exclude org="com.twitter" name="finagle-thrift"/>
<exclude org="com.twitter" name="util-core"/>
<exclude org="com.twitter" name="util-logging"/>
<exclude org="com.twitter.common" name="jdk-logging"/>
<exclude org="com.twitter.common" name="stats"/>
<exclude org="com.twitter.common" name="util-executor-service-shutdown"/>
<exclude org="io.netty" name="netty"/>
<exclude org="javax.activation" name="activation"/>
<exclude org="javax.mail" name="mail"/>
</dependency>
</dependencies>
).dependsOn(finagleCore, finagleTest)
// Protocol support
lazy val finagleHttp = Project(
id = "finagle-http",
base = file("finagle-http"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-http",
libraryDependencies ++= Seq(
util("codec"), util("logging"),
"commons-lang" % "commons-lang" % "2.6",
"com.google.guava" % "guava" % "16.0.1"
)
).dependsOn(finagleCore)
// see https://finagle.github.io/blog/2014/10/20/upgrading-finagle-to-netty-4/
// for an explanation of the role of transitional -x packages in the netty4 migration.
lazy val finagleHttpX = Project(
id = "finagle-httpx",
base = file("finagle-httpx"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-httpx",
libraryDependencies ++= Seq(
util("codec"), util("logging"),
"commons-lang" % "commons-lang" % "2.6",
"com.google.guava" % "guava" % "16.0.1"
)
).dependsOn(finagleCore)
lazy val finagleHttpXCompat = Project(
id = "finagle-httpx-compat",
base = file("finagle-httpx-compat"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-httpx-compat"
).dependsOn(finagleCore, finagleHttp, finagleHttpX)
lazy val finagleNative = Project(
id = "finagle-native",
base = file("finagle-native"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-native"
).dependsOn(finagleCore, finagleHttp)
lazy val finagleStream = Project(
id = "finagle-stream",
base = file("finagle-stream"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-stream"
).dependsOn(finagleCore, finagleTest % "test")
lazy val finagleThrift = Project(
id = "finagle-thrift",
base = file("finagle-thrift"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-thrift",
libraryDependencies ++= Seq("silly" % "silly-thrift" % "0.5.0" % "test") ++ scroogeLibs
).dependsOn(finagleCore, finagleTest % "test")
lazy val finagleCacheResolver = Project(
id = "finagle-cacheresolver",
base = file("finagle-cacheresolver"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-cacheresolver",
libraryDependencies ++= Seq(
"com.twitter.common" % "zookeeper-testing" % "0.0.53" % "test"
),
libraryDependencies ++= jacksonLibs
).dependsOn(finagleCore, finagleServersets)
lazy val finagleMemcached = Project(
id = "finagle-memcached",
base = file("finagle-memcached"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-memcached",
libraryDependencies ++= Seq(
util("hashing"),
util("zk-test") % "test",
"com.google.guava" % "guava" % "16.0.1",
"com.twitter.common" % "zookeeper-testing" % "0.0.53" % "test"
),
libraryDependencies ++= jacksonLibs
).dependsOn(finagleCacheResolver, finagleCore, finagleServersets)
lazy val finagleKestrel = Project(
id = "finagle-kestrel",
base = file("finagle-kestrel"),
settings = Project.defaultSettings ++
ScroogeSBT.newSettings ++
sharedSettings
).settings(
name := "finagle-kestrel",
libraryDependencies ++= scroogeLibs
).dependsOn(finagleCore, finagleMemcached, finagleThrift)
lazy val finagleRedis = Project(
id = "finagle-redis",
base = file("finagle-redis"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-redis",
libraryDependencies ++= Seq(
util("logging")
),
testOptions in Test := Seq(Tests.Filter {
name => !name.startsWith("com.twitter.finagle.redis.integration")
})
).dependsOn(finagleCore)
lazy val finagleMux = Project(
id = "finagle-mux",
base = file("finagle-mux"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-mux",
libraryDependencies ++= Seq("com.twitter.common" % "stats-util" % "0.0.58")
).dependsOn(finagleCore)
lazy val finagleThriftMux = Project(
id = "finagle-thriftmux",
base = file("finagle-thriftmux"),
settings = Project.defaultSettings ++
ScroogeSBT.newSettings ++
sharedSettings
).settings(
name := "finagle-thriftmux",
libraryDependencies ++= scroogeLibs
).dependsOn(finagleCore, finagleMux, finagleThrift)
lazy val finagleMySQL = Project(
id = "finagle-mysql",
base = file("finagle-mysql"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-mysql",
libraryDependencies ++= Seq(util("logging"), util("cache")),
excludeFilter in unmanagedSources := { "EmbeddableMysql.scala" || "ClientTest.scala" }
).dependsOn(finagleCore)
lazy val finagleExp = Project(
id = "finagle-exp",
base = file("finagle-exp"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-exp",
libraryDependencies ++= Seq(
"com.google.caliper" % "caliper" % "0.5-rc1",
"com.twitter" % "jsr166e" % "1.0.0"
)
).dependsOn(finagleCore, finagleThrift, finagleTest % "test")
// Uses
lazy val finagleStress = Project(
id = "finagle-stress",
base = file("finagle-stress"),
settings = Project.defaultSettings ++
ScroogeSBT.newSettings ++
sharedSettings
).settings(
name := "finagle-stress",
libraryDependencies ++= Seq(ostrichLib, util("logging")) ++ thriftLibs,
libraryDependencies += "com.google.caliper" % "caliper" % "0.5-rc1"
).dependsOn(finagleCore, finagleOstrich4, finagleThrift, finagleHttp, finagleThriftMux)
lazy val finagleMdns = Project(
id = "finagle-mdns",
base = file("finagle-mdns"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-mdns",
libraryDependencies += "javax.jmdns" % "jmdns" % "3.4.1"
).dependsOn(finagleCore)
lazy val finagleExample = Project(
id = "finagle-example",
base = file("finagle-example"),
settings = Project.defaultSettings ++
ScroogeSBT.newSettings ++
sharedSettings
).settings(
name := "finagle-example",
libraryDependencies ++= Seq(
util("codec"),
"org.slf4j" % "slf4j-nop" % "1.7.7" % "provided"
) ++ scroogeLibs
).dependsOn(
finagleCore, finagleHttp, finagleThrift, finagleMemcached, finagleKestrel,
finagleRedis, finagleMySQL, finagleOstrich4, finagleStats)
lazy val finagleBenchmark = Project(
id = "finagle-benchmark",
base = file("finagle-benchmark"),
settings = Project.defaultSettings ++
ScroogeSBT.newSettings ++
sharedSettings ++ JmhPlugin.projectSettings
)
.enablePlugins(JmhPlugin)
.settings(
name := "finagle-benchmark",
libraryDependencies ++= Seq(
util("codec"),
"com.google.caliper" % "caliper" % "0.5-rc1",
"com.twitter.common" % "metrics-data-sample" % "0.0.1"
)
).dependsOn(
finagleCommonsStats,
finagleCore,
finagleExp,
finagleMemcached,
finagleOstrich4,
finagleStats,
finagleThriftMux,
finagleZipkin
)
lazy val finagleTesters = Project(
id = "finagle-testers",
base = file("finagle-testers"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-testers"
).dependsOn(finagleCore)
lazy val finagleSpdy = Project(
id = "finagle-spdy",
base = file("finagle-spdy"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-spdy"
).dependsOn(finagleCore)
/* lazy val finagleSwift = Project(
id = "finagle-swift",
base = file("finagle-swift"),
settings = Project.defaultSettings ++
sharedSettings
).settings(
name := "finagle-swift",
libraryDependencies ++= Seq(
"com.twitter.com.facebook.swift" % "swift-codec" % "0.6.0"
)
).dependsOn(finagleCore, finagleThrift)*/
lazy val finagleDoc = Project(
id = "finagle-doc",
base = file("doc"),
settings = Project.defaultSettings ++ site.settings ++ site.sphinxSupport() ++ sharedSettings ++ Seq(
scalacOptions in doc <++= (version).map(v => Seq("-doc-title", "Finagle", "-doc-version", v)),
includeFilter in Sphinx := ("*.html" | "*.png" | "*.svg" | "*.js" | "*.css" | "*.gif" | "*.txt"),
// Workaround for sbt bug: Without a testGrouping for all test configs,
// the wrong tests are run
testGrouping <<= definedTests in Test map partitionTests,
testGrouping in DocTest <<= definedTests in DocTest map partitionTests
)).configs(DocTest).settings(inConfig(DocTest)(Defaults.testSettings): _*).settings(
unmanagedSourceDirectories in DocTest <+= baseDirectory { _ / "src/sphinx/code" },
//resourceDirectory in DocTest <<= baseDirectory { _ / "src/test/resources" }
// Make the "test" command run both, test and doctest:test
test <<= Seq(test in Test, test in DocTest).dependOn
).dependsOn(finagleCore, finagleHttpX, finagleMySQL)
/* Test Configuration for running tests on doc sources */
lazy val DocTest = config("doctest") extend(Test)
// A dummy partitioning scheme for tests
def partitionTests(tests: Seq[TestDefinition]) = {
Seq(new Group("inProcess", tests, InProcess))
}
}
| jay-johnson/finagle | project/Build.scala | Scala | apache-2.0 | 20,502 |
package org.elasticsearch.plugin.river.kinesis.config
import org.elasticsearch.river.RiverSettings
import org.elasticsearch.plugin.river.kinesis.parser.KinesisDataParser
import org.elasticsearch.plugin.river.kinesis.parser.impl.PassThruDataParser
import org.elasticsearch.plugin.river.kinesis.util.Logging
/**
* Configuration for the data parser
* @param parserClass the parser class
* @param additionalConfig the map of configs for the specific parser being used
*
* Created by JohnDeverna on 8/12/14.
*/
case class ParserConfig(parserClass: Class[_ <: KinesisDataParser],
additionalConfig: Map[String, AnyRef])
/**
* The ParserConfig companion
*/
object ParserConfig extends Config[ParserConfig] with Logging {
lazy val defaultParserClass = classOf[PassThruDataParser]
/**
* Constructor
* @param settings the river settings
* @return a Config instance
*/
def apply(settings: RiverSettings) = {
getConfigMap(settings, "parser") match {
case Some(es) => new ParserConfig(
parserClass = getAsOpt(es, "class") match {
// a class was specified in the config
case Some(p: String) => {
Log.info("Looking for parser of type: {}", p)
val c = Class.forName(p)
if (!classOf[KinesisDataParser].isAssignableFrom(c)) {
throw new RuntimeException(s"Invalid parser class ${c.getName}")
}
c.asInstanceOf[Class[KinesisDataParser]]
}
// no class specified -- use the default
case _ => classOf[PassThruDataParser]
},
additionalConfig = es
)
case _ => new ParserConfig(defaultParserClass, Map[String, AnyRef]())
}
}
} | anreved/elasticsearch-kinesis-river | src/main/scala/org/elasticsearch/plugin/river/kinesis/config/ParserConfig.scala | Scala | mit | 1,737 |
package exam.national_center_test.xml.choice
/**
* <pre>
* Created on 5/23/15.
* </pre>
*
* @author K.Sakamoto
*/
object ChoiceTerm {
def apply(choice: collcetion.ChoiceSingleton): ChoiceTerm = {
new ChoiceTerm(choice.singleton)
}
}
class ChoiceTerm(override val choice: String) extends Choice[String](choice) {
}
object ChoicesTerm {
def apply(choices: Seq[collcetion.ChoiceSingleton]): ChoicesTerm = {
new ChoicesTerm(choices map {ChoiceTerm(_)})
}
}
class ChoicesTerm(override val choices: Seq[ChoiceTerm]) extends Choices[ChoiceTerm](choices) {
}
| ktr-skmt/FelisCatusZero | src/main/scala/exam/national_center_test/xml/choice/ChoiceTerm.scala | Scala | apache-2.0 | 579 |
package otos.service
import java.sql.DriverManager
import akka.actor.Actor
import com.typesafe.config.ConfigFactory
case class IdRequest(id: Int)
case class NameRequest(name: String)
case class NearRequest(id: Int, params: NearParams)
case class NearParams(minRange: Option[Int], maxRange: Option[Int], minPopulation: Option[Long], maxPopulation: Option[Long])
class PostgresPlacesServiceActor extends Actor with PostgresPlacesService {
val config = ConfigFactory.load("opt-loc.properties")
implicit val jdbcUrl = config.getString("javax.persistence.jdbc.url")
implicit val jdbcUser = config.getString("javax.persistence.jdbc.user")
implicit val jdbcPassword = config.getString("javax.persistence.jdbc.password")
def receive = {
case IdRequest(id) =>
sender ! findById(id)
case NameRequest(name) =>
sender ! findByName(name)
case NearRequest(id, NearParams(minRange, maxRange, minPopulation, maxPopulation)) =>
sender ! findNear(id, minRange, maxRange, minPopulation, maxPopulation)
}
}
trait PostgresPlacesService {
implicit def jdbcUrl: String
implicit def jdbcUser: String
implicit def jdbcPassword: String
lazy val databaseConnection = {
DriverManager.getConnection(jdbcUrl, jdbcUser, jdbcPassword)
}
def findById(id: Int): Location = {
val query =
s"""|SELECT
| id,
| name,
| ST_X(geom) as latitude,
| ST_Y(geom) as longitude,
| population
|FROM
| places_gb
|WHERE
| feature_class='P'
| AND id=$id
|ORDER BY
| population DESC
|LIMIT 1;
|""".stripMargin
val stmt = databaseConnection.prepareStatement(query)
val resultSet = stmt.executeQuery
resultSet.next
Location(
resultSet.getInt("id"),
resultSet.getString("name"),
Center(
resultSet.getDouble("latitude"),
resultSet.getDouble("longitude")
),
resultSet.getLong("population")
)
}
def findByName(locationSearch: String): List[Location] = {
println(s"searching for: $locationSearch")
val query =
s"""|SELECT
| id,
| name,
| ST_X(geom) as latitude,
| ST_Y(geom) as longitude,
| population
|FROM
| places_gb
|WHERE
| feature_class='P'
| AND name ILIKE '%${locationSearch.toLowerCase}%'
|ORDER BY
| population DESC;
|""".stripMargin
val stmt = databaseConnection.prepareStatement(query)
val resultSet = stmt.executeQuery
Iterator.continually(resultSet.next).takeWhile(_ == true).map { _ =>
Location(
resultSet.getInt("id"),
resultSet.getString("name"),
Center(
resultSet.getDouble("latitude"),
resultSet.getDouble("longitude")
),
resultSet.getLong("population")
)
}.toList
}
def findNear(
id: Int,
minRange: Option[Int],
maxRange: Option[Int],
minPopulation: Option[Long],
maxPopulation: Option[Long]
) : List[Location] = {
val populationCeiling = 7000000000L
val location = findById(id)
val query =
s"""|SELECT
| id,
| name,
| ST_X(geom) as latitude,
| ST_Y(geom) as longitude,
| population
|FROM
| places_gb
|WHERE
| id != '$id'
| AND feature_class='P'
| AND ST_Distance_Sphere(geom, ST_MakePoint(${location.center.latitude}, ${location.center.longitude})) <= ${maxRange.get}
| AND ST_Distance_Sphere(geom, ST_MakePoint(${location.center.latitude}, ${location.center.longitude})) >= ${minRange.get}
| AND population BETWEEN ${minPopulation.get} AND ${maxPopulation.getOrElse(populationCeiling)}
|ORDER BY
| name ASC
|""".stripMargin
println(query)
val stmt = databaseConnection.prepareStatement(query)
val resultSet = stmt.executeQuery
Iterator.continually(resultSet.next).takeWhile(_ == true).map { _ =>
Location(
resultSet.getInt("id"),
resultSet.getString("name"),
Center(
resultSet.getDouble("latitude"),
resultSet.getDouble("longitude")
),
resultSet.getLong("population")
)
}.toList
}
}
| andystanton/opt-loc | src/main/scala/otos/service/PostgresPlacesService.scala | Scala | apache-2.0 | 4,418 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.features
import java.io.File
import scala.collection.JavaConverters._
import com.google.common.base.Charsets
import com.google.common.io.{BaseEncoding, Files}
import io.fabric8.kubernetes.api.model.Secret
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.k8s.{KubernetesTestConf, SparkPod}
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.util.Utils
class DriverKubernetesCredentialsFeatureStepSuite extends SparkFunSuite {
private val credentialsTempDirectory = Utils.createTempDir()
private val BASE_DRIVER_POD = SparkPod.initialPod()
test("Don't set any credentials") {
val kubernetesConf = KubernetesTestConf.createDriverConf()
val kubernetesCredentialsStep = new DriverKubernetesCredentialsFeatureStep(kubernetesConf)
assert(kubernetesCredentialsStep.configurePod(BASE_DRIVER_POD) === BASE_DRIVER_POD)
assert(kubernetesCredentialsStep.getAdditionalPodSystemProperties().isEmpty)
assert(kubernetesCredentialsStep.getAdditionalKubernetesResources().isEmpty)
}
test("Only set credentials that are manually mounted.") {
val submissionSparkConf = new SparkConf(false)
.set(
s"$KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX.$OAUTH_TOKEN_FILE_CONF_SUFFIX",
"/mnt/secrets/my-token.txt")
.set(
s"$KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX.$CLIENT_KEY_FILE_CONF_SUFFIX",
"/mnt/secrets/my-key.pem")
.set(
s"$KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX.$CLIENT_CERT_FILE_CONF_SUFFIX",
"/mnt/secrets/my-cert.pem")
.set(
s"$KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX.$CA_CERT_FILE_CONF_SUFFIX",
"/mnt/secrets/my-ca.pem")
val kubernetesConf = KubernetesTestConf.createDriverConf(sparkConf = submissionSparkConf)
val kubernetesCredentialsStep = new DriverKubernetesCredentialsFeatureStep(kubernetesConf)
assert(kubernetesCredentialsStep.configurePod(BASE_DRIVER_POD) === BASE_DRIVER_POD)
assert(kubernetesCredentialsStep.getAdditionalKubernetesResources().isEmpty)
val resolvedProperties = kubernetesCredentialsStep.getAdditionalPodSystemProperties()
resolvedProperties.foreach { case (propKey, propValue) =>
assert(submissionSparkConf.get(propKey) === propValue)
}
}
test("Mount credentials from the submission client as a secret.") {
val caCertFile = writeCredentials("ca.pem", "ca-cert")
val clientKeyFile = writeCredentials("key.pem", "key")
val clientCertFile = writeCredentials("cert.pem", "cert")
val submissionSparkConf = new SparkConf(false)
.set(
s"$KUBERNETES_AUTH_DRIVER_CONF_PREFIX.$OAUTH_TOKEN_CONF_SUFFIX",
"token")
.set(
s"$KUBERNETES_AUTH_DRIVER_CONF_PREFIX.$CLIENT_KEY_FILE_CONF_SUFFIX",
clientKeyFile.getAbsolutePath)
.set(
s"$KUBERNETES_AUTH_DRIVER_CONF_PREFIX.$CLIENT_CERT_FILE_CONF_SUFFIX",
clientCertFile.getAbsolutePath)
.set(
s"$KUBERNETES_AUTH_DRIVER_CONF_PREFIX.$CA_CERT_FILE_CONF_SUFFIX",
caCertFile.getAbsolutePath)
val kubernetesConf = KubernetesTestConf.createDriverConf(sparkConf = submissionSparkConf)
val kubernetesCredentialsStep = new DriverKubernetesCredentialsFeatureStep(kubernetesConf)
val resolvedProperties = kubernetesCredentialsStep.getAdditionalPodSystemProperties()
val expectedSparkConf = Map(
s"$KUBERNETES_AUTH_DRIVER_CONF_PREFIX.$OAUTH_TOKEN_CONF_SUFFIX" -> "<present_but_redacted>",
s"$KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX.$OAUTH_TOKEN_FILE_CONF_SUFFIX" ->
DRIVER_CREDENTIALS_OAUTH_TOKEN_PATH,
s"$KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX.$CLIENT_KEY_FILE_CONF_SUFFIX" ->
DRIVER_CREDENTIALS_CLIENT_KEY_PATH,
s"$KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX.$CLIENT_CERT_FILE_CONF_SUFFIX" ->
DRIVER_CREDENTIALS_CLIENT_CERT_PATH,
s"$KUBERNETES_AUTH_DRIVER_MOUNTED_CONF_PREFIX.$CA_CERT_FILE_CONF_SUFFIX" ->
DRIVER_CREDENTIALS_CA_CERT_PATH)
assert(resolvedProperties === expectedSparkConf)
assert(kubernetesCredentialsStep.getAdditionalKubernetesResources().size === 1)
val credentialsSecret = kubernetesCredentialsStep
.getAdditionalKubernetesResources()
.head
.asInstanceOf[Secret]
assert(credentialsSecret.getMetadata.getName ===
s"${kubernetesConf.resourceNamePrefix}-kubernetes-credentials")
val decodedSecretData = credentialsSecret.getData.asScala.map { data =>
(data._1, new String(BaseEncoding.base64().decode(data._2), Charsets.UTF_8))
}
val expectedSecretData = Map(
DRIVER_CREDENTIALS_CA_CERT_SECRET_NAME -> "ca-cert",
DRIVER_CREDENTIALS_OAUTH_TOKEN_SECRET_NAME -> "token",
DRIVER_CREDENTIALS_CLIENT_KEY_SECRET_NAME -> "key",
DRIVER_CREDENTIALS_CLIENT_CERT_SECRET_NAME -> "cert")
assert(decodedSecretData === expectedSecretData)
val driverPod = kubernetesCredentialsStep.configurePod(BASE_DRIVER_POD)
val driverPodVolumes = driverPod.pod.getSpec.getVolumes.asScala
assert(driverPodVolumes.size === 1)
assert(driverPodVolumes.head.getName === DRIVER_CREDENTIALS_SECRET_VOLUME_NAME)
assert(driverPodVolumes.head.getSecret != null)
assert(driverPodVolumes.head.getSecret.getSecretName === credentialsSecret.getMetadata.getName)
val driverContainerVolumeMount = driverPod.container.getVolumeMounts.asScala
assert(driverContainerVolumeMount.size === 1)
assert(driverContainerVolumeMount.head.getName === DRIVER_CREDENTIALS_SECRET_VOLUME_NAME)
assert(driverContainerVolumeMount.head.getMountPath === DRIVER_CREDENTIALS_SECRETS_BASE_DIR)
}
private def writeCredentials(credentialsFileName: String, credentialsContents: String): File = {
val credentialsFile = new File(credentialsTempDirectory, credentialsFileName)
Files.write(credentialsContents, credentialsFile, Charsets.UTF_8)
credentialsFile
}
}
| mahak/spark | resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/DriverKubernetesCredentialsFeatureStepSuite.scala | Scala | apache-2.0 | 6,774 |
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt
package std
import Types._
import Task._
import TaskExtra.{all,existToAny}
import Execute._
object Transform
{
def fromDummy[T](original: Task[T])(action: => T): Task[T] = Task(original.info, Pure(action _, false))
def fromDummyStrict[T](original: Task[T], value: T): Task[T] = fromDummy(original)( value)
implicit def to_~>| [K[_], V[_]](map: RMap[K,V]) : K ~>| V = new (K ~>| V) { def apply[T](k: K[T]): Option[V[T]] = map.get(k) }
final case class DummyTaskMap(mappings: List[TaskAndValue[_]]) {
def ::[T](tav: (Task[T], T)): DummyTaskMap = DummyTaskMap(new TaskAndValue(tav._1, tav._2) :: mappings)
}
final class TaskAndValue[T](val task: Task[T], val value: T)
def dummyMap(dummyMap: DummyTaskMap): Task ~>| Task =
{
val pmap = new DelegatingPMap[Task, Task](new collection.mutable.ListMap)
def add[T](dummy: TaskAndValue[T]) { pmap(dummy.task) = fromDummyStrict(dummy.task, dummy.value) }
dummyMap.mappings.foreach(x => add(x))
pmap
}
/** Applies `map`, returning the result if defined or returning the input unchanged otherwise.*/
implicit def getOrId(map: Task ~>| Task): Task ~> Task =
new (Task ~> Task) {
def apply[T](in: Task[T]): Task[T] = map(in).getOrElse(in)
}
def apply(dummies: DummyTaskMap) =
{
import System._
taskToNode( getOrId(dummyMap(dummies)) )
}
def taskToNode(pre: Task ~> Task): NodeView[Task] = new NodeView[Task] {
def apply[T](t: Task[T]): Node[Task, T] = pre(t).work match {
case Pure(eval, _) => uniform(Nil)( _ => Right(eval()) )
case m: Mapped[t, k] => toNode[t,k](m.in)( right ∙ m.f )( m.alist )
case m: FlatMapped[t, k] => toNode[t,k](m.in)( left ∙ m.f )( m.alist )
case DependsOn(in, deps) => uniform( existToAny(deps) )( const(Left(in)) ∙ all )
case Join(in, f) => uniform(in)(f)
}
def inline[T](t: Task[T]) = t.work match {
case Pure(eval, true) => Some(eval)
case _ => None
}
}
def uniform[T, D](tasks: Seq[Task[D]])(f: Seq[Result[D]] => Either[Task[T], T]): Node[Task, T] =
toNode[T, ({ type l[L[x]] = List[L[D]] })#l]( tasks.toList )( f )( AList.seq[D] )
def toNode[T, k[L[x]]](inputs: k[Task])(f: k[Result] => Either[Task[T], T])(implicit a: AList[k]): Node[Task, T] = new Node[Task, T] {
type K[L[x]] = k[L]
val in = inputs
val alist = a
def work(results: K[Result]) = f(results)
}
} | olove/xsbt | tasks/standard/src/main/scala/sbt/std/System.scala | Scala | bsd-3-clause | 2,402 |
package controllers
import play.api.libs.json._
import play.api.mvc._
import models._
import scalikejdbc.DB
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
class Index extends Controller {
implicit val userWriter = Json.writes[User]
implicit val projectWriter = Json.writes[Project]
implicit val upWriter = Json.writes[UserProject]
implicit val resultWriter = Json.writes[UserProjects]
def index = Action {
Ok(views.html.index("Your new application is ready."))
}
def user(id: Int) = Action.async {
val f = findProjects(id)
for {
up <- f
} yield {
val result = up.groupBy(_.user).map {
case (u: User, g: List[UserProject]) => UserProjects(u, g.map(_.project))
}
Ok(Json.toJson(result))
}
}
def findProjects(id: Int): Future[List[UserProject]] = Future {
DB.readOnly { implicit s =>
AssignDao().byUserName("a").byProjectName("r").list()
}
}
}
| tak-onda/play-macwire-scalikejdbc | app/controllers/Index.scala | Scala | apache-2.0 | 977 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.doperables.spark.wrappers.estimators
import io.deepsense.deeplang.doperables.multicolumn.SingleColumnParams.SingleTransformInPlaceChoices.NoInPlaceChoice
import io.deepsense.deeplang.params.ParamPair
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
class CountVectorizerEstimatorSmokeTest
extends AbstractEstimatorModelWrapperSmokeTest {
override def className: String = "CountVectorizer"
override val estimator = new CountVectorizerEstimator()
import estimator._
override val estimatorParams: Seq[ParamPair[_]] = Seq(
inputColumn -> NameSingleColumnSelection("myStringFeatures"),
singleInPlaceChoice -> NoInPlaceChoice().setOutputColumn("testOutputColumn")
)
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/it/scala/io/deepsense/deeplang/doperables/spark/wrappers/estimators/CountVectorizerEstimatorSmokeTest.scala | Scala | apache-2.0 | 1,338 |
package gameover.fwk.libgdx.gfx
import com.badlogic.gdx.graphics.TextureData
import com.badlogic.gdx.math.Rectangle
import gameover.fwk.gdx.GdxTest
import org.scalatest.FlatSpec
class GraphicsLoaderTest extends FlatSpec {
GdxTest.initializeLibGdx()
val graphicsLoader = new GraphicsLoader()
"GraphicsLoader" should "scan files and load images" in {
assert(graphicsLoader.size > 0)
}
it should "load properly an image as a pixmap and a texture" in {
graphicsLoader.pixmap("pad_area") match {
case Some(pixmap) =>
case _ => fail("Pixmap is not found")
}
graphicsLoader.texture("pad_area") match {
case Some(texture) =>
case _ => fail("Texture is not found")
}
}
it should "load a nine patch getting information from filename" in {
graphicsLoader.ninePatch("bar_window") match {
case Some(ninePatch) =>
case _ => fail("Nine patch is not found")
}
graphicsLoader.texture("pad_area") match {
case Some(texture) =>
case _ => fail("Texture is not found")
}
}
it should "load properly an animation with an ara, the accurate number of frames and duration" in {
graphicsLoader.animation("animwitharea_move_right") match {
case Some(anim) =>
assert(anim.anim.getFrameDuration == 0.08f)
assert(anim.anim.getKeyFrames.length == 4)
assert(anim.optionalArea.isDefined)
assert(anim.optionalArea.get == new Rectangle(5f, 5f, 3f, 3f))
case _ => fail("Animation is not found")
}
}
it should "load properly an animation without an area, the accurate number of frames and duration" in {
graphicsLoader.animation("name") match {
case Some(anim) =>
assert(anim.anim.getFrameDuration == 0.12f)
assert(anim.anim.getKeyFrames.length == 7)
assert(anim.optionalArea.isEmpty)
case _ => fail("Animation is not found")
}
}
it should "create a 'stand' animation from a 'move' animation containing the first frame only" in {
val moveAnimation = graphicsLoader.animation("animwitharea_move_right")
assume(moveAnimation.isDefined)
graphicsLoader.animation("animwitharea_stand_right") match {
case Some(standAnim) =>
assert(standAnim.anim.getKeyFrames.length == 1)
val td1: TextureData = standAnim.anim.getKeyFrames()(0).getTexture.getTextureData
td1.prepare()
val td2: TextureData = moveAnimation.get.anim.getKeyFrames()(0).getTexture.getTextureData
td2.prepare()
assert(td1.consumePixmap().getPixels == td2.consumePixmap().getPixels)
td1.disposePixmap()
td2.disposePixmap()
case _ => fail("Stand animation is not found")
}
}
}
| PixelDuck/gameover-game-framework | src/test/scala/gameover/fwk/libgdx/gfx/GraphicsLoaderTest.scala | Scala | mit | 2,705 |
package com.lambtors.poker_api.module.poker.application.turn
import com.lambtors.poker_api.infrastructure.command_bus.Command
case class AddTurnCardToTableCommand(gameId: String) extends Command
| lambtors/poker-api | src/main/scala/com/lambtors/poker_api/module/poker/application/turn/AddTurnCardToTableCommand.scala | Scala | mit | 197 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtInteger, Linked}
case class CP100(value: Int) extends CtBoxIdentifier(name = "Profit before tax") with CtInteger
object CP100 extends Linked[CP45, CP100] {
override def apply(source: CP45): CP100 = CP100(source.value)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP100.scala | Scala | apache-2.0 | 913 |
package com.flipkart.connekt.callback.topologies
import akka.stream.scaladsl.{Sink, Source}
import com.flipkart.connekt.busybees.tests.streams.TopologyUTSpec
import com.flipkart.connekt.commons.iomodels.SmsCallbackEvent
import com.flipkart.connekt.commons.metrics.Instrumented
import com.flipkart.connekt.firefly.flows.metrics.LatencyMetrics
class LatencyMetricsTopologyTest extends TopologyUTSpec with Instrumented {
val latency = new LatencyMetrics().flow
"HbaseLookupTopology Test" should "run" in {
val smsCallback = SmsCallbackEvent(messageId = "c37d3855-c349-48c9-b3af-724eade554f0",
eventType = "sms_delivered",
receiver = "+911234567843",
clientId = "affordability",
appName = "flipkart",
contextId = "",
cargo = "{\\"deliveredTS\\":\\"1515060997000\\",\\"cause\\":\\"SUCCESS\\",\\"externalId\\":\\"3440781747693252725-262142534416137710\\",\\"provider\\":\\"gupshup\\",\\"errCode\\":\\"000\\"}",
timestamp = 1515060997000L,
eventId = "iaUAuOefuD")
Source.single(smsCallback).via(latency).runWith(Sink.ignore)
Thread.sleep(15000)
}
"HbaseLookupTopology Test" should "cargo blank" in {
val smsCallback = SmsCallbackEvent(messageId = "62b7d6a1-cdb8-414d-8954-972bae4aec2c",
eventType = "CLICK",
receiver = "+911234567843",
clientId = "affordability",
appName = "flipkart",
contextId = "",
cargo = "{\\"name\\":null,\\"url\\":\\"https://dl.flipkart.com/dl/order_details?order_id=OD110852793615523000&token=40f33000a4114bc29ccecd5ba8170d2c&affid=SMSb98efcd97fd08cb92&utm_medium=sms&utm_source=promo&utm_campaign=SMSb98efcd97fd08cb92&u=62b7d6a1-cdb8-414d-8954-972bae4aec2c&utm_content=click&cmpid=sms_promo_SMSb98efcd97fd08cb92\\",\\"useragent\\":\\"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0\\"}",
timestamp = 1509625346666L,
eventId = "iaUAuOefuD")
Source.single(smsCallback).via(latency).runWith(Sink.ignore)
Thread.sleep(15000)
}
"HbaseLookupTopology Test" should "cargo any" in {
val smsCallback = SmsCallbackEvent(messageId = "089dae77-051d-4146-ad25-ea86ed761608",
eventType = "sms_delivered",
receiver = "+911234567843",
clientId = "affordability",
appName = "flipkart",
contextId = "",
cargo = "{}",
timestamp = 1509800169441L,
eventId = "iaUAuOefuD")
Source.single(smsCallback).via(latency).runWith(Sink.ignore)
Thread.sleep(15000)
}
"HbaseLookupTopology Test" should "cargo excssep" in {
val smsCallback = SmsCallbackEvent(messageId = "1c4f3564-3687-4f72-a43d-f75dd111d0",
eventType = "sms_received",
receiver = "+911234567843",
clientId = "affordability",
appName = "flipkart",
contextId = "",
cargo = "{\\"externalId\\":\\"3458299854161064066-166744\\",\\"provider\\":\\"gupshup-v2\\",\\"errCode\\":\\"000\\"}",
timestamp = 1509614097879L,
eventId = "iaUAuOefuD")
Source.single(smsCallback).via(latency).runWith(Sink.ignore)
Thread.sleep(15000)
}
"False DeliveredTS Test" should "run" in {
val smsCallback = SmsCallbackEvent(messageId = "3ce2de85-be4f-4ccb-a91b-1ce8b424ab4c",
eventType = "sms_delivered",
receiver = "+911234567843",
clientId = "affordability",
appName = "flipkart",
contextId = "",
cargo = "{\\"deliveredTS\\":\\"N/A\\",\\"cause\\":\\"SUCCESS\\",\\"externalId\\":\\"3440781747693252725-262142534416137710\\",\\"provider\\":\\"gupshup\\",\\"errCode\\":\\"000\\"}",
timestamp = 1509698165000L,
eventId = "iaUAuOefuD")
Source.single(smsCallback).via(latency).runWith(Sink.ignore)
Thread.sleep(20000)
}
}
| Flipkart/connekt | firefly/src/test/scala/com/flipkart/connekt/callback/topologies/LatencyMetricsTopologyTest.scala | Scala | mit | 3,636 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.scala2_10.scalatest2_2_1
import org.jetbrains.plugins.scala.testingSupport.scalatest.SpecialCharactersTest
/**
* @author Roman.Shein
* @since 18.01.2015.
*/
class Scalatest2_10_2_2_1_SpecialCharactersTest extends Scalatest2_10_2_2_1_Base with SpecialCharactersTest {
}
| triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/testingSupport/scalatest/scala2_10/scalatest2_2_1/Scalatest2_10_2_2_1_SpecialCharactersTest.scala | Scala | apache-2.0 | 336 |
package com.kolor.docker.api
import dispatch._
import Defaults._
import play.api.libs.json._
import org.joda.time.DateTime
import play.api.libs.iteratee._
import org.slf4j.LoggerFactory
import java.io._
import scala.concurrent.Future
import com.ning.http.client.generators.InputStreamBodyGenerator
import play.api.libs.json.JsArray
import scala.Some
import dispatch.StatusCode
import play.api.libs.json.JsObject
import com.kolor.docker.api.entities._
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream
import java.util.zip.GZIPOutputStream
import org.apache.commons.compress.archivers.tar.TarArchiveEntry
import org.apache.commons.compress.utils.IOUtils
import com.kolor.docker.dsl.Dockerfile
/**
* helper trait
* contains common utility methods
*/
sealed trait DockerApiHelper {
def recoverContainerAwareRequest[A](it: Future[Iteratee[Array[Byte], A]])(implicit client: DockerClient, id: ContainerId) = {
it.recoverWith{
case DockerResponseCode(404, err) => Future.failed(new NoSuchContainerException(id, client))
case DockerResponseCode(500, err) => Future.failed(new DockerInternalServerErrorException(client, err))
case t @ DockerResponseCode(code, err) => Future.failed(new DockerRequestException(s"Docker request failed (Code: $code): err", client, Some(t), None))
}
}
def recoverImageAwareRequest[A](it: Future[Iteratee[Array[Byte], A]])(implicit client: DockerClient, image: String) = {
it.recoverWith{
case DockerResponseCode(404, err) => Future.failed(new NoSuchImageException(image, client))
case DockerResponseCode(500, err) => Future.failed(new DockerInternalServerErrorException(client, err))
case t @ DockerResponseCode(code, err) => Future.failed(new DockerRequestException(s"Docker request failed (Code: $code): err", client, Some(t), None))
}
}
def recoverDockerAwareRequest[A](it: Future[Iteratee[Array[Byte], A]])(implicit client: DockerClient) = {
it.recoverWith{
case DockerResponseCode(500, err) => Future.failed(new DockerInternalServerErrorException(client, err))
case t @ DockerResponseCode(code, err) => Future.failed(new DockerRequestException(s"Docker request failed (Code: $code): err", client, Some(t), None))
}
}
}
/**
* DockerApi trait
* is composed of ContainerApi and ImagesApi
*/
trait DockerApi extends DockerContainerApi with DockerImagesApi with DockerApiHelper {
private val log = LoggerFactory.getLogger(this.getClass());
/*
val params = Map("commit" -> "true")
val headers = Map("Content-type" -> "application/json")
val solr = host("localhost", 8983)
val req = solr / "update" / "json" << a <<? params <:< headers
*/
/**
* auth against docker using
*/
def dockerAuth(authInfo: DockerAuth)(implicit docker: DockerClient, fmt: Format[DockerAuth]): Future[Boolean] = {
val req = authInfo match {
case data:DockerAuthCredentials => url(Endpoints.dockerAuth.toString).POST << Json.prettyPrint(Json.toJson(data)) <:< Map("Content-Type" -> "application/json")
case _ => url(Endpoints.dockerAuth.toString).POST <:< Map("Content-Type" -> "application/json")
}
docker.dockerRequest(req).map{
case Right(resp) if resp.getStatusCode() == 200 => true
case Right(resp) if resp.getStatusCode() == 201 => true
case Right(resp) if resp.getStatusCode() == 500 => false // throw new DockerInternalServerErrorException(docker)
case Right(resp) => false
case Left(t) => throw new DockerRequestException(s"docker auth request failed", docker, Some(t), Some(req))
}
}
/**
* retrieve docker info
*/
def dockerInfo()(implicit docker: DockerClient, fmt: Format[DockerInfo]): Future[DockerInfo] = {
val req = url(Endpoints.dockerInfo.toString).GET
docker.dockerJsonRequest[DockerInfo](req).map {
case Right(v) => v
case Left(StatusCode(500)) => throw new DockerInternalServerErrorException(docker)
case Left(t) => throw new DockerRequestException(s"docker info request failed", docker, Some(t), Some(req))
}
}
/**
* PING docker host
*/
def dockerPing()(implicit docker: DockerClient): Future[Boolean] = {
val req = url(Endpoints.dockerPing.toString).GET
docker.httpRequest(req).map {
case Right(str) if (str.toLowerCase().equalsIgnoreCase("ok")) => true
case Right(str) => false
case Left(t) => throw new DockerRequestException(s"docker ping request failed", docker, Some(t), Some(req))
}
}
/**
* retrieve docker version
*/
def dockerVersion()(implicit docker: DockerClient, fmt: Format[DockerVersion]): Future[DockerVersion] = {
val req = url(Endpoints.dockerVersion.toString).GET
docker.dockerJsonRequest[DockerVersion](req).map {
case Left(StatusCode(500)) => throw new DockerInternalServerErrorException(docker)
case Right(version) => version
case Left(t) => throw new DockerRequestException(s"docker version request failed", docker, Some(t), Some(req))
}
}
/**
* retrieve list of docker events since given timestamp
* timestamp defaults to past 10 seconds
*/
def dockerEvents(since: Option[DateTime] = None, until: Option[DateTime] = None)(implicit docker: DockerClient, fmt: Format[DockerStatusMessage], errorFmt: Format[DockerErrorInfo], progressFmt: Format[DockerProgressInfo]): Future[List[Either[DockerErrorInfo, DockerStatusMessage]]] = {
val req = url(Endpoints.dockerEvents(Some(since.map(_.getMillis() / 1000).getOrElse(DateTime.now().getMillis()/1000 - (100*10))), until.map(_.getMillis() / 1000)).toString).GET
recoverDockerAwareRequest(docker.dockerRequestIteratee(req)(_ => DockerIteratee.statusStream)).flatMap(_.run)
}
/**
* attach to dockers event endpoint
* enabled to receive events in realtime by the given consumer iteratee.
* You can use @{DockerEnumeratee.statusStream} to transform the byte stream into a stream of Either[DockerErrorInfo,DockerStatusMessage]
*/
def dockerEventsStreamIteratee[T](consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient, fmt: Format[DockerStatusMessage], errorFmt: Format[DockerErrorInfo], progressFmt: Format[DockerProgressInfo]): Future[Iteratee[Array[Byte], T]] = {
val req = url(Endpoints.dockerEvents().toString).GET
recoverDockerAwareRequest(docker.dockerRequestIteratee(req)(_ => consumer))
}
/**
* attach to dockers event endpoint
* retrieves events in realtime and execute a callback on every received Either[DockerErrorInfo,DockerStatusMessage] event
*/
def dockerEventsStream(fn: (Either[DockerErrorInfo,DockerStatusMessage]) => Unit)(implicit docker: DockerClient, fmt: Format[DockerStatusMessage], errorFmt: Format[DockerErrorInfo], progressFmt: Format[DockerProgressInfo]): Future[Unit] = {
dockerEventsStreamIteratee(DockerEnumeratee.statusStream() &>> Iteratee.foreach(fn)).flatMap(_.run)
}
/**
* build image from dockerfile
* allows realtime processing of the response by given iteratee
*/
def dockerBuildIteratee[T](tarFile: java.io.File, tag: String, verbose: Boolean = false, nocache: Boolean = false, forceRm: Boolean = false)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[Iteratee[Array[Byte], T]] = {
tarFile.exists() match {
case false => throw new RuntimeException(s"File $tarFile does not exist")
case _ =>
val req = auth match {
case DockerAnonymousAuth => url(Endpoints.dockerBuild(tag, verbose, nocache, forceRm).toString).POST <<< tarFile <:< Map("Content-Type" -> "application/x-tar")
case data => url(Endpoints.dockerBuild(tag, verbose, nocache).toString).POST <:< Map("X-Registry-Config" -> data.asBase64Encoded) <<< tarFile <:< Map("Content-Type" -> "application/x-tar")
}
recoverDockerAwareRequest(docker.dockerRequestIteratee(req)(_ => consumer))
}
}
/**
* build image from Dockerfile DSL
* val temp = dockerfile.asTempfile
*/
def dockerfileBuildIteratee[T](dockerfile: Dockerfile, tag: String, verbose: Boolean = false, nocache: Boolean = false, forceRm: Boolean = false)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[Iteratee[Array[Byte], T]] = {
val temp = dockerfile.asTempfile
dockerBuildIteratee(temp, tag, verbose, nocache, forceRm)(consumer)
}
/**
* build image from dockerfile
* collects and aggregates all docker messages into a list on completion
*/
def dockerBuild(tarFile: java.io.File, tag: String, verbose: Boolean = false, nocache: Boolean = false, forceRm: Boolean = false)(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[List[Either[DockerErrorInfo, DockerStatusMessage]]] = {
dockerBuildIteratee(tarFile, tag, verbose, nocache, forceRm)(DockerIteratee.statusStream).flatMap(_.run)
}
/**
* build image from Dockerfile DSL
* collect and aggregate all messages into a list
*/
def dockerfileBuild(dockerfile: Dockerfile, tag: String, verbose: Boolean = false, nocache: Boolean = false, forceRm: Boolean = false)(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[List[Either[DockerErrorInfo, DockerStatusMessage]]] = {
val temp = dockerfile.asTempfile
dockerBuild(temp, tag, verbose, nocache, forceRm)
}
/*
* convenience method to build from dockerfile provided as string sequence
* and retrieve output as list
*/
def dockerBuildFrom(tag: String, verbose: Boolean = false, nocache: Boolean = false, forceRm: Boolean = false)(withDockerfile: () => Seq[String])(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[List[Either[DockerErrorInfo, DockerStatusMessage]]] = {
dockerBuildIterateeFrom(tag, verbose, nocache, forceRm)(DockerIteratee.statusStream)(withDockerfile).flatMap(_.run)
}
/**
* convenience method to build from dockerfile provided as string sequence
*/
def dockerBuildIterateeFrom[T](tag: String, verbose: Boolean = false, nocache: Boolean = false, forceRm: Boolean = false)(consumer: Iteratee[Array[Byte], T])(withDockerfile: () => Seq[String])(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[Iteratee[Array[Byte], T]] = {
val dockerFileString = withDockerfile().mkString("\n")
val temp = File.createTempFile("reactive-docker", "dockerfile")
val tempDockerfile = File.createTempFile("reactive-docker", "dockerfile")
temp.deleteOnExit()
tempDockerfile.deleteOnExit()
val os = new BufferedOutputStream(new FileOutputStream(tempDockerfile))
try {
os.write(dockerFileString.getBytes)
} catch {
case t:Throwable => // ignore
} finally {
os.close
}
var out: TarArchiveOutputStream = null
try {
out = new TarArchiveOutputStream(
new GZIPOutputStream(
new BufferedOutputStream(new FileOutputStream(temp))))
val entry = new TarArchiveEntry(tempDockerfile, "Dockerfile")
out.putArchiveEntry(entry)
IOUtils.copy(new FileInputStream(tempDockerfile), out)
out.closeArchiveEntry
} finally {
if (out != null) {
out.finish
out.close
}
}
log.info(s"DockerTarFile: ${temp.getAbsolutePath()} size=${temp.length()}")
val req = auth match {
case DockerAnonymousAuth => url(Endpoints.dockerBuild(tag, verbose, nocache, forceRm).toString).POST <<< temp <:< Map("Content-Type" -> "application/x-tar")
case data => url(Endpoints.dockerBuild(tag, verbose, nocache).toString).POST <:< Map("X-Registry-Config" -> data.asBase64Encoded) <<< temp <:< Map("Content-Type" -> "application/x-tar")
}
recoverDockerAwareRequest(docker.dockerRequestIteratee(req){_ =>
try {
tempDockerfile.delete()
temp.delete()
} catch {
case t: Throwable => // ignore
}
consumer
})
}
}
/**
* ContainerApi trait
* contains all container-related api operations
*/
trait DockerContainerApi extends DockerApiHelper {
private val log = LoggerFactory.getLogger(this.getClass());
/**
* list containers
*/
def containers(all:Boolean = true, limit: Option[Int] = None, sinceId: Option[String] = None, beforeId: Option[String] = None, showSize: Boolean = true)(implicit docker: DockerClient, fmt: Format[Container]): Future[Seq[Container]] = {
val req = url(Endpoints.containers(all, limit, sinceId, beforeId, showSize).toString).GET
docker.dockerJsonRequest[Seq[Container]](req).map {
case Left(StatusCode(400)) => throw new DockerBadParameterException("list containers - bad parameter", docker, req)
case Left(StatusCode(500)) => throw new DockerInternalServerErrorException(docker)
case Left(StatusCode(302)) => throw new DockerRequestException(s"list containers failed (Code: 302)", docker, None, Some(req))
case Right(v) => v
case Left(t) => throw new DockerRequestException(s"list containers request failed", docker, Some(t), Some(req))
}
}
/**
* create a new container from given image
* returns the new containerId and a sequence of warnings
*/
def containerCreate(image: String, config: ContainerConfiguration, name: Option[String] = None)(implicit docker: DockerClient, fmt: Format[ContainerConfiguration]): Future[(ContainerId, Seq[String])] = {
val cfg = config.copy(image = Some(image))
val req = url(Endpoints.containerCreate(name).toString).POST << Json.prettyPrint(Json.toJson(cfg)) <:< Map("Content-Type" -> "application/json")
docker.dockerRequest(req).map {
case Right(resp) if resp.getStatusCode() == 404 =>
val json = Json.parse(resp.getResponseBody()).asOpt[JsObject]
val id = json.flatMap(j => (j \ "Id").asOpt[String]).map(ContainerId(_)).getOrElse(ContainerId.emptyId)
throw new NoSuchContainerException(id, docker)
case Right(resp) if resp.getStatusCode() == 406 =>
val json = Json.parse(resp.getResponseBody()).asOpt[JsObject]
val id = json.flatMap(j => (j \ "Id").asOpt[String]).map(ContainerId(_)).getOrElse(ContainerId.emptyId)
throw new ContainerNotRunningException(id, docker)
case Right(resp) if resp.getStatusCode() == 409 => throw new DockerConflictException(s"create container request failed: ${resp.getResponseBody()}", docker)
case Right(resp) if resp.getStatusCode() == 500 => throw new DockerInternalServerErrorException(docker)
case Right(resp) if (Seq(200, 201).contains(resp.getStatusCode())) =>
val json = Json.parse(resp.getResponseBody()).asOpt[JsObject]
val id = json.flatMap(j => (j \ "Id").asOpt[String]).map(ContainerId(_)).getOrElse(ContainerId.emptyId)
val warnings: Seq[String] = json.flatMap(j => (j \ "Warnings").asOpt[Seq[String]]).getOrElse(Seq.empty)
(id, warnings)
case Right(resp) => throw new DockerRequestException(s"create container request failed (response code ${resp.getStatusCode()})", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"create container request failed", docker, Some(t), Some(req))
}
}
/**
* inspect a container
*/
def containerInspect(id: ContainerId)(implicit docker: DockerClient, fmt: Format[ContainerInfo]): Future[ContainerInfo] = {
val req = url(Endpoints.containerInspect(id).toString).GET
docker.dockerJsonRequest[ContainerInfo](req).map {
case Right(info) => info
case Left(StatusCode(404)) => throw new NoSuchContainerException(id, docker)
case Left(StatusCode(500)) => throw new DockerInternalServerErrorException(docker)
case Left(t) => throw new DockerRequestException(s"inspect container request failed", docker, Some(t), Some(req))
}
}
/**
* list running processes of container
*/
def containerProcesses(id: ContainerId, psArgs: Option[String] = None)(implicit docker: DockerClient): Future[(Seq[String], Seq[Seq[String]])] = {
val req = url(Endpoints.containerProcesses(id, psArgs).toString).GET
docker.dockerRequest(req).map {
case Right(resp) if resp.getStatusCode() == 404 => throw new NoSuchContainerException(id, docker)
case Right(resp) if resp.getStatusCode() == 500 =>
log.error(s"ResponseError: ${resp.getResponseBody}")
throw new DockerInternalServerErrorException(docker)
case Right(resp) if resp.getStatusCode() == 200 =>
val json = Json.parse(resp.getResponseBody()).asOpt[JsObject]
val columns: Option[Seq[String]] = json.flatMap(j => (j \ "Titles").asOpt[Seq[String]])
val rows: Option[JsArray] = json.flatMap(j => (j \ "Processes").asOpt[JsArray])
val processes: Seq[Seq[String]] = rows.flatMap{items =>
items.asOpt[Seq[Seq[String]]]
}.getOrElse(Seq.empty)
(columns.getOrElse(Seq.empty), processes)
case Left(t) => throw new DockerRequestException(s"container processes request failed", docker, Some(t), Some(req))
}
}
/**
* retrieve container's changelog
*/
def containerChangelog(id: ContainerId)(implicit docker: DockerClient, fmt: Format[ContainerChangelogRecord]): Future[Seq[ContainerChangelogRecord]] = {
val req = url(Endpoints.containerChangelog(id).toString).GET
docker.dockerJsonRequest[Seq[ContainerChangelogRecord]](req).map {
case Right(log) => log
case Left(StatusCode(404)) => throw new NoSuchContainerException(id, docker)
case Left(StatusCode(500)) => throw new DockerInternalServerErrorException(docker)
case Left(t) => throw new DockerRequestException(s"container changelog request failed", docker, Some(t), Some(req))
}
}
/**
* export container as tarball
* allows realtime response processing by provided consumer iteratee
*/
def containerExportIteratee[T](id: ContainerId)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
val req = url(Endpoints.containerExport(id).toString).GET
implicit val containerId = id
recoverContainerAwareRequest(docker.dockerRequestIteratee(req)(_ => consumer))
}
/**
* export container as tarball to given file
*/
def containerExport(id: ContainerId, toFile: java.io.File)(implicit docker: DockerClient): Future[java.io.File] = {
val os = new FileOutputStream(toFile)
containerExportIteratee(id)(Iteratee.foreach[Array[Byte]](d => os.write(d))).flatMap(_.run).map{_ =>
os.close()
toFile
}
}
/**
* export container as tarball to given outputstream
*/
def containerExport(id: ContainerId, os: java.io.OutputStream)(implicit docker: DockerClient): Future[java.io.OutputStream] = {
containerExportIteratee(id)(Iteratee.foreach[Array[Byte]](d => os.write(d))).flatMap(_.run).map{_ =>
os.close()
os
}
}
/**
* start a container
*/
def containerStart(id: ContainerId, config: Option[ContainerHostConfiguration] = None)(implicit docker: DockerClient, fmt: Format[ContainerHostConfiguration]): Future[Boolean] = {
val req = config match {
case Some(cfg) => url(Endpoints.containerStart(id).toString).POST << Json.prettyPrint(Json.toJson(cfg)) <:< Map("Content-Type" -> "application/json")
case _ => url(Endpoints.containerStart(id).toString).POST << Json.prettyPrint(Json.toJson(ContainerHostConfiguration())) <:< Map("Content-Type" -> "application/json")
}
docker.dockerRequest(req).map {
case Right(resp) if (resp.getStatusCode() == 200) => true
case Right(resp) if (resp.getStatusCode() == 204) => true
case Right(resp) if (resp.getStatusCode() == 304) => true // new with 1.13 => container status was not modified
case Right(resp) if (resp.getStatusCode() == 404) => throw new NoSuchContainerException(id, docker)
case Right(resp) => throw new DockerRequestException(s"unable to start container $id (StatusCode: ${resp.getStatusCode()}): ${resp.getStatusText()}", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"unable to start container $id", docker, Some(t), Some(req))
}
}
/**
* stop a container
* note: throws an exception is container is not running
*/
def containerStop(id: ContainerId, timeoutToKill: Int = 60)(implicit docker: DockerClient): Future[Boolean] = {
val req = url(Endpoints.containerStop(id, timeoutToKill).toString).POST
docker.dockerRequest(req).map {
case Right(resp) if (resp.getStatusCode() == 200) => true
case Right(resp) if (resp.getStatusCode() == 204) => true
case Right(resp) if (resp.getStatusCode() == 304) => true // new with 1.13 => container status was not modified
case Right(resp) if (resp.getStatusCode() == 404) => throw new NoSuchContainerException(id, docker)
case Right(resp) => throw new DockerRequestException(s"unable to stop container $id (Code ${resp.getStatusCode()}): ${resp.getResponseBody()}", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"unable to stop container $id", docker, Some(t), Some(req))
}
}
/**
* restart container
*/
def containerRestart(id: ContainerId, timeoutToKill: Int = 60)(implicit docker: DockerClient): Future[Boolean] = {
val req = url(Endpoints.containerRestart(id, timeoutToKill).toString).POST
docker.dockerRequest(req).map {
case Right(resp) if (resp.getStatusCode() == 200) => true
case Right(resp) if (resp.getStatusCode() == 204) => true
case Right(resp) if (resp.getStatusCode() == 404) => throw new NoSuchContainerException(id, docker)
case Right(resp) => throw new DockerRequestException(s"unable to restart container $id (StatusCode: ${resp.getStatusCode()}): ${resp.getStatusText()}", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"unable to restart container $id", docker, Some(t), Some(req))
}
}
/**
* kill container
*/
def containerKill(id: ContainerId)(implicit docker: DockerClient): Future[Boolean] = {
val req = url(Endpoints.containerKill(id).toString).POST
docker.dockerRequest(req).map {
case Right(resp) if (resp.getStatusCode() == 200) => true
case Right(resp) if (resp.getStatusCode() == 204) => true
case Right(resp) if (resp.getStatusCode() == 404) => throw new NoSuchContainerException(id, docker)
case Right(resp) => throw new DockerRequestException(s"unable to kill container $id (StatusCode: ${resp.getStatusCode()}): ${resp.getStatusText()}", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"unable to kill container $id", docker, Some(t), Some(req))
}
}
/**
* attach (stream) to container`s stdin/stdout logs
*/
def attachLog[T](id: ContainerId, stdout: Boolean = true, stderr: Boolean = false, withTimestamps: Boolean = false, tail: Option[Int] = None)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
val req = url(Endpoints.containerLogs(id, true, stdout, stderr, withTimestamps, tail).toString).GET
docker.dockerRequestIteratee(req)(_ => consumer).recoverWith{
case DockerResponseCode(400, err) => throw new DockerBadParameterException(s"unable to attach to container logs stream of $id", docker, req)
case DockerResponseCode(404, err) => Future.failed(new NoSuchContainerException(id, docker))
case DockerResponseCode(500, err) => Future.failed(new DockerInternalServerErrorException(docker, err))
case t @ DockerResponseCode(code, err) => Future.failed(new DockerRequestException(s"Docker streaming attach to container logs of $id failed (Code: $code): err", docker, Some(t), None))
}
}
/**
* fetch container stdin/stdout logs
*/
def fetchLog[T](id: ContainerId, stdout: Boolean = true, stderr: Boolean = false, withTimestamps: Boolean = false, tail: Option[Int] = None)(consumer: Iteratee[Array[Byte], T] = Iteratee.ignore)(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
val req = url(Endpoints.containerLogs(id, false, stdout, stderr, withTimestamps, tail).toString).GET
docker.dockerRequestIteratee(req)(_ => consumer).recoverWith{
case DockerResponseCode(400, err) => throw new DockerBadParameterException(s"unable to fetch logs of container $id", docker, req)
case DockerResponseCode(404, err) => Future.failed(new NoSuchContainerException(id, docker))
case DockerResponseCode(500, err) => Future.failed(new DockerInternalServerErrorException(docker, err))
case t @ DockerResponseCode(code, err) => Future.failed(new DockerRequestException(s"Docker fetch logs of container $id failed (Code: $code): err", docker, Some(t), None))
}
}
/**
* convenience method to attach to stdout stream
*/
def attachStdout[T](id: ContainerId, withTimestamps: Boolean = false, tail: Option[Int] = None)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
attachLog[T](id, true, false, withTimestamps, tail)(consumer)
}
/**
* convenience method to fetch stdout
*/
def fetchStdout[T](id: ContainerId, withTimestamps: Boolean = false, tail: Option[Int] = None)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
fetchLog[T](id, true, false, withTimestamps, tail)(consumer)
}
/**
* convenience method to attach to stderr stream
*/
def attachStderr[T](id: ContainerId, withTimestamps: Boolean = false, tail: Option[Int] = None)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
attachLog[T](id, false, true, withTimestamps, tail)(consumer)
}
/**
* convenience method to fetch stderr
*/
def fetchStderr[T](id: ContainerId, withTimestamps: Boolean = false, tail: Option[Int] = None)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
fetchLog[T](id, false, true, withTimestamps, tail)(consumer)
}
/**
* attach to a containers stdout, stderr, logs and stdin channel and stream their contents
*/
def attachStream[T](id: ContainerId, stdout: Boolean = true, stderr: Boolean = false, logs: Boolean = false, stdin: Option[Enumerator[Array[Byte]]] = None)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
logs match {
case true => attachLog[T](id, stdout, stderr)(consumer) // attach to logs => proxy to attachLogs
case _ => {
val req = stdin match {
case Some(en) =>
// transform stdin enumerator into an inputstream and attach to request
val os = new PipedOutputStream()
val is = new PipedInputStream(os)
(en |>>> Iteratee.foreach{b =>
//println(s"STDIN:Container.$id> ${new String(b)}")
os.write(b)
}).map{_ =>
os.close()
is.close()
}
url(Endpoints.containerAttach(id, true, true, stdout, stderr, logs).toString)
.POST
.subject.underlying(_.setBody(new InputStreamBodyGenerator(is)))
case _ => url(Endpoints.containerAttach(id, true, false, stdout, stderr, logs).toString).POST
}
docker.dockerRequestIteratee(req)(_ => consumer).recoverWith{
case DockerResponseCode(400, err) => throw new DockerBadParameterException(s"unable to attach to container $id", docker, req)
case DockerResponseCode(404, err) => Future.failed(new NoSuchContainerException(id, docker))
case DockerResponseCode(500, err) => Future.failed(new DockerInternalServerErrorException(docker, err))
case t @ DockerResponseCode(code, err) => Future.failed(new DockerRequestException(s"Docker streaming attach to container $id failed (Code: $code): err", docker, Some(t), None))
}
}
}
}
/**
* convenience method to attach an enumerator to stdin
* returns a future which will be completed once the enumerator is done / detached
*/
def attachStdin(id: ContainerId, en: Enumerator[Array[Byte]])(implicit docker: DockerClient): Future[Unit] = {
// TODO: use containerLogs
val os = new PipedOutputStream()
val is = new PipedInputStream(os)
(en |>>> Iteratee.foreach{b =>
os.write(b)
}).map{_ =>
os.close()
is.close()
}
val req = url(Endpoints.containerAttach(id, true, true, false, false, false).toString)
.POST
.subject.underlying(_.setBody(new InputStreamBodyGenerator(is)))
val consumer = Iteratee.ignore[Array[Byte]]
docker.dockerRequestIteratee(req)(_ => consumer).recoverWith{
case DockerResponseCode(400, err) => throw new DockerBadParameterException(s"unable to attach to container $id", docker, req)
case DockerResponseCode(404, err) => Future.failed(new NoSuchContainerException(id, docker))
case DockerResponseCode(500, err) => Future.failed(new DockerInternalServerErrorException(docker, err))
case t @ DockerResponseCode(code, err) => Future.failed(new DockerRequestException(s"Docker streaming attach to container $id failed (Code: $code): err", docker, Some(t), None))
}.flatMap(_.run).map(_ => Unit)
}
/**
* attach to stdout, stderr and/or logs channel
* returns data immediately - no streaming
*/
def attach[T](id: ContainerId, stdout: Boolean = true, stderr: Boolean = false, logs: Boolean = false)(consumer: Iteratee[Array[Byte], T] = Iteratee.ignore)(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
logs match {
case true => fetchLog[T](id, stdout, stderr)(consumer) // attach to logs? => redirect to fetchLog endpoint
case _ => {
val req = url(Endpoints.containerAttach(id, false, false, stdout, stderr, logs).toString).POST
docker.dockerRequestIteratee(req)(_ => consumer).recoverWith{
case DockerResponseCode(400, err) => throw new DockerBadParameterException(s"unable to attach to container $id", docker, req)
case DockerResponseCode(404, err) => Future.failed(new NoSuchContainerException(id, docker))
case DockerResponseCode(500, err) => Future.failed(new DockerInternalServerErrorException(docker, err))
case t @ DockerResponseCode(code, err) => Future.failed(new DockerRequestException(s"Docker attach to container $id failed (Code: $code): err", docker, Some(t), None))
}
}
}
}
/**
* wait for container to terminate an execute given callback on exit code
*/
def containerWait[T](id: ContainerId)(action: Int => T)(implicit docker: DockerClient): Future[T] = {
val req = url(Endpoints.containerWait(id).toString).POST
docker.dockerRequest(req).map {
case Right(resp) if resp.getStatusCode() == 404 => throw new NoSuchContainerException(id, docker)
case Right(resp) if resp.getStatusCode() == 500 => throw new DockerInternalServerErrorException(docker)
case Right(resp) if resp.getStatusCode() == 200 =>
val json = Json.parse(resp.getResponseBody()).asOpt[JsObject]
val statusCode: Option[Int] = json.flatMap(j => (j \ "StatusCode").asOpt[Int])
action(statusCode.getOrElse(-1))
}
}
/**
* remove container
*/
def containerRemove(id: ContainerId, withVolumes: Boolean = false, force: Boolean = false)(implicit docker: DockerClient): Future[Boolean] = {
val req = url(Endpoints.containerRemove(id, withVolumes, force).toString).DELETE
docker.dockerRequest(req).map {
case Right(resp) if (resp.getStatusCode() == 204) => true
case Right(resp) if (resp.getStatusCode() == 400) => throw new DockerBadParameterException(s"removing container $id failed", docker, req)
case Right(resp) if (resp.getStatusCode() == 404) => throw new NoSuchContainerException(id, docker)
case Right(resp) => throw new DockerRequestException(s"unable to remove container $id (Code ${resp.getStatusCode()}): ${resp.getResponseBody()}", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"unable to remove container $id", docker, Some(t), Some(req))
}
}
/**
* copy a resource from container
* allows realtime processing of response by given iterratee
*/
def containerCopyResourceIteratee[T](id: ContainerId, resourcePath: String)(consumer: Iteratee[Array[Byte], T] = Iteratee.ignore)(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
val json = Json.obj("Resource" -> resourcePath)
val req = url(Endpoints.containerCopy(id).toString).POST << Json.prettyPrint(json) <:< Map("Content-Type" -> "application/json")
implicit val containerId = id
recoverContainerAwareRequest(docker.dockerRequestIteratee(req)(_ => consumer))
}
/**
* copy resource from container to targetFile
*/
def containerCopyResource(id: ContainerId, resourcePath: String, toFile: java.io.File)(implicit docker: DockerClient): Future[java.io.File] = {
val os = new FileOutputStream(toFile)
containerCopyResourceIteratee(id, resourcePath)(Iteratee.foreach(b => os.write(b))).flatMap(_.run).map{_ =>
os.close()
toFile
}
}
/**
* copy resource from container to outputstream
*/
def containerCopyResource(id: ContainerId, resourcePath: String, os: java.io.OutputStream)(implicit docker: DockerClient): Future[java.io.OutputStream] = {
containerCopyResourceIteratee(id, resourcePath)(Iteratee.foreach(b => os.write(b))).flatMap(_.run).map{_ =>
os.close()
os
}
}
/**
* commit a container to repoTag without commit message
*/
def containerCommit(id: ContainerId, repoTag: RepositoryTag, runConfig: Option[ContainerConfiguration] = None, pause: Boolean = true)(implicit docker: DockerClient, fmt: Format[ContainerConfiguration]): Future[ContainerId] = {
val cfg = runConfig.map(j => Json.prettyPrint(Json.toJson(j)))
val req = url(Endpoints.containerCommit(id, repoTag.repo, repoTag.tag, cfg, None, None, pause).toString).POST << cfg.getOrElse("{}")
docker.dockerRequest(req).map {
case Right(resp) if resp.getStatusCode() == 404 => throw new NoSuchContainerException(id, docker)
case Right(resp) if resp.getStatusCode() == 500 => throw new DockerInternalServerErrorException(docker)
case Right(resp) if (Seq(200, 201).contains(resp.getStatusCode())) =>
val json = Json.parse(resp.getResponseBody()).asOpt[JsObject]
val newId: ContainerId = json.flatMap(j => (j \ "Id").asOpt[String]).map(ContainerId(_)).getOrElse(ContainerId.emptyId)
newId
case Left(t) => throw new DockerRequestException(s"commit container $id (tag: ${repoTag.toString}) failed", docker, Some(t), Some(req))
}
}
/**
* commit a container to repoTag with comment and author
*/
def containerCommitWithMessage(id: ContainerId, repoTag: RepositoryTag, withMessageAndAuthor: (String, Option[String]), runConfig: Option[ContainerConfiguration] = None, pause: Boolean = true)(implicit docker: DockerClient, fmt: Format[ContainerConfiguration]): Future[ContainerId] = {
val commitMsg = withMessageAndAuthor
val cfg = runConfig.map(j => Json.prettyPrint(Json.toJson(j)))
val req = url(Endpoints.containerCommit(id, repoTag.repo, repoTag.tag, cfg, Some(commitMsg._1), commitMsg._2, pause).toString).POST << cfg.getOrElse("{}")
docker.dockerRequest(req).map {
case Right(resp) if resp.getStatusCode() == 404 => throw new NoSuchContainerException(id, docker)
case Right(resp) if resp.getStatusCode() == 500 => throw new DockerInternalServerErrorException(docker)
case Right(resp) if (Seq(200, 201).contains(resp.getStatusCode())) =>
val json = Json.parse(resp.getResponseBody()).asOpt[JsObject]
val newId: ContainerId = json.flatMap(j => (j \ "Id").asOpt[String]).map(ContainerId(_)).getOrElse(ContainerId.emptyId)
newId
case Left(t) => throw new DockerRequestException(s"commit container $id failed", docker, Some(t), Some(req))
}
}
}
/**
* ImagesApi trait
* contains all images related api operations
*/
trait DockerImagesApi extends DockerApiHelper {
private val log = LoggerFactory.getLogger(this.getClass());
/**
* list images
*/
def images(all: Boolean = false)(implicit docker: DockerClient, fmt: Format[DockerImage]): Future[Seq[DockerImage]] = {
val req = url(Endpoints.images(all).toString).GET
docker.dockerJsonRequest[Seq[DockerImage]](req).map {
case Right(images) => images
case Left(StatusCode(500)) => throw new DockerInternalServerErrorException(docker)
case Left(t) => throw new DockerRequestException(s"list images request failed", docker, Some(t), Some(req))
}
}
/**
* create or pull an image from registry
* allows realtime processing of response by given consumer iteratee
*/
def imageCreateIteratee[T](repoTag: RepositoryTag, registry: Option[String] = None, fromSrc: Option[String] = None)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[Iteratee[Array[Byte], T]] = {
val req = auth match {
case DockerAnonymousAuth => url(Endpoints.imageCreate(repoTag.repo, fromSrc, Some(repoTag.repo), repoTag.tag, registry).toString).POST
case data => url(Endpoints.imageCreate(repoTag.repo, fromSrc, Some(repoTag.repo), repoTag.tag, registry).toString).POST <:< Map("X-Registry-Auth" -> data.asBase64Encoded)
}
implicit val image = repoTag.repo
recoverImageAwareRequest(docker.dockerRequestIteratee(req)(_ => consumer))
}
/**
* create or pull an image from registry
* collects and returns list of messages/errors on completion
*/
def imageCreate(repoTag: RepositoryTag, registry: Option[String] = None, fromSrc: Option[String] = None)(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[List[Either[DockerErrorInfo, DockerStatusMessage]]] = {
imageCreateIteratee(repoTag, registry, fromSrc)(DockerIteratee.statusStream).flatMap(_.run)
}
/**
* insert a resource into image from remote location
* allows realtime processing of reponse by given consumer iteratee
* removed with API 1.12
*/
def imageInsertResourceIteratee[T](image: String, imageTargetPath: String, sourceFileUrl: java.net.URI)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
val req = url(Endpoints.imageInsert(image, imageTargetPath, sourceFileUrl).toString).POST
implicit val img = image
recoverImageAwareRequest(docker.dockerRequestIteratee(req)(_ => consumer))
}
/**
* insert a resource into image from remote location
* collects and returns list of messages/errors on completion
* removed with API 1.12
*/
def imageInsertResource(image: String, imageTargetPath: String, sourceFileUrl: java.net.URI)(implicit docker: DockerClient): Future[List[Either[DockerErrorInfo, DockerStatusMessage]]] = {
docker match {
case cl:DockerClientV19 => imageInsertResourceIteratee(image, imageTargetPath, sourceFileUrl)(DockerIteratee.statusStream).flatMap(_.run)
case _ => throw new RuntimeException("removed with API v1.12")
}
}
/**
* inspect an image
*/
def imageInspect(image: String)(implicit docker: DockerClient, fmt: Format[DockerImageInfo]): Future[DockerImageInfo] = {
val req = url(Endpoints.imageInspect(image).toString).GET
docker.dockerJsonRequest[DockerImageInfo](req).map {
case Right(info) => info
case Left(StatusCode(404)) => throw new NoSuchImageException(image, docker)
case Left(StatusCode(500)) => throw new DockerInternalServerErrorException(docker)
case Left(t) => throw new DockerRequestException(s"inspect image $image request failed", docker, Some(t), Some(req))
}
}
/**
* retrieve image history / changes
*/
def imageHistory(image: String)(implicit docker: DockerClient, fmt: Format[DockerImageHistoryInfo]): Future[Seq[DockerImageHistoryInfo]] = {
val req = url(Endpoints.imageHistory(image).toString).GET
docker.dockerJsonRequest[Seq[DockerImageHistoryInfo]](req).map {
case Right(info) => info
case Left(StatusCode(404)) => throw new NoSuchImageException(image, docker)
case Left(StatusCode(500)) => throw new DockerInternalServerErrorException(docker)
case Left(t) => throw new DockerRequestException(s"inspect image $image request failed", docker, Some(t), Some(req))
}
}
/**
* push image to registry
* allows realtime processing of response using given consumer iteratee
*/
def imagePushIteratee[T](image: String, registry: Option[String] = None)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[Iteratee[Array[Byte], T]] = {
val req = auth match {
case DockerAnonymousAuth => url(Endpoints.imagePush(image, registry).toString).POST
case data => url(Endpoints.imagePush(image, registry).toString).POST <:< Map("X-Registry-Auth" -> data.asBase64Encoded)
}
implicit val img = image
recoverImageAwareRequest(docker.dockerRequestIteratee(req)(_ => consumer))
}
/**
* push image to registry
* collects and returns list of messages/errors on completion
*/
def imagePush(image: String, registry: Option[String] = None)(implicit docker: DockerClient, auth: DockerAuth = DockerAnonymousAuth): Future[List[Either[DockerErrorInfo, DockerStatusMessage]]] = {
imagePushIteratee(image, registry)(DockerIteratee.statusStream).flatMap(_.run)
}
/**
* tag image into a repository
*/
def imageTag(image: String, repo: String, force: Boolean = false)(implicit docker: DockerClient): Future[Boolean] = {
val req = url(Endpoints.imageTag(image, repo, force).toString).POST
docker.dockerRequest(req).map {
case Right(resp) if (resp.getStatusCode() == 201) => true
case Right(resp) if (resp.getStatusCode() == 400) => throw new DockerBadParameterException(s"tagging image $image into $repo failed", docker, req)
case Right(resp) if (resp.getStatusCode() == 404) => throw new NoSuchImageException(image, docker)
case Right(resp) if (resp.getStatusCode() == 409) => throw new DockerConflictException(s"conflict while tagging $image into $repo: ${resp.getResponseBody()}", docker)
case Right(resp) if (resp.getStatusCode() == 500) => throw new DockerRequestException(s"tagging image $image into $repo request failed", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"tagging image $image into $repo request failed", docker, Some(t), Some(req))
}
}
/**
* remove image
*/
def imageRemove(image: String, force: Boolean = false, noPrune: Boolean = false)(implicit docker: DockerClient): Future[Seq[(String,String)]] = {
val req = url(Endpoints.imageRemove(image, force, noPrune).toString).DELETE
docker.dockerJsonRequest[Seq[JsObject]](req).map {
case Right(output) => output.map{obj =>
(obj.fields.head._1, obj.fields.head._2.asOpt[String].getOrElse(""))
}
case Left(StatusCode(404)) => throw new NoSuchImageException(image, docker)
case Left(StatusCode(409)) => throw new DockerConflictException(s"conflict while removing image $image", docker)
case Left(StatusCode(500)) => throw new DockerRequestException(s"removing image $image request failed", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"removing image $image request failed", docker, Some(t), Some(req))
}
}
/**
* search for available images
*/
def imageSearch(query: String)(implicit docker: DockerClient, fmt: Format[DockerImageSearchResult]): Future[Seq[DockerImageSearchResult]] = {
val req = url(Endpoints.imageSearch(query).toString).GET
docker.dockerJsonRequest[Seq[DockerImageSearchResult]](req).map {
case Right(results) => results
case Left(StatusCode(500)) => throw new DockerRequestException(s"searching image (term: $query) request failed", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"searching image (term: $query) request failed", docker, Some(t), Some(req))
}
}
/**
* export image as tarball
* allows realtime processing of response using given consumer iteratee
*/
def imageExport[T](image: String)(consumer: Iteratee[Array[Byte], T])(implicit docker: DockerClient): Future[Iteratee[Array[Byte], T]] = {
val req = url(Endpoints.imageExport(image).toString).GET
implicit val img = image
recoverImageAwareRequest(docker.dockerRequestIteratee(req)(_ => consumer))
}
/**
* export image as tarball into target file
*/
def imageExport(image: String, toFile: java.io.File)(implicit docker: DockerClient): Future[java.io.File] = {
val os = new FileOutputStream(toFile)
imageExport(image)(Iteratee.foreach[Array[Byte]](d => os.write(d))).flatMap(_.run).map{_ =>
os.close()
toFile
}
}
/**
* export image as tarball into outputstream
*/
def imageExport(image: String, os: java.io.OutputStream)(implicit docker: DockerClient): Future[java.io.OutputStream] = {
imageExport(image)(Iteratee.foreach[Array[Byte]](d => os.write(d))).flatMap(_.run).map{_ =>
os.close()
os
}
}
/**
* import images from tarball containing 1 or more docker images
*/
def imageImport(tarFile: java.io.File)(implicit docker: DockerClient): Future[Boolean] = {
val req = url(Endpoints.imagesLoad.toString).POST <<< tarFile <:< Map("Content-Type" -> "application/x-tar")
docker.dockerRequest(req).map {
case Right(resp) if (resp.getStatusCode() == 200) => true
case Right(resp) if (resp.getStatusCode() == 500) => throw new DockerRequestException(s"importing image from $tarFile request failed", docker, None, Some(req))
case Left(t) => throw new DockerRequestException(s"importing image from $tarFile request failed", docker, Some(t), Some(req))
}
}
} | waveinch/reactive-docker | src/main/scala/com/kolor/docker/api/DockerApi.scala | Scala | mit | 46,429 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.cassandra
import java.nio.ByteBuffer
import com.twitter.cassie.tests.util.FakeCassandra
import com.twitter.conversions.time._
import com.twitter.util.Await
import com.twitter.zipkin.cassandra.{Keyspace, StorageBuilder}
import com.twitter.zipkin.common._
import com.twitter.zipkin.query.Trace
import org.scalatest.{BeforeAndAfter, FunSuite}
class CassandraStorageTest extends FunSuite with BeforeAndAfter {
object FakeServer extends FakeCassandra
var cassandraStorage: CassandraStorage = null
def binaryAnnotation(key: String, value: String) =
BinaryAnnotation(key, ByteBuffer.wrap(value.getBytes), AnnotationType.String, Some(ep))
val ep = Endpoint(123, 123, "service")
val spanId = 456
val ann1 = Annotation(1, "cs", Some(ep))
val ann2 = Annotation(2, "sr", None)
val ann3 = Annotation(2, "custom", Some(ep))
val ann4 = Annotation(2, "custom", Some(ep))
val span1 = Span(123, "methodcall", spanId, None, List(ann1, ann3),
List(binaryAnnotation("BAH", "BEH")))
before {
FakeServer.start()
val keyspaceBuilder = Keyspace.static(port = FakeServer.port.get)
val builder = StorageBuilder(keyspaceBuilder)
cassandraStorage = builder.apply()
}
after {
cassandraStorage.close()
FakeServer.stop()
}
test("getSpansByTraceId") {
Await.result(cassandraStorage.storeSpan(span1))
val spans = Await.result(cassandraStorage.getSpansByTraceId(span1.traceId))
assert(!spans.isEmpty)
assert(spans(0) === span1)
}
test("getSpansByTraceIds") {
Await.result(cassandraStorage.storeSpan(span1))
val actual1 = Await.result(cassandraStorage.getSpansByTraceIds(List(span1.traceId)))
assert(!actual1.isEmpty)
val trace1 = Trace(actual1(0))
assert(!trace1.spans.isEmpty)
assert(trace1.spans(0) === span1)
val span2 = Span(666, "methodcall2", spanId, None, List(ann2),
List(binaryAnnotation("BAH2", "BEH2")))
Await.result(cassandraStorage.storeSpan(span2))
val actual2 = Await.result(cassandraStorage.getSpansByTraceIds(List(span1.traceId, span2.traceId)))
assert(!actual2.isEmpty)
val trace2 = Trace(actual2(0))
val trace3 = Trace(actual2(1))
assert(!trace2.spans.isEmpty)
assert(trace2.spans(0) === span1)
assert(!trace3.spans.isEmpty)
assert(trace3.spans(0) === span2)
}
test("getSpansByTraceIds should return empty list if no trace exists") {
val actual1 = Await.result(cassandraStorage.getSpansByTraceIds(List(span1.traceId)))
assert(actual1.isEmpty)
}
test("all binary annotations are logged") {
val a_traceId = 1234L
val a1 = Annotation(1, "sr", Some(ep))
val a2 = Annotation(2, "ss", Some(ep))
val ba1 = binaryAnnotation("key1", "value1")
val ba2 = binaryAnnotation("key2", "value2")
val originalKeyNames = Set("key1", "key2")
val a_span1 = Span(a_traceId, "test", 345L, None, List(a1), Nil)
val a_span2 = Span(a_traceId, "test", 345L, None, Nil, List(ba1))
val a_span3 = Span(a_traceId, "test", 345L, None, Nil, List(ba2))
val a_span4 = Span(a_traceId, "test", 345L, None, List(a2), Nil)
val data = List(a_span1, a_span2, a_span3, a_span4)
for(s <- data) {
Await.result(cassandraStorage.storeSpan(s))
}
val actual1 = Await.result(cassandraStorage.getSpansByTraceIds(List(a_traceId)))
val trace1 = Trace(actual1(0))
val bAnnotations = trace1.spans(0).binaryAnnotations
val keyNames = bAnnotations map { _.key }
assert(bAnnotations.length === 2)
assert(keyNames.toSet === originalKeyNames)
}
test("set time to live on a trace and then get it") {
Await.result(cassandraStorage.storeSpan(span1))
Await.result(cassandraStorage.setTimeToLive(span1.traceId, 1234.seconds))
assert(Await.result(cassandraStorage.getTimeToLive(span1.traceId)) === 1234.seconds)
}
}
| Flipkart/zipkin | zipkin-cassandra/src/test/scala/com/twitter/zipkin/storage/cassandra/CassandraStorageTest.scala | Scala | apache-2.0 | 4,455 |
package pl.newicom.eventstore
import akka.event.LoggingAdapter
import akka.stream.scaladsl.Source
import eventstore.{ResolvedEvent, EventRecord, EsConnection}
import eventstore.EventNumber.Exact
import pl.newicom.dddd.aggregate.BusinessEntity
import pl.newicom.dddd.messaging.event.EventMessageEntry
trait EventSourceProvider extends EventstoreSerializationSupport {
def log: LoggingAdapter
def eventSource(esCon: EsConnection, observable: BusinessEntity, fromPosExcl: Option[Long]): Source[EventMessageEntry, Unit] = {
val streamId = StreamIdResolver.streamId(observable)
log.debug(s"Subscribing to $streamId from position $fromPosExcl (exclusive)")
Source.fromPublisher(
esCon.streamPublisher(
streamId,
fromPosExcl.map(nr => Exact(nr.toInt)),
resolveLinkTos = true
)
).map {
case EventRecord(_, number, eventData, _) =>
EventMessageEntry(toOfficeEventMessage(eventData).get, number.value)
case ResolvedEvent(EventRecord(_, _, eventData, _), linkEvent) =>
EventMessageEntry(toOfficeEventMessage(eventData).get, linkEvent.number.value)
case unexpected =>
throw new RuntimeException(s"Unexpected msg received: $unexpected")
}
}
}
| odd/akka-ddd | eventstore-akka-persistence/src/main/scala/pl/newicom/eventstore/EventSourceProvider.scala | Scala | mit | 1,239 |
package dpla.ingestion3.reports.summary
import org.apache.commons.lang.StringUtils
/**
* Provides utility functions for formatting strings in a report of
* ingestion operations.
*
*/
object ReportFormattingUtils {
/**
* Pads two strings with a delimiter out to the specified width (default is 80)
* If length of (a + b + 2) > width then the b string is truncated down to a
* length such that a + 2 + b = width
*
* Ex.
* errors......./Users/scott/i3/cdl/mapping/20181004_93223/_LOGS/mapping-errors.log
* warnings..ers/scott/i3/cdl/mapping/20181004_93223/_LOGS/mapping-cdl-warnings.log
*
* @param a First string
* @param b Second string
* @param separator Character to separate strings a and b with
* @param width Max string width
* @return
*/
def centerPad(a: String, b: String, separator: String = ".", width: Int = 80): String = {
val limit = if (a.length + b.length + 2 > width) {
width - (a.length + 2) // limit string b
} else {
b.length // show all of string b
}
s"$a${separator*(width-a.length-limit)}${b.takeRight(limit)}"
}
/**
* Returns a new String with the provided value centered using
* whitespace (by default)
*
* Ex. If given 'Errors' it will return:
* ' Errors '
*
* @param a String to center
* @param seperator Value to add before and after the string, default ' '
* @param width With to center on, default 80
* @return String with the provided string centered
*/
def center(a: String, seperator: String = " ",width: Int = 80): String =
StringUtils.leftPad(a, (width+a.length)/2, seperator)
}
| dpla/ingestion3 | src/main/scala/dpla/ingestion3/reports/summary/ReportFormattingUtils.scala | Scala | mit | 1,735 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.batch
import org.apache.flink.api.dag.Transformation
import org.apache.flink.runtime.operators.DamBehavior
import org.apache.flink.streaming.api.operators.SimpleOperatorFactory
import org.apache.flink.table.api.TableException
import org.apache.flink.table.data.RowData
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.codegen.sort.ComparatorCodeGenerator
import org.apache.flink.table.planner.delegation.BatchPlanner
import org.apache.flink.table.planner.plan.`trait`.{FlinkRelDistribution, FlinkRelDistributionTraitDef}
import org.apache.flink.table.planner.plan.cost.{FlinkCost, FlinkCostFactory}
import org.apache.flink.table.planner.plan.nodes.calcite.Rank
import org.apache.flink.table.planner.plan.nodes.exec.{BatchExecNode, ExecNode}
import org.apache.flink.table.planner.plan.rules.physical.batch.BatchExecJoinRuleBase
import org.apache.flink.table.planner.plan.utils.{FlinkRelOptUtil, RelExplainUtil}
import org.apache.flink.table.runtime.operators.rank.{ConstantRankRange, RankRange, RankType}
import org.apache.flink.table.runtime.operators.sort.RankOperator
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo
import org.apache.calcite.plan._
import org.apache.calcite.rel.RelDistribution.Type
import org.apache.calcite.rel.RelDistribution.Type.{HASH_DISTRIBUTED, SINGLETON}
import org.apache.calcite.rel._
import org.apache.calcite.rel.`type`.RelDataTypeField
import org.apache.calcite.rel.metadata.RelMetadataQuery
import org.apache.calcite.util.{ImmutableBitSet, ImmutableIntList, Util}
import java.util
import scala.collection.JavaConversions._
/**
* Batch physical RelNode for [[Rank]].
*
* This node supports two-stage(local and global) rank to reduce data-shuffling.
*/
class BatchExecRank(
cluster: RelOptCluster,
traitSet: RelTraitSet,
inputRel: RelNode,
partitionKey: ImmutableBitSet,
orderKey: RelCollation,
rankType: RankType,
rankRange: RankRange,
rankNumberType: RelDataTypeField,
outputRankNumber: Boolean,
val isGlobal: Boolean)
extends Rank(
cluster,
traitSet,
inputRel,
partitionKey,
orderKey,
rankType,
rankRange,
rankNumberType,
outputRankNumber)
with BatchPhysicalRel
with BatchExecNode[RowData] {
require(rankType == RankType.RANK, "Only RANK is supported now")
val (rankStart, rankEnd) = rankRange match {
case r: ConstantRankRange => (r.getRankStart, r.getRankEnd)
case o => throw new TableException(s"$o is not supported now")
}
override def copy(traitSet: RelTraitSet, inputs: util.List[RelNode]): RelNode = {
new BatchExecRank(
cluster,
traitSet,
inputs.get(0),
partitionKey,
orderKey,
rankType,
rankRange,
rankNumberType,
outputRankNumber,
isGlobal
)
}
override def explainTerms(pw: RelWriter): RelWriter = {
val inputRowType = inputRel.getRowType
pw.item("input", getInput)
.item("rankType", rankType)
.item("rankRange", rankRange.toString(inputRowType.getFieldNames))
.item("partitionBy", RelExplainUtil.fieldToString(partitionKey.toArray, inputRowType))
.item("orderBy", RelExplainUtil.collationToString(orderKey, inputRowType))
.item("global", isGlobal)
.item("select", getRowType.getFieldNames.mkString(", "))
}
override def computeSelfCost(planner: RelOptPlanner, mq: RelMetadataQuery): RelOptCost = {
// sort is done in the last sort operator, only need to compare between agg column.
val inputRowCnt = mq.getRowCount(getInput())
val cpuCost = FlinkCost.FUNC_CPU_COST * inputRowCnt
val memCost: Double = mq.getAverageRowSize(this)
val rowCount = mq.getRowCount(this)
val costFactory = planner.getCostFactory.asInstanceOf[FlinkCostFactory]
costFactory.makeCost(rowCount, cpuCost, 0, 0, memCost)
}
override def satisfyTraits(requiredTraitSet: RelTraitSet): Option[RelNode] = {
if (isGlobal) {
satisfyTraitsOnGlobalRank(requiredTraitSet)
} else {
satisfyTraitsOnLocalRank(requiredTraitSet)
}
}
private def satisfyTraitsOnGlobalRank(requiredTraitSet: RelTraitSet): Option[RelNode] = {
val requiredDistribution = requiredTraitSet.getTrait(FlinkRelDistributionTraitDef.INSTANCE)
val canSatisfy = requiredDistribution.getType match {
case SINGLETON => partitionKey.cardinality() == 0
case HASH_DISTRIBUTED =>
val shuffleKeys = requiredDistribution.getKeys
val partitionKeyList = ImmutableIntList.of(partitionKey.toArray: _*)
if (requiredDistribution.requireStrict) {
shuffleKeys == partitionKeyList
} else if (Util.startsWith(shuffleKeys, partitionKeyList)) {
// If required distribution is not strict, Hash[a] can satisfy Hash[a, b].
// so return true if shuffleKeys(Hash[a, b]) start with partitionKeyList(Hash[a])
true
} else {
// If partialKey is enabled, try to use partial key to satisfy the required distribution
val tableConfig = FlinkRelOptUtil.getTableConfigFromContext(this)
val partialKeyEnabled = tableConfig.getConfiguration.getBoolean(
BatchExecJoinRuleBase.TABLE_OPTIMIZER_SHUFFLE_BY_PARTIAL_KEY_ENABLED)
partialKeyEnabled && partitionKeyList.containsAll(shuffleKeys)
}
case _ => false
}
if (!canSatisfy) {
return None
}
val inputRequiredDistribution = requiredDistribution.getType match {
case SINGLETON => requiredDistribution
case HASH_DISTRIBUTED =>
val shuffleKeys = requiredDistribution.getKeys
val partitionKeyList = ImmutableIntList.of(partitionKey.toArray: _*)
if (requiredDistribution.requireStrict) {
FlinkRelDistribution.hash(partitionKeyList)
} else if (Util.startsWith(shuffleKeys, partitionKeyList)) {
// Hash[a] can satisfy Hash[a, b]
FlinkRelDistribution.hash(partitionKeyList, requireStrict = false)
} else {
// use partial key to satisfy the required distribution
FlinkRelDistribution.hash(shuffleKeys.map(partitionKeyList(_)), requireStrict = false)
}
}
// sort by partition keys + orderby keys
val providedFieldCollations = partitionKey.toArray.map {
k => FlinkRelOptUtil.ofRelFieldCollation(k)
}.toList ++ orderKey.getFieldCollations
val providedCollation = RelCollations.of(providedFieldCollations)
val requiredCollation = requiredTraitSet.getTrait(RelCollationTraitDef.INSTANCE)
val newProvidedTraitSet = if (providedCollation.satisfies(requiredCollation)) {
getTraitSet.replace(requiredDistribution).replace(requiredCollation)
} else {
getTraitSet.replace(requiredDistribution)
}
val newInput = RelOptRule.convert(getInput, inputRequiredDistribution)
Some(copy(newProvidedTraitSet, Seq(newInput)))
}
private def satisfyTraitsOnLocalRank(requiredTraitSet: RelTraitSet): Option[RelNode] = {
val requiredDistribution = requiredTraitSet.getTrait(FlinkRelDistributionTraitDef.INSTANCE)
requiredDistribution.getType match {
case Type.SINGLETON =>
val inputRequiredDistribution = requiredDistribution
// sort by orderby keys
val providedCollation = orderKey
val requiredCollation = requiredTraitSet.getTrait(RelCollationTraitDef.INSTANCE)
val newProvidedTraitSet = if (providedCollation.satisfies(requiredCollation)) {
getTraitSet.replace(requiredDistribution).replace(requiredCollation)
} else {
getTraitSet.replace(requiredDistribution)
}
val inputRequiredTraits = getInput.getTraitSet.replace(inputRequiredDistribution)
val newInput = RelOptRule.convert(getInput, inputRequiredTraits)
Some(copy(newProvidedTraitSet, Seq(newInput)))
case Type.HASH_DISTRIBUTED =>
val shuffleKeys = requiredDistribution.getKeys
if (outputRankNumber) {
// rank function column is the last one
val rankColumnIndex = getRowType.getFieldCount - 1
if (!shuffleKeys.contains(rankColumnIndex)) {
// Cannot satisfy required distribution if some keys are not from input
return None
}
}
val inputRequiredDistributionKeys = shuffleKeys
val inputRequiredDistribution = FlinkRelDistribution.hash(
inputRequiredDistributionKeys, requiredDistribution.requireStrict)
// sort by partition keys + orderby keys
val providedFieldCollations = partitionKey.toArray.map {
k => FlinkRelOptUtil.ofRelFieldCollation(k)
}.toList ++ orderKey.getFieldCollations
val providedCollation = RelCollations.of(providedFieldCollations)
val requiredCollation = requiredTraitSet.getTrait(RelCollationTraitDef.INSTANCE)
val newProvidedTraitSet = if (providedCollation.satisfies(requiredCollation)) {
getTraitSet.replace(requiredDistribution).replace(requiredCollation)
} else {
getTraitSet.replace(requiredDistribution)
}
val inputRequiredTraits = getInput.getTraitSet.replace(inputRequiredDistribution)
val newInput = RelOptRule.convert(getInput, inputRequiredTraits)
Some(copy(newProvidedTraitSet, Seq(newInput)))
case _ => None
}
}
//~ ExecNode methods -----------------------------------------------------------
override def getDamBehavior: DamBehavior = DamBehavior.PIPELINED
override def getInputNodes: util.List[ExecNode[BatchPlanner, _]] =
List(getInput.asInstanceOf[ExecNode[BatchPlanner, _]])
override def replaceInputNode(
ordinalInParent: Int,
newInputNode: ExecNode[BatchPlanner, _]): Unit = {
replaceInput(ordinalInParent, newInputNode.asInstanceOf[RelNode])
}
override protected def translateToPlanInternal(
planner: BatchPlanner): Transformation[RowData] = {
val input = getInputNodes.get(0).translateToPlan(planner)
.asInstanceOf[Transformation[RowData]]
val outputType = FlinkTypeFactory.toLogicalRowType(getRowType)
val partitionBySortingKeys = partitionKey.toArray
// The collation for the partition-by fields is inessential here, we only use the
// comparator to distinguish different groups.
// (order[is_asc], null_is_last)
val partitionBySortCollation = partitionBySortingKeys.map(_ => (true, true))
// The collation for the order-by fields is inessential here, we only use the
// comparator to distinguish order-by fields change.
// (order[is_asc], null_is_last)
val orderByCollation = orderKey.getFieldCollations.map(_ => (true, true)).toArray
val orderByKeys = orderKey.getFieldCollations.map(_.getFieldIndex).toArray
val inputType = FlinkTypeFactory.toLogicalRowType(getInput.getRowType)
//operator needn't cache data
val operator = new RankOperator(
ComparatorCodeGenerator.gen(
planner.getTableConfig,
"PartitionByComparator",
partitionBySortingKeys,
partitionBySortingKeys.map(inputType.getTypeAt),
partitionBySortCollation.map(_._1),
partitionBySortCollation.map(_._2)),
ComparatorCodeGenerator.gen(
planner.getTableConfig,
"OrderByComparator",
orderByKeys,
orderByKeys.map(inputType.getTypeAt),
orderByCollation.map(_._1),
orderByCollation.map(_._2)),
rankStart,
rankEnd,
outputRankNumber)
ExecNode.createOneInputTransformation(
input,
getRelDetailedDescription,
SimpleOperatorFactory.of(operator),
InternalTypeInfo.of(outputType),
input.getParallelism)
}
}
| tzulitai/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/batch/BatchExecRank.scala | Scala | apache-2.0 | 12,548 |
/*
* The MIT License (MIT)
* <p>
* Copyright (c) 2017-2020
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.techcode.streamy.syslog.util.parser
import java.nio.charset.StandardCharsets
import akka.util.ByteString
import com.google.common.base.CharMatcher
import com.google.common.primitives.Ints
import io.techcode.streamy.syslog.component.SyslogTransformer._
import io.techcode.streamy.util.json._
import io.techcode.streamy.util.parser.{ByteStringParser, CharMatchers, ParseException}
/**
* Syslog parser companion.
*/
object SyslogParser {
// Struct data param value matcher for Rfc5424
private[parser] val ParamValueMatcher: CharMatcher = CharMatchers.PrintUsAscii.and(CharMatcher.noneOf("\\\"]")).precomputed()
// Struct data name matcher for Rfc5424
private[parser] val SdNameMatcher: CharMatcher = CharMatchers.PrintUsAscii.and(CharMatcher.noneOf("= \"]")).precomputed()
// App name matcher for Rfc3164
private[parser] val AppNameMatcher: CharMatcher = CharMatchers.PrintUsAscii.and(CharMatcher.noneOf("[")).precomputed()
// Proc id matcher for Rfc3164
private[parser] val ProcIdMatcher: CharMatcher = CharMatchers.PrintUsAscii.and(CharMatcher.noneOf("]")).precomputed()
/**
* Create a syslog parser that transform incoming [[ByteString]] to [[Json]].
* This parser is Rfc5424 compliant.
*
* @param config parser configuration.
* @return new syslog parser Rfc5424 compliant.
*/
def rfc5424(config: Rfc5424.Config): ByteStringParser[Json] = new Rfc5424Parser(config)
/**
* Create a syslog parser that transform incoming [[ByteString]] to [[Json]].
* This parser is Rfc3164 compliant.
*
* @param config parser configuration.
* @return new syslog parser Rfc3164 compliant.
*/
def rfc3164(config: Rfc3164.Config): ByteStringParser[Json] = new Rfc3164Parser(config)
}
/**
* Parser helpers containing various shortcut for character matching.
*/
private trait ParserHelpers {
this: ByteStringParser[Json] =>
@inline def openQuote(): Boolean = ch('<')
@inline def closeQuote(): Boolean = ch('>')
@inline def openBracket(): Boolean = ch('[')
@inline def closeBracket(): Boolean = ch(']')
@inline def sp(): Boolean = ch(' ')
@inline def ws(): Boolean = oneOrMore(ch(' '))
@inline def nilValue(): Boolean = dash()
@inline def dash(): Boolean = ch('-')
@inline def colon(): Boolean = ch(':')
@inline def point(): Boolean = ch('.')
@inline def doubleQuote(): Boolean = ch('"')
@inline def equal(): Boolean = ch('=')
}
/**
* Syslog parser that transform incoming [[ByteString]] to [[Json]].
* This parser is Rfc5424 compliant.
*
* @param config parser configuration.
*/
// scalastyle:off
private class Rfc5424Parser(config: Rfc5424.Config) extends ByteStringParser[Json] with ParserHelpers {
private val binding = config.binding
private val mode = config.mode
private implicit val builder: JsObjectBuilder = Json.objectBuilder()
private val structDataBuilder: JsObjectBuilder = Json.objectBuilder()
def run(): Json = {
if (root()) {
if (!structDataBuilder.result().isEmpty) {
binding.structData.foreach(bind => builder += bind -> structDataBuilder.result())
}
builder.result()
} else {
throw new ParseException(s"Unexpected input at index ${cursor}:\n${data.utf8String}\n${" " * cursor}^")
}
}
override def root(): Boolean =
header() &&
sp() &&
structuredData() &&
optional(msg()) &&
eoi()
// scalastyle:off
def header(): Boolean =
pri() && version() && timestamp() && hostname() && appName() && procId() && msgId()
def pri(): Boolean =
openQuote() && capturePrival(priVal()) && closeQuote()
def priVal(): Boolean = times(1, 3, CharMatchers.Digit)
private def capturePrival(rule: => Boolean): Boolean = {
capture(rule) { value =>
if (binding.facility.isDefined || binding.severity.isDefined) {
val prival = Ints.tryParse(value.decodeString(StandardCharsets.US_ASCII))
// Read severity or facility
binding.facility.foreach(k => builder += k -> (prival >> 3))
binding.severity.foreach(k => builder += k -> (prival & 7))
}
true
}
}
def version(): Boolean =
times(1, CharMatchers.Digit19) && optional(times(1, 2, CharMatchers.Digit))
def hostname(): Boolean =
sp() && or(
nilValue(),
capture(binding.hostname, times(1, mode.hostname, CharMatchers.PrintUsAscii)) { (key, value) =>
// Unsafe can be use because hostname is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
)
def appName(): Boolean =
sp() && or(
nilValue(),
capture(binding.appName, times(1, mode.appName, CharMatchers.PrintUsAscii)) { (key, value) =>
// Unsafe can be use because appName is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
)
def procId(): Boolean =
sp() && or(
nilValue(),
capture(binding.procId, times(1, mode.procId, CharMatchers.PrintUsAscii)) { (key, value) =>
// Unsafe can be use because procId is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
)
def msgId(): Boolean =
sp() && or(
nilValue(),
capture(binding.msgId, times(1, mode.msgId, CharMatchers.PrintUsAscii)) { (key, value) =>
// Unsafe can be use because msgId is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
)
def timestamp(): Boolean =
sp() && or(
nilValue(),
capture(binding.timestamp, fullDate() && ch('T') && fullTime()) { (key, value) =>
// Unsafe can be use because timestamp is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
)
def fullDate(): Boolean =
dateFullYear() && dash() && dateMonth() && dash() && dateMDay()
def dateFullYear(): Boolean = times(4, CharMatchers.Digit)
def dateMonth(): Boolean = times(2, CharMatchers.Digit)
def dateMDay(): Boolean = times(2, CharMatchers.Digit)
def fullTime(): Boolean = partialTime() && timeOffset()
def partialTime(): Boolean =
timeHour() && colon() && timeMinute() && colon() && timeSecond() && optional(timeSecFrac())
def timeSecond(): Boolean = times(2, CharMatchers.Digit)
def timeSecFrac(): Boolean = point() && times(1, 6, CharMatchers.Digit)
def timeOffset(): Boolean = or(
ch('Z'),
timeNumOffset()
)
def timeNumOffset(): Boolean =
or(ch('+'), ch('-')) && timeHour() && colon() && timeMinute()
def timeHour(): Boolean = times(2, CharMatchers.Digit)
def timeMinute(): Boolean = times(2, CharMatchers.Digit)
def structuredData(): Boolean = or(
nilValue(),
sdElement()
)
def sdElement(): Boolean =
openBracket() && sdName() && zeroOrMore(sp() && sdParam()) && closeBracket()
def sdParam(): Boolean =
capture(binding.structData, sdName()) { (_, value) =>
stack.push(value.utf8String)
true
} && equal() && doubleQuote() &&
capture(binding.structData, paramValue()) { (_, value) =>
structDataBuilder.bind(stack.pop().asInstanceOf[String], JsString.fromByteStringUnsafe(value))
} && doubleQuote()
def paramValue(): Boolean = zeroOrMore(SyslogParser.ParamValueMatcher)
def sdName(): Boolean = times(1, 32, SyslogParser.SdNameMatcher)
def msg(): Boolean =
sp() && capture(binding.message, any()) { (key, value) =>
// Unsafe can be use because message is validate
builder.bind(key, {
if (config.bypassMessageParsing) {
JsBytes(value)
} else {
JsString.fromByteStringUnsafe(value)
}
})
}
override def cleanup(): Unit = {
super.cleanup()
builder.clear()
structDataBuilder.clear()
}
}
// scalastyle:on
/**
* Syslog parser that transform incoming [[ByteString]] to [[Json]].
* This parser is Rfc3164 compliant.
*
* @param config parser configuration.
*/
private class Rfc3164Parser(config: Rfc3164.Config) extends ByteStringParser[Json] with ParserHelpers {
private val binding = config.binding
private val mode = config.mode
private implicit var builder: JsObjectBuilder = Json.objectBuilder()
def run(): Json = {
if (root()) {
builder.result()
} else {
throw new ParseException(s"Unexpected input at index $cursor:\n${data.utf8String}\n${" " * cursor}^")
}
}
override def root(): Boolean =
header() &&
colon() &&
optional(msg()) &&
eoi()
// scalastyle:off
def header(): Boolean =
pri() && timestamp() && sp() && hostname() && sp() && appName() && procId()
def pri(): Boolean =
openQuote() && capturePrival(priVal()) && closeQuote()
def priVal(): Boolean = times(1, 3, CharMatchers.Digit)
private def capturePrival(rule: => Boolean): Boolean = {
capture(rule) { value =>
if (binding.facility.isDefined || binding.severity.isDefined) {
val prival = Ints.tryParse(value.decodeString(StandardCharsets.US_ASCII))
// Read severity or facility
binding.facility.foreach(k => builder += k -> (prival >> 3))
binding.severity.foreach(k => builder += k -> (prival & 7))
}
true
}
}
def hostname(): Boolean =
capture(binding.hostname, times(1, mode.hostname, CharMatchers.PrintUsAscii)) { (key, value) =>
// Unsafe can be use because hostname is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
def appName(): Boolean =
capture(binding.appName, times(1, mode.appName, SyslogParser.AppNameMatcher)) { (key, value) =>
// Unsafe can be use because appName is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
def procId(): Boolean =
openBracket() &&
capture(binding.procId, times(1, mode.procId, SyslogParser.ProcIdMatcher)) { (key, value) =>
// Unsafe can be use because procId is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
} &&
closeBracket()
def timestamp(): Boolean =
capture(binding.timestamp, fullDate()) { (key, value) =>
// Unsafe can be use because timestamp is validate
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
def fullDate(): Boolean =
dateMonth() && ws() && dateMDay() && sp() && fullTime()
def dateMonth(): Boolean = times(3, CharMatchers.Alpha)
def dateMDay(): Boolean = times(1, 2, CharMatchers.Digit)
def fullTime(): Boolean = timeHour() && colon() && timeMinute() && colon() && timeSecond()
def timeHour(): Boolean = times(2, CharMatchers.Digit)
def timeMinute(): Boolean = times(2, CharMatchers.Digit)
def timeSecond(): Boolean = times(2, CharMatchers.Digit)
def msg(): Boolean =
sp() && capture(binding.message, any()) { (key, value) =>
if (config.bypassMessageParsing) {
builder.bind(key, JsBytes(value))
} else {
// Unsafe can be use because message will be validate on conversion
builder.bind(key, JsString.fromByteStringUnsafe(value))
}
}
override def cleanup(): Unit = {
super.cleanup()
builder.clear()
}
// scalastyle:on
}
| amannocci/streamy | plugin-syslog/src/main/scala/io/techcode/streamy/syslog/util/parser/SyslogParser.scala | Scala | mit | 12,268 |
import scala.reflect.api.{Universe => ApiUniverse}
import scala.reflect.macros.Context
trait QuasiquoteCompat {
val c: Context
object quasiquoteCompat {
val u: c.universe.type = c.universe
import u._
import Flag._
// ================= LIFTABLES =================
trait Liftable[T] {
def apply(universe: ApiUniverse, value: T): universe.Tree
}
object Liftable {
private class LiftableConstant[T] extends Liftable[T] {
def apply(universe: ApiUniverse, value: T): universe.Tree =
universe.Literal(universe.Constant(value))
}
implicit lazy val liftByte: Liftable[Byte] = new LiftableConstant[Byte]
implicit lazy val liftShort: Liftable[Short] = new LiftableConstant[Short]
implicit lazy val liftChar: Liftable[Char] = new LiftableConstant[Char]
implicit lazy val liftInt: Liftable[Int] = new LiftableConstant[Int]
implicit lazy val liftLong: Liftable[Long] = new LiftableConstant[Long]
implicit lazy val liftFloat: Liftable[Float] = new LiftableConstant[Float]
implicit lazy val liftDouble: Liftable[Double] = new LiftableConstant[Double]
implicit lazy val liftBoolean: Liftable[Boolean] = new LiftableConstant[Boolean]
implicit lazy val liftString: Liftable[String] = new LiftableConstant[String]
implicit lazy val liftUnit: Liftable[Unit] = new LiftableConstant[Unit]
// implicit lazy val liftScalaSymbol: Liftable[scala.Symbol] = new Liftable[scala.Symbol] {
// def apply(universe: ApiUniverse, value: scala.Symbol): universe.Tree = {
// import universe._
// q"scala.Symbol(${value.name})"
// }
// }
}
private def requireSameUniverse[T](universe: ApiUniverse, tp: String, value: T) =
require(universe eq u, s"Can't lift $tp ${showRaw(value)} from universe ${showRaw(universe)} using lift$tp defined for ${showRaw(u)}.")
implicit def liftExpr[T <: Expr[_]]: Liftable[T] = new Liftable[T] {
def apply(universe: ApiUniverse, value: T): universe.Tree = {
requireSameUniverse(universe, "Expr", value)
value.tree.asInstanceOf[universe.Tree]
}
}
implicit object liftType extends Liftable[Type] {
def apply(universe: ApiUniverse, value: Type): universe.Tree = {
requireSameUniverse(universe, "Type", value)
universe.TypeTree(value.asInstanceOf[universe.Type])
}
}
implicit def liftTypeTag[T <: WeakTypeTag[_]]: Liftable[T] = new Liftable[T] {
def apply(universe: ApiUniverse, value: T): universe.Tree = {
requireSameUniverse(universe, "TypeTag", value)
universe.TypeTree(value.asInstanceOf[universe.WeakTypeTag[_]].tpe)
}
}
// ================= BUILD UTILS =================
def Block(stats: List[Tree]): Block = stats match {
case Nil => u.Block(Nil, Literal(Constant(())))
case elem :: Nil => u.Block(Nil, elem)
case elems => u.Block(elems.init, elems.last)
}
def annotationRepr(tree: Tree, args: List[Tree]): Tree = tree match {
case ident: Ident => Apply(Select(New(ident), nme.CONSTRUCTOR: TermName), args)
case call @ Apply(Select(New(ident: Ident), nme.CONSTRUCTOR), _) =>
if(args.nonEmpty)
throw new IllegalArgumentException("Can't splice annotation that already contains args with extra args.")
call
case _ => throw new IllegalArgumentException("Tree ${showRaw(tree)} isn't a correct representation of annotation.")
}
object FlagsAsBits {
def unapply(flags: Long): Option[Long] = Some(flags)
}
object EmptyValDefLike {
def unapply(t: Tree): Boolean = t eq emptyValDef
}
/** Applications in Scala can have one of the following shapes:
*
* 1) naked core: Ident(_) or Select(_, _) or basically anything else
* 2) naked core with targs: TypeApply(core, targs) or AppliedTypeTree(core, targs)
* 3) apply or several applies wrapping a core: Apply(core, _), or Apply(Apply(core, _), _), etc
*
* This class provides different ways to decompose applications and simplifies their analysis.
*
* ***Examples***
* (TypeApply in the examples can be replaced with AppliedTypeTree)
*
* Ident(foo):
* * callee = Ident(foo)
* * core = Ident(foo)
* * targs = Nil
* * argss = Nil
*
* TypeApply(foo, List(targ1, targ2...))
* * callee = TypeApply(foo, List(targ1, targ2...))
* * core = foo
* * targs = List(targ1, targ2...)
* * argss = Nil
*
* Apply(foo, List(arg1, arg2...))
* * callee = foo
* * core = foo
* * targs = Nil
* * argss = List(List(arg1, arg2...))
*
* Apply(Apply(foo, List(arg21, arg22, ...)), List(arg11, arg12...))
* * callee = foo
* * core = foo
* * targs = Nil
* * argss = List(List(arg11, arg12...), List(arg21, arg22, ...))
*
* Apply(Apply(TypeApply(foo, List(targs1, targs2, ...)), List(arg21, arg22, ...)), List(arg11, arg12...))
* * callee = TypeApply(foo, List(targs1, targs2, ...))
* * core = foo
* * targs = Nil
* * argss = List(List(arg11, arg12...), List(arg21, arg22, ...))
*/
class Applied(val tree: Tree) {
/** The tree stripped of the possibly nested applications.
* The original tree if it's not an application.
*/
def callee: Tree = {
def loop(tree: Tree): Tree = tree match {
case Apply(fn, _) => loop(fn)
case tree => tree
}
loop(tree)
}
/** The `callee` unwrapped from type applications.
* The original `callee` if it's not a type application.
*/
def core: Tree = callee match {
case TypeApply(fn, _) => fn
case AppliedTypeTree(fn, _) => fn
case tree => tree
}
/** The type arguments of the `callee`.
* `Nil` if the `callee` is not a type application.
*/
def targs: List[Tree] = callee match {
case TypeApply(_, args) => args
case AppliedTypeTree(_, args) => args
case _ => Nil
}
/** (Possibly multiple lists of) value arguments of an application.
* `Nil` if the `callee` is not an application.
*/
def argss: List[List[Tree]] = {
def loop(tree: Tree): List[List[Tree]] = tree match {
case Apply(fn, args) => loop(fn) :+ args
case _ => Nil
}
loop(tree)
}
/** The depth of the nested applies: e.g. Apply(Apply(Apply(_, _), _), _)
* has depth 3. Continues through type applications (without counting them.)
*/
def applyDepth: Int = {
def loop(tree: Tree): Int = tree match {
case Apply(fn, _) => 1 + loop(fn)
case TypeApply(fn, _) => loop(fn)
case AppliedTypeTree(fn, _) => loop(fn)
case _ => 0
}
loop(tree)
}
}
/** Returns a wrapper that knows how to destructure and analyze applications.
*/
def dissectApplied(tree: Tree) = new Applied(tree)
/** Destructures applications into important subparts described in `Applied` class,
* namely into: core, targs and argss (in the specified order).
*
* Trees which are not applications are also accepted. Their callee and core will
* be equal to the input, while targs and argss will be Nil.
*
* The provided extractors don't expose all the API of the `Applied` class.
* For advanced use, call `dissectApplied` explicitly and use its methods instead of pattern matching.
*/
object Applied {
def unapply(applied: Applied): Option[(Tree, List[Tree], List[List[Tree]])] =
Some((applied.core, applied.targs, applied.argss))
def unapply(tree: Tree): Option[(Tree, List[Tree], List[List[Tree]])] =
unapply(dissectApplied(tree))
}
object Applied2 {
def unapply(tree: Tree): Option[(Tree, List[List[Tree]])] = tree match {
case Applied(fun, targs, argss) =>
if(targs.length > 0)
Some((TypeApply(fun, targs), argss))
else
Some((fun, argss))
case _ => None
}
}
def isEarlyDef(tree: Tree) = tree match {
case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
case _ => false
}
def isEarlyValDef(tree: Tree) = tree match {
case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
case _ => false
}
def isEarlyTypeDef(tree: Tree) = tree match {
case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
case _ => false
}
/** Is tree legal as a member definition of an interface?
*/
def isInterfaceMember(tree: Tree): Boolean = tree match {
case EmptyTree => true
case Import(_, _) => true
case TypeDef(_, _, _, _) => true
case DefDef(mods, _, _, _, _, __) => mods hasFlag DEFERRED
case ValDef(mods, _, _, _) => mods hasFlag DEFERRED
case _ => false
}
def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
def copyValDef(tree: Tree)(
mods: Modifiers = null,
name: Name = null,
tpt: Tree = null,
rhs: Tree = null
): ValDef = tree match {
case ValDef(mods0, name0, tpt0, rhs0) =>
treeCopy.ValDef(tree,
if (mods eq null) mods0 else mods,
if (name eq null) name0 else name,
if (tpt eq null) tpt0 else tpt,
if (rhs eq null) rhs0 else rhs
)
case t =>
sys.error("Not a ValDef: " + t + "/" + t.getClass)
}
def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, true) }
def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {} // FIXME: what about -Yrangepos
/** Generates a template with constructor corresponding to
*
* constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
* extends superclass(args_1) ... (args_n) with mixins { self => body }
*
* This gets translated to
*
* extends superclass with mixins { self =>
* presupers' // presupers without rhs
* vparamss // abstract fields corresponding to value parameters
* def <init>(vparamss) {
* presupers
* super.<init>(args)
* }
* body
* }
*/
def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): Template = {
/* Add constructor to template */
// create parameters for <init> as synthetic trees.
var vparamss1 = vparamss map (_ map { vd =>
atPos(vd.pos.focus) {
val PARAMACCESSOR = scala.reflect.internal.Flags.PARAMACCESSOR.asInstanceOf[Long].asInstanceOf[FlagSet]
val flags1 = (vd.mods.flags.asInstanceOf[Long] & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM).asInstanceOf[Long]).asInstanceOf[FlagSet]
val mods = u.Modifiers(flags1 | PARAM | PARAMACCESSOR)
// FIXME: val mods1 = mods.withAnnotations(vd.mods.annotations)
val mods1 = mods
ValDef(mods1, vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
}
})
val (edefs, rest) = body span isEarlyDef
val (evdefs, etdefs) = edefs partition isEarlyValDef
val gvdefs = evdefs map {
case vdef @ ValDef(_, _, tpt, _) =>
copyValDef(vdef)(
// atPos for the new tpt is necessary, since the original tpt might have no position
// (when missing type annotation for ValDef for example), so even though setOriginal modifies the
// position of TypeTree, it would still be NoPosition. That's what the author meant.
// FIXME: tpt = atPos(vdef.pos.focus)(TypeTree() setOriginal tpt setPos tpt.pos.focus),
tpt = atPos(vdef.pos.focus)(TypeTree()),
rhs = EmptyTree
)
}
// FIXME: val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods) }
val constrs = {
if (constrMods hasFlag TRAIT) {
if (body forall isInterfaceMember) List()
else List(
atPos(wrappingPos(superPos, lvdefs)) (
DefDef(NoMods, newTermName("$init$"), List(), List(List()), TypeTree(), u.Block(lvdefs, Literal(Constant())))))
} else {
// convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.hasFlag(IMPLICIT))
vparamss1 = List() :: vparamss1;
val superRef: Tree = atPos(superPos)(mkSuperSelect)
val superCall = (superRef /: argss) (Apply.apply)
List(
atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), u.Block(lvdefs ::: List(superCall), Literal(Constant())))))
}
}
constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
// Field definitions for the class - remove defaults.
// FIXME: val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods, rhs = EmptyTree))
u.Template(parents, self, gvdefs ::: fieldDefs ::: constrs ::: etdefs ::: rest)
}
object SyntacticClassDef {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]],
parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree =
ClassDef(mods, name, tparams, Template(parents, selfdef, constrMods, vparamss, argss, body, NoPosition))
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers,
List[List[ValDef]], List[List[Tree]], List[Tree], ValDef, List[Tree])] = tree match {
case ClassDef(mods, name, tparams, Template(parents, selfdef, tbody)) =>
// extract generated fieldDefs and constructor
val (defs, (DefDef(mods, _, _, vparamss0, _, Block(_ :+ Applied(_, _, argss), _))) :: otherDefs) = tbody.splitAt(tbody.indexWhere {
case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => true
case _ => false
})
val (earlyDefs, fieldDefs) = defs.span(isEarlyDef)
// undo conversion from (implicit ... ) to ()(implicit ... ) when its the only parameter section
val vparamss1 = vparamss0 match {
case List() :: rest if !rest.isEmpty && !rest.head.isEmpty && rest.head.head.mods.hasFlag(IMPLICIT) => rest
case other => other
}
// undo flag modifications by mergeing flag info from constructor args and fieldDefs
val modsMap = fieldDefs.map { case ValDef(mods, name, _, _) => name -> mods }.toMap
val vparamss2 = vparamss1.map { _.map { vd =>
val mods1 = modsMap(vd.name)
val flags1 = mods1.flags.asInstanceOf[Long]
val flags2 = flags1 | (vd.mods.flags.asInstanceOf[Long] & DEFAULTPARAM.asInstanceOf[Long])
val originalMods =
if (flags1 == flags2) mods1
else u.Modifiers(flags2.asInstanceOf[FlagSet], mods1.privateWithin, mods1.annotations) /* FIXME: setPositions positions */
// val originalMods = modsMap(vd.name) | (vd.mods.flags & DEFAULTPARAM)
atPos(vd.pos)(ValDef(originalMods, vd.name, vd.tpt, vd.rhs))
}}
Some((mods, name, tparams, mods, vparamss2, argss, parents, selfdef, earlyDefs ::: otherDefs))
case _ =>
None
}
}
def True = build.setType(Literal(Constant(true)), ConstantType(Constant(true)))
def False = build.setType(Literal(Constant(false)), ConstantType(Constant(false)))
object TermName {
def apply(s: String) = newTermName(s)
def unapply(name: TermName): Option[String] = Some(name.toString)
}
object TypeName {
def apply(s: String) = newTypeName(s)
def unapply(name: TypeName): Option[String] = Some(name.toString)
}
object Modifiers {
def unapply(mods: Modifiers): Option[(FlagSet, Name, List[Tree])] = Some((mods.flags, mods.privateWithin, mods.annotations))
}
}
} | sirmax/scala-maven-plugin | src/it/test_macroparadise/macro/src/main/scala/Compat.scala | Scala | unlicense | 16,944 |
object CompileOptions {
lazy val options = Seq(
"-target:jvm-1.8",
"-deprecation",
"-encoding", "UTF-8",
"-feature",
"-language:existentials",
"-language:higherKinds",
"-language:implicitConversions",
"-language:experimental.macros",
"-unchecked",
//"-Ywarn-unused-import",
"-Ywarn-nullary-unit",
//"-Xfatal-warnings",
"-Xlint",
//"-Yinline-warnings",
"-Ywarn-dead-code",
"-Xfuture")
}
| hyronx/matter | project/CompileOptions.scala | Scala | apache-2.0 | 452 |
def flatten[A](mma: M[M[A]]): M[A] | hmemcpy/milewski-ctfp-pdf | src/content/3.4/code/scala/snippet12.scala | Scala | gpl-3.0 | 34 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.metadata
import org.apache.flink.table.catalog.CatalogTable
import org.apache.flink.table.planner._
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.expressions.PlannerNamedWindowProperty
import org.apache.flink.table.planner.plan.nodes.calcite.{Expand, Rank, WatermarkAssigner, WindowAggregate}
import org.apache.flink.table.planner.plan.nodes.physical.batch._
import org.apache.flink.table.planner.plan.nodes.physical.common.CommonPhysicalLookupJoin
import org.apache.flink.table.planner.plan.nodes.physical.stream._
import org.apache.flink.table.planner.plan.schema.{FlinkPreparingTableBase, TableSourceTable}
import org.apache.flink.table.planner.plan.utils.{FlinkRelMdUtil, RankUtil}
import org.apache.flink.table.runtime.operators.rank.{ConstantRankRange, RankType}
import org.apache.flink.table.types.logical.utils.LogicalTypeCasts
import com.google.common.collect.ImmutableSet
import org.apache.calcite.plan.RelOptTable
import org.apache.calcite.plan.hep.HepRelVertex
import org.apache.calcite.plan.volcano.RelSubset
import org.apache.calcite.rel.`type`.{RelDataType, RelDataTypeFactory}
import org.apache.calcite.rel.core._
import org.apache.calcite.rel.metadata._
import org.apache.calcite.rel.{RelNode, SingleRel}
import org.apache.calcite.rex.{RexCall, RexInputRef, RexNode}
import org.apache.calcite.sql.SqlKind
import org.apache.calcite.sql.fun.SqlStdOperatorTable
import org.apache.calcite.util.{Bug, BuiltInMethod, ImmutableBitSet, Util}
import java.util
import java.util.Set
import scala.collection.JavaConversions._
class FlinkRelMdUniqueKeys private extends MetadataHandler[BuiltInMetadata.UniqueKeys] {
def getDef: MetadataDef[BuiltInMetadata.UniqueKeys] = BuiltInMetadata.UniqueKeys.DEF
def getUniqueKeys(
rel: TableScan,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getTableUniqueKeys(rel.getTable)
}
private def getTableUniqueKeys(relOptTable: RelOptTable): JSet[ImmutableBitSet] = {
relOptTable match {
case sourceTable: TableSourceTable =>
val catalogTable = sourceTable.catalogTable
catalogTable match {
case act: CatalogTable =>
val builder = ImmutableSet.builder[ImmutableBitSet]()
val schema = act.getResolvedSchema
if (schema.getPrimaryKey.isPresent) {
// use relOptTable's type which may be projected based on original schema
val columns = relOptTable.getRowType.getFieldNames
val primaryKeyColumns = schema.getPrimaryKey.get().getColumns
// we check this because a portion of a composite primary key is not unique
if (columns.containsAll(primaryKeyColumns)) {
val columnIndices = primaryKeyColumns.map(c => columns.indexOf(c))
builder.add(ImmutableBitSet.of(columnIndices: _*))
}
}
val uniqueSet = sourceTable.uniqueKeysSet.orElse(null)
if (uniqueSet != null) {
builder.addAll(uniqueSet)
}
val result = builder.build()
if (result.isEmpty) null else result
}
case table: FlinkPreparingTableBase => table.uniqueKeysSet.orElse(null)
case _ => null
}
}
def getUniqueKeys(
rel: Project,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] =
getProjectUniqueKeys(rel.getProjects, rel.getInput, mq, ignoreNulls)
def getUniqueKeys(
rel: Filter,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = mq.getUniqueKeys(rel.getInput, ignoreNulls)
def getUniqueKeys(
calc: Calc,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
val input = calc.getInput
val projects = calc.getProgram.getProjectList.map(calc.getProgram.expandLocalRef)
getProjectUniqueKeys(projects, input, mq, ignoreNulls)
}
private def getProjectUniqueKeys(
projects: JList[RexNode],
input: RelNode,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getProjectUniqueKeys(
projects,
input.getCluster.getTypeFactory,
() => mq.getUniqueKeys(input, ignoreNulls),
ignoreNulls)
}
def getProjectUniqueKeys(
projects: JList[RexNode],
typeFactory: RelDataTypeFactory,
getInputUniqueKeys :() => util.Set[ImmutableBitSet],
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
// LogicalProject maps a set of rows to a different set;
// Without knowledge of the mapping function(whether it
// preserves uniqueness), it is only safe to derive uniqueness
// info from the child of a project when the mapping is f(a) => a.
//
// Further more, the unique bitset coming from the child needs
val projUniqueKeySet = new JHashSet[ImmutableBitSet]()
val mapInToOutPos = new JHashMap[Int, JArrayList[Int]]()
def appendMapInToOutPos(inIndex: Int, outIndex: Int): Unit = {
if (mapInToOutPos.contains(inIndex)) {
mapInToOutPos(inIndex).add(outIndex)
} else {
val arrayBuffer = new JArrayList[Int]()
arrayBuffer.add(outIndex)
mapInToOutPos.put(inIndex, arrayBuffer)
}
}
// Build an input to output position map.
projects.zipWithIndex.foreach {
case (projExpr, i) =>
projExpr match {
case ref: RexInputRef => appendMapInToOutPos(ref.getIndex, i)
case a: RexCall if ignoreNulls && a.getOperator.equals(SqlStdOperatorTable.CAST) =>
val castOperand = a.getOperands.get(0)
castOperand match {
case castRef: RexInputRef =>
val castType = typeFactory.createTypeWithNullability(projExpr.getType, true)
val origType = typeFactory.createTypeWithNullability(castOperand.getType, true)
if (castType == origType) {
appendMapInToOutPos(castRef.getIndex, i)
}
case _ => // ignore
}
//rename or cast
case a: RexCall if (a.getKind.equals(SqlKind.AS) || isFidelityCast(a)) &&
a.getOperands.get(0).isInstanceOf[RexInputRef] =>
appendMapInToOutPos(a.getOperands.get(0).asInstanceOf[RexInputRef].getIndex, i)
case _ => // ignore
}
}
if (mapInToOutPos.isEmpty) {
// if there's no RexInputRef in the projected expressions
// return empty set.
return projUniqueKeySet
}
val childUniqueKeySet = getInputUniqueKeys()
if (childUniqueKeySet != null) {
// Now add to the projUniqueKeySet the child keys that are fully
// projected.
childUniqueKeySet.foreach { colMask =>
val filerInToOutPos = mapInToOutPos.filter { inToOut =>
colMask.asList().contains(inToOut._1)
}
val keys = filerInToOutPos.keys
if (colMask.forall(keys.contains(_))) {
val total = filerInToOutPos.map(_._2.size).product
for (i <- 0 to total) {
val tmpMask = ImmutableBitSet.builder()
filerInToOutPos.foreach { inToOut =>
val outs = inToOut._2
tmpMask.set(outs.get(i % outs.size))
}
projUniqueKeySet.add(tmpMask.build())
}
}
}
}
projUniqueKeySet
}
/**
* Whether the [[RexCall]] is a cast that doesn't lose any information.
*/
private def isFidelityCast(call: RexCall): Boolean = {
if (call.getKind != SqlKind.CAST) {
return false
}
val originalType = FlinkTypeFactory.toLogicalType(call.getOperands.get(0).getType)
val newType = FlinkTypeFactory.toLogicalType(call.getType)
LogicalTypeCasts.supportsImplicitCast(originalType, newType)
}
def getUniqueKeys(
rel: Expand,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getExpandUniqueKeys(rel, () => mq.getUniqueKeys(rel.getInput, ignoreNulls))
}
def getExpandUniqueKeys(
rel: Expand, getInputUniqueKeys :() => util.Set[ImmutableBitSet]): JSet[ImmutableBitSet] = {
// mapping input column index to output index for non-null value columns
val mapInputToOutput = new JHashMap[Int, Int]()
(0 until rel.getRowType.getFieldCount).filter(_ != rel.expandIdIndex).foreach { column =>
val inputRefs = FlinkRelMdUtil.getInputRefIndices(column, rel)
// expand columns corresponding to a given index should be same input ref.
if (inputRefs.size() == 1 && inputRefs.head >= 0) {
mapInputToOutput.put(inputRefs.head, column)
}
}
if (mapInputToOutput.isEmpty) {
return null
}
val inputUniqueKeys = getInputUniqueKeys()
if (inputUniqueKeys == null || inputUniqueKeys.isEmpty) {
return inputUniqueKeys
}
// values of expand_is are unique in rows expanded from a row,
// and a input unique key combined with expand_id are also unique
val outputUniqueKeys = new JHashSet[ImmutableBitSet]()
inputUniqueKeys.foreach { uniqueKey =>
val outputUniqueKeyBuilder = ImmutableBitSet.builder()
// a input unique key can be output only its values are all in `mapInputToOutput`
val canOutput = uniqueKey.toList.forall { key =>
if (mapInputToOutput.contains(key)) {
outputUniqueKeyBuilder.set(mapInputToOutput.get(key))
true
} else {
false
}
}
if (canOutput) {
// unique key from input combined with expand id are unique
outputUniqueKeyBuilder.set(rel.expandIdIndex)
outputUniqueKeys.add(outputUniqueKeyBuilder.build())
}
}
if (outputUniqueKeys.isEmpty) null else outputUniqueKeys
}
def getUniqueKeys(
rel: Exchange,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = mq.getUniqueKeys(rel.getInput, ignoreNulls)
def getUniqueKeys(
rel: Rank,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getRankUniqueKeys(rel, mq.getUniqueKeys(rel.getInput, ignoreNulls))
}
def getRankUniqueKeys(rel: Rank, inputKeys: JSet[ImmutableBitSet]): JSet[ImmutableBitSet] = {
val rankFunColumnIndex = RankUtil.getRankNumberColumnIndex(rel).getOrElse(-1)
// for Rank node that can convert to Deduplicate, unique key is partition key
val canConvertToDeduplicate: Boolean = {
val rankRange = rel.rankRange
val isRowNumberType = rel.rankType == RankType.ROW_NUMBER
val isLimit1 = rankRange match {
case rankRange: ConstantRankRange =>
rankRange.getRankStart == 1 && rankRange.getRankEnd == 1
case _ => false
}
isRowNumberType && isLimit1
}
if (canConvertToDeduplicate) {
val retSet = new JHashSet[ImmutableBitSet]
retSet.add(rel.partitionKey)
retSet
}
else if (rankFunColumnIndex < 0) {
inputKeys
} else {
val retSet = new JHashSet[ImmutableBitSet]
rel.rankType match {
case RankType.ROW_NUMBER =>
retSet.add(rel.partitionKey.union(ImmutableBitSet.of(rankFunColumnIndex)))
case _ => // do nothing
}
if (inputKeys != null && inputKeys.nonEmpty) {
inputKeys.foreach {
uniqueKey => retSet.add(uniqueKey)
}
}
retSet
}
}
def getUniqueKeys(
rel: Sort,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = mq.getUniqueKeys(rel.getInput, ignoreNulls)
def getUniqueKeys(
rel: StreamPhysicalDeduplicate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
ImmutableSet.of(ImmutableBitSet.of(rel.getUniqueKeys.map(Integer.valueOf).toList))
}
def getUniqueKeys(
rel: StreamPhysicalChangelogNormalize,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
ImmutableSet.of(ImmutableBitSet.of(rel.uniqueKeys.map(Integer.valueOf).toList))
}
def getUniqueKeys(
rel: StreamPhysicalDropUpdateBefore,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
mq.getUniqueKeys(rel.getInput, ignoreNulls)
}
def getUniqueKeys(
rel: Aggregate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getUniqueKeysOnAggregate(rel.getGroupSet.toArray)
}
def getUniqueKeys(
rel: BatchPhysicalGroupAggregateBase,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
if (rel.isFinal) {
getUniqueKeysOnAggregate(rel.grouping)
} else {
null
}
}
def getUniqueKeys(
rel: StreamPhysicalGroupAggregate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getUniqueKeysOnAggregate(rel.grouping)
}
def getUniqueKeys(
rel: StreamPhysicalLocalGroupAggregate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = null
def getUniqueKeys(
rel: StreamPhysicalGlobalGroupAggregate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getUniqueKeysOnAggregate(rel.grouping)
}
def getUniqueKeysOnAggregate(grouping: Array[Int]): util.Set[ImmutableBitSet] = {
// group by keys form a unique key
ImmutableSet.of(ImmutableBitSet.of(grouping.indices: _*))
}
def getUniqueKeys(
rel: WindowAggregate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): util.Set[ImmutableBitSet] = {
getUniqueKeysOnWindowAgg(
rel.getRowType.getFieldCount,
rel.getNamedProperties,
rel.getGroupSet.toArray)
}
def getUniqueKeys(
rel: BatchPhysicalWindowAggregateBase,
mq: RelMetadataQuery,
ignoreNulls: Boolean): util.Set[ImmutableBitSet] = {
if (rel.isFinal) {
getUniqueKeysOnWindowAgg(
rel.getRowType.getFieldCount,
rel.namedWindowProperties,
rel.grouping)
} else {
null
}
}
def getUniqueKeys(
rel: StreamPhysicalGroupWindowAggregate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): util.Set[ImmutableBitSet] = {
getUniqueKeysOnWindowAgg(
rel.getRowType.getFieldCount, rel.namedWindowProperties, rel.grouping)
}
def getUniqueKeysOnWindowAgg(
fieldCount: Int,
namedProperties: Seq[PlannerNamedWindowProperty],
grouping: Array[Int]): util.Set[ImmutableBitSet] = {
if (namedProperties.nonEmpty) {
val begin = fieldCount - namedProperties.size
val end = fieldCount - 1
//namedProperties's indexes is at the end of output record
val keys = ImmutableBitSet.of(grouping.indices: _*)
(begin to end).map {
i => keys.union(ImmutableBitSet.of(i))
}.toSet[ImmutableBitSet]
} else {
null
}
}
def getUniqueKeys(
rel: Window,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getUniqueKeysOfOverAgg(rel, mq, ignoreNulls)
}
def getUniqueKeys(
rel: BatchPhysicalOverAggregate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getUniqueKeysOfOverAgg(rel, mq, ignoreNulls)
}
def getUniqueKeys(
rel: StreamPhysicalOverAggregate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
getUniqueKeysOfOverAgg(rel, mq, ignoreNulls)
}
private def getUniqueKeysOfOverAgg(
window: SingleRel,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
mq.getUniqueKeys(window.getInput, ignoreNulls)
}
def getUniqueKeys(
join: Join,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
join.getJoinType match {
case JoinRelType.SEMI | JoinRelType.ANTI =>
// only return the unique keys from the LHS since a SEMI/ANTI join only
// returns the LHS
mq.getUniqueKeys(join.getLeft, ignoreNulls)
case _ =>
getJoinUniqueKeys(
join.analyzeCondition(), join.getJoinType, join.getLeft, join.getRight, mq, ignoreNulls)
}
}
def getUniqueKeys(
rel: StreamPhysicalIntervalJoin,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
val joinInfo = JoinInfo.of(rel.getLeft, rel.getRight, rel.originalCondition)
getJoinUniqueKeys(joinInfo, rel.getJoinType, rel.getLeft, rel.getRight, mq, ignoreNulls)
}
def getUniqueKeys(
join: CommonPhysicalLookupJoin,
mq: RelMetadataQuery,
ignoreNulls: Boolean): util.Set[ImmutableBitSet] = {
val left = join.getInput
val leftUniqueKeys = mq.getUniqueKeys(left, ignoreNulls)
val leftType = left.getRowType
getJoinUniqueKeys(
join.joinType, leftType, leftUniqueKeys, null,
mq.areColumnsUnique(left, join.joinInfo.leftSet, ignoreNulls),
// TODO get uniqueKeys from TableSchema of TableSource
null)
}
private def getJoinUniqueKeys(
joinInfo: JoinInfo,
joinRelType: JoinRelType,
left: RelNode,
right: RelNode,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
val leftUniqueKeys = mq.getUniqueKeys(left, ignoreNulls)
val rightUniqueKeys = mq.getUniqueKeys(right, ignoreNulls)
getJoinUniqueKeys(
joinRelType, left.getRowType, leftUniqueKeys, rightUniqueKeys,
mq.areColumnsUnique(left, joinInfo.leftSet, ignoreNulls),
mq.areColumnsUnique(right, joinInfo.rightSet, ignoreNulls))
}
def getJoinUniqueKeys(
joinRelType: JoinRelType,
leftType: RelDataType,
leftUniqueKeys: JSet[ImmutableBitSet],
rightUniqueKeys: JSet[ImmutableBitSet],
isLeftUnique: JBoolean,
isRightUnique: JBoolean): JSet[ImmutableBitSet] = {
// first add the different combinations of concatenated unique keys
// from the left and the right, adjusting the right hand side keys to
// reflect the addition of the left hand side
//
// NOTE zfong 12/18/06 - If the number of tables in a join is large,
// the number of combinations of unique key sets will explode. If
// that is undesirable, use RelMetadataQuery.areColumnsUnique() as
// an alternative way of getting unique key information.
val retSet = new JHashSet[ImmutableBitSet]
val nFieldsOnLeft = leftType.getFieldCount
val rightSet = if (rightUniqueKeys != null) {
val res = new JHashSet[ImmutableBitSet]
rightUniqueKeys.foreach { colMask =>
val tmpMask = ImmutableBitSet.builder
colMask.foreach(bit => tmpMask.set(bit + nFieldsOnLeft))
res.add(tmpMask.build())
}
if (leftUniqueKeys != null) {
res.foreach { colMaskRight =>
leftUniqueKeys.foreach(colMaskLeft => retSet.add(colMaskLeft.union(colMaskRight)))
}
}
res
} else {
null
}
// determine if either or both the LHS and RHS are unique on the
// equi-join columns
val leftUnique = isLeftUnique
val rightUnique = isRightUnique
// if the right hand side is unique on its equijoin columns, then we can
// add the unique keys from left if the left hand side is not null
// generating
if (rightUnique != null
&& rightUnique
&& (leftUniqueKeys != null)
&& !joinRelType.generatesNullsOnLeft) {
retSet.addAll(leftUniqueKeys)
}
// same as above except left and right are reversed
if (leftUnique != null
&& leftUnique
&& (rightSet != null)
&& !joinRelType.generatesNullsOnRight) {
retSet.addAll(rightSet)
}
retSet
}
def getUniqueKeys(
rel: Correlate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): util.Set[ImmutableBitSet] = null
def getUniqueKeys(
rel: BatchPhysicalCorrelate,
mq: RelMetadataQuery,
ignoreNulls: Boolean): util.Set[ImmutableBitSet] = null
def getUniqueKeys(
rel: SetOp,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
if (!rel.all) {
ImmutableSet.of(ImmutableBitSet.range(rel.getRowType.getFieldCount))
} else {
ImmutableSet.of()
}
}
def getUniqueKeys(
subset: RelSubset,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
if (!Bug.CALCITE_1048_FIXED) {
//if the best node is null, so we can get the uniqueKeys based original node, due to
//the original node is logically equivalent as the rel.
val rel = Util.first(subset.getBest, subset.getOriginal)
mq.getUniqueKeys(rel, ignoreNulls)
} else {
throw new RuntimeException("CALCITE_1048 is fixed, so check this method again!")
}
}
def getUniqueKeys(
subset: HepRelVertex,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
mq.getUniqueKeys(subset.getCurrentRel, ignoreNulls)
}
def getUniqueKeys(
subset: WatermarkAssigner,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = {
mq.getUniqueKeys(subset.getInput, ignoreNulls)
}
// Catch-all rule when none of the others apply.
def getUniqueKeys(
rel: RelNode,
mq: RelMetadataQuery,
ignoreNulls: Boolean): JSet[ImmutableBitSet] = null
}
object FlinkRelMdUniqueKeys {
val INSTANCE = new FlinkRelMdUniqueKeys
val SOURCE: RelMetadataProvider = ReflectiveRelMetadataProvider.reflectiveSource(
BuiltInMethod.UNIQUE_KEYS.method, INSTANCE)
}
| StephanEwen/incubator-flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/metadata/FlinkRelMdUniqueKeys.scala | Scala | apache-2.0 | 22,250 |
package realworld.domain
import akka.actor.ActorRef
import juju.domain.Saga
import juju.domain.resolvers.CorrelationIdField
import realworld.domain.Manager.{OrderNotValidatedByManager, OrderValidatedByManager}
import realworld.domain.Order.{RejectOrder, AcceptOrder, OrderPlaced}
class OrderProcessor(orderId: String, commandRouter: ActorRef) extends Saga {
var accepted = false
var rejected = false
var managerIdsValidated : Set[String] = Set.empty
var managerIdsNotValidated : Set[String] = Set.empty
var orderDescription : Option[String] = None
@CorrelationIdField(fieldname = "orderId")
def apply(event: OrderPlaced): Unit = {
orderDescription = Some(event.description)
}
@CorrelationIdField(fieldname = "orderId")
def apply(event: OrderValidatedByManager): Unit = {
managerIdsValidated += event.managerId
if (managerIdsValidated.size == 2) {
accepted = true
deliverCommand(commandRouter, AcceptOrder(event.orderId, orderDescription.get))
markAsCompleted()
}
}
@CorrelationIdField(fieldname = "orderId")
def apply(event: OrderNotValidatedByManager): Unit = {
managerIdsNotValidated += event.managerId
if (managerIdsNotValidated.size == 2) {
rejected = true
deliverCommand(commandRouter, RejectOrder(event.orderId, orderDescription.get, s"Manager ${event.managername} doesn't validate order with reason ${event.reason}"))
markAsCompleted()
}
}
}
| brokersquare/juju.realworldcqrs | src/main/scala/realworld/domain/OrderProcessor.scala | Scala | gpl-2.0 | 1,446 |
/*
* Copyright 2012 Emil Hellman
*
* This file is part of SortableChallenge.
*
* SortableChallenge is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* SortableChallenge is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with SortableChallenge. If not, see <http://www.gnu.org/licenses/>.
*/
package sortablechallenge.probability
import org.specs2.mutable._
import sortablechallenge.tokenizers._
import sortablechallenge.model._
import sortablechallenge.cleaners._
class CategoryGivenMessageEstimatorSpec extends Specification {
"CategoryGivenMessageEstimator" should {
"for all products and all listings calculate the probability of the listings belonging to the products" in {
// long test...
// Given
val prod1 = Product("FOO_Bar_Biz", "fun boat",Some("boat fun"), "angeredgod", "ignored :(")
val prod2 = Product("eskil", "", None, "loo","")
val estimator = new CategoryGivenMessageEstimator[Listing, Product] with MessageCategoryListEstimator[Listing,Product] {
val messageTokenizer = Tokenizer
val dictionary = Set("fun", "boat", "angeredgod", "loo")
val messageCleaner = new ListingCleaner
val categoriesToWords = Map(
prod1 -> List("fun","boat","angeredgod"),
prod2 -> List("loo")
)
def wordListGivenCategoryEstimator(categoryWords:List[String], dictionary:Set[String]) =
new TestWordListGivenCategoryEstimator(categoryWords)
def priorCategoryProbability(category:Product):Double = 0.5
}
val listing1 = Listing("","boat","fasdf","ONE MILLION DOLLARS!")
val listings = List (
listing1
)
// When
val categoryToMessage:List[MessageGivenCategories[Listing, Product]] = estimator estimate listings
// Then
categoryToMessage must have size(1)
val listing1Sorted = categoryToMessage(0).categoryEstimates.sort((a, b) => a.estimate > b.estimate)
listing1Sorted must have size(2)
listing1Sorted(0).category must_== prod1
listing1Sorted(1).category must_== prod2
listing1Sorted(0).estimate must_==(0.75d/(0.2 + 0.75)) // probability of listing given category divided by total probability
listing1Sorted(1).estimate must_==(0.2d/(0.2 + 0.75)) // probability of listing given category divided by total probability
}
"clean each listing before estimating it" in {
// Given
val prod1 = Product("FOO_Bar_Biz", "fun boat",Some("boat fun"), "angeredgod", "ignored :(")
val prod2 = Product("eskil", "", None, "loo","")
val estimator = new CategoryGivenMessageEstimator[Listing, Product] with MessageCategoryListEstimator[Listing,Product] {
val messageTokenizer = Tokenizer
val dictionary = Set("fun", "boat", "angeredgod", "loo")
val messageCleaner = new ListingCleaner
val categoriesToWords = Map(
prod1 -> List("fun","boat","angeredgod"),
prod2 -> List("loo")
)
def wordListGivenCategoryEstimator(categoryWords:List[String], dictionary:Set[String]) =
new ConcreteWordListGivenCategoryEstimator(categoryWords, dictionary, 1)
def priorCategoryProbability(category:Product):Double = 0.5
}
val listing1 = Listing("agro boat lalala booo","boat","fasdf angeredgod","ONE MILLION DOLLARS!")
val listing2 = Listing("Agro!boat LalAla #__¤ booo","BoAt","fasdf&&&!_AnGerEdGOD","ONE MILLION DOLLARS!")
val listings = List (
listing1, listing2
)
// When
val categoryToMessage:List[MessageGivenCategories[Listing, Product]] = estimator estimate listings
//Then
categoryToMessage must have size(2)
categoryToMessage(0).categoryEstimates must containAllOf(categoryToMessage(1).categoryEstimates)
categoryToMessage(1).categoryEstimates must containAllOf(categoryToMessage(0).categoryEstimates)
}
}
}
class MessageCategoryEstimatorSpec extends Specification {
"MessageCategoryListEstimator" should {
"if there are no words in the listing then the estimate should equal the priorProbability for the product" in {
val prod1 = Product("FOO_Bar_Biz", "foo the",Some("bar the foo"), "biz", "")
val prod2 = Product("salut!", "salut", None, "","")
val estimator =
new MessageCategoryListEstimator[Listing, Product] {
val messageTokenizer = Tokenizer
val categoriesToWords = Map(
prod1 -> List("foo", "the", "bar", "the", "foo", "biz"),
prod2 -> List("salut")
)
val dictionary = Set("foo", "the", "bar", "biz")
def wordListGivenCategoryEstimator(categoryWords:List[String], dictionary:Set[String]) =
new ConcreteWordListGivenCategoryEstimator(categoryWords, dictionary, 1)
def priorCategoryProbability(product:Product):Double = {
if(product == prod1) {
0.6
} else if (product == prod2) {
0.3
} else {
throw new IllegalArgumentException("Expected only prod1 and prod2...")
}
}
}
val listing1 = Listing("", "", "MCE", "2.55")
val estimates:List[CategoryEstimate[Product]] = estimator estimate listing1
estimates(0).estimate must_== 0.6
estimates(1).estimate must_== 0.3
}
"estimate, for all products, the probability of the words in the listing given product words" in {
val prod1 = Product("FOO_Bar_Biz", "foo the",Some("bar the foo"), "biz", "")
val prod2 = Product("salut!", "salut", None, "","")
val estimator = new MessageCategoryListEstimator[Listing,Product] {
val messageTokenizer = Tokenizer
val K = 1
val categoriesToWords = Map(
prod1 -> List("foo", "the", "bar", "the", "foo", "biz"),
prod2 -> List("salut")
)
val dictionary = Set("foo", "the", "bar", "biz", "salut")
def wordListGivenCategoryEstimator(categoryWords:List[String], dictionary:Set[String]) =
new ConcreteWordListGivenCategoryEstimator(categoryWords, dictionary, 1)
def priorCategoryProbability(product:Product):Double = 0.5
}
val listing1 = Listing("the biz", "", "MCE", "2.55")
val estimates:List[CategoryEstimate[Product]] = estimator estimate listing1
estimates(0).estimate must_==(0.5 * (2d /(6 + 5))*(3d /(6 + 5)))
estimates(1).estimate must_==(0.5 * (1d /(1 + 5))*(1d /(1 + 5)))
}
"filter out words not in the dictionary " in {
val prod1 = Product("FOO_Bar_Biz", "foo the",Some("bar the foo"), "biz", "")
val prod2 = Product("salut!", "salut", None, "","")
val estimator = new MessageCategoryListEstimator[Listing,Product] {
val messageTokenizer = Tokenizer
val K = 1
val categoriesToWords = Map(
prod1 -> List("foo", "the", "bar", "the", "foo", "biz"),
prod2 -> List("salut")
)
val dictionary = Set("foo", "the", "bar", "biz", "salut")
def wordListGivenCategoryEstimator(categoryWords:List[String], dictionary:Set[String]) =
new ConcreteWordListGivenCategoryEstimator(categoryWords, dictionary, 1)
def priorCategoryProbability(product:Product):Double = 0.5
}
val listing1 = Listing("the lollipop biz", "", "MCE", "2.55")
val estimates:List[CategoryEstimate[Product]] = estimator estimate listing1
estimates(0).estimate must_==(0.5 * (2d /(6 + 5))*(3d /(6 + 5)))
estimates(1).estimate must_==(0.5 * (1d /(1 + 5))*(1d /(1 + 5)))
}
}
}
class WordListGivenCategoryEstimatorSpec extends Specification {
"WordListGivenCategoryEstimator" should {
"estimate the joined probability of all words in the listing given a product" in {
val estimator = new WordListGivenCategoryEstimator with SmoothedProbabilityCalculator {
val K = 1
val categoryWords = List("foo", "the", "bar", "the", "foo", "biz")
val dictionary = Set("foo", "the", "bar", "biz")
}
val wordList1 = List("lollipop", "foo")
val categoryProbability = 0.5
estimator.estimate(categoryProbability, wordList1) must_==(0.5 * (1d /(6 + 4))*(3d /(6 + 4)))
estimator.estimate(categoryProbability, Nil) must_==(0.5)
}
"given a list of product words and a dictionary estimate the probability for the word given the product" in {
val estimator = new WordListGivenCategoryEstimator with SmoothedProbabilityCalculator {
val K = 1
val categoryWords = List("foo", "the", "bar", "the", "foo", "biz")
val dictionary = Set("foo", "the", "bar", "biz")
}
val word1 = "lollipop"
estimator estimate word1 must_==(1d /(6 + 4))
val word2 = "foo"
estimator estimate word2 must_==(3d /(6 + 4))
}
"given a different result when K is different" in {
val estimator = new WordListGivenCategoryEstimator with SmoothedProbabilityCalculator {
val K = 2
val categoryWords = List("foo", "the", "bar", "the", "foo", "biz")
val dictionary = Set("foo", "the", "bar", "biz")
}
val word1 = "lollipop"
estimator estimate word1 must_==(2d /(6 + 8))
val word2 = "foo"
estimator estimate word2 must_==(4d /(6 + 8))
}
}
}
class SmoothedProbabilityCalculatorSpec extends Specification {
"A SmoothedProbabilityCalculator" should {
"calculate a probability with laplacian smoothing so probabilites are never 0 given a possitive K" in {
val calculator = new SmoothedProbabilityCalculator { val K = 1 }
val matches = 0
val words = 10
val dictSize = 30
calculator.computeProbability(matches, words, dictSize) must_== 0.025
}
"calculate another probability with laplacian smoothing so probabilites are never 0 given a possitive K" in {
val calculator = new SmoothedProbabilityCalculator { val K = 1 }
val matches = 5
val words = 10
val dictSize = 30
calculator.computeProbability(matches, words, dictSize) must_== 0.15
}
}
}
class TestWordListGivenCategoryEstimator(testWord:List[String]) extends WordListGivenCategoryEstimator with SmoothedProbabilityCalculator {
val categoryWords:List[String] = null
val dictionary:Set[String] = null
val K = 1
override def estimate(word:String):Double = 0
override def estimate(categoryProbability:Double, wordList:List[String]):Double = {
if(testWord.contains(wordList(0))) {
0.75d
} else {
0.2d
}
}
}
| archevel/SortableCodingChallenge | src/test/scala/sortablechallenge/probability/ProbabilitySpecs.scala | Scala | gpl-3.0 | 10,548 |
package k2b6s9j.singingKIA.Songs
object WhatsMyName {
}
| kepler0/singingKIA | src/main/scala/k2b6s9j/singingKIA/Songs/WhatsMyName.scala | Scala | mit | 58 |
/*
* Copyright (C) 2011-2017 Interfaculty Department of Geoinformatics, University of
* Salzburg (Z_GIS) & Institute of Geological and Nuclear Sciences Limited (GNS Science)
* in the SMART Aquifer Characterisation (SAC) programme funded by the New Zealand
* Ministry of Business, Innovation and Employment (MBIE) and Department of Geography,
* University of Tartu, Estonia (UT) under the ETAG Mobilitas Pluss grant No. MOBJD233.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.rdf
import java.time.ZonedDateTime
import java.time.format.DateTimeFormatter
import utils.ClassnameLogger
/**
* container class for Research programme info
*
* @param titleName
* @param abbrev
* @param description
* @param fundingSource
* @param contactPersonName
* @param leadOrganisationName
* @param linkTo
*/
final case class ResearchPGHolder(titleName: String,
abbrev: String,
description: String,
fundingSource: String,
contactPersonName: String,
leadOrganisationName: String,
linkTo: String) extends ClassnameLogger {
def toRdf(vocabUrl: String): String = {
s"""<skos:Concept rdf:about="${vocabUrl}/researchpg/term/$abbrev">
<skos:label>$abbrev</skos:label>
<dc:identifier>$abbrev</dc:identifier>
<dc:title>$titleName</dc:title>
<dc:type>$fundingSource</dc:type>
<dc:relation>$linkTo</dc:relation>
<dc:description>$description</dc:description>
<dc:creator>$leadOrganisationName</dc:creator>
<dc:contributor>$contactPersonName</dc:contributor>
<skos:inCollection rdf:resource="${vocabUrl}/collection/researchpg/terms"/>
</skos:Concept>
"""
}
}
/**
* companion object
*/
object ResearchPGHolder extends ClassnameLogger {
/**
* converter.rdfSkosDcHeader +
* ResearchPGHolder.toCollectionRdf(rdfResearchPGs, date) +
* rdfResearchPGs.map(pg => pg.toRdf).mkString("\n") +
* converter.rdfFooter
*
* @param skosCollection
* @param date
* @return
*/
def toCompleteCollectionRdf(skosCollection: List[ResearchPGHolder],
vocabUrl: String,
date: String = ZonedDateTime.now.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)): String = {
rdfSkosDcHeader +
ResearchPGHolder.toRdfCollectionHeader(skosCollection, date, vocabUrl) +
skosCollection.map(pg => pg.toRdf(vocabUrl)).mkString("\n") +
rdfFooter
}
def toRdfCollectionHeader(skosCollection: List[ResearchPGHolder],
vocabUrl: String,
date: String = ZonedDateTime.now.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)): String = {
s"""<skos:Collection rdf:about="${vocabUrl}/collection/researchpg/terms">
<rdfs:label>Research programmes</rdfs:label>
<dc:title>Research programmes</dc:title>
<dc:description>Research programmes</dc:description>
<dc:creator>
<foaf:Organization>
<foaf:name>GNS Science</foaf:name>
</foaf:Organization>
</dc:creator>
<dc:rights>CC-SA-BY-NC 3.0 NZ</dc:rights>
<dcterms:issued>2017-11-17T20:55:00.215+13:00</dcterms:issued>
<dcterms:modified>${date}</dcterms:modified>
${skosCollection.map(sc => s"<skos:member>${vocabUrl}/researchpg/term/${sc.abbrev}</skos:member>").mkString("\n")}
</skos:Collection>"""
}
}
| ZGIS/smart-portal-backend | app/models/rdf/ResearchPGHolder.scala | Scala | apache-2.0 | 4,160 |
/*
* ____ ____ _____ ____ ___ ____
* | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R)
* | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data
* | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc.
* |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version
* 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.precog.yggdrasil
package table
import scala.util.Random
import blueeyes.json._
import scalaz.StreamT
import scalaz.syntax.comonad._
import org.specs2.ScalaCheck
import org.specs2.mutable._
trait DistinctSpec[M[+_]] extends ColumnarTableModuleTestSupport[M] with Specification with ScalaCheck {
import SampleData._
import trans._
def testDistinctIdentity = {
implicit val gen = sort(distinct(sample(schema)))
check { (sample: SampleData) =>
val table = fromSample(sample)
val distinctTable = table.distinct(Leaf(Source))
val result = toJson(distinctTable)
result.copoint must_== sample.data
}
}
def testDistinctAcrossSlices = {
val array: JValue = JParser.parseUnsafe("""
[{
"value":{
},
"key":[1.0,1.0]
},
{
"value":{
},
"key":[1.0,1.0]
},
{
"value":{
},
"key":[2.0,1.0]
},
{
"value":{
},
"key":[2.0,2.0]
},
{
"value":{
"fzz":false,
"em":[{
"l":210574764564691785.5,
"fbk":-1.0
},[[],""]],
"z3y":[{
"wd":null,
"tv":false,
"o":[]
},{
"sry":{
},
"in0":[]
}]
},
"key":[1.0,2.0]
},
{
"value":{
"fzz":false,
"em":[{
"l":210574764564691785.5,
"fbk":-1.0
},[[],""]],
"z3y":[{
"wd":null,
"tv":false,
"o":[]
},{
"sry":{
},
"in0":[]
}]
},
"key":[1.0,2.0]
}]""")
val data: Stream[JValue] = (array match {
case JArray(li) => li
case _ => sys.error("Expected a JArray")
}).toStream
val sample = SampleData(data)
val table = fromSample(sample, Some(5))
val result = toJson(table.distinct(Leaf(Source)))
result.copoint must_== sample.data.toSeq.distinct
}
def testDistinctAcrossSlices2 = {
val array: JValue = JParser.parseUnsafe("""
[{
"value":{
"elxk7vv":-8.988465674311579E+307
},
"key":[1.0,1.0]
},
{
"value":{
"elxk7vv":-8.988465674311579E+307
},
"key":[1.0,1.0]
},
{
"value":{
"elxk7vv":-6.465000919622952E+307
},
"key":[2.0,4.0]
},
{
"value":{
"elxk7vv":-2.2425006462798597E+307
},
"key":[4.0,3.0]
},
{
"value":{
"elxk7vv":-1.0
},
"key":[5.0,8.0]
},
{
"value":{
"elxk7vv":-1.0
},
"key":[5.0,8.0]
},
{
"value":[[]],
"key":[3.0,1.0]
},
{
"value":[[]],
"key":[3.0,8.0]
},
{
"value":[[]],
"key":[6.0,7.0]
},
{
"value":[[]],
"key":[7.0,2.0]
},
{
"value":[[]],
"key":[8.0,1.0]
},
{
"value":[[]],
"key":[8.0,1.0]
},
{
"value":[[]],
"key":[8.0,4.0]
}]""")
val data: Stream[JValue] = (array match {
case JArray(li) => li
case _ => sys.error("Expected JArray")
}).toStream
val sample = SampleData(data)
val table = fromSample(sample, Some(5))
val result = toJson(table.distinct(Leaf(Source)))
result.copoint must_== sample.data.toSeq.distinct
}
def removeUndefined(jv: JValue): JValue = jv match {
case JObject(jfields) => JObject(jfields collect { case (s, v) if v != JUndefined => JField(s, removeUndefined(v)) })
case JArray(jvs) => JArray(jvs map { jv => removeUndefined(jv) })
case v => v
}
def testDistinct = {
implicit val gen = sort(duplicateRows(sample(schema)))
check { (sample: SampleData) =>
val table = fromSample(sample)
val distinctTable = table.distinct(Leaf(Source))
val result = toJson(distinctTable).copoint
val expected = sample.data.toSeq.distinct
result must_== expected
}.set(minTestsOk -> 2000)
}
}
| precog/platform | yggdrasil/src/test/scala/com/precog/yggdrasil/table/DistinctSpec.scala | Scala | agpl-3.0 | 5,323 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import com.twitter.maple.tap.MemorySourceTap
import cascading.flow.FlowProcess
import cascading.scheme.local.{ TextDelimited => CLTextDelimited }
import cascading.scheme.Scheme
import cascading.tap.Tap
import cascading.tuple.Tuple
import cascading.tuple.Fields
import cascading.scheme.NullScheme
import java.io.{ InputStream, OutputStream }
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapred.OutputCollector
import org.apache.hadoop.mapred.RecordReader
import scala.collection.mutable.Buffer
import scala.collection.JavaConverters._
/**
* Allows working with an iterable object defined in the job (on the submitter)
* to be used within a Job as you would a Pipe/RichPipe
*
* These lists should probably be very tiny by Hadoop standards. If they are
* getting large, you should probably dump them to HDFS and use the normal
* mechanisms to address the data (a FileSource).
*/
case class IterableSource[+T](@transient iter: Iterable[T], inFields: Fields = Fields.NONE)(implicit set: TupleSetter[T], conv: TupleConverter[T]) extends Source with Mappable[T] {
def fields = {
if (inFields.isNone && set.arity > 0) {
Dsl.intFields(0 until set.arity)
} else inFields
}
override def converter[U >: T] = TupleConverter.asSuperConverter[T, U](conv)
@transient
private val asBuffer: Buffer[Tuple] = iter.map { set(_) }.toBuffer
private lazy val hdfsTap: Tap[_, _, _] = new MemorySourceTap(asBuffer.asJava, fields)
override def createTap(readOrWrite: AccessMode)(implicit mode: Mode): Tap[_, _, _] = {
if (readOrWrite == Write) {
sys.error("IterableSource is a Read-only Source")
}
mode match {
case Local(_) => new MemoryTap[InputStream, OutputStream](new NullScheme(fields, fields), asBuffer)
case Test(_) => new MemoryTap[InputStream, OutputStream](new NullScheme(fields, fields), asBuffer)
case Hdfs(_, _) => hdfsTap
case HadoopTest(_, _) => hdfsTap
case _ => throw ModeException("Unsupported mode for IterableSource: " + mode.toString)
}
}
/**
* Don't use the whole string of the iterable, which can be huge.
* We take the first 10 items + the identityHashCode of the iter.
*/
override val sourceId: String =
"IterableSource(%s)-%d".format(iter.take(10).toString, System.identityHashCode(iter))
}
| oeddyo/scalding | scalding-core/src/main/scala/com/twitter/scalding/IterableSource.scala | Scala | apache-2.0 | 2,915 |
package com.twitter.finagle.mux
import com.twitter.io.Buf
/** A mux response. */
sealed trait Response {
/** The payload of the response. */
def body: Buf
/** The contexts of the response. */
def contexts: Seq[(Buf, Buf)]
}
object Response {
private case class Impl(contexts: Seq[(Buf, Buf)], body: Buf) extends Response {
override def toString = s"mux.Response.Impl(contexts=[${contexts.mkString(", ")}], body=$body)"
}
val empty: Response = Impl(Nil, Buf.Empty)
def apply(buf: Buf): Response = apply(Nil, buf)
def apply(ctxts: Seq[(Buf, Buf)], buf: Buf): Response = Impl(ctxts, buf)
}
/** For java compatibility */
object Responses {
val empty: Response = Response.empty
def make(payload: Buf): Response = make(Nil, payload)
def make(contexts: Seq[(Buf, Buf)], payload: Buf): Response = Response(contexts, payload)
}
| twitter/finagle | finagle-mux/src/main/scala/com/twitter/finagle/mux/Response.scala | Scala | apache-2.0 | 858 |
/*
* Copyright 2007-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package http
import common._
import util._
import util.Helpers._
import sitemap._
import http.js.JSArtifacts
import http.js.jquery._
import http.provider._
import js._
import JE._
import JsCmds._
import auth._
import scala.xml._
import java.util.{Locale, TimeZone, ResourceBundle, Date}
import java.io.{InputStream, ByteArrayOutputStream, BufferedReader, StringReader}
import java.util.concurrent.{ConcurrentHashMap => CHash}
import scala.reflect.Manifest
import java.util.concurrent.atomic.AtomicInteger
import net.liftweb.actor.{LiftActor, LAFuture}
class LiftRulesJBridge {
def liftRules: LiftRules = LiftRules
}
sealed trait LiftRulesMocker {
def realInstance: LiftRules
}
object LiftRulesMocker {
implicit def toLiftRules(in: LiftRulesMocker): LiftRules = in.realInstance
/**
* In Dev and Test mode, there's an option to stuff another LiftRules
* instance in here and use that one for mocking
*/
object devTestLiftRulesInstance extends ThreadGlobal[LiftRules]
/**
* This function, in Test and Dev mode will vend the instance of LiftRules.
* If there is an instance set in devTestLiftRulesInstance, that instance
* will be used, otherwise the global instance in LiftRules.prodInstance
* will be used.
*/
@volatile var calcLiftRulesInstance: () => LiftRules =
() => devTestLiftRulesInstance.box.openOr( LiftRules.prodInstance)
}
/**
* The data structure that contains information to determine if the
* request should be treated as a stateful or stateless request
*/
final case class StatelessReqTest(path: List[String], httpReq: HTTPRequest)
/**
* Sometimes we're going to have to surface more data from one of these requests
* than we might like (for example, extra info about continuing the computation on
* a different thread), so we'll start off right by having an Answer trait
* that will have some subclasses and implicit conversions
*/
sealed trait DataAttributeProcessorAnswer
/**
* The companion object that has the implicit conversions
*/
object DataAttributeProcessorAnswer {
implicit def nodesToAnswer(in: NodeSeq): DataAttributeProcessorAnswer = DataAttributeProcessorAnswerNodes(in)
implicit def nodeFuncToAnswer(in: () => NodeSeq): DataAttributeProcessorAnswer = DataAttributeProcessorAnswerFork(in)
implicit def nodeFutureToAnswer(in: LAFuture[NodeSeq]): DataAttributeProcessorAnswer = DataAttributeProcessorAnswerFuture(in)
implicit def setNodeToAnswer(in: Seq[Node]): DataAttributeProcessorAnswer = DataAttributeProcessorAnswerNodes(in)
}
/**
* Yep... just a bunch of nodes.
* @param nodes
*/
final case class DataAttributeProcessorAnswerNodes(nodes: NodeSeq) extends DataAttributeProcessorAnswer
/**
* A function that returns a bunch of nodes... run it on a different thread
* @param nodeFunc
*/
final case class DataAttributeProcessorAnswerFork(nodeFunc: () => NodeSeq) extends DataAttributeProcessorAnswer
/**
* A future that returns nodes... run them on a different thread
* @param nodeFuture the future of the NodeSeq
*/
final case class DataAttributeProcessorAnswerFuture(nodeFuture: LAFuture[NodeSeq]) extends DataAttributeProcessorAnswer
/**
* The Lift configuration singleton
*/
object LiftRules extends LiftRulesMocker {
lazy val prodInstance: LiftRules = new LiftRules()
private[this] val devOrTest = Props.devMode || Props.testMode
/**
* Get the real instance of LiftRules
*/
def realInstance: LiftRules = if (devOrTest) {
LiftRulesMocker.calcLiftRulesInstance()
} else prodInstance
type DispatchPF = PartialFunction[Req, () => Box[LiftResponse]];
/**
* A partial function that allows processing of any attribute on an Elem
* if the attribute begins with "data-"
*/
type DataAttributeProcessor = PartialFunction[(String, String, Elem, LiftSession), DataAttributeProcessorAnswer]
/**
* The pattern/PartialFunction for matching tags in Lift
*/
type TagProcessor = PartialFunction[(String, Elem, LiftSession), DataAttributeProcessorAnswer]
/**
* The test between the path of a request and whether that path
* should result in stateless servicing of that path
*/
type StatelessTestPF = PartialFunction[List[String], Boolean]
/**
* The test between the path of a request, the HTTP request, and whether that path
* should result in stateless servicing of that path
*/
type StatelessReqTestPF = PartialFunction[StatelessReqTest, Boolean]
type RewritePF = PartialFunction[RewriteRequest, RewriteResponse]
type SnippetPF = PartialFunction[List[String], NodeSeq => NodeSeq]
type LiftTagPF = PartialFunction[(String, Elem, MetaData, NodeSeq, String), NodeSeq]
type URINotFoundPF = PartialFunction[(Req, Box[Failure]), NotFound]
type URLDecoratorPF = PartialFunction[String, String]
type SnippetDispatchPF = PartialFunction[String, DispatchSnippet]
type ViewDispatchPF = PartialFunction[List[String], Either[() => Box[NodeSeq], LiftView]]
type HttpAuthProtectedResourcePF = PartialFunction[Req, Box[Role]]
type ExceptionHandlerPF = PartialFunction[(Props.RunModes.Value, Req, Throwable), LiftResponse]
type ResourceBundleFactoryPF = PartialFunction[(String, Locale), ResourceBundle]
type SplitSuffixPF = PartialFunction[List[String], (List[String], String)]
type CometCreationPF = PartialFunction[CometCreationInfo, LiftCometActor]
/**
* A partial function that allows the application to define requests that should be
* handled by lift rather than the default handler
*/
type LiftRequestPF = PartialFunction[Req, Boolean]
/*
private[this] var _doneBoot = false
private[http] def doneBoot = _doneBoot
private[http] def doneBoot_=(in: Boolean) {_doneBoot = in}
*/
/**
* Holds the failure information when a snippet can not be executed.
*/
case class SnippetFailure(page: String, typeName: Box[String], failure: SnippetFailures.Value)
object SnippetFailures extends Enumeration {
val NoTypeDefined = Value(1, "No Type Defined")
val ClassNotFound = Value(2, "Class Not Found")
val StatefulDispatchNotMatched = Value(3, "Stateful Snippet: Dispatch Not Matched")
val MethodNotFound = Value(4, "Method Not Found")
val NoNameSpecified = Value(5, "No Snippet Name Specified")
val InstantiationException = Value(6, "Exception During Snippet Instantiation")
val DispatchSnippetNotMatched = Value(7, "Dispatch Snippet: Dispatch Not Matched")
val StateInStateless = Value(8, "Access to Lift's statefull features from Stateless mode")
val CometTimeout = Value(9, "Comet Component did not response to requests")
val CometNotFound = Value(10, "Comet Component not found")
val ExecutionFailure = Value(11, "Execution Failure")
val NoCometType = Value(12, "Comet Type not specified")
}
def defaultFuncNameGenerator(runMode: Props.RunModes.Value): () => String =
runMode match {
case Props.RunModes.Test => S.generateTestFuncName _
case _ => S.generateFuncName _
}
}
/**
* LiftRules is the global object that holds all of Lift's configuration.
*/
class LiftRules() extends Factory with FormVendor with LazyLoggable {
import LiftRules._
private var _doneBoot = false
/**
* Does the LiftRules instance think it's done booting?
*/
def doneBoot = _doneBoot
def noticesContainerId = "lift__noticesContainer__"
/**
* If you want to make the Lift inactivity timeout shorter than
* the container inactivity timeout, set the inactivity timeout here
*/
val sessionInactivityTimeout = new FactoryMaker[Box[Long]](Empty){}
/**
* The function that converts a scala.text.Document to
* a String (used for JsonAST.JValue to text convertion.
* By default, use Printer.pretty for dev mode and
* Printer.compact for other modes
*/
val jsonOutputConverter = new FactoryMaker[scala.text.Document => String]({
import json.Printer
if (Props.devMode) Printer.pretty _ else Printer.compact _}){}
/**
* Set the default fadeout mechanism for Lift notices. Thus you provide a function that take a NoticeType.Value
* and decide the duration after which the fade out will start and the actual fadeout time. This is applicable
* for general notices (not associated with id-s) regardless if they are set for the page rendering, ajax
* response or Comet response.
*/
val noticesAutoFadeOut = new FactoryMaker[(NoticeType.Value) => Box[(TimeSpan, TimeSpan)]]((notice : NoticeType.Value) => Empty){}
/**
* Use this to apply various effects to the notices. The user function receives the NoticeType
* and the id of the element containing the specific notice. Thus it is the function's responsibility to form
* the javascript code for the visual effects. This is applicable for both ajax and non ajax contexts.
* For notices associated with ID's the user type will receive an Empty notice type. That's because the effect
* is applied on the real estate holding the notices for this ID. Typically this contains a single message.
*/
val noticesEffects = new FactoryMaker[(Box[NoticeType.Value], String) => Box[JsCmd]]((notice: Box[NoticeType.Value], id: String) => Empty){}
/**
* Holds user functions that will be executed very early in the request processing. The functions'
* result will be ignored.
*/
val early = RulesSeq[(HTTPRequest) => Any]
/**
* Holds user functions that are executed before sending the response to client. The functions'
* result will be ignored.
*/
val beforeSend = RulesSeq[(BasicResponse, HTTPResponse, List[(String, String)], Box[Req]) => Any]
private[this] lazy val defaultSecurityRules = SecurityRules()
/**
* The security rules used by Lift to secure this application. These mostly
* relate to HTTPS handling and HTTP `Content-Security-Policy`. See the
* `[[SecurityRules]]` documentation for more.
*
* Once the application has started using these, they are locked in, so make
* sure to set them early in the boot process.
*/
@volatile var securityRules: () => SecurityRules = () => defaultSecurityRules
private[http] lazy val lockedSecurityRules = securityRules()
/**
* Defines the resources that are protected by authentication and authorization. If this function
* is not defined for the input data, the resource is considered unprotected ergo no authentication
* is performed. If this function is defined and returns a Full box, it means that this resource
* is protected by authentication, and authenticated subjed must be assigned to the role returned by
* this function or to a role that is child-of this role. If this function returns Empty it means that
* this resource is protected by authentication but no authorization is performed meaning that roles are
* not verified.
*/
val httpAuthProtectedResource = RulesSeq[HttpAuthProtectedResourcePF]
/**
* The HTTP authentication mechanism that Lift will perform. See <i>LiftRules.protectedResource</i>
*/
@volatile var authentication: HttpAuthentication = NoAuthentication
/**
* A function that takes the HTTPSession and the contextPath as parameters
* and returns a LiftSession reference. This can be used in cases subclassing
* LiftSession is necessary.
*/
@volatile var sessionCreator: (HTTPSession, String) => LiftSession = {
case (httpSession, contextPath) => new LiftSession(contextPath, httpSession.sessionId, Full(httpSession))
}
/**
* A method that returns a function to create migratory sessions. If you want migratory sessions for your
* application, <code>LiftRules.sessionCreator = LiftRules.sessionCreatorForMigratorySessions</code>
*/
def sessionCreatorForMigratorySessions: (HTTPSession, String) => LiftSession = {
case (httpSession, contextPath) => new LiftSession(contextPath, httpSession.sessionId, Full(httpSession)) with MigratorySession
}
@volatile var enableContainerSessions = true
@volatile var getLiftSession: (Req) => LiftSession = (req) => _getLiftSession(req)
// Unique identifier for this particular instance of Lift, used for
// tagging resources below in attachResourceId.
private val instanceResourceId = "instance-" + Helpers.nextFuncName
/**
* Attaches an ID entity for resource URI specified in
* link or script tags. This allows controlling browser
* resource caching. By default this just adds a query string
* parameter unique per application lifetime. More complex
* implementation could user per resource MD5 sequences thus
* "forcing" browsers to refresh the resource only when the resource
* file changes. Users can define other rules as well. Inside user's
* function it is safe to use S context as attachResourceId is called
* from inside the <lift:with-resource-id> snippet
*
*/
@volatile var attachResourceId: (String) => String = (name) => {
name + (if (name contains ("?")) "&" else "?") + instanceResourceId + "=_"
}
/**
* Returns a LiftSession instance.
*/
private def _getLiftSession(req: Req): LiftSession = {
val wp = req.path.wholePath
val LiftPath = LiftRules.liftContextRelativePath
val cometSessionId = wp match {
case LiftPath :: "comet" :: _ :: session :: _ => Full(session)
case _ => Empty
}
val ret = SessionMaster.getSession(req, cometSessionId) match {
case Full(ret) =>
ret.fixSessionTime()
ret
case Failure(_, _, _) =>
LiftRules.statelessSession.vend.apply(req)
case _ =>
val ret = LiftSession(req)
ret.fixSessionTime()
SessionMaster.addSession(ret, req,
req.request.userAgent,
SessionMaster.getIpFromReq(req))
ret
}
makeCometBreakoutDecision(ret, req)
ret
}
/**
* A function that takes appropriate action in breaking out of any
* existing comet requests based on the request, browser type, etc.
*/
@volatile var makeCometBreakoutDecision: (LiftSession, Req) => Unit =
(session, req) => {
// get the open sessions to the host (this means that any DNS wildcarded
// Comet requests will not be counted), as well as all invalid/expired
// sessions
val (which, invalid) = session.cometForHost(req.hostAndPath)
// get the maximum requests given the browser type
val max = maxConcurrentRequests.vend(req) - 2 // this request and any open comet requests
// dump the oldest requests
which.drop(max).foreach {
case (actor, req) => actor ! BreakOut()
}
invalid.foreach {
case (actor, req) => actor ! BreakOut()
}
}
/**
* The path to handle served resources
*/
@volatile var resourceServerPath = "classpath"
/**
* Holds the JS library specific UI artifacts. By default it uses JQuery's artifacts
*/
@volatile var jsArtifacts: JSArtifacts = JQueryArtifacts
/**
* Use this PartialFunction to to automatically add static URL parameters
* to any URL reference from the markup of Ajax request.
*/
val urlDecorate = RulesSeq[URLDecoratorPF]
/**
* Should the JSESSIONID be encoded in the URL if cookies are
* not supported
*/
@volatile var encodeJSessionIdInUrl_? = false
/**
* Partial function to allow you to build a CometActor from code rather than via reflection
*/
val cometCreation = RulesSeq[CometCreationPF]
private def noComet(ignore: CometCreationInfo): Box[LiftCometActor] = Empty
/**
* A factory that will vend comet creators
*/
val cometCreationFactory: FactoryMaker[CometCreationInfo => Box[LiftCometActor]] =
new FactoryMaker(() => noComet _) {}
/**
* Should codes that represent entities be converted to XML
* entities when rendered?
*/
val convertToEntity: FactoryMaker[Boolean] = new FactoryMaker(false) {}
/**
* Certain paths and requests within your application can be marked as stateless
* and if there is access to Lift's stateful facilities (setting
* SessionVars, updating function tables, etc.) the developer will
* receive a notice and the operation will not complete.
*/
val statelessReqTest = RulesSeq[StatelessReqTestPF]
val statelessSession: FactoryMaker[Req => LiftSession with StatelessSession] =
new FactoryMaker((req: Req) => new LiftSession(req.contextPath,
Helpers.nextFuncName,
Empty) with
StatelessSession) {}
/**
* Holds user functions that are executed after the response is sent to client. The functions' result
* will be ignored.
*/
val afterSend = RulesSeq[(BasicResponse, HTTPResponse, List[(String, String)], Box[Req]) => Any]
/**
* Calculate the Comet Server (by default, the server that
* the request was made on, but can do the multi-server thing
* as well)
*/
@volatile var cometServer: () => Option[String] = () => None
/**
* The maximum concurrent requests. If this number of
* requests are being serviced for a given session, messages
* will be sent to all Comet requests to terminate
*/
val maxConcurrentRequests: FactoryMaker[Req => Int] = new FactoryMaker((x: Req) => x match {
case r if r.isIPad || r.isIPhone => 1
case r if r.isFirefox35_+ || r.isIE8 || r.isIE9 || r.isChrome3_+ || r.isOpera9 || r.isSafari3_+ => 4
case _ => 2
}) {}
/**
* A partial function that determines content type based on an incoming
* Req and Accept header
*/
@volatile var determineContentType: PartialFunction[(Box[Req], Box[String]), String] = {
case (_, Full(accept)) if this.useXhtmlMimeType && accept.toLowerCase.contains("application/xhtml+xml") =>
"application/xhtml+xml; charset=utf-8"
case _ => "text/html; charset=utf-8"
}
lazy val liftVersion: String = {
val cn = """\\.""".r.replaceAllIn(LiftRules.getClass.getName, "/")
val ret: Box[String] =
for{
url <- Box !! LiftRules.getClass.getResource("/" + cn + ".class")
newUrl = new java.net.URL(url.toExternalForm.split("!")(0) + "!" + "/META-INF/MANIFEST.MF")
str <- tryo(new String(readWholeStream(newUrl.openConnection.getInputStream), "UTF-8"))
ma <- """Implementation-Version: (.*)""".r.findFirstMatchIn(str)
} yield ma.group(1)
ret openOr "Unknown Lift Version"
}
lazy val liftBuildDate: Date = {
val cn = """\\.""".r.replaceAllIn(LiftRules.getClass.getName, "/")
val ret: Box[Date] =
for{
url <- Box !! LiftRules.getClass.getResource("/" + cn + ".class")
newUrl = new java.net.URL(url.toExternalForm.split("!")(0) + "!" + "/META-INF/MANIFEST.MF")
str <- tryo(new String(readWholeStream(newUrl.openConnection.getInputStream), "UTF-8"))
ma <- """Built-Time: (.*)""".r.findFirstMatchIn(str)
asLong <- asLong(ma.group(1))
} yield new Date(asLong)
ret openOr new Date(0L)
}
/**
* Hooks to be run when LiftServlet.destroy is called.
*/
val unloadHooks = RulesSeq[() => Unit]
/**
* For each unload hook registered, run them during destroy()
*/
private[http] def runUnloadHooks() {
unloadHooks.toList.foreach{f =>
tryo{f()}
}
}
/**
* The maximum allowed size of a complete mime multi-part POST. Default 8MB
*/
@volatile var maxMimeSize: Long = 8 * 1024 * 1024
/**
* Should pages that are not found be passed along the request processing chain to the
* next handler outside Lift?
*/
@volatile var passNotFoundToChain = false
/**
* The maximum allowed size of a single file in a mime multi-part POST.
* Default 7MB
*/
@volatile var maxMimeFileSize: Long = 7 * 1024 * 1024
/**
* The function referenced here is called if there's a localization lookup failure
*/
@volatile var localizationLookupFailureNotice: Box[(String, Locale) => Unit] = Empty
/**
* When a parameter is received either via POST or GET and does not have a
* corresponding mapping on the server, the function provided by this
* FactoryMaker will be called with the req and parameter name.
*
* By default, if the parameter looks Lift-like (i.e., it starts with an F),
* then we log a warning with the given parameter name and URI.
*/
val handleUnmappedParameter = new FactoryMaker[(Req,String)=>Unit](
() => { (req: Req, parameterName: String) =>
if (parameterName.startsWith("F"))
logger.warn("Unmapped Lift-like parameter seen in request [%s]: %s".format(req.uri, parameterName))
}
) {}
/**
* Set to false if you want to have 404's handled the same way in dev and production mode
*/
@volatile var displayHelpfulSiteMapMessages_? = true
/**
* The default location to send people if SiteMap access control fails. The path is
* expressed a a List[String]
*/
@volatile var siteMapFailRedirectLocation: List[String] = List()
private[http] def notFoundOrIgnore(requestState: Req, session: Box[LiftSession]): Box[LiftResponse] = {
if (passNotFoundToChain) Empty
else session match {
case Full(session) => Full(session.checkRedirect(requestState.createNotFound))
case _ => Full(requestState.createNotFound)
}
}
/**
* Allows user adding additional Lift tags (the tags must be prefixed by lift namespace such as <lift:xxxx/>).
* Each LiftTagPF function will be called with the following parameters:
* <pre>
* - Element label,
* - The Element itselft,
* - The attributes
* - The child nodes
* - The page name
* </pre>
*/
val liftTagProcessing = RulesSeq[LiftTagPF]
/**
* If you don't want lift to send the application/xhtml+xml mime type to those browsers
* that understand it, then set this to { @code false }
*/
@volatile var useXhtmlMimeType: Boolean = true
private def _stringToXml(s: String): NodeSeq = Text(s)
/**
* A function that defines how a String should be converted to XML
* for the localization stuff. By default, Text(s) is returned,
* but you can change this to attempt to parse the XML in the String and
* return the NodeSeq.
*/
@volatile var localizeStringToXml: String => NodeSeq = _stringToXml _
/**
* The base name of the resource bundle
*/
@volatile var resourceNames: List[String] = List("lift")
/**
* This function is called to convert the current set of Notices into
* a JsCmd that will be executed on the client to display the Notices.
*
* @see net.liftweb.builtin.snippet.Msgs
*/
@volatile var noticesToJsCmd: () => JsCmd = () => {
import builtin.snippet.{Msg,Msgs,MsgErrorMeta,MsgNoticeMeta,MsgWarningMeta}
// A "wrapper" that simply returns the javascript
val passJs = (in : JsCmd) => in
// Delegate to Msgs for fadeout and effects
def noticesFadeOut(noticeType: NoticeType.Value): JsCmd =
Msgs.noticesFadeOut(noticeType, Noop, passJs)
def groupEffects(noticeType: NoticeType.Value): JsCmd =
Msgs.effects(Full(noticeType), noticeType.id, Noop, passJs)
def idEffects(id : String): JsCmd =
Msgs.effects(Empty, id, Noop, passJs)
// Compute the global notices first
val groupMessages = Msgs.renderNotices() match {
case NodeSeq.Empty => JsCmds.Noop
case xml => LiftRules.jsArtifacts.setHtml(LiftRules.noticesContainerId, xml) &
noticesFadeOut(NoticeType.Notice) &
noticesFadeOut(NoticeType.Warning) &
noticesFadeOut(NoticeType.Error) &
groupEffects(NoticeType.Notice) &
groupEffects(NoticeType.Warning) &
groupEffects(NoticeType.Error)
}
// We need to determine the full set of IDs that need messages rendered.
val idSet = (S.idMessages((S.errors)) ++
S.idMessages((S.warnings)) ++
S.idMessages((S.notices))).map(_._1).distinct
// Merge each Id's messages and effects into the JsCmd chain
idSet.foldLeft(groupMessages) {
(chain,id) => chain &
LiftRules.jsArtifacts.setHtml(id, Msg.renderIdMsgs(id)) &
idEffects(id)
}
}
/**
* The base name of the resource bundle of the lift core code
*/
@volatile var liftCoreResourceName = "i18n.lift-core"
/**
* The JsCmd to execute when the comet session is lost. The comet
* session is considered lost when either (a) a comet request comes
* in for a session that does not exist on the server or (b) a comet
* request comes in for a session that has no associated comet actors
* (this typically happens when the server restarts).
*
* By default, we invoke lift.cometOnSessionLost, which can be
* overridden client-side for more complex work.
* lift.cometOnSessionLost reloads the current page by default.
*/
val noCometSessionCmd = new FactoryMaker[JsCmd](
() => JsCmds.Run("lift.cometOnSessionLost()")
) {}
/**
* The JsCmd to execute when the ajax session is lost. The ajax
* session is considered lost when either an ajax request comes in for
* a session that does not exist on the server.
*
* By default, we invoke lift.ajaxOnSessionLost, which can be
* overridden client-side for more complex work.
* lift.ajaxOnSessionLost reloads the page by default.
*/
val noAjaxSessionCmd = new FactoryMaker[JsCmd](
() => JsCmds.Run("lift.ajaxOnSessionLost()")
) {}
/**
* Server-side actors that represent client-side
* actor endpoints (client actors, Round Trips) need
* a lifespan. By default, it's 60 seconds, but you might
* want to make it longer if the client is going to get
* delayed by long computations that bar it from re-establishing
* the long polling connection
*/
val clientActorLifespan = new FactoryMaker[LiftActor => Long](
() => (actor: LiftActor) => (30.minutes): Long
){}
/**
* Put a function that will calculate the request timeout based on the
* incoming request.
*/
@volatile var calcRequestTimeout: Box[Req => Int] = Empty
/**
* If you want the standard (non-AJAX) request timeout to be something other than
* 10 seconds, put the value here
*/
@volatile var stdRequestTimeout: Box[Int] = Empty
/**
* If you want the AJAX request timeout to be something other than 120 seconds, put the value here
*/
@volatile var cometRequestTimeout: Box[Int] = Empty
/**
* If a Comet request fails timeout for this period of time. Default value is 10 seconds
*/
@volatile var cometFailureRetryTimeout: Long = 10.seconds
/**
* The timeout in milliseconds of a comet ajax-request. Defaults to 5000 ms.
*/
@volatile var cometProcessingTimeout: Long = 5.seconds
/**
* The timeout in milliseconds of a comet render-request. Defaults to 30000 ms.
*/
@volatile var cometRenderTimeout: Long = 30.seconds
/**
* The dispatcher that takes a Snippet and converts it to a
* DispatchSnippet instance
*/
val snippetDispatch = RulesSeq[SnippetDispatchPF]
/**
* Function that generates variants on snippet names to search for, given the name from the template.
* The default implementation just returns name :: Nil (e.g. no change).
* The names are searched in order.
* See also searchSnippetsWithRequestPath for an implementation.
*/
@volatile var snippetNamesToSearch: FactoryMaker[String => List[String]] =
new FactoryMaker(() => (name: String) => name :: Nil) {}
/**
* Implementation for snippetNamesToSearch that looks first in a package named by taking the current template path.
* For example, suppose the following is configured in Boot:
* LiftRules.snippetNamesToSearch.default.set(() => LiftRules.searchSnippetsWithRequestPath)
* LiftRules.addToPackages("com.mycompany.myapp")
* LiftRules.addToPackages("com.mycompany.mylib")
* The tag <lift:MySnippet> in template foo/bar/baz.html would search for the snippet in the following locations:
* - com.mycompany.myapp.snippet.foo.bar.MySnippet
* - com.mycompany.myapp.snippet.MySnippet
* - com.mycompany.mylib.snippet.foo.bar.MySnippet
* - com.mycompany.mylib.snippet.MySnippet
* - and then the Lift builtin snippet packages
*/
def searchSnippetsWithRequestPath(name: String): List[String] =
S.request.map(_.path.partPath.dropRight(1)) match {
case Full(xs) if !xs.isEmpty => (xs.mkString(".") + "." + name) :: name :: Nil
case _ => name :: Nil
}
/**
* Change this variable to set view dispatching
*/
val viewDispatch = RulesSeq[ViewDispatchPF]
private[http] def snippet(name: String): Box[DispatchSnippet] = NamedPF.applyBox(name, snippetDispatch.toList)
/**
* If the request times out (or returns a non-Response) you can
* intercept the response here and create your own response
*/
@volatile var requestTimedOut: Box[(Req, Any) => Box[LiftResponse]] = Empty
/**
* A function that takes the current HTTP request and returns the current
*/
@volatile var timeZoneCalculator: Box[HTTPRequest] => TimeZone = defaultTimeZoneCalculator _
def defaultTimeZoneCalculator(request: Box[HTTPRequest]): TimeZone = TimeZone.getDefault
/**
* How many times do we retry an Ajax command before calling it a failure?
*/
@volatile var ajaxRetryCount: Box[Int] = Empty
/**
* The JavaScript to execute at the beginning of an
* Ajax request (for example, showing the spinning working thingy)
*/
@volatile var ajaxStart: Box[() => JsCmd] = Empty
import FuncJBridge._
/**
* Set the Ajax end JavaScript function. The
* Java-callable alternative to assigning the var ajaxStart
*/
def setAjaxStart(f: Func0[JsCmd]): Unit = {
ajaxStart = Full(f: () => JsCmd)
}
/**
* The function that calculates if the response should be rendered in
* IE6/7/8 compatibility mode
*/
@volatile var calcIEMode: () => Boolean =
() => (for (r <- S.request) yield r.isIE6 || r.isIE7 ||
r.isIE8) openOr true
/**
* The JavaScript to execute to log a message on the client side when
* lift.logError is called.
*
* If Empty no logging is performed
* The default when running in DevMode is to call lift.logError which
* will use JavaScript console if available or alert otherwise.
*
* To always use alert set:
*
* LiftRules.jsLogFunc = Full(v => JE.Call("alert",v).cmd)
*/
@volatile var jsLogFunc: Box[JsVar => JsCmd] =
if (Props.devMode) Full(v => JE.Call("lift.logError", v))
else Empty
/**
* The JavaScript to execute at the end of an
* Ajax request (for example, removing the spinning working thingy)
*/
@volatile var ajaxEnd: Box[() => JsCmd] = Empty
/**
* Set the Ajax end JavaScript function. The
* Java-callable alternative to assigning the var ajaxEnd
*/
def setAjaxEnd(f: Func0[JsCmd]): Unit = {
ajaxEnd = Full(f: () => JsCmd)
}
/**
* An XML header is inserted at the very beginning of returned XHTML pages.
* This function defines the cases in which such a header is inserted. The
* function takes a NodeResponse (the LiftResponse that is converting the
* XML to a stream of bytes), the Node (root node of the XML), and
* a Box containing the content type.
*/
@volatile var calculateXmlHeader: (NodeResponse, Node, Box[String]) => String = {
case _ if S.skipXmlHeader => ""
case (_, up: Unparsed, _) => ""
case (_, _, Empty) | (_, _, Failure(_, _, _)) =>
"<?xml version=\\"1.0\\" encoding=\\"UTF-8\\"?>\\n"
case (_, _, Full(s)) if (s.toLowerCase.startsWith("text/html")) =>
"<?xml version=\\"1.0\\" encoding=\\"UTF-8\\"?>\\n"
case (_, _, Full(s)) if (s.toLowerCase.startsWith("text/xml") ||
s.toLowerCase.startsWith("text/xhtml") ||
s.toLowerCase.startsWith("application/xml") ||
s.toLowerCase.startsWith("application/xhtml+xml")) =>
"<?xml version=\\"1.0\\" encoding=\\"UTF-8\\"?>\\n"
case _ => ""
}
/**
* The default action to take when the JavaScript action fails
*/
@volatile var ajaxDefaultFailure: Box[() => JsCmd] =
Full(() => JsCmds.Alert(S.?("ajax.error")))
/**
* A function that takes the current HTTP request and returns the current
*/
@volatile var localeCalculator: Box[HTTPRequest] => Locale = defaultLocaleCalculator _
def defaultLocaleCalculator(request: Box[HTTPRequest]) =
request.flatMap(_.locale).openOr(Locale.getDefault())
val resourceBundleFactories = RulesSeq[ResourceBundleFactoryPF]
/**
* Given the current location (based on the Req.path.partPath),
* what are the resource bundles in the templates for the current
* page.
*
* @see DefaultRoutines.resourceForCurrentLoc()
*/
val resourceForCurrentLoc: FactoryMaker[() => List[ResourceBundle]] =
new FactoryMaker(() => () => DefaultRoutines.resourceForCurrentReq()) {}
/**
* Ever wanted to add custom attribute processing to Lift? Here's your chance.
* Every attribute with the data- prefix will be tested against this
* RulesSeq and if there's a match, then use the rule process. Simple, easy, cool.
*/
val dataAttributeProcessor: RulesSeq[DataAttributeProcessor] = new RulesSeq()
dataAttributeProcessor.append {
case ("lift", snippetInvocation, element, liftSession) =>
snippetInvocation.charSplit('?') match {
case Nil =>
// This shouldn't ever happen.
NodeSeq.Empty
case snippetName :: Nil =>
new Elem("lift", snippetName, Null, element.scope, false, element)
case snippetName :: encodedArguments =>
val decodedMetaData = pairsToMetaData(encodedArguments.flatMap(_.roboSplit("[;&]")))
if (decodedMetaData.get("parallel").headOption == Some(Text("true"))) {
DataAttributeProcessorAnswerFuture(LAFuture(() =>
new Elem("lift", snippetName, decodedMetaData, element.scope, false, element)
))
} else {
new Elem("lift", snippetName, decodedMetaData, element.scope, false, element)
}
}
}
/**
* Ever wanted to match on *any* arbitrary tag in your HTML and process it
* any way you wanted? Well, here's your chance, dude. You can capture any
* tag and do anything you want with it.
*
* Note that this set of PartialFunctions is run for **EVERY** node
* in the DOM so make sure it runs *FAST*.
*
* Also, no subsequent processing of the returned NodeSeq is done (no
* LiftSession.processSurroundAndInclude()) so evaluate everything
* you want to.
*
* But do avoid infinite loops, so make sure the PartialFunction actually
* returns true *only* when you're going to return a modified node.
*
* An example might be:
*
*
* case ("script", e, session) if e.getAttribute("data-serverscript").isDefined => ...
*/
val tagProcessor: RulesSeq[TagProcessor] = new RulesSeq()
/**
* There may be times when you want to entirely control the templating process. You can insert
* a function to this factory that will do your custom template resolution. If the PartialFunction
* isDefinedAt the given locale/path, then that's the template returned. In this way, you can
* return Empty for a template that's not found and the template will not be found. Otherwise,
* if the function is not defined for the locale/path pair, the normal templating system will
* be used. Also, keep in mind how FactoryMaker can be used... it can be global, per request, etc.
*/
val externalTemplateResolver: FactoryMaker[() => PartialFunction[(Locale, List[String]), Box[NodeSeq]]] =
new FactoryMaker(() => (() => Map.empty: PartialFunction[(Locale, List[String]), Box[NodeSeq]])) {}
/**
* There may be times when you want to entirely control the templating process. You can insert a function
* that creates a white list of snippets. The white list is the exhaustive list of snippets. The
* snippets are class/method pairs. If the partial function is defined and the result is a Full Box,
* the function is run. If the Box is an EmptyBox, then the result is a snippet lookup failure. If the
* partial function is not defined, then the normal snippet resolution mechanism is used. Please note that
* in Scala a Map is PartialFunction and you can create Maps that have a default value using the withDefaultValue
* method.
*/
val snippetWhiteList: FactoryMaker[() => PartialFunction[(String, String), Box[NodeSeq => NodeSeq]]] =
new FactoryMaker(() => (() => Map.empty: PartialFunction[(String, String), Box[NodeSeq => NodeSeq]])) {}
/**
* This FactoryMaker can be used to disable the little used attributeSnippets
*/
val allowAttributeSnippets: FactoryMaker[() => Boolean] =
new FactoryMaker(() => () => true) {}
private var _sitemap: Box[SiteMap] = Empty
private var sitemapFunc: Box[() => SiteMap] = Empty
private object sitemapRequestVar extends TransientRequestVar(resolveSitemap())
/**
* Set the sitemap to a function that will be run to generate the sitemap.
*
* This allows for changing the SiteMap when in development mode and having
* the function re-run for each request.<br/>
*
* This is **NOT** a mechanism for dynamic SiteMap. This is a mechanism
* **ONLY** for allowing you to change the SiteMap during development.
* There will be significant performance penalties (serializing the
* service of requests... only one at a time) for changing the SiteMap.
*/
def setSiteMapFunc(smf: () => SiteMap) {
sitemapFunc = Full(smf)
if (!Props.devMode) {
resolveSitemap()
}
}
/**
* Define the sitemap.
*/
def setSiteMap(sm: SiteMap) {
this.setSiteMapFunc(() => sm)
}
private def runAsSafe[T](f: => T): T = synchronized {
val old = _doneBoot
try {
_doneBoot = false
f
} finally {
_doneBoot = old
}
}
private case class PerRequestPF[A, B](f: PartialFunction[A, B]) extends PartialFunction[A, B] {
def isDefinedAt(a: A) = f.isDefinedAt(a)
def apply(a: A) = f(a)
}
private def resolveSitemap(): Box[SiteMap] = {
this.synchronized {
runAsSafe {
sitemapFunc.flatMap {
smf =>
LiftRules.statefulRewrite.remove {
case PerRequestPF(_) => true
case _ => false
}
val sm = smf()
_sitemap = Full(sm)
for (menu <- sm.menus;
loc = menu.loc;
rewrite <- loc.rewritePF) LiftRules.statefulRewrite.append(PerRequestPF(rewrite))
_sitemap
}
}
}
}
/**
* Return the sitemap if set in Boot. If the current runMode is development
* mode, the sitemap may be recomputed on each page load.
*/
def siteMap: Box[SiteMap] = if (Props.devMode) {
this.synchronized {
sitemapRequestVar.is
}
} else _sitemap
/**
* A unified set of properties for managing how to treat
* HTML, XHTML, HTML5. The default behavior is to return an
* OldHtmlPropteries instance, but you can change this
* to return an Html5Properties instance any you'll get
* HTML5 support.
* LiftRules.htmlProperties.default.set((r: Req) => new Html5Properties(r.userAgent))
*/
val htmlProperties: FactoryMaker[Req => HtmlProperties] =
new FactoryMaker(() => (r: Req) => new Html5Properties(r.userAgent): HtmlProperties) {}
/**
* How long should we wait for all the lazy snippets to render
*/
val lazySnippetTimeout: FactoryMaker[TimeSpan] = new FactoryMaker(() => 30.seconds) {}
/**
* Does the current context support parallel snippet execution
*/
val allowParallelSnippets: FactoryMaker[Boolean] = new FactoryMaker(() => false) {}
/**
* Update the function here that calculates particular paths to
* excluded from context path rewriting
*/
val excludePathFromContextPathRewriting: FactoryMaker[String => Boolean] =
new FactoryMaker(() => ((s: String) => false)) {}
/**
* If a deferred snippet has a failure during render,
* what should we display?
*/
val deferredSnippetFailure: FactoryMaker[Failure => NodeSeq] =
new FactoryMaker(() => {
failure: Failure => {
if (Props.devMode)
<div style="border: red solid 2px">A lift:parallel snippet failed to render.Message:{failure.msg}{failure.exception match {
case Full(e) =>
<pre>{e.getStackTrace.map(_.toString).mkString("\\n")}</pre>
case _ => NodeSeq.Empty
}}<i>note: this error is displayed in the browser because
your application is running in "development" mode.If you
set the system property run.mode=production, this error will not
be displayed, but there will be errors in the output logs.
</i>
</div>
else NodeSeq.Empty
}
}) {}
/**
* If a deferred snippet has a failure during render,
* what should we display?
*/
val deferredSnippetTimeout: FactoryMaker[NodeSeq] =
new FactoryMaker(() => {
if (Props.devMode)
<div style="border: red solid 2px">
A deferred snippet timed out during render.
<i>note: this error is displayed in the browser because
your application is running in "development" mode. If you
set the system property run.mode=production, this error will not
be displayed, but there will be errors in the output logs.
</i>
</div>
else NodeSeq.Empty
}) {}
/**
* Should comments be stripped from the served XHTML
*/
val stripComments: FactoryMaker[Boolean] =
new FactoryMaker(() => {
if (Props.devMode)
false
else true
}) {}
private[http] val reqCnt = new AtomicInteger(0)
@volatile private[http] var ending = false
private[http] def bootFinished() {
_doneBoot = true
}
/**
* Holds user's DispatchPF functions that will be executed in a stateless context. This means that
* no session will be created and no JSESSIONID cookie will be presented to the user (unless
* the user has presented a JSESSIONID cookie).
*
* This is the way to do stateless REST in Lift
*/
val statelessDispatch =
RulesSeq[DispatchPF]
.append(ContentSecurityPolicyViolation.defaultViolationHandler)
/**
* Add functionality around all of the HTTP request/response cycle.
* This is an optimal place to get a database connection. Note that whatever
* is loaned at the beginning of the request will not be returned until the end
* of the request. It's super-important to (1) not do anything related
* to state or touch the request objects or anything else at the beginning or
* end of the loan wrapper phase; (2) make sure that your code does not throw
* exceptions as exceptions can cause major problems.
*/
val allAround = RulesSeq[LoanWrapper]
private[http] def dispatchTable(req: HTTPRequest): List[DispatchPF] = {
req match {
case null => dispatch.toList
case _ => SessionMaster.getSession(req, Empty) match {
case Full(s) => S.initIfUninitted(s) {
S.highLevelSessionDispatchList.map(_.dispatch) :::
dispatch.toList
}
case _ => dispatch.toList
}
}
}
/**
* Contains the URI path under which all built-in Lift-handled requests are
* scoped. It does not include the context path and should not begin with a
* /.
*/
@volatile var liftContextRelativePath = "lift"
/**
* Returns a complete URI, including the context path, under which all
* built-in Lift-handled requests are scoped.
*/
def liftPath: String = S.contextPath + "/" + liftContextRelativePath
/**
* If there is an alternative way of calculating the context path
* (by default returning Empty)
*
* If this function returns an Empty, the contextPath provided by the container will be used.
*
*/
@volatile var calculateContextPath: () => Box[String] = () => Empty
@volatile private var _context: HTTPContext = _
/**
* Should an exception be thrown on out of scope Session and RequestVar
* access. By default, no.
*/
@volatile var throwOnOutOfScopeVarAccess: Boolean = false
/**
* In Dev mode and Test mode, return a non-200 response code
* if there is an error on the page (one that would result in
* the red box with the error message being displayed). This
* helps in testing automation.
*/
@volatile var devModeFailureResponseCodeOverride: Box[Int] = Empty
/**
* Returns the HTTPContext
*/
def context: HTTPContext = synchronized {_context}
/**
* Sets the HTTPContext
*/
def setContext(in: HTTPContext): Unit = synchronized {
if (in ne _context) {
_context = in
}
}
private var otherPackages: List[String] = Nil
/**
* Used by Lift to construct full package names from the packages provided to addToPackages function
*/
def buildPackage(end: String) = otherPackages.map(_ + "." + end)
/**
* Tells Lift where to find Snippets,Views, Comet Actors and Lift ORM Model object
*/
def addToPackages(what: String) {
if (doneBoot) throw new IllegalStateException("Cannot modify after boot.");
otherPackages = what :: otherPackages
}
/**
* Tells Lift where to find Snippets, Views, Comet Actors and Lift ORM Model object
*/
def addToPackages(what: Package) {
if (doneBoot) throw new IllegalStateException("Cannot modify after boot.");
otherPackages = what.getName :: otherPackages
}
private val defaultFinder = getClass.getResource _
private def resourceFinder(name: String): java.net.URL = if (null eq _context) null else _context.resource(name)
/**
* Obtain the resource URL by name
*/
@volatile var getResource: String => Box[java.net.URL] = defaultGetResource _
/**
* Obtain the resource URL by name
*/
def defaultGetResource(name: String): Box[java.net.URL] =
for{
rf <- (Box !! resourceFinder(name)) or (Box !! defaultFinder(name))
} yield rf
/**
* Open a resource by name and process its contents using the supplied function.
*/
def doWithResource[T](name: String)(f: InputStream => T): Box[T] =
getResource(name) map { _.openStream } map { is => try { f(is) } finally { is.close } }
/**
* Obtain the resource as an array of bytes by name
*/
def loadResource(name: String): Box[Array[Byte]] = doWithResource(name) { stream =>
val buffer = new Array[Byte](2048)
val out = new ByteArrayOutputStream
def reader {
val len = stream.read(buffer)
if (len < 0) return
else if (len > 0) out.write(buffer, 0, len)
reader
}
reader
out.toByteArray
}
/**
* Obtain the resource as an XML by name. If you're using this to load a template, consider using
* the Template object instead.
*
* @see Template
*/
def loadResourceAsXml(name: String): Box[NodeSeq] = loadResourceAsString(name).flatMap(s => PCDataXmlParser(s))
/**
* Obtain the resource as a String by name
*/
def loadResourceAsString(name: String): Box[String] = loadResource(name).map(s => new String(s, "UTF-8"))
/**
* Get the partial function that defines if a request should be handled by
* the application (rather than the default container handler)
*/
val liftRequest = RulesSeq[LiftRequestPF]
/**
* Holds the user's DispatchPF functions that will be executed in stateful context
*/
val dispatch = RulesSeq[DispatchPF].append(LiftJavaScript.servePageJs)
/**
* Holds the user's rewrite functions that can alter the URI parts and query parameters. This rewrite
* is performed very early in the HTTP request cycle and may not include any state. This rewrite is meant
* to rewrite requests for statelessDispatch. <br/>
* Note also that rewrites should not have side effects except
* to memoize database query results. No side effects means that you should not change SessionVars
* in a rewrite.
*/
val statelessRewrite = RulesSeq[RewritePF]
/**
* Holds the user's rewrite functions that can alter the URI parts and query parameters.
* This rewrite takes place within the scope of the S state so SessionVars and other session-related
* information is available. <br/>
* Note also that rewrites should not have side effects except
* to memoize database query results. No side effects means that you should not change SessionVars
* in a rewrite. <br/>
* In general, rewrites should be considered low level access. Rather than using a rewrite to extract
* parameters out of a URL, you'll be much better off using SiteMap generally and Menu.param and Menu.params
* specifically for extracting parameters from URLs.
*/
val statefulRewrite = RulesSeq[RewritePF]
/**
* Holds the user's snippet functions that will be executed by lift given a certain path.
*/
val snippets = RulesSeq[SnippetPF]
/**
* Handles the parsing of template content into NodeSeqs. If multiple parsers are registered for the same
* template suffix, the first matching parser is used. This intended to be set in in `Boot` as it is read only
* once during the processing of the first template.
*/
@volatile var contentParsers: List[ContentParser] = List(
ContentParser(
Seq("html", "xhtml", "htm"),
(content:InputStream) => S.htmlProperties.htmlParser(content),
identity[NodeSeq](_) // These templates are not surrounded by default
),
ContentParser("md", MarkdownParser.parse)
)
/**
* Execute certain functions early in a Stateful Request
* This is called early in a stateful request (one that's not serviced by a stateless REST request and
* one that's not marked as a stateless HTML page request).
* @dpp strongly recommends that everything that you do related to user state is done with earlyInStateful,
* instead of using onBeginServicing.
*/
val earlyInStateful = RulesSeq[Box[Req] => Unit]
/**
* Execute certain functions early in a Stateful Request
*/
val earlyInStateless = RulesSeq[Box[Req] => Unit]
private var _configureLogging: () => Unit = _
/**
* Holds the function that configures logging. Must be set before any loggers are created
*/
def configureLogging: () => Unit = _configureLogging
/**
* Holds the function that configures logging. Must be set before any loggers are created
*/
def configureLogging_=(newConfigurer: () => Unit): Unit = {
_configureLogging = newConfigurer
Logger.setup = Full(newConfigurer)
}
configureLogging = net.liftweb.util.LoggingAutoConfigurer()
private val _cometLogger: FatLazy[Logger] = FatLazy({
val ret = Logger("comet_trace")
ret
})
/**
* Holds the CometLogger that will be used to log comet activity
*/
def cometLogger: Logger = _cometLogger.get
/**
* Holds the CometLogger that will be used to log comet activity
*/
def cometLogger_=(newLogger: Logger): Unit = _cometLogger.set(newLogger)
/**
* Sometimes the comet logger (which is really the Ajax logger)
* needs to have the string cleaned up to remove stuff like passwords. That's
* done by this function.
*/
@volatile var cometLoggerStringSecurer: String => String = s => s
/**
* Takes a Node, headers, cookies, and a session and turns it into an XhtmlResponse.
*/
private def cvt(ns: Node, headers: List[(String, String)], cookies: List[HTTPCookie], req: Req, code:Int) =
convertResponse({
val ret = XhtmlResponse(ns,
/*LiftRules.docType.vend(req)*/S.htmlProperties.docType,
headers, cookies, code,
S.legacyIeCompatibilityMode)
ret._includeXmlVersion = !S.skipDocType
ret
}, headers, cookies, req)
@volatile var defaultHeaders: PartialFunction[(NodeSeq, Req), List[(String, String)]] = {
case _ =>
val d = Helpers.nowAsInternetDate
List("Expires" -> d,
"Date" -> d,
"Cache-Control" -> "no-cache, private, no-store",
"Pragma" -> "no-cache" )
}
/**
* Runs responseTransformers
*/
def performTransform(in: LiftResponse): LiftResponse = responseTransformers.toList.foldLeft(in) {
case (in, pf: PartialFunction[_, _]) =>
if (pf.isDefinedAt(in)) pf(in) else in
case (in, f) => f(in)
}
/**
* Holds the user's transformer functions allowing the user to modify a LiftResponse before sending it to client.
*/
val responseTransformers = RulesSeq[LiftResponse => LiftResponse]
/**
* convertResponse is a PartialFunction that reduces a given Tuple4 into a
* LiftResponse that can then be sent to the browser.
*/
var convertResponse: PartialFunction[(Any, List[(String, String)], List[HTTPCookie], Req), LiftResponse] = {
case (r: LiftResponse, _, _, _) => r
case (ns: Group, headers, cookies, req) => cvt(ns, headers, cookies, req, 200)
case (ns: Node, headers, cookies, req) => cvt(ns, headers, cookies, req, 200)
case (ns: NodeSeq, headers, cookies, req) => cvt(Group(ns), headers, cookies, req, 200)
case ((ns: NodeSeq, code: Int), headers, cookies, req) => cvt(Group(ns), headers, cookies, req, code)
case (SafeNodeSeq(n), headers, cookies, req) => cvt(Group(n), headers, cookies, req, 200)
case (Full(o), headers, cookies, req) => convertResponse((o, headers, cookies, req))
case (Some(o), headers, cookies, req) => convertResponse((o, headers, cookies, req))
case (bad, _, _, req) => req.createNotFound
}
/**
* Set a snippet failure handler here. The class and method for the snippet are passed in
*/
val snippetFailedFunc = RulesSeq[SnippetFailure => Unit].prepend(logSnippetFailure _)
private def logSnippetFailure(sf: SnippetFailure) = logger.info("Snippet Failure: " + sf)
/**
* Set to false if you do not want ajax/comet requests that are not
* associated with a session to call their respective session
* loss handlers (set via LiftRules.noAjaxSessionCmd and
* LiftRules.noCometSessionCmd).
*/
@volatile var redirectAsyncOnSessionLoss = true
/**
* The sequence of partial functions (pattern matching) for handling converting an exception to something to
* be sent to the browser depending on the current RunMode (development, etc.)
*
* By default it returns an XhtmlResponse containing a predefined markup. You can overwrite this by calling
* LiftRules.exceptionHandler.prepend(...). If you are calling append then your code will not be called since
* a default implementation is already appended.
*
*/
val exceptionHandler = RulesSeq[ExceptionHandlerPF].append {
case (Props.RunModes.Development, r, e) =>
logger.error("Exception being returned to browser when processing " + r.uri.toString, e)
XhtmlResponse((<html> <body>Exception occured while processing {r.uri}<pre>{showException(e)}</pre> </body> </html>), S.htmlProperties.docType, List("Content-Type" -> "text/html; charset=utf-8"), Nil, 500, S.legacyIeCompatibilityMode)
case (_, r, e) =>
logger.error("Exception being returned to browser when processing " + r.uri.toString, e)
XhtmlResponse((<html> <body>Something unexpected happened while serving the page at {r.uri}</body> </html>), S.htmlProperties.docType, List("Content-Type" -> "text/html; charset=utf-8"), Nil, 500, S.legacyIeCompatibilityMode)
}
/**
* The list of partial function for defining the behavior of what happens when
* URI is invalid and you're not using a site map
*
*/
val uriNotFound = RulesSeq[URINotFoundPF].prepend(NamedPF("default") {
case (r, _) => DefaultNotFound
})
/**
* If you use the form attribute in a snippet invocation, what attributes should
* be copied from the snippet invocation tag to the form tag. The
* default list is "class", "id", "target", "style", "onsubmit"
*/
val formAttrs: FactoryMaker[List[String]] = new FactoryMaker(() => List("class", "id", "target", "style", "onsubmit")) {}
/**
* By default, Http response headers are appended. However, there are
* some headers that should only appear once (for example "expires"). This
* Vendor vends the list of header responses that can only appear once.
*/
val overwrittenReponseHeaders: FactoryMaker[List[String]] = new FactoryMaker(() => List("expires")) {}
/**
* A utility method to convert an exception to a string of stack traces
* @param le the exception
*
* @return the stack trace
*/
private def showException(le: Throwable): String = {
val ret = "Message: " + le.toString + "\\n\\t" +
le.getStackTrace.map(_.toString).mkString("\\n\\t") + "\\n"
val also = le.getCause match {
case null => ""
case sub: Throwable => "\\nCaught and thrown by:\\n" + showException(sub)
}
ret + also
}
/**
* Modifies the root relative paths from the css url-s
*
* @param path - the path of the css resource
* @prefix - the prefix to be added on the root relative paths. If this is Empty
* the prefix will be the application context path.
*/
def fixCSS(path: List[String], prefix: Box[String]) {
val liftReq: LiftRules.LiftRequestPF = new LiftRules.LiftRequestPF {
def functionName = "Default CSS Fixer"
def isDefinedAt(r: Req): Boolean = {
r.path.partPath == path
}
def apply(r: Req): Boolean = {
r.path.partPath == path
}
}
val cssFixer: LiftRules.DispatchPF = new LiftRules.DispatchPF {
def functionName = "default css fixer"
def isDefinedAt(r: Req): Boolean = {
r.path.partPath == path
}
def apply(r: Req): () => Box[LiftResponse] = {
val cssPath = path.mkString("/", "/", ".css")
val css = LiftRules.loadResourceAsString(cssPath);
() => {
css.map(str => CSSHelpers.fixCSS(new BufferedReader(
new StringReader(str)), prefix openOr (S.contextPath)) match {
case (Full(c), _) => CSSResponse(c)
case (x, input) => {
logger.info("Fixing " + cssPath + " failed with result %s".format(x));
CSSResponse(input)
}
})
}
}
}
LiftRules.dispatch.prepend(cssFixer)
LiftRules.liftRequest.append(liftReq)
}
/**
* Holds user function hooks when the request is about to be processed
* It's legacy from when Lift was a lot more Rails-like. It's called literally at the very
* beginning of the servicing of the HTTP request.
* The S scope is not available nor is the DB connection available in onBeginServicing.
* We recommend using earlyInStateful.
*/
val onBeginServicing = RulesSeq[Req => Unit]
val preAccessControlResponse_!! = new RulesSeq[Req => Box[LiftResponse]] with FirstBox[Req, LiftResponse]
val earlyResponse = new RulesSeq[Req => Box[LiftResponse]] with FirstBox[Req, LiftResponse]
/**
* Holds user function hooks when the request was processed
*/
val onEndServicing = RulesSeq[(Req, Box[LiftResponse]) => Unit]
/**
* Tells Lift if the Comet JavaScript should be included. By default it is set to true.
*/
@volatile var autoIncludeComet: LiftSession => Boolean = session => true
val autoIncludeAjaxCalc: FactoryMaker[() => LiftSession => Boolean] =
new FactoryMaker(() => () => (session: LiftSession) => true) {}
/**
* Tells Lift which JavaScript settings to use. If Empty, does not
* include the JS settings.
*/
val javaScriptSettings: FactoryMaker[() => Box[LiftSession => JsObj]] =
new FactoryMaker(() => () => (Full((session: LiftSession) => LiftJavaScript.settings): Box[LiftSession => JsObj])) {}
/**
* Define the XHTML validator
*/
@volatile var xhtmlValidator: Box[XHtmlValidator] = Empty // Full(TransitionalXHTML1_0Validator)
@volatile var ajaxPostTimeout = 5000
@volatile var cometGetTimeout = 140000
/**
* Compute the headers to be sent to the browser in addition to anything else
* that's sent.
*
* Note that the headers for the applications `SecurityRules` are also set
* here, so if you override the supplemental headers, you should
* either refer back to the default set or make sure to include
* `LiftRules.securityRules.headers`.
*/
val supplementalHeaders: FactoryMaker[List[(String, String)]] = new FactoryMaker(() => {
("X-Lift-Version", liftVersion) ::
lockedSecurityRules.headers
}) {}
/**
* Handles content security policy violation reports reported to the default
* reporting endpoint (see `[[ContentSecurityPolicy.defaultReportUri]]`).
*
* If an `Empty` is returned from this function, a default 200 response will
* be returned. The default implementation simply logs the violation at WARN
* level.
*/
@volatile var contentSecurityPolicyViolationReport: (ContentSecurityPolicyViolation)=>Box[LiftResponse] = { violation =>
logger.warn(
s"""Content security policy violation reported on page
| '${violation.documentUri}' from referrer '${violation.referrer}':
| '${violation.blockedUri}' was blocked because it violated the
| directive '${violation.violatedDirective}'. The policy that specified
| this directive is: '${violation.originalPolicy}'.""".trim
)
Empty
}
@volatile var calcIE6ForResponse: () => Boolean = () => S.request.map(_.isIE6) openOr false
@volatile var flipDocTypeForIE6 = true
/**
* By default lift uses a garbage-collection mechanism of removing unused bound functions from LiftSesssion.
* Setting this to false will disable this mechanisms and there will be no Ajax polling requests attempted.
*/
@volatile var enableLiftGC = true;
/**
* If Lift garbage collection is enabled, functions that are not seen in the page for this period of time
* (given in milliseconds) will be discarded, hence eligible for garbage collection.
* The default value is 10 minutes.
*/
@volatile var unusedFunctionsLifeTime: Long = 10.minutes
/**
* The polling interval for background Ajax requests to prevent functions of being garbage collected.
* Default value is set to 75 seconds.
*/
@volatile var liftGCPollingInterval: Long = 75.seconds
/**
* Put a test for being logged in into this function
*/
@volatile var loggedInTest: Box[() => Boolean] = Empty
/**
* The polling interval for background Ajax requests to keep functions to not be garbage collected.
* This will be applied if the Ajax request will fail. Default value is set to 15 seconds.
*/
@volatile var liftGCFailureRetryTimeout: Long = 15.seconds
/**
* If this is Full, comet updates (partialUpdates or reRenders) are
* wrapped in a try/catch statement. The provided JsCmd is the body of
* the catch statement. Within that JsCmd, the variable "e" refers to the
* caught exception.
*
* In development mode, this defaults to Full and the command within
* invokes lift.cometOnError with the exception;
* lift.cometOnError rethrows the exception by default. In production
* mode, this defaults to Empty.
*
* Note that if you set this to Full, it is highly advised that you
* rethrow the exception. If you fail to rethrow the exception, you
* run the risk of dropping an unpredictable number of updates (i.e.,
* if the third of 20 updates that are sent to the client in a single
* response throws an exception, none of the subsequent ones will run;
* failing to rethrow the exception means any updates that did not run
* will never be run).
*/
val cometUpdateExceptionHandler: FactoryMaker[Box[JsCmd]] =
new FactoryMaker[Box[JsCmd]]( () => {
if (Props.devMode)
Full(JE.Call("lift.cometOnError", JE.JsVar("e")).cmd)
else
Empty
} ) {}
/**
* Holds the last update time of the Ajax request. Based on this server may return HTTP 304 status
* indicating the client to used the cached information.
*/
@volatile var ajaxScriptUpdateTime: LiftSession => Long = session => {
object when extends SessionVar[Long](millis)
when.is
}
/**
* Determines the path parts and suffix from given path parts
*/
val suffixSplitters = RulesSeq[SplitSuffixPF].append {
case parts =>
val last = parts.last
val idx: Int = {
val firstDot = last.indexOf(".")
val len = last.length
if (firstDot + 1 == len) -1 // if the dot is the last character, don't split
else {
if (last.indexOf(".", firstDot + 1) != -1) -1 // if there are multiple dots, don't split out
else {
val suffix = last.substring(firstDot + 1)
// if the suffix isn't in the list of suffixes we care about, don't split it
if (!LiftRules.explicitlyParsedSuffixes.contains(suffix.toLowerCase)) -1
else firstDot
}
}
}
if (idx == -1) (parts, "")
else (parts.dropRight(1) ::: List(last.substring(0, idx)), last.substring(idx + 1))
}
/**
* When a request is parsed into a Req object, certain suffixes are explicitly split from
* the last part of the request URI. If the suffix is contained in this list, it is explicitly split.
* The default list is: "html", "htm", "jpg", "png", "gif", "xml", "rss", "json" ...
*/
@volatile var explicitlyParsedSuffixes: Set[String] = knownSuffixes
/**
* The global multipart progress listener:
* pBytesRead - The total number of bytes, which have been read so far.
* pContentLength - The total number of bytes, which are being read. May be -1, if this number is unknown.
* pItems - The number of the field, which is currently being read. (0 = no item so far, 1 = first item is being read, ...)
*/
@volatile var progressListener: (Long, Long, Int) => Unit = (_, _, _) => ()
/**
* The function that converts a fieldName, contentType, fileName and an InputStream into
* a FileParamHolder. By default, create an in-memory instance. Use OnDiskFileParamHolder
* to create an on-disk version
*/
@volatile var handleMimeFile: (String, String, String, InputStream) => FileParamHolder =
(fieldName, contentType, fileName, inputStream) =>
new InMemFileParamHolder(fieldName, contentType, fileName, Helpers.readWholeStream(inputStream))
private object _mimeHeaders extends TransientRequestVar[Box[Map[String, List[String]]]](Empty)
/**
* Returns any mimeHeaders for the currently invoked handleMimeFile.
*/
def mimeHeaders = _mimeHeaders.get
private[http] def withMimeHeaders[T](map: Map[String, List[String]])(f: => T): T = _mimeHeaders.doWith(Full(map))(f)
@volatile var templateCache: Box[TemplateCache[(Locale, List[String]), NodeSeq]] = Empty
val dateTimeConverter: FactoryMaker[DateTimeConverter] = new FactoryMaker[DateTimeConverter]( () => DefaultDateTimeConverter ) {}
/**
* This variable controls whether RequestVars that have been set but not subsequently
* read will be logged in Dev mode. Logging can be disabled at the per-RequestVar level
* via RequestVar.logUnreadVal
*
* @see RequestVar#logUnreadVal
*/
@volatile var logUnreadRequestVars = true
/** Controls whether or not the service handling timing messages (Service request (GET) ... took ... Milliseconds) are logged. Defaults to true. */
@volatile var logServiceRequestTiming = true
/** Provides a function that returns random names for form variables, page ids, callbacks, etc. */
@volatile var funcNameGenerator: () => String = defaultFuncNameGenerator(Props.mode)
import provider.servlet._
import containers._
/**
* The meta for the detected AsyncProvider given the container we're running in
*/
lazy val asyncProviderMeta: Box[AsyncProviderMeta] =
asyncMetaList.find(_.suspendResumeSupport_?)
/**
* A function that converts the current Request into an AsyncProvider.
*/
lazy val theServletAsyncProvider: Box[HTTPRequest => ServletAsyncProvider] =
asyncProviderMeta.flatMap(_.providerFunction)
private var asyncMetaList: List[AsyncProviderMeta] =
List(Servlet30AsyncProvider, Jetty6AsyncProvider, Jetty7AsyncProvider)
/**
* Register an AsyncMeta provider in addition to the default
* Jetty6, Jetty7, and Servlet 3.0 providers
*/
def addSyncProvider(asyncMeta: AsyncProviderMeta) {
if (doneBoot) throw new IllegalStateException("Cannot modify after boot.")
asyncMetaList ::= asyncMeta
}
def updateAsyncMetaList(f: List[AsyncProviderMeta] => List[AsyncProviderMeta]) {
if (doneBoot) throw new IllegalStateException("Cannot modify after boot.")
asyncMetaList = f(asyncMetaList)
}
private def ctor() {
appendGlobalFormBuilder(FormBuilderLocator[String]((value, setter) => SHtml.text(value, setter)))
appendGlobalFormBuilder(FormBuilderLocator[Int]((value, setter) => SHtml.text(value.toString, s => Helpers.asInt(s).foreach((setter)))))
appendGlobalFormBuilder(FormBuilderLocator[Boolean]((value, setter) => SHtml.checkbox(value, s => setter(s))))
import net.liftweb.builtin.snippet._
snippetDispatch.append(
Map("CSS" -> CSS, "Msgs" -> Msgs, "Msg" -> Msg,
"Menu" -> Menu, "css" -> CSS, "msgs" -> Msgs, "msg" -> Msg,
"menu" -> Menu,
"children" -> Children,
"comet" -> Comet, "form" -> Form, "ignore" -> Ignore, "loc" -> Loc,
"surround" -> Surround,
"test_cond" -> TestCond,
"TestCond" -> TestCond,
"testcond" -> TestCond,
"embed" -> Embed,
"tail" -> Tail,
"head" -> Head,
"Head" -> Head,
"with-param" -> WithParam,
"withparam" -> WithParam,
"WithParam" -> WithParam,
"bind-at" -> WithParam,
"VersionInfo" -> VersionInfo,
"versioninfo" -> VersionInfo,
"version_info" -> VersionInfo,
"SkipDocType" -> SkipDocType,
"skipdoctype" -> SkipDocType,
"skip_doc_type" -> SkipDocType,
"xml_group" -> XmlGroup,
"XmlGroup" -> XmlGroup,
"xmlgroup" -> XmlGroup,
"lazy-load" -> LazyLoad,
"LazyLoad" -> LazyLoad,
"lazyload" -> LazyLoad,
"html5" -> HTML5,
"HTML5" -> HTML5,
"with-resource-id" -> WithResourceId
))
}
ctor()
object RulesSeq {
def apply[T]: RulesSeq[T] = new RulesSeq[T]()
}
/**
* Generic container used mainly for adding functions
*
*/
class RulesSeq[T] {
@volatile private var rules: List[T] = Nil
private val pre = new ThreadGlobal[List[T]]
private val app = new ThreadGlobal[List[T]]
private val cur = new ThreadGlobal[List[T]]
private def safe_?(f: => Any) {
doneBoot match {
case false => f
case _ => throw new IllegalStateException("Cannot modify after boot.");
}
}
/**
* Sometimes it's useful to change the rule for the duration of
* a thread... prepend a rule and execute the code within
* a scope with the prepended rule
*/
def prependWith[A](what: T)(f: => A): A = prependWith(List(what))(f)
/**
* Sometimes it's useful to change the rule for the duration of
* a thread... append a rule and execute the code within
* a scope with the appended rule
*/
def appendWith[A](what: T)(f: => A): A = appendWith(List(what))(f)
/**
* Sometimes it's useful to change the rule for the duration of
* a thread... prepend rules and execute the code within
* a scope with the prepended rules
*/
def prependWith[A](what: List[T])(f: => A): A = {
val newList = pre.value match {
case null => what
case Nil => what
case x => what ::: x
}
pre.doWith(newList)(doCur(f))
}
/**
* Sometimes it's useful to change the rules for the duration of
* a thread... append rules and execute the code within
* a scope with the appended rules
*/
def appendWith[A](what: List[T])(f: => A): A = {
val newList = pre.value match {
case null => what
case Nil => what
case x => x ::: what
}
app.doWith(newList)(doCur(f))
}
/**
* Precompute the current rule set
*/
private def doCur[A](f: => A): A = {
cur.doWith((pre.value, app.value) match {
case (null, null) | (null, Nil) | (Nil, null) | (Nil, Nil) => rules
case (null, xs) => rules ::: xs
case (xs, null) => xs ::: rules
case (p, a) => p ::: rules ::: a
})(f)
}
def toList: List[T] = cur.value match {
case null => rules
case xs => xs
}
def prepend(r: T): RulesSeq[T] = {
safe_? {
rules = r :: rules
}
this
}
private[http] def remove(f: T => Boolean) {
safe_? {
rules = rules.filterNot(f)
}
}
def append(r: T): RulesSeq[T] = {
safe_? {
rules = rules ::: List(r)
}
this
}
}
trait FirstBox[F, T] {
self: RulesSeq[F => Box[T]] =>
def firstFull(param: F): Box[T] = {
def finder(in: List[F => Box[T]]): Box[T] = in match {
case Nil => Empty
case x :: xs => x(param) match {
case Full(r) => Full(r)
case _ => finder(xs)
}
}
finder(toList)
}
}
}
sealed trait NotFound
case object DefaultNotFound extends NotFound
final case class NotFoundAsResponse(response: LiftResponse) extends NotFound
final case class NotFoundAsTemplate(path: ParsePath) extends NotFound
final case class NotFoundAsNode(node: NodeSeq) extends NotFound
final case class BreakOut()
abstract class Bootable {
def boot(): Unit;
}
/*
/**
* Factory object for RulesSeq instances
*/
object RulesSeq {
def apply[T]: RulesSeq[T] = new RulesSeq[T]
}
*/
private[http] case object DefaultBootstrap extends Bootable {
def boot(): Unit = {
val f = createInvoker("boot", Class.forName("bootstrap.liftweb.Boot").newInstance.asInstanceOf[AnyRef])
f.map {f => f()}
}
}
/**
* Holds the Comet identification information
*/
trait CometVersionPair {
def guid: String
def version: Long
}
object CometVersionPair {
def unapply(pair: CometVersionPair): Option[(String, Long)] = {
Some((pair.guid, pair.version))
}
}
case class CVP(guid: String, version: Long) extends CometVersionPair
case class XHTMLValidationError(msg: String, line: Int, col: Int)
trait XHtmlValidator extends Function1[Node, List[XHTMLValidationError]]
object StrictXHTML1_0Validator extends GenericValidator {
def ngurl = "http://www.w3.org/2002/08/xhtml/xhtml1-strict.xsd"
}
abstract class GenericValidator extends XHtmlValidator with Loggable {
import javax.xml.validation._
import javax.xml._
import XMLConstants._
import java.net.URL
import javax.xml.transform.dom._
import javax.xml.transform.stream._
import java.io.ByteArrayInputStream
private lazy val sf = SchemaFactory.newInstance(W3C_XML_SCHEMA_NS_URI)
protected def ngurl: String
private lazy val schema = tryo(sf.newSchema(new URL(ngurl)))
def apply(in: Node): List[XHTMLValidationError] = {
(for{
sc <- schema
v <- tryo(sc.newValidator)
source = new StreamSource(new ByteArrayInputStream(in.toString.getBytes("UTF-8")))
} yield try {
v.validate(source)
Nil
} catch {
case e: org.xml.sax.SAXParseException =>
List(XHTMLValidationError(e.getMessage, e.getLineNumber, e.getColumnNumber))
}) match {
case Full(x) => x
case Failure(msg, _, _) =>
logger.info("XHTML Validation Failure: " + msg)
Nil
case _ => Nil
}
}
}
object TransitionalXHTML1_0Validator extends GenericValidator {
def ngurl = "http://www.w3.org/2002/08/xhtml/xhtml1-transitional.xsd"
}
trait FormVendor {
/**
* Given a type manifest, vend a form
*/
def vendForm[T](implicit man: Manifest[T]): Box[(T, T => Any) => NodeSeq] = {
val name = man.toString
val first: Option[List[FormBuilderLocator[_]]] = requestForms.is.get(name) orElse sessionForms.is.get(name)
first match {
case Some(x :: _) => Full(x.func.asInstanceOf[(T, T => Any) => NodeSeq])
case _ => if (globalForms.containsKey(name)) {
globalForms.get(name).headOption.map(_.func.asInstanceOf[(T, T => Any) => NodeSeq])
} else Empty
}
}
private val globalForms: CHash[String, List[FormBuilderLocator[_]]] = new CHash
def prependGlobalFormBuilder[T](builder: FormBuilderLocator[T]) {
globalForms.synchronized {
val name = builder.manifest.toString
if (globalForms.containsKey(name)) {
globalForms.put(name, builder :: globalForms.get(name))
} else {
globalForms.put(name, List(builder))
}
}
}
def appendGlobalFormBuilder[T](builder: FormBuilderLocator[T]) {
globalForms.synchronized {
val name = builder.manifest.toString
if (globalForms.containsKey(name)) {
globalForms.put(name, builder :: globalForms.get(name))
} else {
globalForms.put(name, List(builder))
}
}
}
def prependSessionFormBuilder[T](builder: FormBuilderLocator[T]) {
sessionForms.set(prependBuilder(builder, sessionForms))
}
def appendSessionFormBuilder[T](builder: FormBuilderLocator[T]) {
sessionForms.set(appendBuilder(builder, sessionForms))
}
def prependRequestFormBuilder[T](builder: FormBuilderLocator[T]) {
requestForms.set(prependBuilder(builder, requestForms))
}
def appendRequestFormBuilder[T](builder: FormBuilderLocator[T]) {
requestForms.set(appendBuilder(builder, requestForms))
}
def doWith[F, T](builder: FormBuilderLocator[T])(f: => F): F =
requestForms.doWith(prependBuilder(builder, requestForms))(f)
private def prependBuilder(builder: FormBuilderLocator[_], to: Map[String, List[FormBuilderLocator[_]]]):
Map[String, List[FormBuilderLocator[_]]] = {
val name = builder.manifest.toString
to + (name -> (builder :: to.getOrElse(name, Nil)))
}
private def appendBuilder(builder: FormBuilderLocator[_], to: Map[String, List[FormBuilderLocator[_]]]):
Map[String, List[FormBuilderLocator[_]]] = {
val name = builder.manifest.toString
to + (name -> (builder :: to.getOrElse(name, Nil)))
}
private object sessionForms extends SessionVar[Map[String, List[FormBuilderLocator[_]]]](Map())
private object requestForms extends SessionVar[Map[String, List[FormBuilderLocator[_]]]](Map())
}
| sortable/framework | web/webkit/src/main/scala/net/liftweb/http/LiftRules.scala | Scala | apache-2.0 | 78,751 |
package codechicken.multipart
import java.util.Random
/**
* Interface for parts that need random display ticks (torches)
*
* Marker interface for TRandomDisplayTickTile
*/
trait IRandomDisplayTick
{
/**
* Called on a random display tick.
*/
def randomDisplayTick(random:Random)
} | Chicken-Bones/ForgeMultipart | src/codechicken/multipart/IRandomDisplayTick.scala | Scala | lgpl-2.1 | 304 |
package scalaz.stream
import org.scalacheck.Properties
import org.scalacheck.Prop._
import scalaz.concurrent.Task
import scalaz.{Nondeterminism, -\\/, \\/-, \\/}
import java.lang.Exception
import scalaz.syntax.monad._
import scala.concurrent.SyncVar
import scalaz.stream.async.mutable.Signal
import scalaz.stream.Process.{End, eval, eval_}
object AsyncSignalSpec extends Properties("async.signal") {
case object TestedEx extends Exception("expected in test") {
override def fillInStackTrace = this
}
property("basic") = forAll { l: List[Int] =>
val v = async.signal[Int]
val s = v.continuous
val t1 = Task {
l.foreach { i => v.set(i).run; Thread.sleep(1) }
v.close.run
}
val t2 = s.takeWhile(_ % 23 != 0).runLog
Nondeterminism[Task].both(t1, t2).run._2.toList.forall(_ % 23 != 0)
}
// tests all the operations on the signal (get,set,fail)
property("signal-ops") = forAll {
l: List[Int] =>
val signal = async.signal[Int]
val ops: List[Int => (String, Task[Boolean])] = l.map {
v =>
(v % 5).abs match {
case 0 => (o: Int) => ("get", signal.get.map(_ == o))
case 1 => (o: Int) => (s"set($v)", signal.set(v) *> signal.get.map(_ == v))
case 2 => (o: Int) => (s"getAndSet($v)", signal.getAndSet(v).map(r => r == Some(o)))
case 3 => (o: Int) => (s"compareAndSet(_=>Some($v))", signal.compareAndSet(_ => Some(v)).map(_ == Some(v)))
case 4 => (o: Int) => ("compareAndSet(_=>None)", signal.compareAndSet(_ => None).map(_ == Some(o)))
}
}
//initial set
signal.set(0).run
val (_, runned) =
ops.foldLeft[(Throwable \\/ Int, Seq[(String, Boolean)])]((\\/-(signal.get.run), Seq(("START", true)))) {
case ((\\/-(last), acc), n) =>
n(last) match {
case (descr, action) =>
action.attemptRun match {
case \\/-(maybeOk) => (signal.get.attemptRun, acc :+((descr, maybeOk)))
case -\\/(failed) => (-\\/(failed), acc :+((descr, false)))
}
}
case ((-\\/(lastErr), acc), n) =>
//just execute item all with 0, and record the exception
n(0) match {
case (descr, action) =>
action.attemptRun match {
case \\/-(unexpected) => (-\\/(lastErr), acc :+((descr + " got " + unexpected, true)))
case -\\/(failure) if failure == lastErr || failure == End => (-\\/(lastErr), acc :+((descr, true)))
case -\\/(unexpectedFailure) => (-\\/(unexpectedFailure), acc :+((descr, false)))
}
}
}
signal.close.run
(runned.filterNot(_._2).size == 0) :| "no ref action failed" &&
(runned.size == l.size + 1) :| "all actions were run"
}
// tests sink
property("signal.sink") = forAll {
l: List[Int] =>
val signal = async.signal[(String, Int)]
val last = if (l.size % 2 == 0) Signal.Fail(End) else Signal.Fail(TestedEx)
val messages = l.zipWithIndex.map {
case (i, idx) =>
import Signal._
(i % 3).abs match {
case 0 => Set[(String, Int)]((s"$idx. Set", i))
case 1 => CompareAndSet[(String, Int)](_ => Some((s"$idx. CompareAndSet", i)))
case 2 => CompareAndSet[(String, Int)](_ => None)
}
} :+ last
val feeded = new SyncVar[Throwable \\/ Seq[(String, Int)]]
Task { signal.continuous.runLog.runAsync(feeded.put) }.run
val feeder =
Process.eval(Task.now(Signal.Set[(String, Int)](("START", 0)))) ++
Process.emitAll(messages).evalMap(e => Task.fork { Thread.sleep(1); Task.now(e) })
(feeder to signal.sink).attempt().run.attemptRun
val result = feeded.get(3000)
(result.isDefined == true) :| "got result in time" &&
(if (last == Signal.Fail(End)) {
(result.get.isRight == true) :| "result did not fail" &&
(result.get.toOption.get.size >= messages.size) :| "items was emitted" &&
(signal.get.attemptRun == -\\/(End)) :| "Signal is terminated"
} else {
(result.get == -\\/(TestedEx)) :| "Exception was raised correctly" &&
(signal.get.attemptRun == -\\/(TestedEx)) :| "Signal is failed"
})
}
// tests the discrete stream so it would contain all discrete values set
property("discrete") = forAll {
l: List[Int] =>
val initial = None
val feed = l.map(Some(_))
val ref = async.signal[Option[Int]]
ref.set(initial).run
val d1 = ref.discrete.take(l.size+1)
val d2 = ref.discrete.take(l.size+1)
val sync1 = new SyncVar[Throwable \\/ Seq[Option[Int]]]
d1.runLog.runAsync(sync1.put)
val sync2 = new SyncVar[Throwable \\/ Seq[Option[Int]]]
d2.runLog.runAsync(sync2.put)
Task {
feed.foreach { v => ref.set(v).run }
}.run
sync1.get(3000).nonEmpty :| "First process finished in time" &&
sync2.get(3000).nonEmpty :| "Second process finished in time" &&
(sync1.get.isRight && sync2.get.isRight) :| "both processes finished ok" &&
(sync1.get.toOption.get == None +: feed) :| "first process get all values signalled" &&
(sync2.get.toOption.get == None +: feed) :| "second process get all values signalled"
}
property("continuous") = secure {
def wait(ms: Int) = Task.delay(Thread.sleep(ms))
val sig = async.signal[Int]
val sink = sig.sink.contramap((x: Int) => Signal.Set(x))
// Set signal.
val in = Process(1) fby eval_(wait(300)) fby Process(2, 3, 4)
in.to(sink).run.runAsync(_ => ())
val p = Process(()) fby eval(wait(600)) fby eval(wait(300))
val expOut = List(1, 4, 4)
val out = p.zip(sig.continuous).map(_._2).runLog.run.toList
(out == expOut) :| s"continuous returns newest values - unexpected $out"
}
}
| jedws/scalaz-stream | src/test/scala/scalaz/stream/AsyncSignalSpec.scala | Scala | mit | 6,165 |
package org.dynfabdoc
import scalajs.js
import scalajs.js.JSApp
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import org.scalajs.dom
import org.dynfabdoc.styles.Presentation
import scalacss.ScalatagsCss._
import scalacss.ScalatagsJsDomImplicits
import scalacss.Defaults._
import scalatags.JsDom.TypedTag
import org.scalajs.dom.raw.HTMLStyleElement
/**
* @author mathi_000
*/
object Main extends JSApp {
def main(): Unit = {
import scalatags.JsDom.all._
val mainDiv = div().render
dom.document.body.appendChild(mainDiv)
dom.document.head.appendChild(Presentation.render[TypedTag[HTMLStyleElement]].render)
ReactDOM.render(
MainView.view(
"https://raw.githubusercontent.com/math85360/dynFabDoc-tutorials/master/chapin-de-noel"), mainDiv)
}
} | math85360/dynFabDoc | src/main/scala/org/dynfabdoc/Main.scala | Scala | gpl-3.0 | 815 |
/*
mls: basic machine learning algorithms for Scala
Copyright (C) 2014 Davi Pereira dos Santos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ml.models
import ml.Pattern
import scala.collection.immutable.Map
import scala.util.Random
case class RandomRank(seed: Int) extends Model {
def JS(pattern: Pattern) = ???
def predict(instance: Pattern) = output(instance).zipWithIndex.maxBy(_._1)._2
def distribution(instance: Pattern) = {
println(s"distribution was not calculated with Platt/sigmoid")
val arr = output(instance)
val min = arr.min
val max = arr.max
val norm = arr map (x => x - min / max - min)
val sum = norm.sum
norm map (x => x / sum)
}
val rnd = new Random(seed)
def output(instance: Pattern) = (rnd.shuffle(1 to instance.nclasses) map (_.toDouble)).toArray
override lazy val L = ???
}
| Crespo911/elm-scala | src/main/scala/ml/models/RandomRank.scala | Scala | gpl-3.0 | 1,445 |
package mr.merc.economics
import mr.merc.army.WarriorViewNames
import mr.merc.economics.Culture.{CultureAlignment, FrenchHuman, LatinHuman}
import mr.merc.economics.Culture.CultureAlignment.{ColorAlignment, PriorityAlignment}
import mr.merc.economics.PopulationType.{Aristocrats, Capitalists, Traders}
import mr.merc.economics.Race.Humans
import mr.merc.map.terrain.FourSeasonsTerrainTypes.{FourSeasonsCityCastle, FourSeasonsGrassyRoad, FourSeasonsHumanCastle, FourSeasonsTerrainType}
import mr.merc.politics._
import org.mockito.MockitoSugar
import org.scalatest.funsuite.AnyFunSuite
import scalafx.scene.paint.Color
class ElectionTest extends AnyFunSuite with MockitoSugar {
val culture: Culture = FrenchHuman
val culture2: Culture = LatinHuman
test("choose best party") {
val bestParty = Party("best", Color.White,
Migration.OpenBorders, Regime.Absolute, ForeignPolicy.Expansionism, Economy.StateEconomy,
SocialPolicy.LifeNeedsSocialSecurity, VotersPolicy.PrimaryUpperClass)
val worstParty = new Party("worst", Color.Black,
Migration.ClosedBorders, Regime.Democracy, ForeignPolicy.Pacifism, Economy.FreeMarket,
SocialPolicy.RegularNeedsSocialSecurity, VotersPolicy.NoVoting)
val position = new PoliticalViews(
Migration.popularity(1, 0, 1, 0),
Regime.popularity(1, 0, 0, 1, 0, 0),
ForeignPolicy.popularity(1, 0, 1, 0),
Economy.popularity(1, 0, 1, 0),
SocialPolicy.popularity(0, 1, 0, 0, 1, 0),
VotersPolicy.popularity(0, 1, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0))
val population = new Population(culture, Traders, 100, 0, 0, position, mock[Province])
import Election.PopulationElection
val report = population.choose(List(bestParty, worstParty))
assert(report === PopulationElectionReport(population, Map(bestParty -> 100, worstParty -> 0)))
}
test("choose when there are two best parties") {
val bestParty1 = Party("best", Color.White,
Migration.OpenBorders, Regime.Absolute, ForeignPolicy.Expansionism, Economy.StateEconomy,
SocialPolicy.NoSocialSecurity, VotersPolicy.PrimaryUpperClass)
val bestParty2 = Party("best", Color.White,
Migration.OpenBorders, Regime.Absolute, ForeignPolicy.Expansionism, Economy.StateEconomy,
SocialPolicy.RegularNeedsSocialSecurity, VotersPolicy.PrimaryUpperClass)
val worstParty = new Party("worst", Color.Black,
Migration.ClosedBorders, Regime.Democracy, ForeignPolicy.Pacifism, Economy.FreeMarket,
SocialPolicy.RegularNeedsSocialSecurity, VotersPolicy.NoVoting)
val position = new PoliticalViews(
Migration.popularity(1, 0, 1, 0),
Regime.popularity(1, 0, 0, 1, 0, 0),
ForeignPolicy.popularity(1, 0, 1, 0),
Economy.popularity(1, 0, 1, 0),
SocialPolicy.popularity(0, 1, 0, 0, 1, 0),
VotersPolicy.popularity(0, 1, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0))
val population = new Population(culture, Traders, 100, 0, 0, position, mock[Province])
import Election.PopulationElection
val report = population.choose(List(bestParty1, bestParty2, worstParty))
assert(report === PopulationElectionReport(population, Map(bestParty1 -> 50, bestParty2 -> 50, worstParty -> 0)))
}
test("real election in one region") {
val party1 = Party("best1", Color.White,
Migration.OpenBorders, Regime.Absolute, ForeignPolicy.Expansionism, Economy.StateEconomy,
SocialPolicy.NoSocialSecurity, VotersPolicy.PrimaryUpperClass)
val party2 = Party("best2", Color.White,
Migration.OpenBorders, Regime.Absolute, ForeignPolicy.Expansionism, Economy.StateEconomy,
SocialPolicy.RegularNeedsSocialSecurity, VotersPolicy.PrimaryUpperClass)
val party3 = new Party("worst", Color.Black,
Migration.ClosedBorders, Regime.Democracy, ForeignPolicy.Pacifism, Economy.FreeMarket,
SocialPolicy.RegularNeedsSocialSecurity, VotersPolicy.NoVoting)
val position = new PoliticalViews(
Migration.popularity(1, 0, 0, 1),
Regime.popularity(1, 0, 0, 1, 0, 0),
ForeignPolicy.popularity(1, 0, 0, 1),
Economy.popularity(1, 0, 0, 1),
SocialPolicy.popularity(1, 0, 0, 0, 1, 0),
VotersPolicy.popularity(0, 1, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0))
val province = mock[Province]
val population1 = new Population(culture, Aristocrats, 100, 0, 0, position, province)
val population2 = new Population(culture, Capitalists, 150, 0, 150, position, province)
val population3 = new Population(culture, Traders, 15000, 0, 150, position, province)
val population4 = new Population(culture2, Traders, 15000, 0, 150, position, province)
val rp = new RegionPopulation(province)
List(population1, population2, population3, population4).foreach(rp.addPop)
when(province.regionPopulation).thenReturn(rp)
val election = new Election(party1, culture, List(party1, party2, party3))
val report = election.doElections(List(province))
assert(report === StateElectionReport(List(RegionElectionReport(province, List(
PopulationElectionReport(population1, Map(party1 -> 100, party2 -> 0, party3 -> 0)),
PopulationElectionReport(population2, Map(party3 -> 150, party1 -> 0, party2 -> 0))
)))))
assert(report.votes === Map(party1 -> 100, party3 -> 150, party2 -> 0))
}
}
| RenualdMarch/merc | src/test/scala/mr/merc/economics/ElectionTest.scala | Scala | gpl-3.0 | 5,291 |
/*
* Copyright 2012 Jonathan Anderson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import scala.collection.JavaConversions._
import org.junit.runner.RunWith
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.{BeforeAndAfter,FreeSpec}
import org.scalatest.junit.JUnitRunner
import org.scalatest.matchers._
import org.scalatest.mock.MockitoSugar
import org.junit.runner.RunWith
package me.footlights.api.ajax {
import JSON._
import JSONData._
/** Integration tests for {@link ClasspathLoader}. */
@RunWith(classOf[JUnitRunner])
class JSONTest extends FreeSpec with BeforeAndAfter with MockitoSugar with ShouldMatchers {
"A JSON object should be able to hold " - {
"the empty set" in {
JSON().toString should fullyMatch regex """\\{[ ]*\\}"""
}
"primitive members" in {
val json = JSON("foo" -> 42, "bar" -> 3.1415926, "baz" -> "Hello, world!")
json.toString should equal (
"{ \\"foo\\": 42, \\"bar\\": 3.1415926, \\"baz\\": \\"Hello, world!\\" }")
}
"arrays" in {
val arr = List(1,2,3) toArray
val json = JSON("array" -> arr)
json.toString should equal ("{ \\"array\\": [ 1, 2, 3 ] }")
}
"lists" in {
val json = JSON("list" -> List(1, 2, 3))
json.toString should equal ("{ \\"list\\": [ 1, 2, 3 ] }")
}
"lists of lists" in {
val json = JSON("listoflists" -> List(1 :: 2 :: 3 :: Nil))
json.toString should equal ("{ \\"listoflists\\": [ [ 1, 2, 3 ] ] }")
}
"explicit JSON objects" in {
val json = JSON("sub" -> JSON("foo" -> "bar"))
json.toString should equal ("{ \\"sub\\": { \\"foo\\": \\"bar\\" } }")
}
"maps" in {
val json = JSON("sub" -> Map("foo" -> "bar"))
json.toString should equal ("{ \\"sub\\": { \\"foo\\": \\"bar\\" } }")
}
"maps as implicit JSON objects" in {
val json = JSON("sub" -> ("foo" -> "bar"))
json.toString should equal ("{ \\"sub\\": { \\"foo\\": \\"bar\\" } }")
}
"anything a Map[String,T] can hold" in {
val map:Map[String,JSONData] = Map(
"foo" -> 42,
"bar" -> "hello",
"baz" -> List(1, 2),
"map" -> Map("a" -> 1, "b" -> 2)
)
val json:JSON = map
json.toString should equal (
"""{ "foo": 42, "bar": "hello", "baz": [ 1, 2 ], "map": { "a": 1, "b": 2 } }""")
}
}
}
}
| nasrallahmounir/Footlights | API/src/test/scala/me/footlights/api/ajax/json-test.scala | Scala | apache-2.0 | 2,748 |
package hubs
import signalJ.services.Hub
/**
* Implements a singleton SignalJ hub that supports notification of scheduled events to client-side listeners.
*/
class EventHub extends Hub[EventSink]{
/*
* Implements the "getInterface" method mandated by the SignalJ architecture.
*/
override def getInterface: Class[EventSink] = classOf[EventSink]
}
| aquisio/quartz-scheduler-scala | app/hubs/EventHub.scala | Scala | unlicense | 363 |
package io.scalding.approximations.BloomFilter
import com.twitter.algebird._
import com.twitter.scalding._
import com.twitter.scalding.source.TypedSequenceFile
/**
* We generate 100.000 user ids ( 1 .. 100000 ) and add them into a BloomFilter
* with a small estimation error. Then we execute membership queries on some ids
*
* @author Antonios Chalkiopoulos - http://scalding.io
*/
class SimpleBFAggregator(args:Args) extends Job(args) {
val bloomFilterMonoid = BloomFilter(numEntries = 100000 , fpProb = 0.02)
// BF aggregator
val bfAggregator = BloomFilterAggregator
.apply(bloomFilterMonoid)
.composePrepare[SimpleUser](_.userID)
// Generate and add 100K ids into the (Bloom) filter
val usersList = (1 to 100000).toList.map{ x => SimpleUser(x.toString) }
val usersBF = typed.IterablePipe[SimpleUser](usersList)
.aggregate(bfAggregator)
// Display that BF can be queried
usersBF
.map { bf:BF =>
println("BF contains 'ABCD' ? " + (if (bf.contains("ABCD").isTrue) "maybe" else "no"))
println("BF contains 'EFGH' ? " + (if (bf.contains("EFGH").isTrue) "maybe" else "no"))
println("BF contains '123' ? " + (if (bf.contains("123") .isTrue) "maybe" else "no"))
bf
}
.write( TypedCsv(args.getOrElse("output","results/SimpleBFAggregator")) )
// Serialize the BF
// Note: Serialization adds a few more seconds to execution time
// So expect 4 seconds to generate the BF and 40 more if you choose to serialize it
usersBF
.map { bf:BF => io.scalding.approximations.Utils.serialize(bf) }
.write( TypedSequenceFile(args.getOrElse("serialized","results/SimpleBFAggregator-serialized")) )
}
object SimpleBFAggregatorRunner extends App {
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.util.ToolRunner
val timer = io.scalding.approximations.Utils.withTimeCalc("SimpleBFAggregatorRunner") {
ToolRunner.run(new Configuration, new Tool, (classOf[SimpleBFAggregator].getName :: "--local" :: args.toList).toArray)
}
println(s"Running BF synthetic-data example took $timer msec in –-local mode")
} | LeeRisk/social-media-analytics | src/main/scala/io/scalding/approximations/BloomFilter/SimpleBFAggregator.scala | Scala | apache-2.0 | 2,109 |
/**
* Copyright (c) 2012 Alexey Aksenov ezh@ezh.msk.ru
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.digimead.digi.lib.ctrl.declaration
object DHistoryProvider {
implicit def value2uri(v: Enumeration#Value): android.net.Uri = android.net.Uri.parse(v.toString)
val authority = DConstant.controlPackage + ".history"
case class Row(val session_id: Int,
val process_id: Int,
val component: Array[Byte],
val executable: Array[Byte],
val connection: Array[Byte])
object Field extends Enumeration {
val ID = Value("_id")
val ComponentTS = Value("component_timestamp")
val ComponentName = Value("component_name")
val ComponentPackage = Value("component_package")
val UserTS = Value("user_timestamp")
val UserOrigin = Value("user_origin") // component id
val UserName = Value("user_name")
val ActivityTS = Value("activity_timestamp")
val ActivityOrigin = Value("activity_origin") // component id
val ActivitySeverity = Value("activity_severity")
val ActivityMessage = Value("activity_message")
val SessionTS = Value("session_timestamp")
val SessionOrigin = Value("session_origin") // component id
val SessionIP = Value("session_ip")
val SessionDuration = Value("session_duration")
val AuthTS = Value("auth_timestamp")
val AuthOrigin = Value("auth_origin") // session id
val AuthUser = Value("auth_user") // user id
val AuthCode = Value("auth_code") // Session.Auth.Value: no, unknown user, failed, successful
}
object Uri extends Enumeration {
val History = Value("content://" + authority + "/history")
val HistoryID = Value("content://" + authority + "/history/#")
val Activity = Value("content://" + authority + "/activity")
val ActivityID = Value("content://" + authority + "/activity/#")
val Session = Value("content://" + authority + "/session")
val SessionID = Value("content://" + authority + "/session/#")
val Auth = Value("content://" + authority + "/auth")
val AuthID = Value("content://" + authority + "/auth/#")
}
}
| ezh/digi-lib-ctrl | src/main/scala/org/digimead/digi/lib/ctrl/declaration/DHistoryProvider.scala | Scala | apache-2.0 | 2,588 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.util.Properties
import scala.collection.mutable.{ArrayBuffer, HashSet, HashMap, Map}
import scala.language.reflectiveCalls
import scala.util.control.NonFatal
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Timeouts
import org.scalatest.time.SpanSugar._
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
import org.apache.spark.storage.{BlockId, BlockManagerId, BlockManagerMaster}
import org.apache.spark.util.CallSite
import org.apache.spark.executor.TaskMetrics
class DAGSchedulerEventProcessLoopTester(dagScheduler: DAGScheduler)
extends DAGSchedulerEventProcessLoop(dagScheduler) {
override def post(event: DAGSchedulerEvent): Unit = {
try {
// Forward event to `onReceive` directly to avoid processing event asynchronously.
//调用父类DAGSchedulerEventProcessLoop 直接跳转onReceive事件,避免异步进程事件
onReceive(event)
} catch {
case NonFatal(e) => onError(e)
}
}
}
/**
* An RDD for passing to DAGScheduler. These RDDs will use the dependencies and
* RDD通过DAGScheduler,这些RDDS将使用依赖性和preferredlocations(如果有)传递给他们
* preferredLocations (if any) that are passed to them. They are deliberately not executable
* so we can test that DAGScheduler does not try to execute RDDs locally.
* 他们是故意不可执行,所以我们可以测试DAGScheduler不想执行本地的RDDS
*/
class MyRDD(
sc: SparkContext,
numPartitions: Int,
dependencies: List[Dependency[_]],//依赖
//Nil是一个空的List
locations: Seq[Seq[String]] = Nil) extends RDD[(Int, Int)](sc, dependencies) with Serializable {
override def compute(split: Partition, context: TaskContext): Iterator[(Int, Int)] =
throw new RuntimeException("should not be reached")
//返回此RDD中的一组分区
override def getPartitions: Array[Partition] = (0 until numPartitions).map(i => new Partition {
override def index: Int = i
}).toArray
override def getPreferredLocations(split: Partition): Seq[String] =
//Nil是一个空的List,isDefinedAt是判断传入来的参数是否在这的范围内
if (locations.isDefinedAt(split.index)) locations(split.index) else Nil
override def toString: String = "DAGSchedulerSuiteRDD " + id
}
/**
* DAGScheduler决定了运行Task的理想位置,并把这些信息传递给下层的TaskScheduler
* DAGScheduler还处理由于Shuffle数据丢失导致的失败,
* 这有可能需要重新提交运行之前的Stage(非Shuffle数据丢失导致的Task失败由TaskScheduler处理)
*/
class DAGSchedulerSuiteDummyException extends Exception
//DAGSchedulerDAGScheduler的主要任务是基于Stage构建DAG,
//决定每个任务的最佳位置 记录哪个RDD或者Stage输出被物化 面向stage的调度层
//为job生成以stage组成的DAG,提交TaskSet给TaskScheduler执行
//重新提交shuffle输出丢失的stage
class DAGSchedulerSuite
extends SparkFunSuite with BeforeAndAfter with LocalSparkContext with Timeouts {
val conf = new SparkConf
/**
* Set of TaskSets the DAGScheduler has requested executed.
* 请求执行器设置DAGScheduler的TaskSets
* */
//Buffer提供了一组创建缓冲区值的操作
val taskSets = scala.collection.mutable.Buffer[TaskSet]()
/** Stages for which the DAGScheduler has called TaskScheduler.cancelTasks(). */
//调用TaskScheduler.cancelTasks()任务
val cancelledStages = new HashSet[Int]()
val taskScheduler = new TaskScheduler() {
override def rootPool: Pool = null
override def schedulingMode: SchedulingMode = SchedulingMode.NONE
override def start() = {}
override def stop() = {}
override def executorHeartbeatReceived(execId: String, taskMetrics: Array[(Long, TaskMetrics)],
blockManagerId: BlockManagerId): Boolean = true
override def submitTasks(taskSet: TaskSet) = {
// normally done by TaskSetManager
// TaskSetManager正常完成
taskSet.tasks.foreach(_.epoch = mapOutputTracker.getEpoch)
taskSets += taskSet
}
//取消任务
override def cancelTasks(stageId: Int, interruptThread: Boolean) {
cancelledStages += stageId
}
override def setDAGScheduler(dagScheduler: DAGScheduler) = {}
override def defaultParallelism() = 2 //默认并发数
override def executorLost(executorId: String, reason: ExecutorLossReason): Unit = {}
//None被声明为一个对象,而不是一个类,在没有值的时候,使用None,如果有值可以引用,就使用Some来包含这个值,都是Option的子类
override def applicationAttemptId(): Option[String] = None
}
/**
* Length of time to wait while draining listener events.
* 侦听事件等待的超时时间
* */
val WAIT_TIMEOUT_MILLIS = 10000//毫秒
val sparkListener = new SparkListener() {
val submittedStageInfos = new HashSet[StageInfo]
val successfulStages = new HashSet[Int]
val failedStages = new ArrayBuffer[Int]
val stageByOrderOfExecution = new ArrayBuffer[Int]
override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted) {
submittedStageInfos += stageSubmitted.stageInfo
}
override def onStageCompleted(stageCompleted: SparkListenerStageCompleted) {
//StageInfo
val stageInfo = stageCompleted.stageInfo
//阶段排序的执行器
stageByOrderOfExecution += stageInfo.stageId
println("==stageInfo=="+stageInfo.stageId+"==name=="+stageInfo.name+"==attemptId=="+stageInfo.attemptId+"=numTasks="+stageInfo.numTasks
+"==rddInfos=="+stageInfo.rddInfos.size+"==parentIds=="+stageInfo.parentIds)
stageInfo.rddInfos.foreach(a=> println("rddInfo:"+a.id+"==name=="+a.name+"=numPartitions="+a.numPartitions+"==isCached=="+a.isCached))
if (stageInfo.failureReason.isEmpty) {
//完成的Stage
successfulStages += stageInfo.stageId
} else {
//失败的Stage
failedStages += stageInfo.stageId
}
}
}
//主要用于跟踪Map阶段任务的输出状态,此状态便于Reduce阶段任务获取地址及中间输出结果
var mapOutputTracker: MapOutputTrackerMaster = null
var scheduler: DAGScheduler = null
var dagEventProcessLoopTester: DAGSchedulerEventProcessLoop = null
/**
* Set of cache locations to return from our mock BlockManagerMaster.
* 设置缓存的位置,返回我们模拟blockmanagermaster
* Keys are (rdd ID, partition ID). Anything not present will return an empty
* 键(RDD ID,分区ID),任何不存在的东西都返回一个缓存位置的空列表
* list of cache locations silently.
*/
val cacheLocations = new HashMap[(Int, Int), Seq[BlockManagerId]]
// stub out BlockManagerMaster.getLocations to use our cacheLocations
//我们使用cacheLocations获得位置
val blockManagerMaster = new BlockManagerMaster(null, conf, true) {
override def getLocations(blockIds: Array[BlockId]): IndexedSeq[Seq[BlockManagerId]] = {
var test=null
blockIds.map {
_.asRDDId.map(id =>
(id.rddId -> id.splitIndex)
).flatMap(key => cacheLocations.get(key)).
getOrElse(Seq())
}.toIndexedSeq
}
override def removeExecutor(execId: String) {
// don't need to propagate to the driver, which we don't have
//不需要传播到驱动程序,我们没有
}
}
/**
* The list of results that DAGScheduler has collected.
* DAGScheduler集合结果的列表
* */
val results = new HashMap[Int, Any]()
var failure: Exception = _
val jobListener = new JobListener() {
override def taskSucceeded(index: Int, result: Any) = results.put(index, result)
override def jobFailed(exception: Exception) = { failure = exception }
}
before {
sc = new SparkContext("local", "DAGSchedulerSuite")
sparkListener.submittedStageInfos.clear()
sparkListener.successfulStages.clear()
sparkListener.failedStages.clear()
failure = null
sc.addSparkListener(sparkListener)
taskSets.clear()
cancelledStages.clear()
cacheLocations.clear()
results.clear()
mapOutputTracker = new MapOutputTrackerMaster(conf)
scheduler = new DAGScheduler(
sc,
taskScheduler,
sc.listenerBus,
mapOutputTracker,
blockManagerMaster,
sc.env)
dagEventProcessLoopTester = new DAGSchedulerEventProcessLoopTester(scheduler)
}
after {
scheduler.stop()
}
override def afterAll() {
super.afterAll()
}
/**
* Type of RDD we use for testing. Note that we should never call the real RDD compute methods.
* 我们用于测试RDD类型,注意:我们不应该调用真正的RDD的计算方法
* This is a pair RDD type so it can always be used in ShuffleDependencies.
* 这是一个RDD类型对,所以它总是在ShuffleDependencies调用
*/
type PairOfIntsRDD = RDD[(Int, Int)]
/**
* Process the supplied event as if it were the top of the DAGScheduler event queue, expecting
* the scheduler not to exit.
* 处理所支持的事件,如果是dagscheduler顶部队列事件,期望调度不退出
*
* After processing the event, submit waiting stages as is done on most iterations of the
* DAGScheduler event loop.
* 处理事件后,提交等待阶段对dagscheduler事件循环迭代完成
*/
private def runEvent(event: DAGSchedulerEvent) {
dagEventProcessLoopTester.post(event)
}
/**
* When we submit dummy Jobs, this is the compute function we supply. Except in a local test
* 当我们提交虚拟Job,这是我们提供的计算功能,除了在下面的一个本地测试
* below, we do not expect this function to ever be executed; instead, we will return results
* 我们不希望这个函数被执行,相反,我们将返回的结果直接通过completionevents
* directly through CompletionEvents.
*/
private val jobComputeFunc = (context: TaskContext, it: Iterator[(_)]) =>
it.next.asInstanceOf[Tuple2[_, _]]._1
/**
* Send the given CompletionEvent messages for the tasks in the TaskSet.
* 在taskset的任务发送消息给completionevent
* */
private def complete(taskSet: TaskSet, results: Seq[(TaskEndReason, Any)]) {
assert(taskSet.tasks.size >= results.size)
for ((result, i) <- results.zipWithIndex) {
println(result+"===="+i)
if (i < taskSet.tasks.size) {
runEvent(CompletionEvent(//result._1返馈,._2结果
taskSet.tasks(i), result._1, result._2, null, createFakeTaskInfo(), null))
}
}
}
private def completeWithAccumulator(accumId: Long, taskSet: TaskSet,
results: Seq[(TaskEndReason, Any)]) {
assert(taskSet.tasks.size >= results.size)
for ((result, i) <- results.zipWithIndex) {
if (i < taskSet.tasks.size) {
runEvent(CompletionEvent(taskSet.tasks(i), result._1, result._2,
Map[Long, Any]((accumId, 1)), createFakeTaskInfo(), null))
}
}
}
/**
* Sends the rdd to the scheduler for scheduling and returns the job id.
* 发送RDD调度的调度器和返回工作的ID
* */
private def submit(
rdd: RDD[_],
partitions: Array[Int],
func: (TaskContext, Iterator[_]) => _ = jobComputeFunc,
listener: JobListener = jobListener,
properties: Properties = null): Int = {
val jobId = scheduler.nextJobId.getAndIncrement()
runEvent(JobSubmitted(jobId, rdd, func, partitions, CallSite("", ""), listener, properties))
jobId
}
/**
* Sends TaskSetFailed to the scheduler.
* 发送tasksetfailed的调度
* */
private def failed(taskSet: TaskSet, message: String) {
runEvent(TaskSetFailed(taskSet, message, None))
}
/**
* Sends JobCancelled to the DAG scheduler.
* 发送jobcancelled的DAG调度
* */
private def cancel(jobId: Int) {
runEvent(JobCancelled(jobId))
}
//父阶段应该有较低的阶段id标识
test("[SPARK-3353] parent stage should have lower stage id") {
sparkListener.stageByOrderOfExecution.clear()
//划分二个stage,一个是Map,一个是shuffle
sc.parallelize(1 to 10).map(x => (x, x)).reduceByKey(_ + _, 4).count()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.stageByOrderOfExecution.length === 2)
//println(sparkListener.stageByOrderOfExecution(0)+"===="+sparkListener.stageByOrderOfExecution(1))
assert(sparkListener.stageByOrderOfExecution(0) < sparkListener.stageByOrderOfExecution(1))
}
test("zero split job") {//RDD零分隔的Job
var numResults = 0
val fakeListener = new JobListener() {//Job监听
override def taskSucceeded(partition: Int, value: Any) = numResults += 1
override def jobFailed(exception: Exception) = throw exception
}
val jobId = submit(new MyRDD(sc, 0, Nil), Array(), listener = fakeListener)
println("jobId:"+jobId)
assert(numResults === 0)
cancel(jobId)
}
test("run trivial job") {//运行无价值的工作
submit(new MyRDD(sc, 1, Nil), Array(0))
complete(taskSets(0), List((Success, 42)))
//JobListener监听完成
assert(results === Map(0 -> 42))
//清空数据结构
assertDataStructuresEmpty()
}
test("run trivial job w/ dependency") {//运行无价值的Job依赖
val baseRdd = new MyRDD(sc, 1, Nil)
val finalRdd = new MyRDD(sc, 1, List(new OneToOneDependency(baseRdd)))
submit(finalRdd, Array(0))
//
complete(taskSets(0), Seq((Success, 42)))
assert(results === Map(0 -> 42))
//清空数据结构
assertDataStructuresEmpty()
}
test("cache location preferences w/ dependency") {//缓存位置偏好依赖
val baseRdd = new MyRDD(sc, 1, Nil).cache()
val finalRdd = new MyRDD(sc, 1, List(new OneToOneDependency(baseRdd)))
//数组的赋值
cacheLocations(baseRdd.id -> 0) =
Seq(makeBlockManagerId("hostA"), makeBlockManagerId("hostB"))
submit(finalRdd, Array(0))
val taskSet = taskSets(0)
assertLocations(taskSet, Seq(Seq("hostA", "hostB")))
complete(taskSet, Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty()
}
test("regression test for getCacheLocs") {//对于获得缓存的位置getCacheLocs回归测试
val rdd = new MyRDD(sc, 3, Nil).cache()//三个分区
//rdd.id -> 0 分区Id
cacheLocations(rdd.id -> 0) = //赋值
Seq(makeBlockManagerId("hostA"), makeBlockManagerId("hostB"))
cacheLocations(rdd.id -> 1) =
Seq(makeBlockManagerId("hostB"), makeBlockManagerId("hostC"))
cacheLocations(rdd.id -> 2) =
Seq(makeBlockManagerId("hostC"), makeBlockManagerId("hostD"))
//获得缓存的位置
val locs = scheduler.getCacheLocs(rdd).map(_.map(_.host))
assert(locs === Seq(Seq("hostA", "hostB"), Seq("hostB", "hostC"), Seq("hostC", "hostD")))
}
/**
* This test ensures that if a particular RDD is cached, RDDs earlier in the dependency chain
* are not computed. It constructs the following chain of dependencies:
* 此测试确保如果缓存特定RDD,则不会计算依赖关系链中较早的RDD,它构建了以下依赖链:
* +---+ shuffle +---+ +---+ +---+
* | A |<--------| B |<---| C |<---| D |
* +---+ +---+ +---+ +---+
* Here, B is derived from A by performing a shuffle, C has a one-to-one dependency on B,
* and D similarly has a one-to-one dependency on C. If none of the RDDs were cached, this
* set of RDDs would result in a two stage job: one ShuffleMapStage, and a ResultStage that
* reads the shuffled data from RDD A. This test ensures that if C is cached, the scheduler
* doesn't perform a shuffle, and instead computes the result using a single ResultStage
* that reads C's cached data.
*
* 这里,B通过执shuffleA派生,C对B有一对一的依赖性,D类似地对C有一对一的依赖性,
* 如果没有RDD被缓存,则该组RDD将导致两个阶段的工作:
* 一个ShuffleMapStage和一个从RDD A读取shuffle数据的ResultStage,此测试确保如果C被缓存,则调度程序不执行shuffled,
* 而是使用单个ResultStage来计算结果它读取C的缓存数据。
*/
//getMissingParentStages应该考虑所有祖先RDD的缓存状态
test("getMissingParentStages should consider all ancestor RDDs' cache statuses") {
val rddA = new MyRDD(sc, 1, Nil)
val rddB = new MyRDD(sc, 1, List(new ShuffleDependency(rddA, null)))
val rddC = new MyRDD(sc, 1, List(new OneToOneDependency(rddB))).cache()
val rddD = new MyRDD(sc, 1, List(new OneToOneDependency(rddC)))
cacheLocations(rddC.id -> 0) =
Seq(makeBlockManagerId("hostA"), makeBlockManagerId("hostB"))
submit(rddD, Array(0))
assert(scheduler.runningStages.size === 1)
// Make sure that the scheduler is running the final result stage.
//确保调度程序运行的是最终结果阶段
// Because C is cached, the shuffle map stage to compute A does not need to be run.
//因为C是缓存,Shuffle的Map任务阶段来计算A不需要运行
assert(scheduler.runningStages.head.isInstanceOf[ResultStage])
}
//避免指数爆破时优先位置列表
test("avoid exponential blowup when getting preferred locs list") {
// Build up a complex dependency graph with repeated zip operations, without preferred locations
//建立一个重复的压缩操作的复杂的依赖关系图,不喜欢的位置
var rdd: RDD[_] = new MyRDD(sc, 1, Nil)
(1 to 30).foreach(_ => rdd = rdd.zip(rdd))
// getPreferredLocs runs quickly, indicating that exponential graph traversal is avoided.
//getPreferredLocs运行快速,避免索引遍历
failAfter(10 seconds) {
//返回每一个数据数据块所在的机器名或者IP地址,
//如果每一块数据是多份存储的,那么就会返回多个机器地址
val preferredLocs = scheduler.getPreferredLocs(rdd, 0)
// No preferred locations are returned.
//返回没有首选的位置
assert(preferredLocs.length === 0)
}
}
test("unserializable task") {//未序列任务
val unserializableRdd = new MyRDD(sc, 1, Nil) {
class UnserializableClass
val unserializable = new UnserializableClass
}
submit(unserializableRdd, Array(0))
assert(failure.getMessage.startsWith(//工作阶段失败而终止
"Job aborted due to stage failure: Task not serializable:"))
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.failedStages.contains(0))
assert(sparkListener.failedStages.size === 1)
assertDataStructuresEmpty()
}
test("trivial job failure") {//无价值的Job失败
submit(new MyRDD(sc, 1, Nil), Array(0))
failed(taskSets(0), "some failure")
//工作阶段失败而终止:一些失败
assert(failure.getMessage === "Job aborted due to stage failure: some failure")
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.failedStages.contains(0))
assert(sparkListener.failedStages.size === 1)
assertDataStructuresEmpty()
}
test("trivial job cancellation") {//无价值的工作取消
val rdd = new MyRDD(sc, 1, Nil)
val jobId = submit(rdd, Array(0))
cancel(jobId)
assert(failure.getMessage === s"Job $jobId cancelled ")
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.failedStages.contains(0))
assert(sparkListener.failedStages.size === 1)
assertDataStructuresEmpty()
}
test("job cancellation no-kill backend") {//作业取消不杀死后端
// make sure that the DAGScheduler doesn't crash when the TaskScheduler
//确保DAGScheduler不崩溃时,任务调度器不执行killtask()
// doesn't implement killTask()
val noKillTaskScheduler = new TaskScheduler() {
override def rootPool: Pool = null
override def schedulingMode: SchedulingMode = SchedulingMode.NONE
override def start(): Unit = {}
override def stop(): Unit = {}
override def submitTasks(taskSet: TaskSet): Unit = {
taskSets += taskSet
}
override def cancelTasks(stageId: Int, interruptThread: Boolean) {
throw new UnsupportedOperationException
}
override def setDAGScheduler(dagScheduler: DAGScheduler): Unit = {}
override def defaultParallelism(): Int = 2 //默认并发数
override def executorHeartbeatReceived(
execId: String,
taskMetrics: Array[(Long, TaskMetrics)],
blockManagerId: BlockManagerId): Boolean = true
override def executorLost(executorId: String, reason: ExecutorLossReason): Unit = {}
override def applicationAttemptId(): Option[String] = None
}
val noKillScheduler = new DAGScheduler(
sc,
noKillTaskScheduler,
sc.listenerBus,
mapOutputTracker,
blockManagerMaster,
sc.env)
dagEventProcessLoopTester = new DAGSchedulerEventProcessLoopTester(noKillScheduler)
val jobId = submit(new MyRDD(sc, 1, Nil), Array(0))
cancel(jobId)
// Because the job wasn't actually cancelled, we shouldn't have received a failure message.
//因为job并没有被取消,我们不应该收到一个失败的消息
assert(failure === null)
// When the task set completes normally, state should be correctly updated.
//当任务集合正常完成时,状态应该正确更新
complete(taskSets(0), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty()
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.failedStages.isEmpty)
assert(sparkListener.successfulStages.contains(0))
}
test("run trivial shuffle") {//运行无价值的shuffle
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
println("shuffleId:"+shuffleId)
val reduceRdd = new MyRDD(sc, 1, List(shuffleDep))
submit(reduceRdd, Array(0))
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostB", 1))))
mapOutputTracker.getMapSizesByExecutorId(shuffleId, 0).map(a=>{
println(a._1+"==="+a._2)
}
)
assert(mapOutputTracker.getMapSizesByExecutorId(shuffleId, 0).map(_._1).toSet ===
HashSet(makeBlockManagerId("hostA"), makeBlockManagerId("hostB")))
complete(taskSets(1), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty()
}
//运行无价值的Shuffle与获取失败
test("run trivial shuffle with fetch failure") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", reduceRdd.partitions.size)),
(Success, makeMapStatus("hostB", reduceRdd.partitions.size))))
//the 2nd ResultTask failed
//第二个resulttask失败
complete(taskSets(1), Seq(
(Success, 42),
(FetchFailed(makeBlockManagerId("hostA"), shuffleId, 0, 0, "ignored"), null)))
// this will get called 这将被称为
// blockManagerMaster.removeExecutor("exec-hostA")
// ask the scheduler to try it again 请求一次调度
scheduler.resubmitFailedStages()
// have the 2nd attempt pass 第二次尝试通过
complete(taskSets(2), Seq((Success, makeMapStatus("hostA", reduceRdd.partitions.size))))
// we can see both result blocks now
//我们现在可以看到两个结果块
assert(mapOutputTracker.getMapSizesByExecutorId(shuffleId, 0).map(_._1.host).toSet ===
HashSet("hostA", "hostB"))
complete(taskSets(3), Seq((Success, 43)))
assert(results === Map(0 -> 42, 1 -> 43))
assertDataStructuresEmpty()
}
//多个获取失败的无价值的Shuffle
test("trivial shuffle with multiple fetch failures") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", reduceRdd.partitions.size)),
(Success, makeMapStatus("hostB", reduceRdd.partitions.size))))
// The MapOutputTracker should know about both map output locations.
//MapOutputTracker应该知道Map任务的输出位置
assert(mapOutputTracker.getMapSizesByExecutorId(shuffleId, 0).map(_._1.host).toSet ===
HashSet("hostA", "hostB"))
// The first result task fails, with a fetch failure for the output from the first mapper.
// 第一个结果任务失败,获取一个输出第一个Map的输出故障
runEvent(CompletionEvent(
taskSets(1).tasks(0),
FetchFailed(makeBlockManagerId("hostA"), shuffleId, 0, 0, "ignored"),
null,
Map[Long, Any](),
createFakeTaskInfo(),
null))
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.failedStages.contains(1))
// The second ResultTask fails, with a fetch failure for the output from the second mapper.
//第二次resulttask失败,获取失败第二个Map的输出故障
runEvent(CompletionEvent(
taskSets(1).tasks(0),
FetchFailed(makeBlockManagerId("hostA"), shuffleId, 1, 1, "ignored"),
null,
Map[Long, Any](),
createFakeTaskInfo(),
null))
// The SparkListener should not receive redundant failure events.
// SparkListener不应该得到冗余故障事件
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.failedStages.size == 1)
}
/**
* This tests the case where another FetchFailed comes in while the map stage is getting
* re-run.
* 这个测试的情况下,另一个fetchfailed进来而Map任务阶段开始重新运行
*/
//后期获取失败不会导致同一个映射阶段的多个并发尝试
test("late fetch failures don't cause multiple concurrent attempts for the same map stage") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
val mapStageId = 0
def countSubmittedMapStageAttempts(): Int = {
sparkListener.submittedStageInfos.count(_.stageId == mapStageId)
}
// The map stage should have been submitted.
//Map阶段应该已经提交
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(countSubmittedMapStageAttempts() === 1)
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 2)),
(Success, makeMapStatus("hostB", 2))))
// The MapOutputTracker should know about both map output locations.
//MapOutputTracker应该知道Map输出位置
assert(mapOutputTracker.getMapSizesByExecutorId(shuffleId, 0).map(_._1.host).toSet ===
HashSet("hostA", "hostB"))
assert(mapOutputTracker.getMapSizesByExecutorId(shuffleId, 1).map(_._1.host).toSet ===
HashSet("hostA", "hostB"))
// The first result task fails, with a fetch failure for the output from the first mapper.
//第一个结果任务失败,获取第一个从Map输出故障。
runEvent(CompletionEvent(
taskSets(1).tasks(0),
FetchFailed(makeBlockManagerId("hostA"), shuffleId, 0, 0, "ignored"),
null,
Map[Long, Any](),
createFakeTaskInfo(),
null))
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.failedStages.contains(1))
// Trigger resubmission of the failed map stage.
//触发失败的Map阶段提交
runEvent(ResubmitFailedStages)
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
// Another attempt for the map stage should have been submitted, resulting in 2 total attempts.
//Map阶段的另一个尝试应该已经提交,尝试2次结果
assert(countSubmittedMapStageAttempts() === 2)
// The second ResultTask fails, with a fetch failure for the output from the second mapper.
//第二次resulttask失败,一个获取失败在第二个Map任务的输出故障
runEvent(CompletionEvent(
taskSets(1).tasks(1),
FetchFailed(makeBlockManagerId("hostB"), shuffleId, 1, 1, "ignored"),
null,
Map[Long, Any](),
createFakeTaskInfo(),
null))
// Another ResubmitFailedStages event should not result in another attempt for the map
// stage being run concurrently.
//另一个resubmitfailedstages事件不应导致对地图的另一个尝试
//兼运行阶段
// NOTE: the actual ResubmitFailedStages may get called at any time during this, but it
// shouldn't effect anything -- our calling it just makes *SURE* it gets called between the
// desired event and our check.
//注意:实际的ResubmitFailedStages可能会在此期间的任何时间被调用,
//但它不应该影响任何东西 - 我们调用它只是使*SURE *它在所需的事件和我们的支票之间调用。
runEvent(ResubmitFailedStages)
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(countSubmittedMapStageAttempts() === 2)
}
/**
* This tests the case where a late FetchFailed comes in after the map stage has finished getting
* retried and a new reduce stage starts running.
* 这个测试的情况下,后期fetchfailed进来之后的Map阶段完成复审和一个新的阶段开始reduce
*/
test("extremely late fetch failures don't cause multiple concurrent attempts for " +
"the same stage") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
def countSubmittedReduceStageAttempts(): Int = {
sparkListener.submittedStageInfos.count(_.stageId == 1)
}
def countSubmittedMapStageAttempts(): Int = {
sparkListener.submittedStageInfos.count(_.stageId == 0)
}
// The map stage should have been submitted.
//Map阶段应该已经提交
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(countSubmittedMapStageAttempts() === 1)
// Complete the map stage.
//完成Map阶段
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 2)),
(Success, makeMapStatus("hostB", 2))))
// The reduce stage should have been submitted.
//reduce阶段应提交
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(countSubmittedReduceStageAttempts() === 1)
// The first result task fails, with a fetch failure for the output from the first mapper.
//第一个结果任务失败,一个获取失败Map任务输出故障
runEvent(CompletionEvent(
taskSets(1).tasks(0),
FetchFailed(makeBlockManagerId("hostA"), shuffleId, 0, 0, "ignored"),
null,
Map[Long, Any](),
createFakeTaskInfo(),
null))
// Trigger resubmission of the failed map stage and finish the re-started map task.
//触发失败的Map阶段提交和完成重新启动Map任务
runEvent(ResubmitFailedStages)
complete(taskSets(2), Seq((Success, makeMapStatus("hostA", 1))))
// Because the map stage finished, another attempt for the reduce stage should have been
//因为Map阶段结束了,reduce阶段的另一次尝试应该被提交
// submitted, resulting in 2 total attempts for each the map and the reduce stage.
//造成2次总尝试的每一个Map和reduce阶段
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(countSubmittedMapStageAttempts() === 2)
assert(countSubmittedReduceStageAttempts() === 2)
// A late FetchFailed arrives from the second task in the original reduce stage.
//最后一个fetchfailed到达第二任务原来在reduce阶段
runEvent(CompletionEvent(
taskSets(1).tasks(1),
FetchFailed(makeBlockManagerId("hostB"), shuffleId, 1, 1, "ignored"),
null,
Map[Long, Any](),
createFakeTaskInfo(),
null))
// Running ResubmitFailedStages shouldn't result in any more attempts for the map stage, because
// the FetchFailed should have been ignored
//运行resubmitfailedstages不应该再为Map阶段带来更多的尝试,因为fetchfailed应该被忽略
runEvent(ResubmitFailedStages)
// The FetchFailed from the original reduce stage should be ignored.
//从原始的fetchfailed减少阶段应该被忽略
assert(countSubmittedMapStageAttempts() === 2)
}
//忽视最后的Map任务完成
test("ignore late map task completions") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
// pretend we were told hostA went away
//假装我们是告诉hostA走了
val oldEpoch = mapOutputTracker.getEpoch
runEvent(ExecutorLost("exec-hostA"))//丢失
val newEpoch = mapOutputTracker.getEpoch
assert(newEpoch > oldEpoch)
val taskSet = taskSets(0)
// should be ignored for being too old
//应该被忽视因为太老
runEvent(CompletionEvent(taskSet.tasks(0), Success, makeMapStatus("hostA",
reduceRdd.partitions.size), null, createFakeTaskInfo(), null))
// should work because it's a non-failed host
//应该工作,因为它是一个非失败的主机
runEvent(CompletionEvent(taskSet.tasks(0), Success, makeMapStatus("hostB",
reduceRdd.partitions.size), null, createFakeTaskInfo(), null))
// should be ignored for being too old
//应该被忽视因为太老
runEvent(CompletionEvent(taskSet.tasks(0), Success, makeMapStatus("hostA",
reduceRdd.partitions.size), null, createFakeTaskInfo(), null))
// should work because it's a new epoch
//应该工作,因为它是一个新的时代
taskSet.tasks(1).epoch = newEpoch
runEvent(CompletionEvent(taskSet.tasks(1), Success, makeMapStatus("hostA",
reduceRdd.partitions.size), null, createFakeTaskInfo(), null))
assert(mapOutputTracker.getMapSizesByExecutorId(shuffleId, 0).map(_._1).toSet ===
HashSet(makeBlockManagerId("hostB"), makeBlockManagerId("hostA")))
complete(taskSets(1), Seq((Success, 42), (Success, 43)))
assert(results === Map(0 -> 42, 1 -> 43))
assertDataStructuresEmpty()
}
//运行Shuffle具有Map阶段故障
test("run shuffle with map stage failure") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val reduceRdd = new MyRDD(sc, 2, List(shuffleDep))
submit(reduceRdd, Array(0, 1))
// Fail the map stage. This should cause the entire job to fail.
//失败的Map阶段,这应该会导致整个Job失败
val stageFailureMessage = "Exception failure in map stage"
failed(taskSets(0), stageFailureMessage)
assert(failure.getMessage === s"Job aborted due to stage failure: $stageFailureMessage")
// Listener bus should get told about the map stage failing, but not the reduce stage
//监听总线应该被告知Map阶段的失败,但不是减少阶段(因为减少阶段还没有开始)
// (since the reduce stage hasn't been started yet).
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.failedStages.toSet === Set(0))
assertDataStructuresEmpty()
}
/**
* Makes sure that failures of stage used by multiple jobs are correctly handled.
* 确保多个作业所使用的阶段失败是否正确处理
*
* This test creates the following dependency graph:
* 测试创建以下依赖关系图:
* shuffleMapRdd1 shuffleMapRDD2
* | \ |
* | \ |
* | \ |
* | \ |
* reduceRdd1 reduceRdd2
*
* We start both shuffleMapRdds and then fail shuffleMapRdd1. As a result, the job listeners for
* 我们开始shufflemaprdds然后失败shufflemaprdd1,
* reduceRdd1 and reduceRdd2 should both be informed that the job failed. shuffleMapRDD2 should
* 因此一个结果,对于reducerdd1和reducerdd2工作作业失败都应告知的监听,shufflemaprdd2也应取消
* also be cancelled, because it is only used by reduceRdd2 and reduceRdd2 cannot complete
* 因为只有reducerdd2和reducerdd2不能完成没有shufflemaprdd1
* without shuffleMapRdd1.
*/
test("failure of stage used by two jobs") {//两个工作阶段的失败
val shuffleMapRdd1 = new MyRDD(sc, 2, Nil)
val shuffleDep1 = new ShuffleDependency(shuffleMapRdd1, null)
val shuffleMapRdd2 = new MyRDD(sc, 2, Nil)
val shuffleDep2 = new ShuffleDependency(shuffleMapRdd2, null)
val reduceRdd1 = new MyRDD(sc, 2, List(shuffleDep1))
val reduceRdd2 = new MyRDD(sc, 2, List(shuffleDep1, shuffleDep2))
// We need to make our own listeners for this test, since by default submit uses the same
//我们需要监听做测试,由于默认情况下,提交使用相同的侦听器进行所有作业
// listener for all jobs, and here we want to capture the failure for each job separately.
//在这里我们要捕捉每一个工作的失败
class FailureRecordingJobListener() extends JobListener {
var failureMessage: String = _
override def taskSucceeded(index: Int, result: Any) {}
override def jobFailed(exception: Exception): Unit = { failureMessage = exception.getMessage }
}
val listener1 = new FailureRecordingJobListener()
val listener2 = new FailureRecordingJobListener()
submit(reduceRdd1, Array(0, 1), listener = listener1)
submit(reduceRdd2, Array(0, 1), listener = listener2)
val stageFailureMessage = "Exception failure in map stage"
failed(taskSets(0), stageFailureMessage)
assert(cancelledStages.toSet === Set(0, 2))
// Make sure the listeners got told about both failed stages.
//确保两个失败的阶段告知这监听
sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS)
assert(sparkListener.successfulStages.isEmpty)
assert(sparkListener.failedStages.toSet === Set(0, 2))
assert(listener1.failureMessage === s"Job aborted due to stage failure: $stageFailureMessage")
assert(listener2.failureMessage === s"Job aborted due to stage failure: $stageFailureMessage")
assertDataStructuresEmpty()
}
def checkJobPropertiesAndPriority(taskSet: TaskSet, expected: String, priority: Int): Unit = {
assert(taskSet.properties != null)
assert(taskSet.properties.getProperty("testProperty") === expected)
assert(taskSet.priority === priority)
}
def launchJobsThatShareStageAndCancelFirst(): ShuffleDependency[Int, Int, Nothing] = {
val baseRdd = new MyRDD(sc, 1, Nil)
val shuffleDep1 = new ShuffleDependency(baseRdd, new HashPartitioner(1))
val intermediateRdd = new MyRDD(sc, 1, List(shuffleDep1))
val shuffleDep2 = new ShuffleDependency(intermediateRdd, new HashPartitioner(1))
val finalRdd1 = new MyRDD(sc, 1, List(shuffleDep2))
val finalRdd2 = new MyRDD(sc, 1, List(shuffleDep2))
val job1Properties = new Properties()
val job2Properties = new Properties()
job1Properties.setProperty("testProperty", "job1")
job2Properties.setProperty("testProperty", "job2")
// Run jobs 1 & 2, both referencing the same stage, then cancel job1.
//运行作业1和2,都引用同一个阶段,然后取消job1
// Note that we have to submit job2 before we cancel job1 to have them actually share
//注意,我们必须在我们那儿有他们真正共离取消提交的作业1
// *Stages*, and not just shuffle dependencies, due to skipped stages (at least until
// we address SPARK-10193.)
val jobId1 = submit(finalRdd1, Array(0), properties = job1Properties)
val jobId2 = submit(finalRdd2, Array(0), properties = job2Properties)
assert(scheduler.activeJobs.nonEmpty)
val testProperty1 = scheduler.jobIdToActiveJob(jobId1).properties.getProperty("testProperty")
// remove job1 as an ActiveJob
//移除Job1在活动Job(ActiveJob)
cancel(jobId1)
// job2 should still be running
//job2应该仍在运行
assert(scheduler.activeJobs.nonEmpty)
val testProperty2 = scheduler.jobIdToActiveJob(jobId2).properties.getProperty("testProperty")
assert(testProperty1 != testProperty2)
// NB: This next assert isn't necessarily the "desired" behavior; it's just to document
// the current behavior. We've already submitted the TaskSet for stage 0 based on job1, but
// even though we have cancelled that job and are now running it because of job2, we haven't
// updated the TaskSet's properties. Changing the properties to "job2" is likely the more
// correct behavior.
//注意:下一个断言不一定是“期望”的行为; 只是记录当前的行为,我们已经根据job1提交了第0阶段的TaskSet,但是
//即使我们已经取消了该作业,并且由于job2而正在运行它,我们还没有更新TaskSet的属性,将属性更改为“job2”可能是更正确的行为。
//taskset优先运行阶段,以“job1”为activejob
val job1Id = 0 // TaskSet priority for Stages run with "job1" as the ActiveJob
checkJobPropertiesAndPriority(taskSets(0), "job1", job1Id)
complete(taskSets(0), Seq((Success, makeMapStatus("hostA", 1))))
shuffleDep1
}
/**
* Makes sure that tasks for a stage used by multiple jobs are submitted with the properties of a
* later, active job if they were previously run under a job that is no longer active
* 确保多个作业所使用的阶段的任务被提交给一个以后的属性,激活的工作,如果他们以前是在一个Job不再活跃
*/
//阶段所使用的两个工作,第一个不再活跃
test("stage used by two jobs, the first no longer active (SPARK-6880)") {
launchJobsThatShareStageAndCancelFirst()
// The next check is the key for SPARK-6880. For the stage which was shared by both job1 and
// job2 but never had any tasks submitted for job1, the properties of job2 are now used to run
// the stage.
//下一个检查是SPARK-6880的关键,对于job1和job2共享的阶段,但没有为job1提交任何任务,job2的属性现在用于运行阶段
checkJobPropertiesAndPriority(taskSets(1), "job2", 1)
complete(taskSets(1), Seq((Success, makeMapStatus("hostA", 1))))
assert(taskSets(2).properties != null)
complete(taskSets(2), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assert(scheduler.activeJobs.isEmpty)
assertDataStructuresEmpty()
}
/**
* Makes sure that tasks for a stage used by multiple jobs are submitted with the properties of a
* later, active job if they were previously run under a job that is no longer active, even when
* there are fetch failures
* 确保多个作业使用的阶段的任务如果先前运行在不再处于活动状态的作业下,即使提交失败,也会将后续活动作业的属性提交
*/
//两个工作阶段使用的阶段,一些获取失败,第一个Job不再活跃
test("stage used by two jobs, some fetch failures, and the first job no longer active " +
"(SPARK-6880)") {
val shuffleDep1 = launchJobsThatShareStageAndCancelFirst()
//使用“job2”作为ActiveJob运行的阶段的TaskSet优先级
val job2Id = 1 // TaskSet priority for Stages run with "job2" as the ActiveJob
// lets say there is a fetch failure in this task set, which makes us go back and
// run stage 0, attempt 1
//让我们说这个任务集中有一个提取失败,这使我们回到运行阶段0,尝试1
complete(taskSets(1), Seq(
(FetchFailed(makeBlockManagerId("hostA"), shuffleDep1.shuffleId, 0, 0, "ignored"), null)))
scheduler.resubmitFailedStages()
// stage 0, attempt 1 should have the properties of job2
//0阶段,尝试1应具有的job2特性
assert(taskSets(2).stageId === 0)
assert(taskSets(2).stageAttemptId === 1)
checkJobPropertiesAndPriority(taskSets(2), "job2", job2Id)
// run the rest of the stages normally, checking that they have the correct properties
//正常运行阶段的其余部分,检查它们有正确的属性
complete(taskSets(2), Seq((Success, makeMapStatus("hostA", 1))))
checkJobPropertiesAndPriority(taskSets(3), "job2", job2Id)
complete(taskSets(3), Seq((Success, makeMapStatus("hostA", 1))))
checkJobPropertiesAndPriority(taskSets(4), "job2", job2Id)
complete(taskSets(4), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assert(scheduler.activeJobs.isEmpty)
assertDataStructuresEmpty()
}
//超出范围失败和重试的无价值的Shuffle
test("run trivial shuffle with out-of-band failure and retry") {
val shuffleMapRdd = new MyRDD(sc, 2, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 1, List(shuffleDep))
submit(reduceRdd, Array(0))
// blockManagerMaster.removeExecutor("exec-hostA")
// pretend we were told hostA went away
//假装我们是告诉hostA走了
runEvent(ExecutorLost("exec-hostA"))
// DAGScheduler will immediately resubmit the stage after it appears to have no pending tasks
// rather than marking it is as failed and waiting.
//DAGScheduler将立即重新提交阶段,因为它似乎没有挂起的任务,而不是标记为失败并等待
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostB", 1))))
// have hostC complete the resubmitted task
//有hostc完成提交任务
complete(taskSets(1), Seq((Success, makeMapStatus("hostC", 1))))
assert(mapOutputTracker.getMapSizesByExecutorId(shuffleId, 0).map(_._1).toSet ===
HashSet(makeBlockManagerId("hostC"), makeBlockManagerId("hostB")))
complete(taskSets(2), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty()
}
test("recursive shuffle failures") {//递归Shuffle失败
val shuffleOneRdd = new MyRDD(sc, 2, Nil)
val shuffleDepOne = new ShuffleDependency(shuffleOneRdd, null)
val shuffleTwoRdd = new MyRDD(sc, 2, List(shuffleDepOne))
val shuffleDepTwo = new ShuffleDependency(shuffleTwoRdd, null)
val finalRdd = new MyRDD(sc, 1, List(shuffleDepTwo))
submit(finalRdd, Array(0))
// have the first stage complete normally
//有第一个阶段完成正常
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 2)),
(Success, makeMapStatus("hostB", 2))))
// have the second stage complete normally
// 有第二个阶段完成正常
complete(taskSets(1), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostC", 1))))
// fail the third stage because hostA went down
//失败的第三阶段因为hostA宕机了
complete(taskSets(2), Seq(
(FetchFailed(makeBlockManagerId("hostA"), shuffleDepTwo.shuffleId, 0, 0, "ignored"), null)))
// TODO assert this:
// blockManagerMaster.removeExecutor("exec-hostA")
// have DAGScheduler try again
//有dagscheduler再试一次
scheduler.resubmitFailedStages()
complete(taskSets(3), Seq((Success, makeMapStatus("hostA", 2))))
complete(taskSets(4), Seq((Success, makeMapStatus("hostA", 1))))
complete(taskSets(5), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty()
}
test("cached post-shuffle") {//缓存提交shuffle
val shuffleOneRdd = new MyRDD(sc, 2, Nil).cache()
val shuffleDepOne = new ShuffleDependency(shuffleOneRdd, null)
val shuffleTwoRdd = new MyRDD(sc, 2, List(shuffleDepOne)).cache()
val shuffleDepTwo = new ShuffleDependency(shuffleTwoRdd, null)
val finalRdd = new MyRDD(sc, 1, List(shuffleDepTwo))
submit(finalRdd, Array(0))
cacheLocations(shuffleTwoRdd.id -> 0) = Seq(makeBlockManagerId("hostD"))
cacheLocations(shuffleTwoRdd.id -> 1) = Seq(makeBlockManagerId("hostC"))
// complete stage 2
//完成阶段2
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 2)),
(Success, makeMapStatus("hostB", 2))))
// complete stage 1
//完成阶段1
complete(taskSets(1), Seq(
(Success, makeMapStatus("hostA", 1)),
(Success, makeMapStatus("hostB", 1))))
// pretend stage 0 failed because hostA went down
//假装阶段0失败 ,因为hostA宕机了
complete(taskSets(2), Seq(
(FetchFailed(makeBlockManagerId("hostA"), shuffleDepTwo.shuffleId, 0, 0, "ignored"), null)))
// TODO assert this:
// blockManagerMaster.removeExecutor("exec-hostA")
// DAGScheduler should notice the cached copy of the second shuffle and try to get it rerun.
//DAGScheduler应该注意的缓存副本,第二个洗牌试着重新洗牌
scheduler.resubmitFailedStages()
assertLocations(taskSets(3), Seq(Seq("hostD")))
// allow hostD to recover
//允许hostD恢复
complete(taskSets(3), Seq((Success, makeMapStatus("hostD", 1))))
complete(taskSets(4), Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty()
}
/* test("misbehaved accumulator should not crash DAGScheduler and SparkContext") {
val acc = new Accumulator[Int](0, new AccumulatorParam[Int] {
override def addAccumulator(t1: Int, t2: Int): Int = t1 + t2
override def zero(initialValue: Int): Int = 0
override def addInPlace(r1: Int, r2: Int): Int = {
throw new DAGSchedulerSuiteDummyException
}
})
// Run this on executors
//运行在执行器
sc.parallelize(1 to 10, 2).foreach { item => acc.add(1) }
// Make sure we can still run commands
//确保我们仍然可以运行命令
assert(sc.parallelize(1 to 10, 2).count() === 10)
}*/
/**
* The job will be failed on first task throwing a DAGSchedulerSuiteDummyException.
* 这项工作将在第一个任务扔dagschedulersuitedummyexception失败
* Any subsequent task WILL throw a legitimate java.lang.UnsupportedOperationException.
* 任何后续任务都将抛出一个合法的UnsupportedOperationException
* If multiple tasks, there exists a race condition between the SparkDriverExecutionExceptions
* 如果多个任务,存在一个竞争条件SparkDriverExecutionExceptions和他们的不同的原因,这将代表工作的结果…
* and their differing causes as to which will represent result for job...
*/
test("misbehaved resultHandler should not crash DAGScheduler and SparkContext") {
val e = intercept[SparkDriverExecutionException] {
// Number of parallelized partitions implies number of tasks of job
//并行分区数目意味着任务的工作数量
val rdd = sc.parallelize(1 to 10, 2)
sc.runJob[Int, Int](
rdd,
(context: TaskContext, iter: Iterator[Int]) => iter.size,
// For a robust test assertion, limit number of job tasks to 1; that is,
//对于一个健壮的测试断言,将任务任务的数量限制为1,这是
// if multiple RDD partitions, use id of any one partition, say, first partition id=0
//如果多个RDD的分区,使用ID的任何一个分区,第一个分区ID =0
Seq(0),
(part: Int, result: Int) => throw new DAGSchedulerSuiteDummyException)
}
assert(e.getCause.isInstanceOf[DAGSchedulerSuiteDummyException])
// Make sure we can still run commands on our SparkContext
//确保我们仍然可以运行在我们的sparkcontext命令
assert(sc.parallelize(1 to 10, 2).count() === 10)
}
//异常不应该碰撞dagscheduler和sparkcontext
test("getPartitions exceptions should not crash DAGScheduler and SparkContext (SPARK-8606)") {
val e1 = intercept[DAGSchedulerSuiteDummyException] {
val rdd = new MyRDD(sc, 2, Nil) {
override def getPartitions: Array[Partition] = {
throw new DAGSchedulerSuiteDummyException
}
}
rdd.reduceByKey(_ + _, 1).count()
}
// Make sure we can still run commands
//确保我们仍然可以运行命令
assert(sc.parallelize(1 to 10, 2).count() === 10)
}
//错误不应该碰撞dagscheduler和sparkcontext
test("getPreferredLocations errors should not crash DAGScheduler and SparkContext (SPARK-8606)") {
val e1 = intercept[SparkException] {
val rdd = new MyRDD(sc, 2, Nil) {
override def getPreferredLocations(split: Partition): Seq[String] = {
throw new DAGSchedulerSuiteDummyException
}
}
rdd.count()
}
assert(e1.getMessage.contains(classOf[DAGSchedulerSuiteDummyException].getName))
// Make sure we can still run commands
//确保我们仍然可以运行命令
assert(sc.parallelize(1 to 10, 2).count() === 10)
}
//累加器不计算提交结果阶段
test("accumulator not calculated for resubmitted result stage") {
// just for register
val accum = new Accumulator[Int](0, AccumulatorParam.IntAccumulatorParam)
val finalRdd = new MyRDD(sc, 1, Nil)
submit(finalRdd, Array(0))
completeWithAccumulator(accum.id, taskSets(0), Seq((Success, 42)))
completeWithAccumulator(accum.id, taskSets(0), Seq((Success, 42)))
assert(results === Map(0 -> 42))
val accVal = Accumulators.originals(accum.id).get.get.value
assert(accVal === 1)
assertDataStructuresEmpty()
}
//reduce任务应放在本地与Map输出
ignore("reduce tasks should be placed locally with map output") {
// Create an shuffleMapRdd with 1 partition
val shuffleMapRdd = new MyRDD(sc, 1, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 1, List(shuffleDep))
submit(reduceRdd, Array(0))
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 1))))
assert(mapOutputTracker.getMapSizesByExecutorId(shuffleId, 0).map(_._1).toSet ===
HashSet(makeBlockManagerId("hostA")))
// Reducer should run on the same host that map task ran
//Reducer应在同一台主机上运行,Map任务
val reduceTaskSet = taskSets(1)
assertLocations(reduceTaskSet, Seq(Seq("hostA")))
complete(reduceTaskSet, Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty()
}
//reduce任务局部性的喜好,只应包括机器与最大的map输出
ignore("reduce task locality preferences should only include machines with largest map outputs") {
val numMapTasks = 4
// Create an shuffleMapRdd with more partitions
val shuffleMapRdd = new MyRDD(sc, numMapTasks, Nil)
val shuffleDep = new ShuffleDependency(shuffleMapRdd, null)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 1, List(shuffleDep))
submit(reduceRdd, Array(0))
val statuses = (1 to numMapTasks).map { i =>
(Success, makeMapStatus("host" + i, 1, (10*i).toByte))
}
complete(taskSets(0), statuses)
// Reducer should prefer the last 3 hosts as they have 20%, 30% and 40% of data
//Reducer应该更喜欢最后的3台主机,因为他们有20%,30%和40%的数据
val hosts = (1 to numMapTasks).map(i => "host" + i).reverse.take(numMapTasks - 1)
val reduceTaskSet = taskSets(1)
assertLocations(reduceTaskSet, Seq(hosts))
complete(reduceTaskSet, Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty()
}
//窄和Shuffle的依赖关系的阶段使用窄的地方
test("stages with both narrow and shuffle dependencies use narrow ones for locality") {
// Create an RDD that has both a shuffle dependency and a narrow dependency (e.g. for a join)
//创建一个RDD具有Shuffle窄依赖
val rdd1 = new MyRDD(sc, 1, Nil)
val rdd2 = new MyRDD(sc, 1, Nil, locations = Seq(Seq("hostB")))
val shuffleDep = new ShuffleDependency(rdd1, null)
val narrowDep = new OneToOneDependency(rdd2)
val shuffleId = shuffleDep.shuffleId
val reduceRdd = new MyRDD(sc, 1, List(shuffleDep, narrowDep))
submit(reduceRdd, Array(0))
complete(taskSets(0), Seq(
(Success, makeMapStatus("hostA", 1))))
assert(mapOutputTracker.getMapSizesByExecutorId(shuffleId, 0).map(_._1).toSet ===
HashSet(makeBlockManagerId("hostA")))
// Reducer should run where RDD 2 has preferences, even though though it also has a shuffle dep
//Reducer应运行RDD 2具有偏好,即使它也有一个洗牌
val reduceTaskSet = taskSets(1)
assertLocations(reduceTaskSet, Seq(Seq("hostB")))
complete(reduceTaskSet, Seq((Success, 42)))
assert(results === Map(0 -> 42))
assertDataStructuresEmpty()
}
//Spark异常应包括堆栈跟踪中的调用站点
test("Spark exceptions should include call site in stack trace") {
val e = intercept[SparkException] {
sc.parallelize(1 to 10, 2).map { _ => throw new RuntimeException("uh-oh!") }.count()
}
// Does not include message, ONLY stack trace.
//不包括消息,只有堆栈跟踪
val stackTraceString = e.getStackTraceString
// should actually include the RDD operation that invoked the method:
//实际上应该包括RDD操作调用的方法
assert(stackTraceString.contains("org.apache.spark.rdd.RDD.count"))
// should include the FunSuite setup:
//应包括funsuite设置
assert(stackTraceString.contains("org.scalatest.FunSuite"))
}
/**
* Assert that the supplied TaskSet has exactly the given hosts as its preferred locations.
* 断言提供完全给予taskset主机作为其首选地点
* Note that this checks only the host and not the executor ID.
* 注意这只检查主机而不是执行人的身份
*/
private def assertLocations(taskSet: TaskSet, hosts: Seq[Seq[String]]) {
assert(hosts.size === taskSet.tasks.size)
// preferredLocations对于data partition的位置偏好
val exp=taskSet.tasks.map(_.preferredLocations).zip(hosts)
for ((taskLocs, expectedLocs) <- taskSet.tasks.map(_.preferredLocations).zip(hosts)) {
/**
* taskLocs:Set(hostA, hostB)
* expectedLocs:Set(hostA, hostB)
*/
println("taskLocs:"+taskLocs.map(_.host).toSet)
println("expectedLocs:"+expectedLocs.toSet)
assert(taskLocs.map(_.host).toSet === expectedLocs.toSet)
}
}
private def makeMapStatus(host: String, reduces: Int, sizes: Byte = 2): MapStatus =
MapStatus(makeBlockManagerId(host), Array.fill[Long](reduces)(sizes))
private def makeBlockManagerId(host: String): BlockManagerId =
BlockManagerId("exec-" + host, host, 12345)
/**
* 清空数据结构
*/
private def assertDataStructuresEmpty(): Unit = {
assert(scheduler.activeJobs.isEmpty)
assert(scheduler.failedStages.isEmpty)
assert(scheduler.jobIdToActiveJob.isEmpty)
assert(scheduler.jobIdToStageIds.isEmpty)
assert(scheduler.stageIdToStage.isEmpty)
assert(scheduler.runningStages.isEmpty)
assert(scheduler.shuffleToMapStage.isEmpty)
assert(scheduler.waitingStages.isEmpty)
assert(scheduler.outputCommitCoordinator.isEmpty)
}
// Nothing in this test should break if the task info's fields are null, but
// OutputCommitCoordinator requires the task info itself to not be null.
//如果任务信息的字段为空,则此测试中的任何内容都不会中断,但OutputCommitCoordinator要求任务信息本身不为空
private def createFakeTaskInfo(): TaskInfo = {
val info = new TaskInfo(0, 0, 0, 0L, "", "", TaskLocality.ANY, false)
////以防止JobProgressListener中的虚假错误
info.finishTime = 1 // to prevent spurious errors in JobProgressListener
info
}
}
| tophua/spark1.52 | core/src/test/scala/org/apache/spark/scheduler/DAGSchedulerSuite.scala | Scala | apache-2.0 | 60,938 |
/**
* Copyright (c) 2015, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.sparkts.models
import org.apache.commons.math3.analysis.{MultivariateFunction, MultivariateVectorFunction}
import org.apache.commons.math3.optim.nonlinear.scalar.gradient.NonLinearConjugateGradientOptimizer
import org.apache.commons.math3.optim.nonlinear.scalar.{ObjectiveFunction,
ObjectiveFunctionGradient}
import org.apache.commons.math3.optim.{InitialGuess, MaxEval, MaxIter, SimpleValueChecker}
import org.apache.commons.math3.random.RandomGenerator
import org.apache.spark.mllib.linalg.{DenseVector, Vector, Vectors}
object GARCH {
/**
* Fits a GARCH(1, 1) model to the given time series.
*
* @param ts The time series to fit the model to.
* @return The model.
*/
def fitModel(ts: Vector): GARCHModel = {
val optimizer = new NonLinearConjugateGradientOptimizer(
NonLinearConjugateGradientOptimizer.Formula.FLETCHER_REEVES,
new SimpleValueChecker(1e-6, 1e-6))
val gradient = new ObjectiveFunctionGradient(new MultivariateVectorFunction() {
def value(params: Array[Double]): Array[Double] = {
new GARCHModel(params(0), params(1), params(2)).gradient(ts)
}
})
val objectiveFunction = new ObjectiveFunction(new MultivariateFunction() {
def value(params: Array[Double]): Double = {
new GARCHModel(params(0), params(1), params(2)).logLikelihood(ts)
}
})
val initialGuess = new InitialGuess(Array(.2, .2, .2)) // TODO: make this smarter
val maxIter = new MaxIter(10000)
val maxEval = new MaxEval(10000)
val optimal = optimizer.optimize(objectiveFunction, gradient, initialGuess, maxIter, maxEval)
val params = optimal.getPoint
new GARCHModel(params(0), params(1), params(2))
}
}
object ARGARCH {
/**
* Fits an AR(1) + GARCH(1, 1) model to the given time series.
*
* @param ts The time series to fit the model to.
* @return The model.
*/
def fitModel(ts: Vector): ARGARCHModel = {
val arModel = Autoregression.fitModel(ts)
val residuals = arModel.removeTimeDependentEffects(ts, Vectors.zeros(ts.size))
val garchModel = GARCH.fitModel(residuals)
new ARGARCHModel(arModel.c, arModel.coefficients(0), garchModel.omega, garchModel.alpha,
garchModel.beta)
}
}
class GARCHModel(
val omega: Double,
val alpha: Double,
val beta: Double) extends TimeSeriesModel {
/**
* Returns the log likelihood of the parameters on the given time series.
*
* Based on http://www.unc.edu/~jbhill/Bollerslev_GARCH_1986.pdf
*/
def logLikelihood(ts: Vector): Double = {
var sum = 0.0
iterateWithHAndEta(ts) { (i, h, eta, prevH, prevEta) =>
sum += -.5 * math.log(h) - .5 * eta * eta / h
}
sum + -.5 * math.log(2 * math.Pi) * (ts.size - 1)
}
/**
* Find the gradient of the log likelihood with respect to the given time series.
*
* Based on http://www.unc.edu/~jbhill/Bollerslev_GARCH_1986.pdf
* @return an 3-element array containing the gradient for the alpha, beta, and omega parameters.
*/
private[sparkts] def gradient(ts: Vector): Array[Double] = {
var omegaGradient = 0.0
var alphaGradient = 0.0
var betaGradient = 0.0
var omegaDhdtheta = 0.0
var alphaDhdtheta = 0.0
var betaDhdtheta = 0.0
iterateWithHAndEta(ts) { (i, h, eta, prevH, prevEta) =>
omegaDhdtheta = 1 + beta * omegaDhdtheta
alphaDhdtheta = prevEta * prevEta + beta * alphaDhdtheta
betaDhdtheta = prevH + beta * betaDhdtheta
val multiplier = (eta * eta / (h * h)) - (1 / h)
omegaGradient += multiplier * omegaDhdtheta
alphaGradient += multiplier * alphaDhdtheta
betaGradient += multiplier * betaDhdtheta
}
Array(alphaGradient * .5, betaGradient * .5, omegaGradient * .5)
}
private def iterateWithHAndEta(ts: Vector)
(fn: (Int, Double, Double, Double, Double) => Unit): Unit = {
var prevH = omega / (1 - alpha - beta)
var i = 1
while (i < ts.size) {
val h = omega + alpha * ts(i - 1) * ts(i - 1) + beta * prevH
fn(i, h, ts(i), prevH, ts(i - 1))
prevH = h
i += 1
}
}
def removeTimeDependentEffects(ts: Vector, dest: Vector): Vector = {
var prevEta = ts(0)
var prevVariance = omega / (1.0 - alpha - beta)
val destArr = dest.toArray
destArr(0) = prevEta / math.sqrt(prevVariance)
for (i <- 1 until ts.size) {
val variance = omega + alpha * prevEta * prevEta + beta * prevVariance
val eta = ts(i)
destArr(i) = eta / math.sqrt(variance)
prevEta = eta
prevVariance = variance
}
new DenseVector(destArr)
}
override def addTimeDependentEffects(ts: Vector, dest: Vector): Vector = {
var prevVariance = omega / (1.0 - alpha - beta)
var prevEta = ts(0) * math.sqrt(prevVariance)
val destArr = dest.toArray
destArr(0) = prevEta
for (i <- 1 until ts.size) {
val variance = omega + alpha * prevEta * prevEta + beta * prevVariance
val standardizedEta = ts(i)
val eta = standardizedEta * math.sqrt(variance)
destArr(i) = eta
prevEta = eta
prevVariance = variance
}
new DenseVector(destArr)
}
private def sampleWithVariances(n: Int, rand: RandomGenerator): (Array[Double], Array[Double]) = {
val ts = new Array[Double](n)
val variances = new Array[Double](n)
variances(0) = omega / (1 - alpha - beta)
var eta = math.sqrt(variances(0)) * rand.nextGaussian()
for (i <- 1 until n) {
variances(i) = omega + beta * variances(i-1) + alpha * eta * eta
eta = math.sqrt(variances(i)) * rand.nextGaussian()
ts(i) = eta
}
(ts, variances)
}
/**
* Samples a random time series of a given length with the properties of the model.
*
* @param n The length of the time series to sample.
* @param rand The random generator used to generate the observations.
* @return The samples time series.
*/
def sample(n: Int, rand: RandomGenerator): Array[Double] = sampleWithVariances(n, rand)._1
}
/**
* A GARCH(1, 1) + AR(1) model, where
* y(i) = c + phi * y(i - 1) + eta(i),
* and h(i), the variance of eta(i), is given by
* h(i) = omega + alpha * eta(i) ** 2 + beta * h(i - 1) ** 2
*
* @param c The constant term.
* @param phi The autoregressive term.
* @param omega The constant term in the variance.
*/
class ARGARCHModel(
val c: Double,
val phi: Double,
val omega: Double,
val alpha: Double,
val beta: Double) extends TimeSeriesModel {
override def removeTimeDependentEffects(ts: Vector, dest: Vector): Vector = {
var prevEta = ts(0) - c
var prevVariance = omega / (1.0 - alpha - beta)
val destArr = dest.toArray
destArr(0) = prevEta / math.sqrt(prevVariance)
for (i <- 1 until ts.size) {
val variance = omega + alpha * prevEta * prevEta + beta * prevVariance
val eta = ts(i) - c - phi * ts(i - 1)
destArr(i) = eta / math.sqrt(variance)
prevEta = eta
prevVariance = variance
}
new DenseVector(destArr)
}
override def addTimeDependentEffects(ts: Vector, dest: Vector): Vector = {
var prevVariance = omega / (1.0 - alpha - beta)
var prevEta = ts(0) * math.sqrt(prevVariance)
val destArr = dest.toArray
destArr(0) = c + prevEta
for (i <- 1 until ts.size) {
val variance = omega + alpha * prevEta * prevEta + beta * prevVariance
val standardizedEta = ts(i)
val eta = standardizedEta * math.sqrt(variance)
destArr(i) = c + phi * dest(i - 1) + eta
prevEta = eta
prevVariance = variance
}
new DenseVector(destArr)
}
private def sampleWithVariances(n: Int, rand: RandomGenerator): (Array[Double], Array[Double]) = {
val ts = new Array[Double](n)
val variances = new Array[Double](n)
variances(0) = omega / (1 - alpha - beta)
var eta = math.sqrt(variances(0)) * rand.nextGaussian()
for (i <- 1 until n) {
variances(i) = omega + beta * variances(i-1) + alpha * eta * eta
eta = math.sqrt(variances(i)) * rand.nextGaussian()
ts(i) = c + phi * ts(i - 1) + eta
}
(ts, variances)
}
/**
* Samples a random time series of a given length with the properties of the model.
*
* @param n The length of the time series to sample.
* @param rand The random generator used to generate the observations.
* @return The samples time series.
*/
def sample(n: Int, rand: RandomGenerator): Array[Double] = sampleWithVariances(n, rand)._1
}
class EGARCHModel(
val omega: Double,
val alpha: Double,
val beta: Double) extends TimeSeriesModel {
/**
* Returns the log likelihood of the parameters on the given time series.
*
* Based on http://swopec.hhs.se/hastef/papers/hastef0564.pdf
*/
def logLikelihood(ts: Array[Double]): Double = {
throw new UnsupportedOperationException()
}
override def removeTimeDependentEffects(ts: Vector, dest: Vector): Vector = {
throw new UnsupportedOperationException()
}
override def addTimeDependentEffects(ts: Vector, dest: Vector): Vector = {
throw new UnsupportedOperationException()
}
}
| samklr/spark-timeseries | src/main/scala/com/cloudera/sparkts/models/GARCH.scala | Scala | apache-2.0 | 9,684 |
package edu.gemini.sp.vcs2
import edu.gemini.pot.sp.Conflict.Moved
import edu.gemini.pot.sp.{DataObjectBlob, ISPNode, SPNodeKey}
import edu.gemini.shared.util.VersionComparison._
import edu.gemini.sp.vcs2.MergeNode._
import edu.gemini.spModel.rich.pot.sp._
import scalaz.\\&/.{Both, That, This}
import scalaz._
import Scalaz._
import scalaz.Tree.Node
/** Produces a preliminary [[MergePlan]]. Before using it to complete a merge
* however, various special case corrections (e.g., observation renumbering)
* must be applied to the plan. */
object PreliminaryMerge {
def merge(mc: MergeContext): TryVcs[MergePlan] =
tree(mc).map { t =>
val mergedKeys = t.sFoldRight(Set.empty[SPNodeKey]) { (mn, s) => s + mn.key }
val allKeys = mc.remote.diffMap.keySet ++ mc.remote.plan.delete.map(_.key)
val deletedKeys = allKeys &~ mergedKeys
val allMissing = deletedKeys.map { k => Missing(k, mc.local.version(k).sync(mc.remote.version(k))) }
MergePlan(t, allMissing)
}
def tree(mc: MergeContext): TryVcs[Tree[MergeNode]] = {
def isVersonUpdated(k: SPNodeKey, pc0: ProgContext, pc1: ProgContext): Boolean =
pc0.version(k).updates(pc1.version(k))
def isUpdatedLocal(l: ISPNode) = isVersonUpdated(l.key, mc.local, mc.remote)
def isUpdatedRemote(r: Tree[MergeNode]) = isVersonUpdated(r.key, mc.remote, mc.local)
// Defines the rules for determining which parent wins in case of ambiguities.
def mergeParent(childKey: SPNodeKey): Option[SPNodeKey] = {
val lParentKey = mc.local.parent(childKey)
val rParentKey = mc.remote.parent(childKey)
(lParentKey, rParentKey) match {
case (None, None) => None
case (Some(lKey), None) => Some(lKey)
case (None, Some(rKey)) => Some(rKey)
case (Some(lKey), Some(rKey)) if lKey === rKey => Some(lKey)
case (Some(lKey), Some(rKey)) =>
if (rParentKey.flatMap(mc.remote.get).exists(isUpdatedRemote)) Some(rKey)
else Some(lKey)
}
}
def keep(k: SPNodeKey, pc: ProgContext): Boolean = pc.parent(k) == mergeParent(k)
def keepLocalChild(l: ISPNode): Boolean = keep(l.key, mc.local)
def keepRemoteChild(r: Tree[MergeNode]): Boolean = keep(r.key, mc.remote)
def containsUpdatedLocal(l: ISPNode): Boolean =
isUpdatedLocal(l) || l.children.exists(containsUpdatedLocal)
def containsUpdatedRemote(r: Tree[MergeNode]): Boolean =
isUpdatedRemote(r) || r.subForest.exists(containsUpdatedRemote)
def containsMissingRemoteYetUpdatedLocal(l: ISPNode): Boolean =
!mc.remote.isPresent(l.key) && (
isUpdatedLocal(l) || l.children.exists(containsMissingRemoteYetUpdatedLocal)
)
def containsMissingLocalYetUpdatedRemote(r: Tree[MergeNode]): Boolean =
!mc.local.isPresent(r.key) && (
isUpdatedRemote(r) || r.subForest.exists(containsMissingLocalYetUpdatedRemote)
)
sealed trait ChildMergeStrategy {
def filter(pc: PartitionedChildren): PartitionedChildren
def sort(merged: List[Tree[MergeNode]], lcs: List[ISPNode], rcs: List[Tree[MergeNode]]): List[Tree[MergeNode]]
}
sealed trait LocalMergeStrategy extends ChildMergeStrategy {
final def sort(merged: List[Tree[MergeNode]], lcs: List[ISPNode], rcs: List[Tree[MergeNode]]): List[Tree[MergeNode]] =
SortHeuristic.sort(merged, lcs.map(_.key), rcs.map(_.key))(_.key)
}
sealed trait RemoteMergeStrategy extends ChildMergeStrategy {
final def sort(merged: List[Tree[MergeNode]], lcs: List[ISPNode], rcs: List[Tree[MergeNode]]): List[Tree[MergeNode]] =
SortHeuristic.sort(merged, rcs.map(_.key), lcs.map(_.key))(_.key)
}
// Strategy for when there is no edit to either side.
val same = new RemoteMergeStrategy {
def filter(pc: PartitionedChildren): PartitionedChildren = pc
}
// Strategy for local nodes that are newer.
val newer = new LocalMergeStrategy {
def filter(pc: PartitionedChildren): PartitionedChildren = {
// Keep all local-only children not in a different updated parent remotely.
val local = pc.local.filter(keepLocalChild)
// In general we don't want the remote nodes -- we're deleting them.
// If deleting something that contains an update we haven't seen though,
// we want to keep it anyway.
val remote = pc.remote.filter(containsMissingLocalYetUpdatedRemote)
PartitionedChildren(local, pc.both, remote)
}
}
// Strategy for local nodes that resurrect remotely deleted nodes. Keep
// all children that have been updated locally and which don't belong to
// some other node.
val deletedRemotely = new LocalMergeStrategy {
def filter(pc: PartitionedChildren): PartitionedChildren =
pc.copy(local = pc.local.filter { child =>
keepLocalChild(child) && containsUpdatedLocal(child)
})
}
// Strategy for local nodes that are older than their remote counterparts.
val older = new RemoteMergeStrategy {
def filter(pc: PartitionedChildren): PartitionedChildren = {
val local = pc.local.filter(containsMissingRemoteYetUpdatedLocal)
PartitionedChildren(local, pc.both, pc.remote)
}
}
// Strategy for remote nodes that have been deleted locally. The opposite
// of "deletedRemotely".
val deletedLocally = new RemoteMergeStrategy {
def filter(pc: PartitionedChildren): PartitionedChildren =
pc.copy(remote = pc.remote.filter { child =>
keepRemoteChild(child) && containsUpdatedRemote(child)
})
}
// Strategy for conflicting edits. When conflicting, the only local-only
// children we keep are those that contain some local update and which
// don't belong in some other node (we let any remote deletions through)..
val conflicting = new RemoteMergeStrategy {
def filter(pc: PartitionedChildren): PartitionedChildren = {
val local = pc.local.filter { child =>
(keepLocalChild(child) && containsUpdatedLocal(child)) ||
mc.remote.parent(child.key).flatMap(mc.remote.get).exists(p => !isUpdatedRemote(p))
}
val remote = pc.remote.filterNot { child =>
mc.local.isDeleted(child.key) && !containsUpdatedRemote(child)
}
PartitionedChildren(local, pc.both, remote)
}
}
def toNode(mod: MergeNode, lcs: List[ISPNode], rcs: List[Tree[MergeNode]], s: ChildMergeStrategy): Tree[MergeNode] = {
// Filter the children according to the version information.
val pc = s.filter(PartitionedChildren.part(lcs, rcs))
// Merge the local only, both, and remote only children.
val lMerged = pc.local.map { lc =>
mc.remote.get(lc.key).fold(go(This(lc))) { rc => go(Both(lc, rc)) }
}
val bMerged = pc.both.map { case (lc, rc) => go(Both(lc,rc)) }
val rMerged = pc.remote.map { rc =>
mc.local.get(rc.key).fold(go(That(rc))) { lc => go(Both(lc, rc)) }
}
// Combine and order the children.
val newChildren = s.sort(lMerged ++ rMerged ++ bMerged, lcs, rcs)
// Compute the new version for this node, which is nominally the
// combination of the local and remote versions. If the children don't
// match though (that is, they have been updated by the merge), then be
// sure that the local version is updated.
val k = mod.key
val syncVersion = mc.syncVersion(k)
def incrVersion = syncVersion.incr(mc.local.prog.getLifespanId)
val newChildKeys = newChildren.map(_.key)
def updatesLocalVersion: Boolean = syncVersion.updates(mc.local.version(k))
def updatesRemoteVersion: Boolean = syncVersion.updates(mc.remote.version(k))
def updatesLocalChildren: Boolean = newChildKeys =/= (mc.local.get(k).map(_.children.map(_.key)) | Nil)
def updatesRemoteChildren: Boolean = newChildKeys =/= (mc.remote.get(k).map(_.subForest.toList.map(_.key)) | Nil)
val newVersion =
if ((!updatesLocalVersion && updatesLocalChildren) || (!updatesRemoteVersion && updatesRemoteChildren))
incrVersion
else
syncVersion
val mod2 = mod match {
case m: Modified => m.copy(nv = newVersion)
case _: Unmodified => mod
}
Node(mod2, newChildren.toStream)
}
def go(lr: ISPNode \\&/ Tree[MergeNode]): Tree[MergeNode] = {
val incr0: Tree[MergeNode] => Tree[MergeNode] = identity
def incr1(n: Tree[MergeNode]): Tree[MergeNode] =
n.rootLabel match {
case m: Modified =>
Node(m.copy(nv = m.nv.incr(mc.local.prog.getLifespanId)), n.subForest)
case _ =>
n
}
lr match {
case This(l) =>
val (incr, filt) = if (isUpdatedLocal(l)) (incr0, newer) // New or updated local node
else (incr1 _, deletedRemotely) // Resurrected local node
incr(toNode(modified(l), l.children, Nil, filt))
case That(r) =>
val (incr, filt) = if (isUpdatedRemote(r)) (incr0, older) // New or updated remote node
else (incr1 _, deletedLocally) // Resurrected remote node
incr(toNode(r.rootLabel, Nil, r.subForest.toList, filt))
case Both(l, r) => r.rootLabel match {
case _: Unmodified => r
case m: Modified =>
val lc = l.children
val rc = r.subForest.toList
l.getVersion.compare(m.nv) match {
case Same => toNode(r.rootLabel, lc, rc, same)
case Newer => toNode(modified(l), lc, rc, newer)
case Older => toNode(r.rootLabel, lc, rc, older)
case Conflicting => toNode(r.rootLabel, lc, rc, conflicting)
}
}
}
}
def addDataObjectConflicts(in: Tree[MergeNode]): TryVcs[Tree[MergeNode]] = {
val commonKeys = mc.remote.diffMap.keySet & mc.local.nodeMap.keySet
def dobConflicts(k: SPNodeKey): Boolean =
mc.local.version(k).compare(mc.remote.version(k)) === Conflicting && {
val local = mc.local.nodeMap(k).getDataObject
val remote = mc.remote.diffMap(k).rootLabel match {
case Modified(_, _, dob, _, _) => Some(dob)
case Unmodified(_) => None
}
remote.exists(dob => !DataObjectBlob.same(local, dob))
}
val conflicts = commonKeys.collect { case k if dobConflicts(k) =>
k -> mc.local.nodeMap(k).getDataObject
}
(TryVcs(in)/:conflicts) { case(tryTree,(k,dob)) =>
for {
t <- tryTree
n0 <- t.loc.findNode(k)
n1 <- n0.addDataObjectConflict(dob)
n2 <- n1.incr(mc.local.prog.getLifespanId)
} yield n2.toTree
}
}
def addMoved(in: Tree[MergeNode]): TryVcs[Tree[MergeNode]] = {
// moved = List: (old parent, child, new parent)
val moved = in.foldTree(List.empty[(SPNodeKey, SPNodeKey, SPNodeKey)]) { (newParent, lst) =>
(lst/:newParent.subForest) { (lst0, child) =>
mc.local.parent(child.key).fold(lst0) { oldParentKey =>
val isMoved = (oldParentKey =/= newParent.key) &&
isVersonUpdated(oldParentKey, mc.local, mc.remote)
if (isMoved) (oldParentKey, child.key, newParent.key) :: lst0
else lst0
}
}
}
(TryVcs(in)/:moved) { case (tryTree, (oldParent, child, newParent)) =>
for {
t <- tryTree
n0 <- t.loc.findNode(oldParent)
n1 <- n0.addConflictNote(new Moved(child, newParent))
n2 <- n1.incr(mc.local.prog.getLifespanId)
} yield n2.toTree
}
}
def addConflicts(in: Tree[MergeNode]): TryVcs[Tree[MergeNode]] =
for {
t0 <- addDataObjectConflicts(in)
t1 <- addMoved(t0)
} yield t1
addConflicts(go(Both(mc.local.prog, mc.remote.plan.update)))
}
} | spakzad/ocs | bundle/edu.gemini.sp.vcs/src/main/scala/edu/gemini/sp/vcs2/PreliminaryMerge.scala | Scala | bsd-3-clause | 12,149 |
package com.twitter.finagle.kestrel.integration
import com.twitter.conversions.time._
import com.twitter.finagle.Service
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.kestrel.Server
import com.twitter.finagle.kestrel.protocol._
import com.twitter.finagle.memcached.util.ChannelBufferUtils._
import com.twitter.util.{Await, Time}
import java.net.InetSocketAddress
import org.specs.SpecificationWithJUnit
class InterpreterServiceSpec extends SpecificationWithJUnit {
"InterpreterService" should {
var server: Server = null
var client: Service[Command, Response] = null
var address: InetSocketAddress = null
doBefore {
server = new Server(new InetSocketAddress(0))
address = server.start().localAddress.asInstanceOf[InetSocketAddress]
client = ClientBuilder()
.hosts("localhost:" + address.getPort)
.codec(Kestrel())
.hostConnectionLimit(1)
.build()
}
doAfter {
server.stop()
}
val queueName = "name"
val value = "value"
"set & get" in {
val result = for {
_ <- client(Flush(queueName))
_ <- client(Set(queueName, Time.now, value))
r <- client(Get(queueName))
} yield r
Await.result(result, 1.second) mustEqual Values(Seq(Value(queueName, value)))
}
"transactions" in {
"set & get/open & get/abort" in {
val result = for {
_ <- client(Set(queueName, Time.now, value))
_ <- client(Open(queueName))
_ <- client(Abort(queueName))
r <- client(Open(queueName))
} yield r
Await.result(result, 1.second) mustEqual Values(Seq(Value(queueName, value)))
}
}
}
}
| stevegury/finagle | finagle-kestrel/src/test/scala/com/twitter/finagle/kestrel/integration/InterpreterServiceSpec.scala | Scala | apache-2.0 | 1,718 |
package net.magik6k.lxcadmin.panel
import java.util.UUID
import net.magik6k.jwwf.core.User
import net.magik6k.jwwf.enums.Type
import net.magik6k.jwwf.handlers.UserDataHandler
import net.magik6k.jwwf.widgets.basic.panel.{NamedPanel, Row}
import net.magik6k.lxcadmin.widget.{LocalMemMon, LocalCPUMon}
class MainPanel(user: User) extends Row(2) {
val cpuMon = new LocalCPUMon()
val memMon = new LocalMemMon()
val lxcPanel = new LxcPanel()
val host = new Row(2)
host.put(new NamedPanel(cpuMon.asPanel(12), "CPU Usage", Type.SUCCESS))
host.put(new NamedPanel(memMon, "Memory", Type.WARNING))
this.put(new NamedPanel(host, "<b>Host</b>", Type.DANGER).asPanel(12))
this.put(lxcPanel)
//////////
// TIMER
var handler: UserDataHandler = null
handler = new UserDataHandler {
override def data(key: String, value: String) {
cpuMon.refresh()
memMon.refresh()
lxcPanel.refresh()
user.getUserData.get(UUID.randomUUID().toString, handler)
}
}
user.getUserData.get(UUID.randomUUID().toString, handler)
}
| magik6k/LxcAdmin | src/main/scala/net/magik6k/lxcadmin/panel/MainPanel.scala | Scala | mit | 1,026 |
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.lang.scala.scalaz
import scalaz._
import rx.lang.scala.Observable
import org.scalacheck.Arbitrary
import scala.concurrent.{Await, Promise}
import scala.concurrent.duration.Duration
/**
* This object provides implicits for tests.
*/
object ImplicitsForTest {
// Equality based on sequenceEqual() method.
implicit def observableEqual[A](implicit eqA: Equal[A]) = new Equal[Observable[A]]{
def equal(a1: Observable[A], a2: Observable[A]) = {
val p = Promise[Boolean]
val sub = a1.sequenceEqual(a2).firstOrElse(false).subscribe(v => p.success(v))
try {
Await.result(p.future, Duration.Inf)
} finally {
sub.unsubscribe()
}
}
}
implicit def observableArbitrary[A](implicit a: Arbitrary[A], array: Arbitrary[Array[A]]): Arbitrary[Observable[A]]
= Arbitrary(for (arr <- array.arbitrary) yield Observable.just(arr:_*))
}
| abersnaze/RxJava | rxjava-contrib/rxjava-scalaz/src/test/scala/rx/lang/scala/scalaz/ImplicitsForTest.scala | Scala | apache-2.0 | 1,488 |
package com.arcusys.valamis.social.model
import com.arcusys.valamis.model.SkipTake
case class CommentFilter(
companyId: Long,
userId: Option[Long] = None,
activityId: Option[Long] = None,
sortBy: Option[CommentSortBy] = None,
skipTake: Option[SkipTake] = None)
| ViLPy/Valamis | valamis-social/src/main/scala/com/arcusys/valamis/social/model/CommentFilter.scala | Scala | lgpl-3.0 | 273 |
package cromwell.engine.io.gcs
import scala.language.implicitConversions
import scala.util.{Failure, Success, Try}
/** Represents a Google Cloud Storage path, like gs://bucket/path/to/object.txt
*
* @param bucket - should adhere to https://cloud.google.com/storage/docs/bucket-naming?hl=en#requirements
* @param objectName
*/
case class GcsPath(bucket: String, objectName: String) {
override def toString = {
"gs://" + bucket + "/" + objectName
}
}
object GcsPath {
implicit def toGcsPath(str: String): GcsPath = GcsPath(str)
def apply(value: String): GcsPath = {
parse(value) match {
case Success(gcsPath) => gcsPath
case Failure(e) => throw e
}
}
def parse(value: String): Try[GcsPath] = {
val gsUriRegex = """gs://([^/]*)(.*)""".r
value match {
case gsUriRegex(bucket, objectName) => Success(GcsPath(bucket, objectName.stripPrefix("/")))
case _ => Failure(new IllegalArgumentException(s"Not a valid Google Cloud Storage URI: $value"))
}
}
def parseBucket(value: String): Try[String] = {
val gsUriRegex = """gs://([^/]*)""".r
value match {
case gsUriRegex(bucket) => Success(bucket)
case _ => Failure(new IllegalArgumentException(s"Not a valid Google Cloud Storage URI: $value"))
}
}
} | dgtester/cromwell | src/main/scala/cromwell/engine/io/gcs/GcsPath.scala | Scala | bsd-3-clause | 1,292 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.SparkException
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.internal.SQLConf._
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
class SimpleSQLViewSuite extends SQLViewSuite with SharedSparkSession
/**
* A suite for testing view related functionality.
*/
abstract class SQLViewSuite extends QueryTest with SQLTestUtils {
import testImplicits._
protected override def beforeAll(): Unit = {
super.beforeAll()
// Create a simple table with two columns: id and id1
spark.range(1, 10).selectExpr("id", "id id1").write.format("json").saveAsTable("jt")
}
protected override def afterAll(): Unit = {
try {
spark.sql(s"DROP TABLE IF EXISTS jt")
} finally {
super.afterAll()
}
}
test("create a permanent view on a permanent view") {
withView("jtv1", "jtv2") {
sql("CREATE VIEW jtv1 AS SELECT * FROM jt WHERE id > 3")
sql("CREATE VIEW jtv2 AS SELECT * FROM jtv1 WHERE id < 6")
checkAnswer(sql("select count(*) FROM jtv2"), Row(2))
}
}
test("create a temp view on a permanent view") {
withView("jtv1") {
withTempView("temp_jtv1") {
sql("CREATE VIEW jtv1 AS SELECT * FROM jt WHERE id > 3")
sql("CREATE TEMPORARY VIEW temp_jtv1 AS SELECT * FROM jtv1 WHERE id < 6")
checkAnswer(sql("select count(*) FROM temp_jtv1"), Row(2))
}
}
}
test("create a temp view on a temp view") {
withTempView("temp_jtv1", "temp_jtv2") {
sql("CREATE TEMPORARY VIEW temp_jtv1 AS SELECT * FROM jt WHERE id > 3")
sql("CREATE TEMPORARY VIEW temp_jtv2 AS SELECT * FROM temp_jtv1 WHERE id < 6")
checkAnswer(sql("select count(*) FROM temp_jtv2"), Row(2))
}
}
test("create a permanent view on a temp view") {
withView("jtv1") {
withTempView("temp_jtv1") {
withGlobalTempView("global_temp_jtv1") {
sql("CREATE TEMPORARY VIEW temp_jtv1 AS SELECT * FROM jt WHERE id > 3")
var e = intercept[AnalysisException] {
sql("CREATE VIEW jtv1 AS SELECT * FROM temp_jtv1 WHERE id < 6")
}.getMessage
assert(e.contains("Not allowed to create a permanent view `default`.`jtv1` by " +
"referencing a temporary view temp_jtv1. " +
"Please create a temp view instead by CREATE TEMP VIEW"))
val globalTempDB = spark.sharedState.globalTempViewManager.database
sql("CREATE GLOBAL TEMP VIEW global_temp_jtv1 AS SELECT * FROM jt WHERE id > 0")
e = intercept[AnalysisException] {
sql(s"CREATE VIEW jtv1 AS SELECT * FROM $globalTempDB.global_temp_jtv1 WHERE id < 6")
}.getMessage
assert(e.contains("Not allowed to create a permanent view `default`.`jtv1` by " +
"referencing a temporary view global_temp.global_temp_jtv1"))
}
}
}
}
test("error handling: existing a table with the duplicate name when creating/altering a view") {
withTable("tab1") {
sql("CREATE TABLE tab1 (id int) USING parquet")
var e = intercept[AnalysisException] {
sql("CREATE OR REPLACE VIEW tab1 AS SELECT * FROM jt")
}.getMessage
assert(e.contains("`tab1` is not a view"))
e = intercept[AnalysisException] {
sql("CREATE VIEW tab1 AS SELECT * FROM jt")
}.getMessage
assert(e.contains("`tab1` is not a view"))
e = intercept[AnalysisException] {
sql("ALTER VIEW tab1 AS SELECT * FROM jt")
}.getMessage
assert(e.contains("tab1 is a table. 'ALTER VIEW ... AS' expects a view."))
}
}
test("existing a table with the duplicate name when CREATE VIEW IF NOT EXISTS") {
withTable("tab1") {
sql("CREATE TABLE tab1 (id int) USING parquet")
sql("CREATE VIEW IF NOT EXISTS tab1 AS SELECT * FROM jt")
checkAnswer(sql("select count(*) FROM tab1"), Row(0))
}
}
test("Issue exceptions for ALTER VIEW on the temporary view") {
val viewName = "testView"
withTempView(viewName) {
spark.range(10).createTempView(viewName)
assertAnalysisError(
s"ALTER VIEW $viewName SET TBLPROPERTIES ('p' = 'an')",
"testView is a temp view. 'ALTER VIEW ... SET TBLPROPERTIES' expects a permanent view.")
assertAnalysisError(
s"ALTER VIEW $viewName UNSET TBLPROPERTIES ('p')",
"testView is a temp view. 'ALTER VIEW ... UNSET TBLPROPERTIES' expects a permanent view.")
}
}
test("Issue exceptions for ALTER TABLE on the temporary view") {
val viewName = "testView"
withTempView(viewName) {
spark.range(10).createTempView(viewName)
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName SET SERDE 'whatever'",
viewName,
"ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName PARTITION (a=1, b=2) SET SERDE 'whatever'",
viewName,
"ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName SET SERDEPROPERTIES ('p' = 'an')",
viewName,
"ALTER TABLE ... SET [SERDE|SERDEPROPERTIES]")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName PARTITION (a='4') RENAME TO PARTITION (a='5')",
viewName,
"ALTER TABLE ... RENAME TO PARTITION")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName RECOVER PARTITIONS",
viewName,
"ALTER TABLE ... RECOVER PARTITIONS")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName SET LOCATION '/path/to/your/lovely/heart'",
viewName,
"ALTER TABLE ... SET LOCATION ...")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName PARTITION (a='4') SET LOCATION '/path/to/home'",
viewName,
"ALTER TABLE ... SET LOCATION ...")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName ADD IF NOT EXISTS PARTITION (a='4', b='8')",
viewName,
"ALTER TABLE ... ADD PARTITION ...")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName DROP PARTITION (a='4', b='8')",
viewName,
"ALTER TABLE ... DROP PARTITION ...")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName SET TBLPROPERTIES ('p' = 'an')",
viewName,
"ALTER TABLE ... SET TBLPROPERTIES")
assertErrorForAlterTableOnTempView(
s"ALTER TABLE $viewName UNSET TBLPROPERTIES ('p')",
viewName,
"ALTER TABLE ... UNSET TBLPROPERTIES")
}
}
test("Issue exceptions for other table DDL on the temporary view") {
val viewName = "testView"
withTempView(viewName) {
spark.range(10).createTempView(viewName)
val e = intercept[AnalysisException] {
sql(s"INSERT INTO TABLE $viewName SELECT 1")
}.getMessage
assert(e.contains("Inserting into an RDD-based table is not allowed"))
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/employee.dat")
val e2 = intercept[AnalysisException] {
sql(s"""LOAD DATA LOCAL INPATH "$dataFilePath" INTO TABLE $viewName""")
}.getMessage
assert(e2.contains(s"$viewName is a temp view. 'LOAD DATA' expects a table"))
val e3 = intercept[AnalysisException] {
sql(s"SHOW CREATE TABLE $viewName")
}.getMessage
assert(e3.contains(
s"$viewName is a temp view. 'SHOW CREATE TABLE' expects a table or permanent view."))
val e4 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS")
}.getMessage
assert(e4.contains(
s"$viewName is a temp view. 'ANALYZE TABLE' expects a table or permanent view."))
val e5 = intercept[AnalysisException] {
sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id")
}.getMessage
assert(e5.contains(s"Temporary view `$viewName` is not cached for analyzing columns."))
}
}
private def assertNoSuchTable(query: String): Unit = {
intercept[NoSuchTableException] {
sql(query)
}
}
private def assertAnalysisError(query: String, message: String): Unit = {
val e = intercept[AnalysisException](sql(query))
assert(e.message.contains(message))
}
private def assertErrorForAlterTableOnTempView(
sqlText: String, viewName: String, cmdName: String): Unit = {
assertAnalysisError(
sqlText,
s"$viewName is a temp view. '$cmdName' expects a table. Please use ALTER VIEW instead.")
}
test("error handling: insert/load table commands against a view") {
val viewName = "testView"
withView(viewName) {
sql(s"CREATE VIEW $viewName AS SELECT id FROM jt")
var e = intercept[AnalysisException] {
sql(s"INSERT INTO TABLE $viewName SELECT 1")
}.getMessage
assert(e.contains("Inserting into a view is not allowed. View: `default`.`testview`"))
val dataFilePath =
Thread.currentThread().getContextClassLoader.getResource("data/files/employee.dat")
e = intercept[AnalysisException] {
sql(s"""LOAD DATA LOCAL INPATH "$dataFilePath" INTO TABLE $viewName""")
}.getMessage
assert(e.contains("default.testView is a view. 'LOAD DATA' expects a table"))
}
}
test("error handling: fail if the view sql itself is invalid") {
// A database that does not exist
assertInvalidReference("CREATE OR REPLACE VIEW myabcdview AS SELECT * FROM db_not_exist234.jt")
// A table that does not exist
assertInvalidReference("CREATE OR REPLACE VIEW myabcdview AS SELECT * FROM table_not_exist345")
// A column that does not exist
intercept[AnalysisException] {
sql("CREATE OR REPLACE VIEW myabcdview AS SELECT random1234 FROM jt").collect()
}
}
private def assertInvalidReference(query: String): Unit = {
val e = intercept[AnalysisException] {
sql(query)
}.getMessage
assert(e.contains("Table or view not found"))
}
test("error handling: fail if the temp view name contains the database prefix") {
// Fully qualified table name like "database.table" is not allowed for temporary view
val e = intercept[AnalysisException] {
sql("CREATE OR REPLACE TEMPORARY VIEW default.myabcdview AS SELECT * FROM jt")
}
assert(e.message.contains("It is not allowed to add database prefix"))
}
test("error handling: disallow IF NOT EXISTS for CREATE TEMPORARY VIEW") {
withTempView("myabcdview") {
val e = intercept[AnalysisException] {
sql("CREATE TEMPORARY VIEW IF NOT EXISTS myabcdview AS SELECT * FROM jt")
}
assert(e.message.contains("It is not allowed to define a TEMPORARY view with IF NOT EXISTS"))
}
}
test("error handling: fail if the temp view sql itself is invalid") {
// A database that does not exist
assertInvalidReference(
"CREATE OR REPLACE TEMPORARY VIEW myabcdview AS SELECT * FROM db_not_exist234.jt")
// A table that does not exist
assertInvalidReference(
"CREATE OR REPLACE TEMPORARY VIEW myabcdview AS SELECT * FROM table_not_exist1345")
// A column that does not exist, for temporary view
intercept[AnalysisException] {
sql("CREATE OR REPLACE TEMPORARY VIEW myabcdview AS SELECT random1234 FROM jt")
}
}
test("SPARK-32374: disallow setting properties for CREATE TEMPORARY VIEW") {
withTempView("myabcdview") {
val e = intercept[ParseException] {
sql("CREATE TEMPORARY VIEW myabcdview TBLPROPERTIES ('a' = 'b') AS SELECT * FROM jt")
}
assert(e.message.contains(
"Operation not allowed: TBLPROPERTIES can't coexist with CREATE TEMPORARY VIEW"))
}
}
test("correctly parse CREATE VIEW statement") {
withView("testView") {
sql(
"""CREATE VIEW IF NOT EXISTS
|default.testView (c1 COMMENT 'blabla', c2 COMMENT 'blabla')
|TBLPROPERTIES ('a' = 'b')
|AS SELECT * FROM jt
|""".stripMargin)
checkAnswer(sql("SELECT c1, c2 FROM testView ORDER BY c1"), (1 to 9).map(i => Row(i, i)))
}
}
test("correctly parse a nested view") {
withTempDatabase { db =>
withView("view1", "view2", s"$db.view3") {
sql("CREATE VIEW view1(x, y) AS SELECT * FROM jt")
// Create a nested view in the same database.
sql("CREATE VIEW view2(id, id1) AS SELECT * FROM view1")
checkAnswer(sql("SELECT * FROM view2 ORDER BY id"), (1 to 9).map(i => Row(i, i)))
// Create a nested view in a different database.
activateDatabase(db) {
sql(s"CREATE VIEW $db.view3(id, id1) AS SELECT * FROM default.view1")
checkAnswer(sql("SELECT * FROM view3 ORDER BY id"), (1 to 9).map(i => Row(i, i)))
}
}
}
}
test("correctly parse CREATE TEMPORARY VIEW statement") {
withTempView("testView") {
sql(
"""CREATE TEMPORARY VIEW
|testView (c1 COMMENT 'blabla', c2 COMMENT 'blabla')
|AS SELECT * FROM jt
|""".stripMargin)
checkAnswer(sql("SELECT c1, c2 FROM testView ORDER BY c1"), (1 to 9).map(i => Row(i, i)))
}
}
test("should NOT allow CREATE TEMPORARY VIEW when TEMPORARY VIEW with same name exists") {
withTempView("testView") {
sql("CREATE TEMPORARY VIEW testView AS SELECT id FROM jt")
val e = intercept[AnalysisException] {
sql("CREATE TEMPORARY VIEW testView AS SELECT id FROM jt")
}
assert(e.message.contains("Temporary view") && e.message.contains("already exists"))
}
}
test("should allow CREATE TEMPORARY VIEW when a permanent VIEW with same name exists") {
withView("testView", "default.testView") {
withTempView("testView") {
sql("CREATE VIEW testView AS SELECT id FROM jt")
sql("CREATE TEMPORARY VIEW testView AS SELECT id FROM jt")
}
}
}
test("should allow CREATE permanent VIEW when a TEMPORARY VIEW with same name exists") {
withView("testView", "default.testView") {
withTempView("testView") {
sql("CREATE TEMPORARY VIEW testView AS SELECT id FROM jt")
sql("CREATE VIEW testView AS SELECT id FROM jt")
}
}
}
test("correctly handle CREATE VIEW IF NOT EXISTS") {
withTable("jt2") {
withView("testView") {
sql("CREATE VIEW testView AS SELECT id FROM jt")
val df = (1 until 10).map(i => i -> i).toDF("i", "j")
df.write.format("json").saveAsTable("jt2")
sql("CREATE VIEW IF NOT EXISTS testView AS SELECT * FROM jt2")
// make sure our view doesn't change.
checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
}
}
}
test(s"correctly handle CREATE OR REPLACE TEMPORARY VIEW") {
withTable("jt2") {
withView("testView") {
sql("CREATE OR REPLACE TEMPORARY VIEW testView AS SELECT id FROM jt")
checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
sql("CREATE OR REPLACE TEMPORARY VIEW testView AS SELECT id AS i, id AS j FROM jt")
// make sure the view has been changed.
checkAnswer(sql("SELECT * FROM testView ORDER BY i"), (1 to 9).map(i => Row(i, i)))
}
}
}
test("correctly handle CREATE OR REPLACE VIEW") {
withTable("jt2") {
sql("CREATE OR REPLACE VIEW testView AS SELECT id FROM jt")
checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
val df = (1 until 10).map(i => i -> i).toDF("i", "j")
df.write.format("json").saveAsTable("jt2")
sql("CREATE OR REPLACE VIEW testView AS SELECT * FROM jt2")
// make sure the view has been changed.
checkAnswer(sql("SELECT * FROM testView ORDER BY i"), (1 to 9).map(i => Row(i, i)))
sql("DROP VIEW testView")
val e = intercept[AnalysisException] {
sql("CREATE OR REPLACE VIEW IF NOT EXISTS testView AS SELECT id FROM jt")
}
assert(e.message.contains(
"CREATE VIEW with both IF NOT EXISTS and REPLACE is not allowed"))
}
}
test("correctly handle ALTER VIEW") {
withTable("jt2") {
withView("testView") {
sql("CREATE VIEW testView AS SELECT id FROM jt")
val df = (1 until 10).map(i => i -> i).toDF("i", "j")
df.write.format("json").saveAsTable("jt2")
sql("ALTER VIEW testView AS SELECT * FROM jt2")
// make sure the view has been changed.
checkAnswer(sql("SELECT * FROM testView ORDER BY i"), (1 to 9).map(i => Row(i, i)))
}
}
}
test("correctly handle ALTER VIEW on a referenced view") {
withView("view1", "view2") {
sql("CREATE VIEW view1(x, y) AS SELECT * FROM jt")
// Create a nested view.
sql("CREATE VIEW view2(id, id1) AS SELECT * FROM view1")
checkAnswer(sql("SELECT * FROM view2 ORDER BY id"), (1 to 9).map(i => Row(i, i)))
// Alter the referenced view.
sql("ALTER VIEW view1 AS SELECT id AS x, id1 + 1 As y FROM jt")
checkAnswer(sql("SELECT * FROM view2 ORDER BY id"), (1 to 9).map(i => Row(i, i + 1)))
}
}
test("should not allow ALTER VIEW AS when the view does not exist") {
assertAnalysisError(
"ALTER VIEW testView AS SELECT 1, 2",
"View not found: testView")
assertAnalysisError(
"ALTER VIEW default.testView AS SELECT 1, 2",
"View not found: default.testView")
}
test("ALTER VIEW AS should try to alter temp view first if view name has no database part") {
withView("test_view") {
withTempView("test_view") {
sql("CREATE VIEW test_view AS SELECT 1 AS a, 2 AS b")
sql("CREATE TEMP VIEW test_view AS SELECT 1 AS a, 2 AS b")
sql("ALTER VIEW test_view AS SELECT 3 AS i, 4 AS j")
// The temporary view should be updated.
checkAnswer(spark.table("test_view"), Row(3, 4))
// The permanent view should stay same.
checkAnswer(spark.table("default.test_view"), Row(1, 2))
}
}
}
test("ALTER VIEW AS should alter permanent view if view name has database part") {
withView("test_view") {
withTempView("test_view") {
sql("CREATE VIEW test_view AS SELECT 1 AS a, 2 AS b")
sql("CREATE TEMP VIEW test_view AS SELECT 1 AS a, 2 AS b")
sql("ALTER VIEW default.test_view AS SELECT 3 AS i, 4 AS j")
// The temporary view should stay same.
checkAnswer(spark.table("test_view"), Row(1, 2))
// The permanent view should be updated.
checkAnswer(spark.table("default.test_view"), Row(3, 4))
}
}
}
test("ALTER VIEW AS should keep the previous table properties, comment, create_time, etc.") {
withView("test_view") {
sql(
"""
|CREATE VIEW test_view
|COMMENT 'test'
|TBLPROPERTIES ('key' = 'a')
|AS SELECT 1 AS a, 2 AS b
""".stripMargin)
val catalog = spark.sessionState.catalog
val viewMeta = catalog.getTableMetadata(TableIdentifier("test_view"))
assert(viewMeta.comment == Some("test"))
assert(viewMeta.properties("key") == "a")
sql("ALTER VIEW test_view AS SELECT 3 AS i, 4 AS j")
val updatedViewMeta = catalog.getTableMetadata(TableIdentifier("test_view"))
assert(updatedViewMeta.comment == Some("test"))
assert(updatedViewMeta.properties("key") == "a")
assert(updatedViewMeta.createTime == viewMeta.createTime)
// The view should be updated.
checkAnswer(spark.table("test_view"), Row(3, 4))
}
}
test("create view for json table") {
// json table is not hive-compatible, make sure the new flag fix it.
withView("testView") {
sql("CREATE VIEW testView AS SELECT id FROM jt")
checkAnswer(sql("SELECT * FROM testView ORDER BY id"), (1 to 9).map(i => Row(i)))
}
}
test("create view for partitioned parquet table") {
// partitioned parquet table is not hive-compatible, make sure the new flag fix it.
withTable("parTable") {
withView("testView") {
val df = Seq(1 -> "a").toDF("i", "j")
df.write.format("parquet").partitionBy("i").saveAsTable("parTable")
sql("CREATE VIEW testView AS SELECT i, j FROM parTable")
checkAnswer(sql("SELECT * FROM testView"), Row(1, "a"))
}
}
}
test("create view for joined tables") {
// make sure the new flag can handle some complex cases like join and schema change.
withTable("jt1", "jt2") {
spark.range(1, 10).toDF("id1").write.format("json").saveAsTable("jt1")
spark.range(1, 10).toDF("id2").write.format("json").saveAsTable("jt2")
withView("testView") {
sql("CREATE VIEW testView AS SELECT * FROM jt1 JOIN jt2 ON id1 == id2")
checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 9).map(i => Row(i, i)))
val df = (1 until 10).map(i => i -> i).toDF("id1", "newCol")
df.write.format("json").mode(SaveMode.Overwrite).saveAsTable("jt1")
checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 9).map(i => Row(i, i)))
}
}
}
test("CTE within view") {
withView("cte_view") {
sql("CREATE VIEW cte_view AS WITH w AS (SELECT 1 AS n) SELECT n FROM w")
checkAnswer(sql("SELECT * FROM cte_view"), Row(1))
}
}
test("Using view after switching current database") {
withView("v") {
sql("CREATE VIEW v AS SELECT * FROM jt")
withTempDatabase { db =>
activateDatabase(db) {
// Should look up table `jt` in database `default`.
checkAnswer(sql("SELECT * FROM default.v"), sql("SELECT * FROM default.jt"))
// The new `jt` table shouldn't be scanned.
sql("CREATE TABLE jt(key INT, value STRING) USING parquet")
checkAnswer(sql("SELECT * FROM default.v"), sql("SELECT * FROM default.jt"))
}
}
}
}
test("Using view after adding more columns") {
withTable("add_col") {
spark.range(10).write.saveAsTable("add_col")
withView("v") {
sql("CREATE VIEW v AS SELECT * FROM add_col")
spark.range(10).select('id, 'id as 'a).write.mode("overwrite").saveAsTable("add_col")
checkAnswer(sql("SELECT * FROM v"), spark.range(10).toDF())
}
}
}
test("error handling: fail if the referenced table or view is invalid") {
withView("view1", "view2", "view3") {
// Fail if the referenced table is defined in a invalid database.
withTempDatabase { db =>
withTable(s"$db.table1") {
activateDatabase(db) {
sql("CREATE TABLE table1(a int, b string) USING parquet")
sql("CREATE VIEW default.view1 AS SELECT * FROM table1")
}
}
}
assertInvalidReference("SELECT * FROM view1")
// Fail if the referenced table is invalid.
withTable("table2") {
sql("CREATE TABLE table2(a int, b string) USING parquet")
sql("CREATE VIEW view2 AS SELECT * FROM table2")
}
assertInvalidReference("SELECT * FROM view2")
// Fail if the referenced view is invalid.
withView("testView") {
sql("CREATE VIEW testView AS SELECT * FROM jt")
sql("CREATE VIEW view3 AS SELECT * FROM testView")
}
assertInvalidReference("SELECT * FROM view3")
}
}
test("correctly resolve a view in a self join") {
withView("testView") {
sql("CREATE VIEW testView AS SELECT * FROM jt")
checkAnswer(
sql("SELECT * FROM testView t1 JOIN testView t2 ON t1.id = t2.id ORDER BY t1.id"),
(1 to 9).map(i => Row(i, i, i, i)))
}
}
test("correctly handle a view with custom column names") {
withTable("tab1") {
spark.range(1, 10).selectExpr("id", "id + 1 id1").write.saveAsTable("tab1")
withView("testView", "testView2") {
sql("CREATE VIEW testView(x, y) AS SELECT * FROM tab1")
// Correctly resolve a view with custom column names.
checkAnswer(sql("SELECT * FROM testView ORDER BY x"), (1 to 9).map(i => Row(i, i + 1)))
// Throw an AnalysisException if the number of columns don't match up.
val e = intercept[AnalysisException] {
sql("CREATE VIEW testView2(x, y, z) AS SELECT * FROM tab1")
}.getMessage
assert(e.contains("The number of columns produced by the SELECT clause (num: `2`) does " +
"not match the number of column names specified by CREATE VIEW (num: `3`)."))
// Correctly resolve a view when the referenced table schema changes.
spark.range(1, 10).selectExpr("id", "id + id dummy", "id + 1 id1")
.write.mode(SaveMode.Overwrite).saveAsTable("tab1")
checkAnswer(sql("SELECT * FROM testView ORDER BY x"), (1 to 9).map(i => Row(i, i + 1)))
// Throw an AnalysisException if the column name is not found.
spark.range(1, 10).selectExpr("id", "id + 1 dummy")
.write.mode(SaveMode.Overwrite).saveAsTable("tab1")
intercept[AnalysisException](sql("SELECT * FROM testView"))
}
}
}
test("resolve a view when the dataTypes of referenced table columns changed") {
withTable("tab1") {
spark.range(1, 10).selectExpr("id", "id + 1 id1").write.saveAsTable("tab1")
withView("testView") {
sql("CREATE VIEW testView AS SELECT * FROM tab1")
// Allow casting from IntegerType to LongType
val df = (1 until 10).map(i => (i, i + 1)).toDF("id", "id1")
df.write.format("json").mode(SaveMode.Overwrite).saveAsTable("tab1")
checkAnswer(sql("SELECT * FROM testView ORDER BY id1"), (1 to 9).map(i => Row(i, i + 1)))
// Casting from DoubleType to LongType might truncate, throw an AnalysisException.
val df2 = (1 until 10).map(i => (i.toDouble, i.toDouble)).toDF("id", "id1")
df2.write.format("json").mode(SaveMode.Overwrite).saveAsTable("tab1")
intercept[AnalysisException](sql("SELECT * FROM testView"))
// Can't cast from ArrayType to LongType, throw an AnalysisException.
val df3 = (1 until 10).map(i => (i, Seq(i))).toDF("id", "id1")
df3.write.format("json").mode(SaveMode.Overwrite).saveAsTable("tab1")
intercept[AnalysisException](sql("SELECT * FROM testView"))
}
}
}
test("correctly handle a cyclic view reference") {
withView("view1", "view2", "view3") {
sql("CREATE VIEW view1 AS SELECT * FROM jt")
sql("CREATE VIEW view2 AS SELECT * FROM view1")
sql("CREATE VIEW view3 AS SELECT * FROM view2")
// Detect cyclic view reference on ALTER VIEW.
val e1 = intercept[AnalysisException] {
sql("ALTER VIEW view1 AS SELECT * FROM view2")
}.getMessage
assert(e1.contains("Recursive view `default`.`view1` detected (cycle: `default`.`view1` " +
"-> `default`.`view2` -> `default`.`view1`)"))
// Detect the most left cycle when there exists multiple cyclic view references.
val e2 = intercept[AnalysisException] {
sql("ALTER VIEW view1 AS SELECT * FROM view3 JOIN view2")
}.getMessage
assert(e2.contains("Recursive view `default`.`view1` detected (cycle: `default`.`view1` " +
"-> `default`.`view3` -> `default`.`view2` -> `default`.`view1`)"))
// Detect cyclic view reference on CREATE OR REPLACE VIEW.
val e3 = intercept[AnalysisException] {
sql("CREATE OR REPLACE VIEW view1 AS SELECT * FROM view2")
}.getMessage
assert(e3.contains("Recursive view `default`.`view1` detected (cycle: `default`.`view1` " +
"-> `default`.`view2` -> `default`.`view1`)"))
// Detect cyclic view reference from subqueries.
val e4 = intercept[AnalysisException] {
sql("ALTER VIEW view1 AS SELECT * FROM jt WHERE EXISTS (SELECT 1 FROM view2)")
}.getMessage
assert(e4.contains("Recursive view `default`.`view1` detected (cycle: `default`.`view1` " +
"-> `default`.`view2` -> `default`.`view1`)"))
}
}
test("permanent view should be case-preserving") {
withView("v") {
sql("CREATE VIEW v AS SELECT 1 as aBc")
assert(spark.table("v").schema.head.name == "aBc")
sql("CREATE OR REPLACE VIEW v AS SELECT 2 as cBa")
assert(spark.table("v").schema.head.name == "cBa")
}
}
test("sparkSession API view resolution with different default database") {
withDatabase("db2") {
withView("default.v1") {
withTable("t1") {
sql("USE default")
sql("CREATE TABLE t1 USING parquet AS SELECT 1 AS c0")
sql("CREATE VIEW v1 AS SELECT * FROM t1")
sql("CREATE DATABASE IF NOT EXISTS db2")
sql("USE db2")
checkAnswer(spark.table("default.v1"), Row(1))
}
}
}
}
test("SPARK-23519 view should be created even when query output contains duplicate col name") {
withTable("t23519") {
withView("v23519") {
sql("CREATE TABLE t23519 USING parquet AS SELECT 1 AS c1")
sql("CREATE VIEW v23519 (c1, c2) AS SELECT c1, c1 FROM t23519")
checkAnswer(sql("SELECT * FROM v23519"), Row(1, 1))
}
}
}
test("temporary view should ignore useCurrentSQLConfigsForView config") {
withTable("t") {
Seq(2, 3, 1).toDF("c1").write.format("parquet").saveAsTable("t")
withTempView("v1") {
sql("CREATE TEMPORARY VIEW v1 AS SELECT 1/0")
withSQLConf(
USE_CURRENT_SQL_CONFIGS_FOR_VIEW.key -> "true",
ANSI_ENABLED.key -> "true") {
checkAnswer(sql("SELECT * FROM v1"), Seq(Row(null)))
}
}
}
}
test("alter temporary view should follow current storeAnalyzedPlanForView config") {
withTable("t") {
Seq(2, 3, 1).toDF("c1").write.format("parquet").saveAsTable("t")
withView("v1") {
withSQLConf(STORE_ANALYZED_PLAN_FOR_VIEW.key -> "true") {
sql("CREATE TEMPORARY VIEW v1 AS SELECT * FROM t")
Seq(4, 6, 5).toDF("c1").write.mode("overwrite").format("parquet").saveAsTable("t")
val e = intercept[SparkException] {
sql("SELECT * FROM v1").collect()
}.getMessage
assert(e.contains("does not exist"))
}
withSQLConf(STORE_ANALYZED_PLAN_FOR_VIEW.key -> "false") {
// alter view from legacy to non-legacy config
sql("ALTER VIEW v1 AS SELECT * FROM t")
Seq(1, 3, 5).toDF("c1").write.mode("overwrite").format("parquet").saveAsTable("t")
checkAnswer(sql("SELECT * FROM v1"), Seq(Row(1), Row(3), Row(5)))
}
withSQLConf(STORE_ANALYZED_PLAN_FOR_VIEW.key -> "true") {
// alter view from non-legacy to legacy config
sql("ALTER VIEW v1 AS SELECT * FROM t")
Seq(2, 4, 6).toDF("c1").write.mode("overwrite").format("parquet").saveAsTable("t")
val e = intercept[SparkException] {
sql("SELECT * FROM v1").collect()
}.getMessage
assert(e.contains("does not exist"))
}
}
}
}
test("local temp view refers global temp view") {
withGlobalTempView("v1") {
withTempView("v2") {
val globalTempDB = spark.sharedState.globalTempViewManager.database
sql("CREATE GLOBAL TEMPORARY VIEW v1 AS SELECT 1")
sql(s"CREATE TEMPORARY VIEW v2 AS SELECT * FROM ${globalTempDB}.v1")
checkAnswer(sql("SELECT * FROM v2"), Seq(Row(1)))
}
}
}
test("global temp view refers local temp view") {
withTempView("v1") {
withGlobalTempView("v2") {
val globalTempDB = spark.sharedState.globalTempViewManager.database
sql("CREATE TEMPORARY VIEW v1 AS SELECT 1")
sql(s"CREATE GLOBAL TEMPORARY VIEW v2 AS SELECT * FROM v1")
checkAnswer(sql(s"SELECT * FROM ${globalTempDB}.v2"), Seq(Row(1)))
}
}
}
test("SPARK-33141: view should be parsed and analyzed with configs set when creating") {
withTable("t") {
withView("v1", "v2", "v3", "v4", "v5") {
Seq(2, 3, 1).toDF("c1").write.format("parquet").saveAsTable("t")
sql("CREATE VIEW v1 (c1) AS SELECT C1 FROM t")
sql("CREATE VIEW v2 (c1) AS SELECT c1 FROM t ORDER BY 1 ASC, c1 DESC")
sql("CREATE VIEW v3 (c1, count) AS SELECT c1, count(c1) FROM t GROUP BY 1")
sql("CREATE VIEW v4 (a, count) AS SELECT c1 as a, count(c1) FROM t GROUP BY a")
sql("CREATE VIEW v5 (c1) AS SELECT 1/0")
withSQLConf(CASE_SENSITIVE.key -> "true") {
checkAnswer(sql("SELECT * FROM v1"), Seq(Row(2), Row(3), Row(1)))
}
withSQLConf(ORDER_BY_ORDINAL.key -> "false") {
checkAnswer(sql("SELECT * FROM v2"), Seq(Row(1), Row(2), Row(3)))
}
withSQLConf(GROUP_BY_ORDINAL.key -> "false") {
checkAnswer(sql("SELECT * FROM v3"),
Seq(Row(1, 1), Row(2, 1), Row(3, 1)))
}
withSQLConf(GROUP_BY_ALIASES.key -> "false") {
checkAnswer(sql("SELECT * FROM v4"),
Seq(Row(1, 1), Row(2, 1), Row(3, 1)))
}
withSQLConf(ANSI_ENABLED.key -> "true") {
checkAnswer(sql("SELECT * FROM v5"), Seq(Row(null)))
}
withSQLConf(USE_CURRENT_SQL_CONFIGS_FOR_VIEW.key -> "true") {
withSQLConf(CASE_SENSITIVE.key -> "true") {
val e = intercept[AnalysisException] {
sql("SELECT * FROM v1")
}.getMessage
assert(e.contains("cannot resolve 'C1' given input columns: " +
"[spark_catalog.default.t.c1]"))
}
withSQLConf(ORDER_BY_ORDINAL.key -> "false") {
checkAnswer(sql("SELECT * FROM v2"), Seq(Row(3), Row(2), Row(1)))
}
withSQLConf(GROUP_BY_ORDINAL.key -> "false") {
val e = intercept[AnalysisException] {
sql("SELECT * FROM v3")
}.getMessage
assert(e.contains(
"expression 'spark_catalog.default.t.c1' is neither present " +
"in the group by, nor is it an aggregate function. Add to group by or wrap in " +
"first() (or first_value) if you don't care which value you get."))
}
withSQLConf(GROUP_BY_ALIASES.key -> "false") {
val e = intercept[AnalysisException] {
sql("SELECT * FROM v4")
}.getMessage
assert(e.contains("cannot resolve 'a' given input columns: " +
"[spark_catalog.default.t.c1]"))
}
withSQLConf(ANSI_ENABLED.key -> "true") {
val e = intercept[ArithmeticException] {
sql("SELECT * FROM v5").collect()
}.getMessage
assert(e.contains("divide by zero"))
}
}
withSQLConf(ANSI_ENABLED.key -> "true") {
sql("ALTER VIEW v1 AS SELECT 1/0")
}
val e = intercept[ArithmeticException] {
sql("SELECT * FROM v1").collect()
}.getMessage
assert(e.contains("divide by zero"))
}
}
}
}
| wangmiao1981/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala | Scala | apache-2.0 | 35,843 |
package org.http4s.client.middleware
import cats.effect.{Clock, Resource, Sync}
import cats.implicits._
import java.util.concurrent.TimeUnit
import cats.effect.concurrent.Ref
import org.http4s.{Request, Response, Status}
import org.http4s.client.Client
import org.http4s.metrics.MetricsOps
import org.http4s.metrics.TerminationType.{Error, Timeout}
import scala.concurrent.TimeoutException
/**
* Client middleware to record metrics for the http4s client.
*
* This middleware will record:
* - Number of active requests
* - Time duration to receive the response headers
* - Time duration to process the whole response body
* - Time duration of errors, timeouts and other abnormal terminations
*
* This middleware can be extended to support any metrics ecosystem by implementing the [[MetricsOps]] type
*/
object Metrics {
/**
* Wraps a [[Client]] with a middleware capable of recording metrics
*
* @param ops a algebra describing the metrics operations
* @param classifierF a function that allows to add a classifier that can be customized per request
* @param client the [[Client]] to gather metrics from
* @return the metrics middleware wrapping the [[Client]]
*/
def apply[F[_]](ops: MetricsOps[F], classifierF: Request[F] => Option[String] = { _: Request[F] =>
None
})(client: Client[F])(implicit F: Sync[F], clock: Clock[F]): Client[F] =
Client(withMetrics(client, ops, classifierF))
private def withMetrics[F[_]](
client: Client[F],
ops: MetricsOps[F],
classifierF: Request[F] => Option[String])(
req: Request[F])(implicit F: Sync[F], clock: Clock[F]): Resource[F, Response[F]] =
(for {
statusRef <- Resource.liftF(Ref.of[F, Option[Status]](None))
start <- Resource.liftF(clock.monotonic(TimeUnit.NANOSECONDS))
_ <- Resource.make(ops.increaseActiveRequests(classifierF(req)))(_ =>
ops.decreaseActiveRequests(classifierF(req)))
_ <- Resource.make(F.unit) { _ =>
clock
.monotonic(TimeUnit.NANOSECONDS)
.flatMap(now =>
statusRef.get.flatMap(oStatus =>
oStatus.traverse_(status =>
ops.recordTotalTime(req.method, status, now - start, classifierF(req)))))
}
resp <- client.run(req)
_ <- Resource.liftF(statusRef.set(Some(resp.status)))
end <- Resource.liftF(clock.monotonic(TimeUnit.NANOSECONDS))
_ <- Resource.liftF(ops.recordHeadersTime(req.method, end - start, classifierF(req)))
} yield resp)
.handleErrorWith { e: Throwable =>
Resource.liftF[F, Response[F]](
registerError(ops, classifierF(req))(e) *> F.raiseError[Response[F]](e)
)
}
private def registerError[F[_]](ops: MetricsOps[F], classifier: Option[String])(
e: Throwable): F[Unit] =
if (e.isInstanceOf[TimeoutException]) {
ops.recordAbnormalTermination(1, Timeout, classifier)
} else {
ops.recordAbnormalTermination(1, Error, classifier)
}
}
| aeons/http4s | client/src/main/scala/org/http4s/client/middleware/Metrics.scala | Scala | apache-2.0 | 3,009 |
package scan
import java.nio.file._
import scala.compat.java8.StreamConverters._
import scala.collection.SortedSet
import cats._
import cats.implicits._
import monix.eval._
import monix.execution._
import scala.concurrent.duration._
object Scanner {
implicit val s = Scheduler(ExecutionModel.BatchedExecution(32))
def main(args: Array[String]): Unit = {
val program = scanReport(Paths.get(args(0)), 10).map(println)
program.runSyncUnsafe(1.minute)
}
def scanReport(base: Path, topN: Int): Task[String] = for {
scan <- pathScan(FilePath(base), topN)
} yield ReportFormat.largeFilesReport(scan, base.toString)
def pathScan(filePath: FilePath, topN: Int): Task[PathScan] = filePath match {
case File(path) =>
Task {
val fs = FileSize.ofFile(Paths.get(path))
PathScan(SortedSet(fs), fs.size, 1)
}
case Directory(path) =>
for {
files <- Task {
val jstream = Files.list(Paths.get(path))
try jstream.toScala[List]
finally jstream.close()
}
scans <- files.traverse(subpath => pathScan(FilePath(subpath), topN))
} yield scans.combineAll(PathScan.topNMonoid(topN))
case Other(_) =>
Task(PathScan.empty)
}
}
case class PathScan(largestFiles: SortedSet[FileSize], totalSize: Long, totalCount: Long)
object PathScan {
def empty = PathScan(SortedSet.empty, 0, 0)
def topNMonoid(n: Int): Monoid[PathScan] = new Monoid[PathScan] {
def empty: PathScan = PathScan.empty
def combine(p1: PathScan, p2: PathScan): PathScan = PathScan(
p1.largestFiles.union(p2.largestFiles).take(n),
p1.totalSize + p2.totalSize,
p1.totalCount + p2.totalCount
)
}
}
case class FileSize(path: Path, size: Long)
object FileSize {
def ofFile(file: Path) = {
FileSize(file, Files.size(file))
}
implicit val ordering: Ordering[FileSize] = Ordering.by[FileSize, Long ](_.size).reverse
}
//I prefer an closed set of disjoint cases over a series of isX(): Boolean tests, as provided by the Java API
//The problem with boolean test methods is they make it unclear what the complete set of possible states is, and which tests
//can overlap
sealed trait FilePath {
def path: String
}
object FilePath {
def apply(path: Path): FilePath =
if (Files.isRegularFile(path))
File(path.toString)
else if (Files.isDirectory(path))
Directory(path.toString)
else
Other(path.toString)
}
case class File(path: String) extends FilePath
case class Directory(path: String) extends FilePath
case class Other(path: String) extends FilePath
//Common pure code that is unaffected by the migration to Eff
object ReportFormat {
def largeFilesReport(scan: PathScan, rootDir: String): String = {
if (scan.largestFiles.nonEmpty) {
s"Largest ${scan.largestFiles.size} file(s) found under path: $rootDir\\n" +
scan.largestFiles.map(fs => s"${(fs.size * 100)/scan.totalSize}% ${formatByteString(fs.size)} ${fs.path}").mkString("", "\\n", "\\n") +
s"${scan.totalCount} total files found, having total size ${formatByteString(scan.totalSize)} bytes.\\n"
}
else
s"No files found under path: $rootDir"
}
def formatByteString(bytes: Long): String = {
if (bytes < 1000)
s"${bytes} B"
else {
val exp = (Math.log(bytes) / Math.log(1000)).toInt
val pre = "KMGTPE".charAt(exp - 1)
s"%.1f ${pre}B".format(bytes / Math.pow(1000, exp))
}
}
}
| benhutchison/GettingWorkDoneWithExtensibleEffects | solutions/exerciseTask/src/main/scala/scan/Scanner.scala | Scala | apache-2.0 | 3,479 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.executor
import java.io._
import java.nio.charset.StandardCharsets.UTF_8
import java.nio.file.{Files, Paths}
import java.util.Locale
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.util.Try
import org.apache.spark.{SparkEnv, SparkException}
import org.apache.spark.internal.{config, Logging}
import org.apache.spark.util.Utils
private[spark] case class ProcfsMetrics(
jvmVmemTotal: Long,
jvmRSSTotal: Long,
pythonVmemTotal: Long,
pythonRSSTotal: Long,
otherVmemTotal: Long,
otherRSSTotal: Long)
// Some of the ideas here are taken from the ProcfsBasedProcessTree class in hadoop
// project.
private[spark] class ProcfsMetricsGetter(procfsDir: String = "/proc/") extends Logging {
private val procfsStatFile = "stat"
private val testing = Utils.isTesting
private val pageSize = computePageSize()
private var isAvailable: Boolean = isProcfsAvailable
private val pid = computePid()
private lazy val isProcfsAvailable: Boolean = {
if (testing) {
true
}
else {
val procDirExists = Try(Files.exists(Paths.get(procfsDir))).recover {
case ioe: IOException =>
logWarning("Exception checking for procfs dir", ioe)
false
}
val shouldPollProcessTreeMetrics =
SparkEnv.get.conf.get(config.EXECUTOR_PROCESS_TREE_METRICS_ENABLED)
procDirExists.get && shouldPollProcessTreeMetrics
}
}
private def computePid(): Int = {
if (!isAvailable || testing) {
return -1;
}
try {
// This can be simplified in java9:
// https://docs.oracle.com/javase/9/docs/api/java/lang/ProcessHandle.html
val cmd = Array("bash", "-c", "echo $PPID")
val out = Utils.executeAndGetOutput(cmd)
Integer.parseInt(out.split("\n")(0))
}
catch {
case e: SparkException =>
logWarning("Exception when trying to compute process tree." +
" As a result reporting of ProcessTree metrics is stopped", e)
isAvailable = false
-1
}
}
private def computePageSize(): Long = {
if (testing) {
return 4096;
}
try {
val cmd = Array("getconf", "PAGESIZE")
val out = Utils.executeAndGetOutput(cmd)
Integer.parseInt(out.split("\n")(0))
} catch {
case e: Exception =>
logWarning("Exception when trying to compute pagesize, as a" +
" result reporting of ProcessTree metrics is stopped")
isAvailable = false
0
}
}
// Exposed for testing
private[executor] def computeProcessTree(): Set[Int] = {
if (!isAvailable || testing) {
return Set()
}
var ptree: Set[Int] = Set()
ptree += pid
val queue = mutable.Queue.empty[Int]
queue += pid
while ( !queue.isEmpty ) {
val p = queue.dequeue()
val c = getChildPids(p)
if (!c.isEmpty) {
queue ++= c
ptree ++= c.toSet
}
}
ptree
}
private def getChildPids(pid: Int): ArrayBuffer[Int] = {
try {
val builder = new ProcessBuilder("pgrep", "-P", pid.toString)
val process = builder.start()
val childPidsInInt = mutable.ArrayBuffer.empty[Int]
def appendChildPid(s: String): Unit = {
if (s != "") {
logTrace("Found a child pid:" + s)
childPidsInInt += Integer.parseInt(s)
}
}
val stdoutThread = Utils.processStreamByLine("read stdout for pgrep",
process.getInputStream, appendChildPid)
val errorStringBuilder = new StringBuilder()
val stdErrThread = Utils.processStreamByLine(
"stderr for pgrep",
process.getErrorStream,
line => errorStringBuilder.append(line))
val exitCode = process.waitFor()
stdoutThread.join()
stdErrThread.join()
val errorString = errorStringBuilder.toString()
// pgrep will have exit code of 1 if there are more than one child process
// and it will have a exit code of 2 if there is no child process
if (exitCode != 0 && exitCode > 2) {
val cmd = builder.command().toArray.mkString(" ")
logWarning(s"Process $cmd exited with code $exitCode and stderr: $errorString")
throw new SparkException(s"Process $cmd exited with code $exitCode")
}
childPidsInInt
} catch {
case e: Exception =>
logWarning("Exception when trying to compute process tree." +
" As a result reporting of ProcessTree metrics is stopped.", e)
isAvailable = false
mutable.ArrayBuffer.empty[Int]
}
}
// Exposed for testing
private[executor] def addProcfsMetricsFromOneProcess(
allMetrics: ProcfsMetrics,
pid: Int): ProcfsMetrics = {
// The computation of RSS and Vmem are based on proc(5):
// http://man7.org/linux/man-pages/man5/proc.5.html
try {
val pidDir = new File(procfsDir, pid.toString)
def openReader(): BufferedReader = {
val f = new File(new File(procfsDir, pid.toString), procfsStatFile)
new BufferedReader(new InputStreamReader(new FileInputStream(f), UTF_8))
}
Utils.tryWithResource(openReader) { in =>
val procInfo = in.readLine
val procInfoSplit = procInfo.split(" ")
val vmem = procInfoSplit(22).toLong
val rssMem = procInfoSplit(23).toLong * pageSize
if (procInfoSplit(1).toLowerCase(Locale.US).contains("java")) {
allMetrics.copy(
jvmVmemTotal = allMetrics.jvmVmemTotal + vmem,
jvmRSSTotal = allMetrics.jvmRSSTotal + (rssMem)
)
}
else if (procInfoSplit(1).toLowerCase(Locale.US).contains("python")) {
allMetrics.copy(
pythonVmemTotal = allMetrics.pythonVmemTotal + vmem,
pythonRSSTotal = allMetrics.pythonRSSTotal + (rssMem)
)
}
else {
allMetrics.copy(
otherVmemTotal = allMetrics.otherVmemTotal + vmem,
otherRSSTotal = allMetrics.otherRSSTotal + (rssMem)
)
}
}
} catch {
case f: IOException =>
logWarning("There was a problem with reading" +
" the stat file of the process. ", f)
throw f
}
}
private[spark] def computeAllMetrics(): ProcfsMetrics = {
if (!isAvailable) {
return ProcfsMetrics(0, 0, 0, 0, 0, 0)
}
val pids = computeProcessTree
var allMetrics = ProcfsMetrics(0, 0, 0, 0, 0, 0)
for (p <- pids) {
try {
allMetrics = addProcfsMetricsFromOneProcess(allMetrics, p)
// if we had an error getting any of the metrics, we don't want to
// report partial metrics, as that would be misleading.
if (!isAvailable) {
return ProcfsMetrics(0, 0, 0, 0, 0, 0)
}
} catch {
case _: IOException =>
return ProcfsMetrics(0, 0, 0, 0, 0, 0)
}
}
allMetrics
}
}
private[spark] object ProcfsMetricsGetter {
final val pTreeInfo = new ProcfsMetricsGetter
}
| maropu/spark | core/src/main/scala/org/apache/spark/executor/ProcfsMetricsGetter.scala | Scala | apache-2.0 | 7,793 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTest}
import org.apache.spark.ml.util.TestingUtils._
import org.apache.spark.sql.{Dataset, Row}
class ANOVASelectorSuite extends MLTest with DefaultReadWriteTest {
import testImplicits._
@transient var dataset: Dataset[_] = _
override def beforeAll(): Unit = {
super.beforeAll()
// scalastyle:off
/*
X:
array([[4.65415496e-03, 1.03550567e-01, -1.17358140e+00,
1.61408773e-01, 3.92492111e-01, 7.31240882e-01],
[-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,
7.02154563e-01, 6.21348351e-01, 1.88397353e-01],
[ 3.85692159e-01, -9.04639637e-01, 5.09782604e-02,
8.40043971e-01, 7.45977857e-01, 8.78402288e-01],
[ 1.36264353e+00, 2.62454094e-01, 7.96306202e-01,
6.14948000e-01, 7.44948187e-01, 9.74034830e-01],
[ 9.65874070e-01, 2.52773665e+00, -2.19380094e+00,
2.33408080e-01, 1.86340919e-01, 8.23390433e-01],
[ 1.12324305e+01, -2.77121515e-01, 1.12740513e-01,
2.35184013e-01, 3.46668895e-01, 9.38500782e-02],
[ 1.06195839e+01, -1.82891238e+00, 2.25085601e-01,
9.09979851e-01, 6.80257535e-02, 8.24017480e-01],
[ 1.12806837e+01, 1.30686889e+00, 9.32839108e-02,
3.49784755e-01, 1.71322408e-02, 7.48465194e-02],
[ 9.98689462e+00, 9.50808938e-01, -2.90786359e-01,
2.31253009e-01, 7.46270968e-01, 1.60308169e-01],
[ 1.08428551e+01, -1.02749936e+00, 1.73951508e-01,
8.92482744e-02, 1.42651730e-01, 7.66751625e-01],
[-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,
8.22809049e-01, 3.26739456e-01, 7.88268404e-01],
[-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,
7.33931213e-01, 1.42554396e-01, 7.11225605e-01],
[-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,
2.51532056e-01, 2.82729807e-01, 7.16245686e-01],
[-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,
5.93898886e-01, 5.68425656e-01, 8.49762330e-01],
[ 7.63485129e-01, 1.02605138e+01, 1.32617719e+00,
5.49682879e-01, 8.59931442e-01, 4.88677978e-02],
[ 9.34900015e-01, 4.11379043e-01, 8.65010205e+00,
9.23509168e-01, 1.16995043e-01, 5.91894106e-03],
[ 4.73734933e-01, -1.48321181e+00, 9.73349621e+00,
4.09421563e-01, 5.09375719e-01, 5.93157850e-01],
[ 3.41470679e-01, -6.88972582e-01, 9.60347938e+00,
3.62654055e-01, 2.43437468e-01, 7.13052838e-01],
[-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,
8.24123861e-01, 5.84074506e-01, 6.54461558e-01],
[-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,
5.16336729e-01, 9.99776159e-01, 3.15769738e-01]])
y:
array([1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4])
scikit-learn result:
>>> f_classif(X, y)
(array([228.27701422, 84.33070501, 134.25330675, 0.82211775, 0.82991363, 1.08478943]),
array([2.43864448e-13, 5.09088367e-10, 1.49033067e-11, 5.00596446e-01, 4.96684374e-01, 3.83798191e-01]))
*/
// scalastyle:on
val data = Seq(
(1, Vectors.dense(4.65415496e-03, 1.03550567e-01, -1.17358140e+00,
1.61408773e-01, 3.92492111e-01, 7.31240882e-01), Vectors.dense(4.65415496e-03)),
(1, Vectors.dense(-9.01651741e-01, -5.28905302e-01, 1.27636785e+00,
7.02154563e-01, 6.21348351e-01, 1.88397353e-01), Vectors.dense(-9.01651741e-01)),
(1, Vectors.dense(3.85692159e-01, -9.04639637e-01, 5.09782604e-02,
8.40043971e-01, 7.45977857e-01, 8.78402288e-01), Vectors.dense(3.85692159e-01)),
(1, Vectors.dense(1.36264353e+00, 2.62454094e-01, 7.96306202e-01,
6.14948000e-01, 7.44948187e-01, 9.74034830e-01), Vectors.dense(1.36264353e+00)),
(1, Vectors.dense(9.65874070e-01, 2.52773665e+00, -2.19380094e+00,
2.33408080e-01, 1.86340919e-01, 8.23390433e-01), Vectors.dense(9.65874070e-01)),
(2, Vectors.dense(1.12324305e+01, -2.77121515e-01, 1.12740513e-01,
2.35184013e-01, 3.46668895e-01, 9.38500782e-02), Vectors.dense(1.12324305e+01)),
(2, Vectors.dense(1.06195839e+01, -1.82891238e+00, 2.25085601e-01,
9.09979851e-01, 6.80257535e-02, 8.24017480e-01), Vectors.dense(1.06195839e+01)),
(2, Vectors.dense(1.12806837e+01, 1.30686889e+00, 9.32839108e-02,
3.49784755e-01, 1.71322408e-02, 7.48465194e-02), Vectors.dense(1.12806837e+01)),
(2, Vectors.dense(9.98689462e+00, 9.50808938e-01, -2.90786359e-01,
2.31253009e-01, 7.46270968e-01, 1.60308169e-01), Vectors.dense(9.98689462e+00)),
(2, Vectors.dense(1.08428551e+01, -1.02749936e+00, 1.73951508e-01,
8.92482744e-02, 1.42651730e-01, 7.66751625e-01), Vectors.dense(1.08428551e+01)),
(3, Vectors.dense(-1.98641448e+00, 1.12811990e+01, -2.35246756e-01,
8.22809049e-01, 3.26739456e-01, 7.88268404e-01), Vectors.dense(-1.98641448e+00)),
(3, Vectors.dense(-6.09864090e-01, 1.07346276e+01, -2.18805509e-01,
7.33931213e-01, 1.42554396e-01, 7.11225605e-01), Vectors.dense(-6.09864090e-01)),
(3, Vectors.dense(-1.58481268e+00, 9.19364039e+00, -5.87490459e-02,
2.51532056e-01, 2.82729807e-01, 7.16245686e-01), Vectors.dense(-1.58481268e+00)),
(3, Vectors.dense(-2.50949277e-01, 1.12815254e+01, -6.94806734e-01,
5.93898886e-01, 5.68425656e-01, 8.49762330e-01), Vectors.dense(-2.50949277e-01)),
(3, Vectors.dense(7.63485129e-01, 1.02605138e+01, 1.32617719e+00,
5.49682879e-01, 8.59931442e-01, 4.88677978e-02), Vectors.dense(7.63485129e-01)),
(4, Vectors.dense(9.34900015e-01, 4.11379043e-01, 8.65010205e+00,
9.23509168e-01, 1.16995043e-01, 5.91894106e-03), Vectors.dense(9.34900015e-01)),
(4, Vectors.dense(4.73734933e-01, -1.48321181e+00, 9.73349621e+00,
4.09421563e-01, 5.09375719e-01, 5.93157850e-01), Vectors.dense(4.73734933e-01)),
(4, Vectors.dense(3.41470679e-01, -6.88972582e-01, 9.60347938e+00,
3.62654055e-01, 2.43437468e-01, 7.13052838e-01), Vectors.dense(3.41470679e-01)),
(4, Vectors.dense(-5.29614251e-01, -1.39262856e+00, 1.01354144e+01,
8.24123861e-01, 5.84074506e-01, 6.54461558e-01), Vectors.dense(-5.29614251e-01)),
(4, Vectors.dense(-2.99454508e-01, 2.20457263e+00, 1.14586015e+01,
5.16336729e-01, 9.99776159e-01, 3.15769738e-01), Vectors.dense(-2.99454508e-01)))
dataset = spark.createDataFrame(data).toDF("label", "features", "topFeature")
}
test("params") {
ParamsSuite.checkParams(new ANOVASelector())
}
test("Test ANOVAFValue calssification selector: numTopFeatures") {
val selector = new ANOVASelector()
.setOutputCol("filtered").setSelectorType("numTopFeatures").setNumTopFeatures(1)
testSelector(selector, dataset)
}
test("Test ANOVAFValue calssification selector: percentile") {
val selector = new ANOVASelector()
.setOutputCol("filtered").setSelectorType("percentile").setPercentile(0.17)
testSelector(selector, dataset)
}
test("Test ANOVAFValue calssification selector: fpr") {
val selector = new ANOVASelector()
.setOutputCol("filtered").setSelectorType("fpr").setFpr(1.0E-12)
testSelector(selector, dataset)
}
test("Test ANOVAFValue calssification selector: fdr") {
val selector = new ANOVASelector()
.setOutputCol("filtered").setSelectorType("fdr").setFdr(6.0E-12)
testSelector(selector, dataset)
}
test("Test ANOVAFValue calssification selector: fwe") {
val selector = new ANOVASelector()
.setOutputCol("filtered").setSelectorType("fwe").setFwe(6.0E-12)
testSelector(selector, dataset)
}
test("read/write") {
def checkModelData(model: ANOVASelectorModel, model2: ANOVASelectorModel): Unit = {
assert(model.selectedFeatures === model2.selectedFeatures)
}
val anovaSelector = new ANOVASelector()
testEstimatorAndModelReadWrite(anovaSelector, dataset,
ANOVASelectorSuite.allParamSettings,
ANOVASelectorSuite.allParamSettings, checkModelData)
}
private def testSelector(selector: ANOVASelector, data: Dataset[_]):
ANOVASelectorModel = {
val selectorModel = selector.fit(data)
testTransformer[(Double, Vector, Vector)](data.toDF(), selectorModel,
"filtered", "topFeature") {
case Row(vec1: Vector, vec2: Vector) =>
assert(vec1 ~== vec2 absTol 1e-1)
}
selectorModel
}
}
object ANOVASelectorSuite {
/**
* Mapping from all Params to valid settings which differ from the defaults.
* This is useful for tests which need to exercise all Params, such as save/load.
* This excludes input columns to simplify some tests.
*/
val allParamSettings: Map[String, Any] = Map(
"selectorType" -> "percentile",
"numTopFeatures" -> 1,
"percentile" -> 0.12,
"outputCol" -> "myOutput"
)
}
| matthewfranglen/spark | mllib/src/test/scala/org/apache/spark/ml/feature/ANOVASelectorSuite.scala | Scala | mit | 9,710 |
package org.scalamock.scalatest
import org.scalamock.clazz.{Mock => MacroMock}
import org.scalamock.proxy.ProxyMockFactory
import scala.reflect.ClassTag
/**
* allows combining of macro mocks wih proxy mocks in the same Suite
* {{{
* val macroMock = mock[Foo]
* val proxyMock = Proxy.mock[Bar]
* }}}
*/
trait MixedMockFactory extends AbstractMockFactory with MacroMock {
object Proxy extends ProxyMockFactory {
import org.scalamock.proxy._
def mock[T: ClassTag]: T with Mock = super.mock[T]
def stub[T: ClassTag]: T with Stub = super.stub[T]
}
}
| paulbutcher/ScalaMock | shared/src/main/scala/org/scalamock/scalatest/MixedMockFactory.scala | Scala | mit | 577 |
package hephaestus
package lunarg
package tutorial
import hephaestus.platform._
object Step03 extends Utils {
def main(args: Array[String]): Unit = {
val instance = initInstance()
val physicalDevices = vk.enumeratePhysicalDevices(instance)
val physicalDevice = physicalDevices(0)
val qfps = vk.getPhysicalDeviceQueueFamilyProperties(physicalDevice)
qfps.foreach { qfp =>
println(
s"flags ${qfp.queueFlags} count ${qfp.queueCount} bits: ${qfp.timestampValidBits}")
}
val qi = qfps.zipWithIndex
.find {
case (q, i) => (q.queueFlags & Vulkan.QUEUE_GRAPHICS_BIT) > 0
}
.map(_._2)
.get
val dqinfo = new Vulkan.DeviceQueueCreateInfo(
flags = 0,
queueFamilyIndex = qi,
queuePriorities = Array(0f)
)
val dinfo = new Vulkan.DeviceCreateInfo(queueCreateInfos = Array(dqinfo),
enabledExtensionNames =
Array.empty[String])
val device = vk.createDevice(physicalDevice, dinfo)
val textureFormatProperties = List(
vk.getPhysicalDeviceFormatProperties(physicalDevice,
Vulkan.FORMAT_R8G8B8A8_UNORM),
vk.getPhysicalDeviceFormatProperties(physicalDevice,
Vulkan.FORMAT_R8G8B8_UNORM)
)
textureFormatProperties.foreach { p =>
println(
s"lin ${p.linearTilingFeatures} op: ${p.optimalTilingFeatures} buf: ${p.bufferFeatures}")
val fi = p.optimalTilingFeatures & Vulkan.FORMAT_FEATURE_SAMPLED_IMAGE_BIT.value
println(s"fi $fi")
}
vk.destroyDevice(device)
vk.destroyInstance(instance)
}
}
| to-ithaca/hephaestus | samples/src/main/scala/hephaestus/lunarg/tutorial/Step03.scala | Scala | apache-2.0 | 1,723 |
package offGridOrcs
sealed trait Model
object Model {
final case class Title() extends Model
final case class Map(world: World, isPaused: Boolean, camera: Camera, cursor: Cursor.Map, previousInspectionMode: InspectionMode) extends Model
final case class Inspection(topLeft: Vec2, selection: Vec2, mode: InspectionMode, cursor: Cursor.Inspection, mapModel: Model.Map) extends Model
final case class Menu(mode: MenuMode, cursor: Cursor.Inspection, mapModel: Model.Map) extends Model
sealed trait InspectionMode
object InspectionMode {
final case class Status() extends InspectionMode
final case class Stock() extends InspectionMode
}
sealed trait MenuMode
object MenuMode {
final case class Normal() extends MenuMode
final case class GameOver() extends MenuMode
}
}
| dcecile/off-grid-orcs | src/Model.scala | Scala | mit | 804 |
package com.github.diegopacheco.scala.idiomatic.typeclasses
object NumbersMainApp extends App {
trait NumberLike[T] {
def plus(x: T, y: T): T
def divide(x: T, y: T): T
def minus(x: T, y: T): T
def multiply(x: T, y: T): T
}
object NumberLike {
implicit object NumberLikeDouble extends NumberLike[Double] {
def plus(x: Double, y: Double): Double = x + y
def divide(x: Double, y: Double): Double = x / y
def minus(x: Double, y: Double): Double = x - y
def multiply(x: Double, y: Double): Double = x * y
}
implicit object NumberLikeInt extends NumberLike[Int] {
def plus(x: Int, y: Int): Int = x + y
def divide(x: Int, y: Int): Int = x / y
def minus(x: Int, y: Int): Int = x - y
def multiply(x: Int, y: Int): Int = x * y
}
implicit def int2NumberLikeInt(x: Int) = NumberLikeInt
implicit def double2NumberLikeDouble(y: Double) = NumberLikeDouble
}
import NumberLike._
val x = 10
println(s"10 + 10 = ${x.plus(10, 10)}")
println(s"10 - 10 = ${x.minus(10, 10)}")
println(s"10 * 10 = ${x.multiply(10, 10)}")
println(s"10 / 10 = ${x.divide(10, 10)}")
val y: Double = 20.5
println(s"20.5 + 20.5 = ${y.plus(20.5, 20.5)}")
println(s"20.5 - 20.5 = ${y.minus(20.5, 20.5)}")
println(s"20.5 * 20.5 = ${y.multiply(20.5, 20.5)}")
println(s"20.5 / 20.5 = ${y.divide(20.5, 20.5)}")
} | diegopacheco/scala-playground | idiomatic-scala/src/main/scala/com/github/diegopacheco/scala/idiomatic/typeclasses/NumbersMainApp.scala | Scala | unlicense | 1,389 |
package com.github.pedrovgs.kuronometer.mothers
import com.github.pedrovgs.kuronometer.free.domain.{Config, Platform}
object ConfigMother {
val anyConfig: Config = Config(Platform.Java,
reportProjectInfo = true,
reportDataRemotely = true,
verbose = false)
val anyAnonymousConfig: Config = Config(Platform.Java,
reportProjectInfo = false,
reportDataRemotely = true,
verbose = false)
}
| pedrovgs/Kuronometer | kuronometer-core/src/test/scala/com/github/pedrovgs/kuronometer/mothers/ConfigMother.scala | Scala | apache-2.0 | 617 |
package service.notifications
import java.io.{BufferedWriter, File, FileWriter}
import dao.SundialDaoFactory
import dto.DisplayModels
import model.{EmailNotification, ProcessStatus}
import software.amazon.awssdk.services.ses.SesClient
import scala.sys.process._
class DevelopmentEmailNotifications(daoFactory: SundialDaoFactory,
displayModels: DisplayModels,
sesClient: SesClient)
extends EmailNotifications(daoFactory,
"noreply@yourdomain.com",
displayModels,
sesClient) {
override def sendEmail(processStatus: ProcessStatus,
previousProcessStatus: Option[ProcessStatus],
teams: Seq[EmailNotification],
subject: String,
body: String): Unit = {
val outfile = File.createTempFile("sundial", ".html")
val bw = new BufferedWriter(new FileWriter(outfile))
bw.write(body)
bw.close()
Seq("open", outfile.getAbsolutePath()).!
}
}
| gilt/sundial | app/service/notifications/DevelopmentEmailNotifications.scala | Scala | mit | 1,123 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.upscan
import play.api.libs.json.{Json, OFormat}
case class UpscanInitiateRequest(
callbackUrl: String,
successRedirect: String,
errorRedirect: String,
minimumFileSize: Option[Int] = None,
maximumFileSize: Option[Int] = None
)
object UpscanInitiateRequest {
implicit val format: OFormat[UpscanInitiateRequest] = Json.format[UpscanInitiateRequest]
}
| hmrc/gmp-frontend | app/models/upscan/UpscanInitiateRequest.scala | Scala | apache-2.0 | 1,176 |
package de.htwg.zeta.server.model.modelValidator.validator.rules.nullChecks
import de.htwg.zeta.common.models.project.instance.GraphicalDslInstance
import de.htwg.zeta.server.model.modelValidator.validator.rules.ModelRule
/**
* This file was created by Tobias Droth as part of his master thesis at HTWG Konstanz (03/2017 - 09/2017).
*/
class EdgeSourcesNotNull extends ModelRule {
override val name: String = getClass.getSimpleName
override val description: String = "The source list inside an edge is Null."
override val possibleFix: String = "Replace the Null value by an empty list."
override def check(model: GraphicalDslInstance): Boolean = !model.edges.map(_.sourceNodeName).contains(null) // scalastyle:ignore null
}
| Zeta-Project/zeta | api/server/app/de/htwg/zeta/server/model/modelValidator/validator/rules/nullChecks/EdgeSourcesNotNull.scala | Scala | bsd-2-clause | 737 |
package io.github.mandar2812.dynaml.kernels
import breeze.linalg._
import io.github.mandar2812.dynaml.analysis.VectorField
import io.github.mandar2812.dynaml.analysis.implicits._
import io.github.mandar2812.dynaml.DynaMLPipe._
import io.github.mandar2812.dynaml.pipes.DataPipe
import org.scalatest.{FlatSpec, Matchers}
class KernelSpec extends FlatSpec with Matchers {
"Covariance Functions " should "be able to block/un-block parameters correctly" in {
implicit val field = VectorField(1)
val seKernel = new SEKernel(band = 1.0, h = 1.0)
val hyp = seKernel.hyper_parameters
seKernel.block(hyp.head)
assert(seKernel.effective_hyper_parameters.head == hyp.last)
seKernel.block_all_hyper_parameters
assert(seKernel.effective_hyper_parameters.isEmpty)
seKernel.block()
assert(seKernel.effective_hyper_parameters.length == 2)
}
"RBF/SE, Cauchy, Laplace, Polynomial Kernels " should "compute correct values" in {
val epsilon = 1E-5
implicit val field = VectorField(1)
val seKernel = new SEKernel(band = 1.0, h = 1.0)
val laplaceKernel = new LaplacianKernel(be = 1.0)
val polyKernel = new PolynomialKernel(2, 1.0)
val cauchyKernel = new CauchyKernel(1.0)
val (x, y, z) = (DenseVector(1.0), DenseVector(0.0), DenseVector(1.5))
assert(math.abs(laplaceKernel.evaluate(x, y) - 0.36787944117144233) < epsilon)
assert(math.abs(seKernel.evaluate(x, y) - 0.6065306597126334) < epsilon)
assert(math.abs(polyKernel.evaluate(x, z) - 6.25) < epsilon)
assert(math.abs(cauchyKernel.evaluate(x, z) - 0.8) < epsilon)
}
"FBM, MLP Kernels " should "compute correct values" in {
val epsilon = 1E-5
implicit val field = VectorField(1)
val (fbmKernel, fbmCovFunc) = (new FBMKernel(hurst = 1.0), new FBMCovFunction(hurst = 1.0))
val (mlpKernel, mlpCovFunc) = (new MLPKernel(1.0, 0.0), new MLP1dKernel(1.0, 0.0))
val (x, y, z) = (DenseVector(1.0), DenseVector(0.0), DenseVector(1.5))
assert(fbmKernel.evaluate(x, y) == 0d)
assert(fbmCovFunc.evaluate(1d, 0d) == 0d)
assert(fbmKernel.gradient(x, z)("hurst") == 1.5*1.5*math.log(1.5*1.5) - 0.5*0.5*math.log(0.5*0.5))
assert(fbmCovFunc.gradient(1.0, 1.5)("hurst") == 1.5*1.5*math.log(1.5*1.5) - 0.5*0.5*math.log(0.5*0.5))
assert(fbmKernel.gradient(x, y)("hurst").isNaN)
assert(fbmCovFunc.gradient(1.0, 0.0)("hurst").isNaN)
assert(math.abs(mlpKernel.evaluate(x, x) - math.Pi/6) < epsilon)
assert(math.abs(mlpCovFunc.evaluate(1d, 1d) - math.Pi/6) < epsilon)
}
"Kernels transformations " should "compute and handle hyper-parameters correctly" in {
val epsilon = 1E-5
implicit val field = VectorField(1)
val seKernel = new SEKernel(band = 1.0, h = 1.0)
seKernel.block("amplitude")
val laplaceKernel = new LaplacianKernel(be = 1.0)
val polyKernel = new PolynomialKernel(2, 1.0)
val (x, y, z) = (DenseVector(1.0), DenseVector(0.0), DenseVector(1.5))
val k1 = seKernel + polyKernel
val k2 = laplaceKernel * polyKernel
val k3 = seKernel + laplaceKernel
assert(math.abs(k1.evaluate(x, z) - 7.132496902584595) < epsilon)
assert(math.abs(k2.evaluate(x, z) - 3.790816623203959) < epsilon)
val block_hyp2 = Seq(polyKernel.toString.split("\\\\.").last+"/degree")
k2.block(block_hyp2:_*)
assert(k2.blocked_hyper_parameters.length == 1 && k2.blocked_hyper_parameters.head == block_hyp2.head)
assert(
k1.blocked_hyper_parameters.length == 1 &&
k1.blocked_hyper_parameters.head == seKernel.toString.split("\\\\.").last+"/amplitude")
assert(
k3.blocked_hyper_parameters.length == 1 &&
k3.blocked_hyper_parameters.head == seKernel.toString.split("\\\\.").last+"/amplitude")
assert(polyKernel.blocked_hyper_parameters == Seq("degree"))
}
"Decomposable Kernels " should "compute correctly" in {
val epsilon = 1E-5
implicit val field = VectorField(1)
implicit val enc = breezeDVSplitEncoder(1)
val seKernel = new SEKernel(band = 1.0, h = 1.0)
val polyKernel = new PolynomialKernel(2, 1.0)
val (x, y, z) = (DenseVector(1.0, 1.0), DenseVector(0.0, 0.0), DenseVector(1.5, 1.5))
val k1 = new DecomposableCovariance[DenseVector[Double]](seKernel, polyKernel)
assert(math.abs(k1.evaluate(x, z) - 7.132496902584595) < epsilon)
}
"Decomposable Kernels " should "handle hyper-perameters in a consistent fashion" in {
implicit val field = VectorField(1)
implicit val enc = breezeDVSplitEncoder(1)
val seKernel = new SEKernel(band = 1.0, h = 1.0)
val polyKernel = new PolynomialKernel(2, 1.0)
polyKernel.block("degree")
val (x, y, z) = (DenseVector(1.0, 1.0), DenseVector(0.0, 0.0), DenseVector(1.5, 1.5))
val k1 = new DecomposableCovariance[DenseVector[Double]](seKernel, polyKernel)
assert(k1.hyper_parameters.forall(
h => h.contains(seKernel.toString.split("\\\\.").last) ||
h.contains(polyKernel.toString.split("\\\\.").last)
))
assert(
k1.blocked_hyper_parameters.forall(
_.contains(polyKernel.toString.split("\\\\.").last+"/degree")
)
)
}
"Kernel Matrices " should "be constructed correctly" in {
val eval1 = (x: Int, y: Int) => if(x == y) 1.0 else 0.0
val eval2 = (x: Int, y: Int) => 1/(1.0 + math.pow(x - y, 2.0))
val nPoints = 2
val data: Seq[Int] = 0 until nPoints
val k1 = SVMKernel.buildSVMKernelMatrix(data, nPoints, eval1).getKernelMatrix()
val k2 = SVMKernel.buildSVMKernelMatrix(data, nPoints, eval2).getKernelMatrix()
assert(k1.rows == nPoints && k1.cols == nPoints && DenseMatrix.eye[Double](nPoints) == k1)
assert(
k2.rows == nPoints &&
k2.cols == nPoints &&
DenseMatrix.tabulate[Double](nPoints, nPoints)((i, j) => if(i == j) 1.0 else 0.5) == k2)
val k3 = SVMKernel.crossKernelMatrix(data, Seq(0), eval2)
assert(
k3.rows == nPoints &&
k3.cols == 1 &&
DenseMatrix.tabulate[Double](nPoints, 1)((i, j) => if(i == j) 1.0 else 0.5) == k3)
val k4 = SVMKernel.buildPartitionedKernelMatrix(
data, nPoints.toLong,
numElementsPerRowBlock = 1,
numElementsPerColBlock = 1,
eval2)
assert(k4.rows == 2 && k4.cols == 2 && k4.rowBlocks == 2 && k4.colBlocks == 2)
assert(k4._data.forall(p =>
if(p._1._1 == p._1._2) p._2 == DenseMatrix(1.0)
else p._2 == DenseMatrix(0.5)))
val k5 = SVMKernel.crossPartitonedKernelMatrix(
data, Seq(0),
numElementsPerRowBlock = 1,
numElementsPerColBlock = 1,
eval2)
assert(k5.rows == 2 && k5.cols == 1 && k5.rowBlocks == 2 && k5.colBlocks == 1)
assert(k5._data.forall(p =>
if(p._1._1 == p._1._2) p._2 == DenseMatrix(1.0)
else p._2 == DenseMatrix(0.5)))
}
"Covariance functions constructed from Feature Maps" should " compute and compose correctly" in {
val epsilon = 1E-5
implicit val field = VectorField(2)
val data = Seq(0d, math.Pi/2)
val phi = DataPipe[Double, DenseVector[Double]](x => DenseVector(math.cos(x), math.sin(x)))
val id = identityPipe[Double]
val seKernel = new SEKernel(band = 1.0, h = 1.0)
seKernel.block("amplitude")
val id_cov = new FeatureMapCovariance[Double, Double](id)
val cov1 = new FeatureMapCovariance[Double, DenseVector[Double]](phi)
val cov2 = id_cov > cov1
val cov3 = id_cov > cov1 > seKernel
val k1 = cov1.buildKernelMatrix(data, data.length).getKernelMatrix()
val k2 = cov2.buildKernelMatrix(data, data.length).getKernelMatrix()
val k3 = cov3.buildKernelMatrix(data, data.length).getKernelMatrix()
val errMat1 = DenseMatrix.eye[Double](2) - k1
val errMat2 = DenseMatrix.eye[Double](2) - k2
val errMat3 = DenseMatrix.tabulate[Double](2, 2)((i, j) => if(i == j) 1.0 else math.exp(-1.0)) - k3
assert(
k1.rows == 2 &&
k1.cols == 2 &&
trace(errMat1.t*errMat1) < epsilon &&
trace(errMat2.t*errMat2) < epsilon)
assert(cov3.blocked_hyper_parameters == Seq("amplitude") && trace(errMat3.t*errMat3) < epsilon)
}
}
| transcendent-ai-labs/DynaML | dynaml-core/src/test/scala/io/github/mandar2812/dynaml/kernels/KernelSpec.scala | Scala | apache-2.0 | 8,169 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate
import org.scalatest._
abstract class ConcurrentVersionsSpec extends WordSpec with Matchers with BeforeAndAfterEach {
var versions: ConcurrentVersions[String, String] = null
override def beforeEach(): Unit =
versions = create
def create: ConcurrentVersions[String, String]
def vectorTime(t1: Int, t2: Int, t3: Int): VectorTime =
VectorTime("p1" -> t1, "p2" -> t2, "p3" -> t3)
"A ConcurrentVersions instance" must {
"track causal updates" in {
val result = versions
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(2, 0, 0))
result.conflict should be(false)
result.all(0) should be(Versioned("b", vectorTime(2, 0, 0)))
}
"track concurrent updates" in {
val result = versions
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(0, 1, 0))
result.conflict should be(true)
result.all(0) should be(Versioned("a", vectorTime(1, 0, 0)))
result.all(1) should be(Versioned("b", vectorTime(0, 1, 0)))
}
"resolve concurrent updates" in {
val result = versions
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(0, 1, 0))
.resolve(
vectorTime(1, 0, 0),
vectorTime(2, 1, 0))
result.conflict should be(false)
result.all(0) should be(Versioned("a", vectorTime(2, 1, 0)))
}
"resolve concurrent updates with implicit event timestamp" in {
val result = versions
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(0, 1, 0))
.resolve(vectorTime(1, 0, 0))
result.conflict should be(false)
result.all(0) should be(Versioned("a", vectorTime(1, 1, 0)))
}
"resolve concurrent updates (advanced)" in {
val updated = versions
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(0, 1, 0))
.update("c", vectorTime(0, 1, 4))
.update("d", vectorTime(0, 3, 0))
.update("e", vectorTime(0, 1, 5))
updated.all.length should be(3)
updated.all(0) should be(Versioned("a", vectorTime(1, 0, 0)))
updated.all(1) should be(Versioned("e", vectorTime(0, 1, 5)))
updated.all(2) should be(Versioned("d", vectorTime(0, 3, 0)))
val result = updated.resolve(
vectorTime(0, 3, 0),
vectorTime(3, 4, 8))
result.conflict should be(false)
result.all(0) should be(Versioned("d", vectorTime(3, 4, 8)))
}
"only resolve concurrent updates that happened before the resolve" in {
val result = versions
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(0, 1, 0))
.update("c", vectorTime(0, 0, 1))
.resolve(
vectorTime(1, 0, 0),
vectorTime(2, 1, 0))
result.all.length should be(2)
result.all(0) should be(Versioned("a", vectorTime(2, 1, 0)))
result.all(1) should be(Versioned("c", vectorTime(0, 0, 1)))
}
}
}
object ConcurrentVersionsTreeSpec {
implicit class ConcurrentVersionsTreeHelper(tree: ConcurrentVersionsTree[String, String]) {
def nodeTuples = tree.nodes.map { node => (node.versioned, node.rejected) }
}
}
class ConcurrentVersionsTreeSpec extends ConcurrentVersionsSpec {
type Projection = (String, String) => String
val replace: Projection = (a, b) => b
val append: Projection = (a, b) => if (a == null) b else a + b
override def create: ConcurrentVersions[String, String] = ConcurrentVersionsTree(replace)
"A ConcurrentVersionsTree instance" must {
"support updates on rejected versions (append to leaf)" in {
val result = ConcurrentVersionsTree(append)
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(1, 1, 0))
.update("c", vectorTime(1, 0, 1))
.resolve(
vectorTime(1, 0, 1),
vectorTime(1, 2, 1))
.update("d", vectorTime(2, 1, 0))
.update("e", vectorTime(3, 1, 0))
result.all.length should be(2)
result.all(0) should be(Versioned("abde", vectorTime(3, 1, 0)))
result.all(1) should be(Versioned("ac", vectorTime(1, 2, 1)))
}
"support updates on rejected versions (append to non-leaf)" in {
val result = ConcurrentVersionsTree(append)
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(1, 1, 0))
.update("x", vectorTime(1, 2, 0))
.update("c", vectorTime(1, 0, 1))
.resolve(
vectorTime(1, 0, 1),
vectorTime(1, 3, 1))
.update("d", vectorTime(2, 1, 0))
.update("e", vectorTime(3, 1, 0))
result.all.length should be(2)
result.all(0) should be(Versioned("abde", vectorTime(3, 1, 0)))
result.all(1) should be(Versioned("ac", vectorTime(1, 3, 1)))
}
"append updates to the closest predecessor" in {
val result = ConcurrentVersionsTree(append)
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(2, 0, 0))
.update("c", vectorTime(1, 1, 0))
.resolve(
vectorTime(2, 0, 0),
vectorTime(2, 2, 0))
.update("d", vectorTime(3, 2, 0))
result.conflict should be(false)
result.all(0) should be(Versioned("abd", vectorTime(3, 2, 0)))
}
"create a deep copy of itself" in {
val tree = ConcurrentVersionsTree(append)
.update("a", vectorTime(1, 0, 0))
.update("b", vectorTime(2, 0, 0))
.update("c", vectorTime(1, 1, 0))
val upd1 = tree.copy().resolve(
vectorTime(2, 0, 0),
vectorTime(2, 2, 0))
val upd2 = tree.copy().resolve(
vectorTime(1, 1, 0),
vectorTime(2, 2, 0))
tree.conflict should be(true)
upd1.conflict should be(false)
upd2.conflict should be(false)
tree.all should be(Seq(
Versioned("ab", vectorTime(2, 0, 0)),
Versioned("ac", vectorTime(1, 1, 0))))
upd1.all should be(Seq(Versioned("ab", vectorTime(2, 2, 0))))
upd2.all should be(Seq(Versioned("ac", vectorTime(2, 2, 0))))
}
}
}
class ConcurrentVersionsListSpec extends ConcurrentVersionsSpec {
override def create: ConcurrentVersions[String, String] = ConcurrentVersionsList[String]
}
| ianclegg/eventuate | eventuate-core/src/test/scala/com/rbmhtechnology/eventuate/ConcurrentVersionsSpec.scala | Scala | apache-2.0 | 6,856 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.