repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
retronym/scala-sandbox
|
finance-model/src/main/scala/retronym/finance/contract/Contract.scala
|
package retronym.finance.contract
case class Cashflow(amount: Double, currency: Currency) {
def scale(factor: Double) : Cashflow = Cashflow(factor * amount, currency)
def negate : Cashflow = scale(-1)
}
case class Evolution(c: Contract, cf: List[Cashflow])
object Evolution {
implicit def ContractToEvolution(c: Contract): Evolution = Evolution(c, List[Cashflow]())
def evolvePair(c1: Contract, c2: Contract, i: Instant,
cashflowCombiner: (List[Cashflow], List[Cashflow]) => List[Cashflow],
contractCombiner: (Contract, Contract) => Contract) = {
(c1.evolve(i), c2.evolve(i)) match {
case (Evolution(c1, cf1), Evolution(c2, cf2)) =>
{
val contract: Contract = contractCombiner(c1, c2)
val cashflows: List[Cashflow] = cashflowCombiner(cf1, cf2)
Evolution(contract, cashflows)
}
}
}
def evolve(c1: Contract, i: Instant,
cashflowCombiner: (List[Cashflow]) => List[Cashflow],
contractCombiner: (Contract) => Contract) = {
c1.evolve(i) match {
case Evolution(c1, cf1) => Evolution(contractCombiner(c1), cashflowCombiner(cf1))
}
}
}
trait Contract {
def evolve(i: Instant): Evolution = this
def acquire(i: Instant): Contract = this
}
trait Obs[T] {
def observe(i: Instant): T
}
trait Instant
//trait Obs1[M[_], T] extends Obs[T] {
// observe(t: Instant): M[T]
//}
case class Timestep(i: Int) extends Instant
case class Currency(code: String)
case object Zero extends Contract
case class One(c: Currency) extends Contract {
// unify acquire and evolve to be able to pass back a cashflow on acquisition.
// def acquire(i: Instant) = Evolution(Zero, List(Cashflow(1, c)))
override def acquire(i: Instant) = Zero
}
case class Give(c: Contract) extends Contract {
override def acquire(i: Instant) = Give(c.acquire(i))
override def evolve(i: Instant) = c.evolve(i) match {
case Evolution(c1, cf) => Evolution(Give(c1), cf.map(_.negate))
}
}
case class And(c1: Contract, c2: Contract) extends Contract {
override def acquire(i: Instant) = And(c1.acquire(i), c2.acquire(i))
override def evolve(i: Instant) = Evolution.evolvePair(c1, c2, i, _ ::: _, And(_, _))
}
case class Or(c1: Contract, c2: Contract) extends Contract {
// need to pass in the choice of contract here...
override def acquire(i: Instant) = Or(c1.acquire(i), c2.acquire(i))
// evolve not needed as this contract will simpify on aqcuisition.
}
case class Cond(o: Obs[Boolean], c1: Contract, c2: Contract) extends Contract {
override def acquire(i: Instant) = (if (o.observe(i)) c1 else c2).acquire(i)
// evolve not needed as this contract will simpify on aqcuisition.
}
case class Scale(o: Obs[Double], c: Contract) extends Contract {
override def acquire(i: Instant) = Scale(o, c.acquire(i))
override def evolve(i: Instant) = Evolution.evolve(c, i, _.map(x => x.scale(o.observe(i))), Scale(o, _))
}
case class When(o: Obs[Boolean], c: Contract) extends Contract {
override def evolve(i: Instant) = if (o.observe(i)) c.acquire(i) else this
}
case class Anytime(o: Obs[Boolean], c: Contract) extends Contract {
// add param to signal intent to aqcuire.
override def evolve(i: Instant) = {
// if (o.isPermanently(i, false)) {
// Zero
// } else {
Evolution.evolve(c, i, _ => List[Cashflow](), Anytime(o, _))
// }
}
override def acquire(i: Instant) = Anytime(o, c)
}
case class Until(o: Obs[Boolean], c: Contract) extends Contract {
override def evolve(i: Instant) = {
if (o.observe(i) == false) {
Zero
} else {
Evolution.evolve(c, i, identity _, Until(o, _))
}
}
}
object ContractCombinators {
def and = And.apply _
def give = Give.apply _
def andGive(c: Contract, d: Contract) = and(c, give(d))
}
object Observable {
def const[A](a: A): Obs[A] = new Obs[A] {
def observe(i: Instant) = a
}
def lift[A, B](f: A => B)(o: Obs[A]): Obs[B] = new Obs[B] {
def observe(i: Instant) = f(o.observe(i))
}
def lift2[A, B, C](f: (A, B) => C)(o1: Obs[A], o2: Obs[B]): Obs[C] = new Obs[C] {
def observe(i: Instant) = f(o1.observe(i), o2.observe(i))
}
def date(t: Instant): Obs[Instant] = new Obs[Instant] {
def observe(i: Instant) = t
}
}
class ObsW[T <% Numeric[T]](o1: Obs[T]) {
import Observable._
def +(o2: Obs[T]) = error("todo")
}
trait Numeric[T] {
def +(t: T): T
}
/*
performance_payoff_option: C
PRE:
Initial Fixing: or(S - K, 0)
Final Fixing : S - K
S - K
forward_start: (t: Instant, f: (X => Contract)
*/
|
retronym/scala-sandbox
|
web-interface/src/main/scala/demo/helloworld/snippet/HelloWorld.scala
|
<gh_stars>1-10
package retronym.snippet
class HelloWorld {
def howdy = <span>Welcome to helloworld at {new _root_.java.util.Date}</span>
}
|
retronym/scala-sandbox
|
expressions/src/main/scala/retronym/expression/dsl/ExpressionDsl.scala
|
<reponame>retronym/scala-sandbox
package retronym.expression.dsl
import retronym.expression._
class ExpressionBuilder(e1: Expression) {
def +~(e2: Expression) = new BinaryOp(e1, Plus, e2)
def -~(e2: Expression) = new BinaryOp(e1, Minus, e2)
def *~(e2: Expression) = new BinaryOp(e1, Multiply, e2)
def /~(e2: Expression) = new BinaryOp(e1, Div, e2)
}
object ExpressionBuilder {
implicit def ExpressionToExpressionBuilder[T <% Expression](e: T): ExpressionBuilder = new ExpressionBuilder(e)
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/collections/ForComprehensions.scala
|
<filename>lessons/src/main/scala/retronym/lessons/collections/ForComprehensions.scala<gh_stars>1-10
package retronym.lessons.collections
import _root_.org.spex.Specification
object ForComprehensions extends Specification {
val weekDays = List("Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday")
val daySections = List("morning", "afternoon", "evening")
"ForComprehensions" should {
"should allow map, filter of collection" in {
val a = for {w <- weekDays if w startsWith "W"
wu = w.toUpperCase
} yield wu
a must containAll(List("WEDNESDAY"))
}
"List of Lists" in {
val a = for {w <- weekDays if w startsWith "T"
weekdaysUpperCaseLetters = w.toList
} yield weekdaysUpperCaseLetters
a must containAll(List('T', 'u', 'e', 's', 'd', 'a', 'y'), List('T', 'h', 'u', 'r', 's', 'd', 'a', 'y'))
}
"Same as 'List of Lists', but without the sugar" in {
val a = weekDays.filter(_ startsWith "T").map(_.toList)
a must containAll(List('T', 'u', 'e', 's', 'd', 'a', 'y'), List('T', 'h', 'u', 'r', 's', 'd', 'a', 'y'))
}
"Flat list by using <- operator twice" in {
val a = for {w <- weekDays if w startsWith "T"
weekdaysUpperCaseLetters <- w
} yield weekdaysUpperCaseLetters
a must containAll(List('T', 'u', 'e', 's', 'd', 'a', 'y', 'T', 'h', 'u', 'r', 's', 'd', 'a', 'y'))
}
"Same as 'Flat list by using <- operator twice', but without the sugar" in {
val a = weekDays.filter(_ startsWith "T").flatMap(_.toList)
a must containAll(List('T', 'u', 'e', 's', 'd', 'a', 'y', 'T', 'h', 'u', 'r', 's', 'd', 'a', 'y'))
}
}
}
|
retronym/scala-sandbox
|
finance-model/src/main/scala/retronym/finance/termsheet/Termsheet.scala
|
package retronym.finance.termsheet
import java.util.Date
class RichText
trait PayoffChart
class UnderlyingRow (
val name: String
)
class UnderlyingSection(
val introduction: Option[String],
val underlyings: List[UnderlyingRow],
val provisions: List[String]
)
{
}
class CouponSection(
)
class GeneralInfo(
val isin: String,
val coupons: CouponSection
)
class RedemptionScenarios
class Definition(
val term: String,
val definition: RichText)
class Redemption(
val introduction: String,
val scenarios: RedemptionScenarios,
val definitions: List[Definition]
)
trait DateDefinition
class SingleDateDefinition(val name: String, val date: Date, val provision: Option[RichText])
class ScheduleDefinition(val scheduleName: String, val date: Date, val provision: Option[RichText])
class DatesSection(
val dateDefinitions: List[DateDefinition]
)
class Termsheet(
val title: String,
val description: RichText,
val marketExpectation: String,
val payoff: PayoffChart,
val underlyings: UnderlyingSection,
val generalInfo: GeneralInfo,
val redemption: Redemption,
val dates: DatesSection)
class Table
class FoRenderContext
trait TermsheetFoRenderer {
def render(context: FoRenderContext) = {
val fo =
<root xmlns="http://www.w3.org/1999/XSL/Format">
<layout-master-set>
<simple-page-master page-height="11in" page-width="8.5in" master-name="only">
<region-body region-name="xsl-region-body" margin="0.7in"/>
<region-before region-name="xsl-region-before" extent="0.7in"/>
<region-after region-name="xsl-region-after" extent="0.7in"/>
</simple-page-master>
</layout-master-set>
<page-sequence master-reference="only" format="A">
<flow flow-name="xsl-region-body">
<block>{"Hello " + "world!"}</block>
</flow>
</page-sequence>
</root>
fo
}
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/annotations/Annotation.scala
|
package retronym.lessons.annotations
import _root_.org.specs.runner.ScalaTestSuite
import _root_.org.specs.Specification
import _root_.org.specs.runner._
import _root_.org.specs.matcher._
import _root_.org.specs.mock._
import _root_.org.specs.specification._
import _root_.org.specs._
import _root_.org.specs.io._
import _root_.org.specs.collection._
import _root_.org.specs.util._
import _root_.org.specs.xml._
import java.lang.reflect.Field
import java.lang.reflect.Method
object Annotation extends Specification {
class MyClass {
@Deprecated
var myVal = ""
}
// https://lampsvn.epfl.ch/trac/scala/ticket/1846
"An annotated var" should {
"have an annotated field" in {
val field = classOf[MyClass].getDeclaredField("myVal")
val annotation: Deprecated = field.getAnnotation(classOf[Deprecated])
annotation mustNot beNull
}
"have an annotated accessor" in {
val method = classOf[MyClass].getMethod("myVal")
val annotation: Deprecated = method.getAnnotation(classOf[Deprecated])
annotation mustNot beNull
}
"have an annotated mutator" in {
val found = classOf[MyClass].getMethods().find(_.getName == "myVal_$eq")
found must beSome[Method].which((_: Method).getAnnotation(classOf[Deprecated]) mustNot beNull)
}
}
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/option/OptionalValues.scala
|
package retronym.lessons.option
import _root_.org.spex.Specification
object OptionalValues extends Specification {
"Java Style" should {
def add(a: java.lang.Integer, b: java.lang.Integer) : java.lang.Integer = {
if (a == null || b == null) return null
return a.intValue + b.intValue
}
"First is null" in {
add(null, 1) must beNull
}
"Second is null" in {
add(1, null) must beNull
}
"Both are null" in {
add(null, null) must beNull
}
"Both are non-null" in {
add(1, 2) must be_==(3)
}
}
"Scala Style" should {
def add(a: Int, b: Int) = a + b
def addOptionalInputs(a: Option[Int], b: Option[Int]) = {
for(a <- a; b <- b) yield add(a, b)
}
"First is null" in {
addOptionalInputs(None, Some(1)) must beNone
}
"Second is null" in {
addOptionalInputs(Some(1), None) must beNone
}
"Both are null" in {
addOptionalInputs(None, None) must beNone
}
"Both are non-null" in {
addOptionalInputs(Some(1), Some(2)) must beSome[Int].which(_ == 3)
}
}
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/testing/specs/Sample.scala
|
<gh_stars>1-10
package retronym.lessons.testing.specs
import _root_.org.spex.Specification
object Sample extends Specification {
"Basic Examples" should {
"this example will pass" in {
1 must be_==(1)
}
}
val spec = declare("Basic Examples without shorthand")
spec.forExample("this will pass").in({
1 must be_==(1)
})
}
|
retronym/scala-sandbox
|
expressions/src/main/scala/retronym/expression/Expression.scala
|
<reponame>retronym/scala-sandbox
package retronym.expression
import _root_.scalaz.OptionW
import _root_.retronym.commons._
trait Expression {
def describe: String
def length: Int = 1
}
sealed abstract class BaseExpression extends Expression
case class Constant(value: Double) extends BaseExpression {
def describe = {
value.toString;
}
}
case class Variable(name: String) extends BaseExpression {
def describe = {
name.toString
}
}
case class BinaryOp(left: Expression, op: Operator, right: Expression) extends BaseExpression {
def describe = left.describe + " " + op.toString + " " + right.describe
def flip = BinaryOp(right, op, left)
override def length = left.length + right.length
}
object Expression {
implicit def fromDouble(value: Double) = Constant(value)
implicit def fromInt(value: Int): Constant = Constant(value)
implicit def fromString(value: String) = Variable(value)
}
|
retronym/scala-sandbox
|
expressions/src/test/scala/retronym/expression/ExpressionTest.scala
|
<filename>expressions/src/test/scala/retronym/expression/ExpressionTest.scala
package retronym.expression
import dsl.ExpressionBuilder._
import dsl._
import RichExpression._
import _root_.org.spex.Specification
object expressionSpec extends Specification {
"describe" in {
val op = BinaryOp(Constant(1), Plus, Constant(0))
op.describe must be_==("1.0 + 0.0")
}
"implicit conversion" in {
val expr = BinaryOp(1, Plus, "x")
expr must be_==(BinaryOp(Constant(1), Plus, Variable("x")))
}
"arithmetic simplified" in {
(0 +~ "x").simplify must expr_==("x")
("x" +~ 0).simplify must expr_==("x")
(0 +~ "x").simplify must expr_==("x")
("x" *~ 1).simplify must expr_==("x")
(1 *~ "x").simplify must expr_==("x")
((0 +~ 0) +~ "x").simplify must expr_==("x")
}
"complex expression is fully refactored" in {
val orig = ("a" +~ "b") -~ ("b" +~ "a");
orig.refactor must contain(("b" +~ "a") -~ ("b" +~ "a"))
orig.refactor must contain(("b" +~ "a") -~ ("a" +~ "b"))
orig.refactor must contain(Constant(0))
}
"associative rule" in {
val e1 = ("a" +~ "b") +~ "c";
val e2 = "a" +~ ("b" +~ "c")
e1.refactor must containAll(List[Expression](e1, e2))
e1.refactor must contain(e1)
e2.refactor must contain(e1)
}
"divide" in {
val e1 = ("a" +~ "b") /~ "b";
val value = ("a" /~ "b") +~ 1
e1.refactor must contain(value)
}
}
|
retronym/scala-sandbox
|
web-interface/src/main/scala/bootstrap/liftweb/Boot.scala
|
<gh_stars>1-10
package bootstrap.liftweb
import _root_.net.liftweb.util._
import _root_.net.liftweb.http._
import _root_.net.liftweb.sitemap._
import _root_.net.liftweb.sitemap.Loc._
import Helpers._
/**
* A class that's instantiated early and run. It allows the application
* to modify lift's environment
*/
class Boot {
def boot {
// where to search snippet
LiftRules.addToPackages("retronym")
// Build SiteMap
val entries = Menu(Loc("Home", List("index"), "Home")) :: Nil
LiftRules.setSiteMap(SiteMap(entries:_*))
}
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/traits/UsingTraits.scala
|
<reponame>retronym/scala-sandbox
package retronym.lessons.traits
import _root_.org.spex.Specification
object UsingTraits extends Specification {
trait Camera {
def shoot(exposure: Int)
}
trait Zoomable {
def zoomIn(level: Int): Int
}
"Traits as interface contract" should {
"Implement one interface" in {
class CameraImpl extends Camera {
def shoot(exposure: Int) = {
println("shooting...");
Thread.sleep(exposure)
}
}
val c: Camera = new CameraImpl
c.shoot(10) must be_==(())
}
"implement multiple interfaces, anonymous class" in {
val c: Camera with Zoomable = new Camera with Zoomable {
var zoomLevel = 5
def shoot(exposure: Int) = println("shoot")
def zoomIn(level: Int) = {
zoomLevel = zoomLevel + level
zoomLevel
}
}
c.zoomIn(1) must be_==(6)
}
"one trait combining multiple interfaces, anonymous class" in {
trait ZoomableCamera extends Camera with Zoomable
val c: ZoomableCamera = new ZoomableCamera {
var zoomLevel = 5
def shoot(exposure: Int) = println("shoot")
def zoomIn(level: Int) = {
zoomLevel = zoomLevel + level
zoomLevel
}
}
c.zoomIn(1) must be_==(6)
}
"type alias for multiple interfaces, anonymous class" in {
object MyTypeHolder {
type ZoomableCamera = Camera with Zoomable
}
import MyTypeHolder.ZoomableCamera
val c: ZoomableCamera = new Camera with Zoomable {
var zoomLevel = 5
def shoot(exposure: Int) = println("shoot")
def zoomIn(level: Int) = {
zoomLevel = zoomLevel + level
zoomLevel
}
}
c.zoomIn(1) must be_==(6)
}
}
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/overload/Overload.scala
|
package retronym.lessons.overload
import _root_.org.spex.Specification
object Overload extends Specification {
"Overload" should {
class M {
final def m = "m"
}
case class Wrap1(a: String)
case class Wrap2(a: String)
class IllegalOverride extends M {
// def m() = "m" // can't overload based on no-param list and empty param list.
// def m :Int = 1 // can't overload based on return type
// var m = 1 // one namespace for methods, vals, vars, and objects to support uniform access principle.
// val m = 1
// object m
}
"override with implicit param" in {
class ImplicitParam
class ImplicitOveride extends M {
def m(implicit a: ImplicitParam) = "m(implicit a: Any)"
}
{
implicit val a = new ImplicitParam
// error: errorneous reference to overloaded definition, most specific 'm', alternative 'm(implicit a)'
// new ImplicitOveride().m must be_==("m")
// new ImplicitOveride().m must be_==("m(implicit a: Any)")
()
}
new ImplicitOveride().m(new ImplicitParam) must be_==("m(implicit a: Any)")
}
"override by type param" in {
class TypeParamaterAndReturnTypeOveride extends M {
def m[T] = Wrap1("m[T]")
// def m[T, Y] = Wrap1("m[T, Y]") // error: double definition, same type after erasure
def m[T, Y] = Wrap2("m[T, Y]")
}
new M().m must be_==("m")
// new TypeParamaterAndReturnTypeOveride().m must be_==("m") // error: ambiguous reference to overloaded definition
val x = new TypeParamaterAndReturnTypeOveride()
x.m[Any] must be_==(Wrap1("m[T]"))
x.m[Nothing] must be_==(Wrap1("m[T]"))
x.m[String, Int] must be_==(Wrap2("m[T, Y]"))
}
class ParamListOverride extends M {
def m(a: Int) = "m(a: Int)"
def m(a: String) = "m(a: String)"
def m(a: List[Int]) = "m(a: List[Int])"
// def m(a: List[String]) = 1 // double definition... have same type after erasure.
def m(a: Any, b: Any) = "m(a: Any, b: Any)"
// def m(a: Any)(b: Any) = "m(a: Any)(b: Any)" // double definition
def m(a: Any)(b: Any) = Wrap1("m(a: Any)(b: Any)") // double definition
}
"classes, types, and traits in a separate namespace" in {
class NestedClass extends M {
class M
}
class NestedTrait extends M {
trait M
}
class NestedType extends M {
type M = Any
}
new NestedClass().m must be_==("m")
new NestedTrait().m must be_==("m")
new NestedType().m must be_==("m")
}
}
}
|
retronym/scala-sandbox
|
expressions/src/test/scala/retronym/expression/ExpressionMatchers.scala
|
package retronym.expression
import _root_.org.specs.matcher.Matcher
case class expr_==(e: Expression) extends Matcher[Expression] {
override def apply(t: =>Expression) = {
(e == t, "matched", "expected: " + e.describe)
}
}
|
retronym/scala-sandbox
|
finance-model/src/main/scala/retronym/finance/Option.scala
|
package retronym.finance
import java.math.BigDecimal
import java.util.Date
import scalaz.control.Monad
case class PayoffStyle()
case object Put extends PayoffStyle
case object Call extends PayoffStyle
case class Level()
case class AbsoluteLevel(value: BigDecimal)
case class PercentLevel(value: BigDecimal)
trait Underlying {
def getId: String
}
trait BasketStyle;
case object WorstOf extends BasketStyle;
case object BestOf extends BasketStyle;
case object Average extends BasketStyle;
case object Single extends BasketStyle;
case class OptionCalendar(forwardStart: Option[Date])
case class OptionPayoff(
strike: Level,
style: PayoffStyle,
basketStyle: BasketStyle
)
case class OptionInstrument(payoff: OptionPayoff, optionCalendar: OptionCalendar)
trait Instrument
class BasketComponent(val instrument: Instrument, val q: BigDecimal)
class Basket(components: List[BasketComponent]) extends Instrument {
}
trait NamedInstrument {
def name;
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/salutation/HelloWorldApp.scala
|
<reponame>retronym/scala-sandbox
package retronym.lessons.salutation
// Obligatory Hello World!
object HelloWorldApp {
// No static methods in Scala. main() is a method on the singleton object that I have
// called HelloWorldApp.
//
def main(args: Array[String]) = {
hello4(args)
}
// This is identical to main. scala.Predef is automatically imported, along with java.lang.
def hello1(args: Array[String]) = println("Hello, world!")
// This is identical to main. scala.Predef is automatically imported, along with java.lang.
def hello2(args: Array[String]) = {
scala.Predef.println("Hello, world!")
}
def hello3(args: Array[String]) = {
import scala.Predef.{println => printline}
printline("Hello World")
println
}
// Object Oriented!
def hello4(args: Array[String]) = {
new Salutation("Hello").greet("World")
}
}
class Salutation(word: String) {
def greet(someone: String) = println(word + ", " + someone)
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/valsvars/ValsVars.scala
|
package retronym.lessons.valsvars
import _root_.org.specs.runner.ScalaTestSuite
import _root_.org.specs.Specification
import _root_.org.specs.runner._
import _root_.org.specs.matcher._
import _root_.org.specs.mock._
import _root_.org.specs.specification._
import _root_.org.specs._
import _root_.org.specs.io._
import _root_.org.specs.collection._
import _root_.org.specs.util._
import _root_.org.specs.xml._
object ValsVars extends Specification {
"var" should {
"can be reassigned" in {
var a = 1
var b = 2
b = a
b must be_== (a)
}
}
"val" should {
"cannot re-assign" in {
Compiler
}
}
}
|
retronym/scala-sandbox
|
expressions/src/test/scala/retronym/expression/parse/ExpressionExternalDSLTest.scala
|
<reponame>retronym/scala-sandbox
package retronmym.expression.parse
import _root_.junit.framework.Assert._
import _root_.org.specs.runner._
import _root_.org.specs.matcher._
import _root_.org.specs.mock._
import _root_.org.specs.specification._
import _root_.org.specs._
import _root_.org.specs.io._
import _root_.org.specs.collection._
import _root_.org.specs.util._
import _root_.org.specs.xml._
import _root_.junit.framework.{TestResult, Test, Assert, TestCase}
import _root_.org.junit.runner.notification.RunNotifier
import _root_.org.junit.runner.{Runner, RunWith, Description}
import _root_.org.junit.runners.Suite
import retronym.expression.expr_==
import retronym.expression.parse.{ ExpressionExternalDSL}
import scala.util.parsing.combinator.syntactical._
import _root_.retronym.expression.dsl.ExpressionBuilder._
import _root_.retronym.expression.RichExpression._
import ExpressionExternalDSL.parse
class expressionDslSpecAdapter extends ScalaTestSuite(expressionDslSpec)
object expressionDslSpec extends Specification {
"Parse" in {
parse("0") must expr_==(0)
parse("x") must expr_==("x")
parse("(0 + 1)") must expr_==(0 +~ 1)
parse("(x + 1)") must expr_==("x" +~ 1)
parse("(x + (x - 1))") must expr_==("x" +~ ("x" -~ 1))
parse("(x + 1)") must expr_==("x" +~ 1)
}
"Parse LeftToRight" in {
// TODO Understand http://cleverlytitled.blogspot.com/2009/04/shunting-yard-algorithm.html to figure out
// how to avoid the infinite recursion of the current parser.
//
// parse("x + x - 1") must expr_==(("x" +~ "x") -~ 1)
}
}
|
retronym/scala-sandbox
|
expressions/src/main/scala/retronym/expression/RichExpression.scala
|
<gh_stars>1-10
package retronym.expression
import _root_.retronym.commons.BooleanW._
import _root_.retronym.commons.PartialFunctionW._
import RichExpression._
object RichExpression {
implicit def ExpressionToRichExpression(e : Expression) : RichExpression = new RichExpression(e)
}
class RichExpression(val e: Expression) {
def simplify: Expression = refactor.sort(_.length < _.length).head
def refactor : List[Expression] = refactorMultiPass(List(e))
def refactorOnePass: List[Expression] = e match {
case b: BinaryOp => refactorBinaryOp(b)
case e => List(e)
}
private def refactorMultiPass(es : List[Expression]) : List[Expression] = {
val es2 = (for (e <- es; e1 <- e.refactorOnePass) yield e1).removeDuplicates
if (es.length == es2.length) {
return es2
}
refactorMultiPass(es2)
}
private val rules: List[PartialFunction[Expression, Expression]] = List(
{case b@BinaryOp(_, o: Commutative, _) => b.flip},
{case BinaryOp(a, o1: Associative, BinaryOp(b, o2: Associative, c)) if o1 == o2 => BinaryOp(BinaryOp(a, o1, b), o1, c)},
{case BinaryOp(BinaryOp(a, o1: Associative, b), o2: Associative, c) if o1 == o2 => BinaryOp(a, o1, BinaryOp(b, o1, c))},
// TODO Compiler warning because of type param Erasure. Maybe Expression should be Expression[DoubleNumericSystem] instead.
{case BinaryOp(Constant(i), o: HasIdentity[Double], x) if o.isIdentity(i) => x},
{case BinaryOp(x, o: HasIdentity[Double], Constant(i)) if o.isIdentity(i) => x},
{case BinaryOp(x, Multiply, Constant(0)) => Constant(0)},
{case BinaryOp(Constant(0), Multiply, x) => Constant(0)},
{case BinaryOp(Constant(0), Div, x) => Constant(0)},
{case BinaryOp(a, Div, b) if a == b => Constant(1)},
{case BinaryOp(x, Minus, y) if x == y => Constant(0)},
{case BinaryOp(BinaryOp(x, op, y), Div, z) if op == Minus || op == Plus => BinaryOp(new BinaryOp(x, Div, z), op, new BinaryOp(y, Div, z))},
{case BinaryOp(BinaryOp(a, Div, b), op, BinaryOp(c, Div, d)) if b == d => BinaryOp(BinaryOp(a, op, c), Div, b)},
{case b => b})
private def refactorBinaryOp(e: BinaryOp): List[Expression] = {
val es = e match {
case BinaryOp(l, o, r) => for (l1 <- l.refactorOnePass; r1 <- r.refactorOnePass) yield BinaryOp(l1, o, r1)
}
for (e1 <- es; r <- rules; e2 <- r.toFunction1(e1)) yield e2
}
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/control/Control.scala
|
<reponame>retronym/scala-sandbox
package retronym.lessons.control
import _root_.org.specs.runner.ScalaTestSuite
import _root_.org.specs.Specification
import _root_.org.specs.runner._
import _root_.org.specs.matcher._
import _root_.org.specs.mock._
import _root_.org.specs.specification._
import _root_.org.specs._
import _root_.org.specs.io._
import _root_.org.specs.collection._
import _root_.org.specs.util._
import _root_.org.specs.xml._
object Control extends Specification {
"Basic if/else" should {
"basic if statement" in {
val a = 1
var b = "";
if (a == 1) {
b = "1"
}
b must be_==("1")
}
"if 'statement' is actually an expression" in {
val a = 1
val b = if (a == 1) {
"1"
} else {
"other"
}
b must be_==("1")
}
"only useful if you have an else, though." in {
var b = if (1 == 0) {
"1"
}
// http://en.wikipedia.org/wiki/Unit_type
val unit: Unit = ()
b must be_==(unit)
}
}
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/Trail.scala
|
<filename>lessons/src/main/scala/retronym/lessons/Trail.scala
package retronym.lessons
import _root_.org.specs.runner.ScalaTestSuite
import callbyname.CallByName
import collections.VectorAverage
import erasure.Erasure
import control.Control
import option.OptionalValues
import org.spex.Specification
import overload.Overload
import traits.UsingTraits
object Trail extends Specification {
"Learning Trail" isSpecifiedBy(testing.specs.Sample, Control, UsingTraits, CallByName, Erasure, OptionalValues,
VectorAverage, Overload)
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/ducktyping/DuckTyping.scala
|
<reponame>retronym/scala-sandbox
package retronym.lessons.ducktyping
import _root_.org.spex.Specification
object DuckTyping extends Specification {
class A {
def close() = "A.close"
}
class B {
def close() = "B.close"
}
"DuckTyping" should {
"closeIt() accepts anything that has a close method that returns a string" in {
def closeIt(c: {def close(): String}) = {
c.close
}
closeIt(new A) must be_==("A.close")
closeIt(new B) must be_==("B.close")
}
"Same as above, but declare a type Closable to give a name to this structural type" in {
type Closable = {def close(): String}
def closeIt(c: Closable) = {
c.close
}
closeIt(new A) must be_==("A.close")
closeIt(new B) must be_==("B.close")
}
}
}
|
retronym/scala-sandbox
|
commons/src/main/scala/retronym/commons/BooleanW.scala
|
package retronym.commons
object BooleanW {
implicit def BooleanToBooleanW(b: Boolean) : BooleanW = new BooleanW(b)
}
class BooleanW(b : Boolean) {
def iif[A](ifTrue : => A, ifFalse : => A) = {
if(b) {ifTrue} else {ifFalse}
}
def iff[A](ifTrue : => A) = iif(Some(ifTrue), None)
}
|
retronym/scala-sandbox
|
expressions/src/main/scala/retronym/expression/parse/ExpressionParser.scala
|
package retronym.expression.parse
import _root_.scala.util.parsing.combinator.syntactical.{StandardTokenParsers, StdTokenParsers}
import retronym.expression.Expression
import scala.util.parsing.combinator.lexical.StdLexical
object ExpressionExternalDSL extends StandardTokenParsers {
def parse(s: String): Expression = {
val result: ParseResult[Expression] = phrase(expr)(new lexical.Scanner(s))
result.getOrElse(error(result.toString))
}
lexical.delimiters ++= List("(", ")", "+", "-", "*", "/")
def expr: Parser[Expression] = binary_operation | atom | ("(" ~> expr <~ ")")
def atom: Parser[Expression] = variable | constant
def binary_operation: Parser[Expression] = "(" ~> expr ~ operator ~ expr <~ ")" ^^ {case l ~ o ~ r => BinaryOp(l, o, r)}
def binary_operation_atoms: Parser[Expression] = expr ~ operator ~ expr <~ ")" ^^ {case l ~ o ~ r => BinaryOp(l, o, r)}
def variable: Parser[Expression] = ident ^^ {s => Variable(s)}
def constant: Parser[Constant] = numericLit ^^ {s => Constant(s.toDouble)}
def operator: Parser[Operator] = plus | minus | mul | div
def plus: Parser[Operator] = "+" ^^ {_ => Plus}
def minus: Parser[Operator] = "-" ^^ {_ => Minus}
def mul: Parser[Operator] = "*" ^^ {_ => Multiply}
def div: Parser[Operator] = "/" ^^ {_ => Div}
}
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/erasure/Erasure.scala
|
<reponame>retronym/scala-sandbox
package retronym.lessons.erasure
import org.spex.Specification
object Erasure extends Specification {
"Erasure" should {
"Type parameter 'T' is erased, and the instance of check always returns true" in {
def instanceOf[T](e: AnyRef) = e.isInstanceOf[T]
instanceOf[Int]("not an integer") must be_==(true)
}
}
"Manifests" should {
"Compiler passed manifest implicitly" in {
def instanceOf2[T] (e: AnyRef)(implicit m: scala.reflect.Manifest[T]) = {
e.getClass == m.erasure
}
instanceOf2[Int]("foo") must beFalse
instanceOf2[String]("foo") must beTrue
}
"Manifest contains the full generic type" in {
def fullType[T](implicit m: scala.reflect.Manifest[T]) = {
(m.erasure, m.toString)
}
val (erasure, manifestString) = fullType[List[Int]]
manifestString must be_==("scala.List[int]")
}
}
}
|
retronym/scala-sandbox
|
expressions/src/main/scala/retronym/expression/Operator.scala
|
package retronym.expression
case class Operator(symbol: String) {
override def toString = symbol
}
trait Commutative
trait Associative
trait HasIdentity[V] {
def isIdentity(v: V): Boolean
}
case object Plus extends Operator("+") with HasIdentity[Double] with Commutative with Associative {
def isIdentity(v: Double) = v == 0
}
case object Minus extends Operator("-") with HasIdentity[Double] {
def isIdentity(v: Double) = v == 0
}
case object Multiply extends Operator("x") with HasIdentity[Double] with Commutative with Associative {
def isIdentity(v: Double) = v == 1
}
case object Div extends Operator("/")
|
retronym/scala-sandbox
|
lessons/src/main/scala/retronym/lessons/callbyname/CallByName.scala
|
package retronym.lessons.callbyname
import _root_.org.spex.Specification
/**
* 4.6.1 The type of a value parameter may be prefixed by =>, e.g. x : => T . The type of
* such a parameter is then the parameterless method type => T . This indicates that
* the corresponding argument is not evaluated at the point of function application,
* but instead is evaluated at each use within the function. That is, the argument is
* evaluated using call-by-name.<br/>
* <hr/>
* Practical when: <br/>
* <ul>
* <li>in a logging API, so you don't create the message unless the threshold is set.</li>
* <li>to implement a new control structure, such as a do-while loop</li>
* <li>DSL creation</li>
* </ul>
* Drawbacks:<br/>
* <ul>
* <li>When reading client code, a programmer expects method arguments to be evaluated eagerly. Better tool support may mitigate this.</li>
* </ul>
*/
object CallByName extends Specification {
"CallByName" should {
trait StringSource {
def make: String
}
def repeat(count: Int, s: => String) = {
(0 until count).foldLeft("")((b: String, _: Any) => b + s)
}
"call by name evaluate the parameter lazily and repeatedly" in {
val mockedStringSource = mock[StringSource]
mockedStringSource.make returns "first" thenReturns "second"
val repeated = repeat(2, mockedStringSource.make)
repeated must be_==("firstsecond")
mockedStringSource.make was called.twice
}
def repeatNormal(count: Int, s: String) = {
(0 until count).foldLeft("")((b: String, _: Any) => b + s)
}
"call by value evaluates the parameter eaguerly and once" in {
val mockedStringSource = mock[StringSource]
mockedStringSource.make returns "first"
val repeated = repeatNormal(2, mockedStringSource.make)
repeated must be_==("firstfirst")
mockedStringSource.make was called.once
}
}
}
|
retronym/scala-sandbox
|
commons/src/main/scala/retronym/commons/PartialFunctionW.scala
|
<reponame>retronym/scala-sandbox
package retronym.commons
import BooleanW._
object PartialFunctionW {
implicit def PartialFunctionToPartialFunctionW[A, B](pf: PartialFunction[A, B]) : PartialFunctionW[A, B] = {
new PartialFunctionW(pf)
}
}
class PartialFunctionW[-A, +B](pf: PartialFunction[A, B]) {
def toFunction1 : Function1[A, Option[B]] = {
(v1: A) => pf.isDefinedAt(v1).iff(pf(v1))
}
}
|
jancajthaml-scala/money
|
src/test/scala/com/github/jancajthaml/money/ParsingSpecs.scala
|
package com.github.jancajthaml.money
import org.scalatest.{FlatSpec, Matchers}
class ParsingSpecs extends FlatSpec with Matchers {
val run = true
if (run) "Trivia" should "parse 1" in {
val left = Money("1", "EUR")
assert(left.value == "1")
}
if (run) it should "parse 2" in {
val left = Money("2", "EUR")
assert(left.value == "2")
}
if (run) it should "parse -1" in {
val left = Money("1", "EUR")
assert((-left).value == "-1")
}
if (run) it should s"parse ${Long.MaxValue}${Long.MaxValue}" in {
val left = Money(s"${Long.MaxValue}${Long.MaxValue}", "EUR")
assert(left.value == s"${Long.MaxValue}${Long.MaxValue}")
}
if (run) "Normalized" should "parse 1.0" in {
val left = Money("1.0", "EUR")
assert(left.value == "1.0")
}
if (run) it should "parse 0.1" in {
val left = Money("0.1", "EUR")
assert(left.value == "0.1")
}
if (run) it should "parse 0.01" in {
val left = Money("0.01", "EUR")
assert(left.value == "0.01")
}
if (run) it should "parse 10" in {
val left = Money("10", "EUR")
assert(left.value == "10")
}
if (run) it should "parse 10.0" in {
val left = Money("10.0", "EUR")
assert(left.value == "10.0")
}
if (run) it should "parse 10.1" in {
val left = Money("10.1", "EUR")
assert(left.value == "10.1")
}
if (run) it should "parse 10.01" in {
val left = Money("10.01", "EUR")
assert(left.value == "10.01")
}
if (run) it should "parse 10000000.00000001" in {
val left = Money("10000000.00000001", "EUR")
assert(left.value == "10000000.00000001")
}
if (run) it should "parse 0" in {
val left = Money("0", "EUR")
assert(left.value == "0")
}
if (run) it should "parse -1.0" in {
val left = Money("-1.0", "EUR")
assert(left.value == "-1.0")
}
if (run) it should "parse -0.1" in {
val left = Money("-0.1", "EUR")
assert(left.value == "-0.1")
}
if (run) it should "parse -0.01" in {
val left = Money("-0.01", "EUR")
assert(left.value == "-0.01")
}
if (run) it should "parse -10" in {
val left = Money("-10", "EUR")
assert(left.value == "-10")
}
if (run) it should "parse -10.0" in {
val left = Money("-10.0", "EUR")
assert(left.value == "-10.0")
}
if (run) it should "parse -10.1" in {
val left = Money("-10.1", "EUR")
assert(left.value == "-10.1")
}
if (run) it should "parse -10.01" in {
val left = Money("-10.01", "EUR")
assert(left.value == "-10.01")
}
if (run) it should "parse -10000000.00000001" in {
val left = Money("-10000000.00000001", "EUR")
assert(left.value == "-10000000.00000001")
}
if (run) it should "parse -0" in {
val left = Money("-0", "EUR")
assert(left.value == "-0.0")
}
if (run) "Malformed" should "parse 0.0" in {
val left = Money("0.0", "EUR")
assert(left.value == "0.0")
}
if (run) it should "parse 01.10" in {
val left = Money("01.10", "EUR")
assert(left.value == "01.10")
}
if (run) it should "parse 00000000.00000001" in {
val left = Money("00000000.00000001", "EUR")
assert(left.value == "00000000.00000001")
}
if (run) it should "parse 10000000.00000000" in {
val left = Money("10000000.00000000", "EUR")
assert(left.value == "10000000.00000000")
}
}
|
jancajthaml-scala/money
|
src/test/scala/com/github/jancajthaml/money/SerializationSpecs.scala
|
package com.github.jancajthaml.money
import org.scalatest.{FlatSpec, Matchers}
class SerializationSpecs extends FlatSpec with Matchers {
val run = true
if (run) "Trivia" should "serialize 1" in {
val left = Money("1", "EUR")
assert(left.toString() == "1")
}
if (run) it should "serialize 10000000.00000001" in {
val left = Money("10000000.00000001", "EUR")
assert(left.toString() == "10000000.00000001")
}
/*
it should "parse 2" in {
val left = Money("2")
assert(left.exponent == 1)
//assert(left.signum == false)
assert(left.digits.deep == Array('2', '0').deep)
}
it should s"parse ${Long.MaxValue}${Long.MaxValue}" in {
val left = Money(s"${Long.MaxValue}${Long.MaxValue}")
assert(left.exponent == s"${Long.MaxValue}${Long.MaxValue}".size)
//assert(left.signum == false)
assert(left.digits.deep == Array('9', '2', '2', '3', '3', '7', '2', '0', '3', '6', '8', '5', '4', '7', '7', '5', '8', '0', '7', '9', '2', '2', '3', '3', '7', '2', '0', '3', '6', '8', '5', '4', '7', '7', '5', '8', '0', '7', '0').deep)
}
"Normalized" should "parse 1.0" in {
val left = Money("1.0")
assert(left.exponent == 1)
//assert(left.signum == false)
assert(left.digits.deep == Array('1', '0').deep)
}
it should "parse 0.1" in {
val left = Money("0.1")
assert(left.exponent == 1)
//assert(left.signum == false)
assert(left.digits.deep == Array('0', '1').deep)
}
it should "parse 0.01" in {
val left = Money("0.01")
assert(left.exponent == 1)
//assert(left.signum == false)
assert(left.digits.deep == Array('0', '0', '1').deep)
}
it should "parse 10" in {
val left = Money("10")
assert(left.exponent == 2)
//assert(left.signum == false)
assert(left.digits.deep == Array('1', '0', '0').deep)
}
it should "parse 10.0" in {
val left = Money("10.0")
assert(left.exponent == 2)
//assert(left.signum == false)
assert(left.digits.deep == Array('1', '0', '0').deep)
}
it should "parse 10.1" in {
val left = Money("10.1")
assert(left.exponent == 2)
//assert(left.signum == false)
assert(left.digits.deep == Array('1', '0', '1').deep)
}
it should "parse 10.01" in {
val left = Money("10.01")
assert(left.exponent == 2)
//assert(left.signum == false)
assert(left.digits.deep == Array('1', '0', '0', '1').deep)
}
it should "parse 10000000.00000001" in {
val left = Money("10000000.00000001")
assert(left.exponent == 8)
//assert(left.signum == false)
assert(left.digits.deep == Array('1', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1').deep)
}
it should "parse 0" in {
val left = Money("0")
assert(left.exponent == 1)
//assert(left.signum == false)
assert(left.digits.deep == Array('0', '0').deep)
}
it should "parse -1.0" in {
val left = Money("-1.0")
assert(left.exponent == 1)
//assert(left.signum == true)
assert(left.digits.deep == Array('1', '0').deep)
}
it should "parse -0.1" in {
val left = Money("-0.1")
assert(left.exponent == 1)
//assert(left.signum == true)
assert(left.digits.deep == Array('0', '1').deep)
}
it should "parse -0.01" in {
val left = Money("-0.01")
assert(left.exponent == 1)
//assert(left.signum == true)
assert(left.digits.deep == Array('0', '0', '1').deep)
}
it should "parse -10" in {
val left = Money("-10")
assert(left.exponent == 2)
//assert(left.signum == true)
assert(left.digits.deep == Array('1', '0', '0').deep)
}
it should "parse -10.0" in {
val left = Money("-10.0")
assert(left.exponent == 2)
//assert(left.signum == true)
assert(left.digits.deep == Array('1', '0', '0').deep)
}
it should "parse -10.1" in {
val left = Money("-10.1")
assert(left.exponent == 2)
//assert(left.signum == true)
assert(left.digits.deep == Array('1', '0', '1').deep)
}
it should "parse -10.01" in {
val left = Money("-10.01")
assert(left.exponent == 2)
//assert(left.signum == true)
assert(left.digits.deep == Array('1', '0', '0', '1').deep)
}
it should "parse -10000000.00000001" in {
val left = Money("-10000000.00000001")
assert(left.exponent == 8)
//assert(left.signum == true)
assert(left.digits.deep == Array('1', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1').deep)
}
it should "parse -0" in {
val left = Money("-0")
assert(left.exponent == 1)
//assert(left.signum == true)
assert(left.digits.deep == Array('0', '0').deep)
}
"Malformed parsing" should "parse 0.0" in {
val left = Money("0.0")
assert(left.exponent == 1)
//assert(left.signum == false)
assert(left.digits.deep == Array('0', '0').deep)
}
it should "parse 01.10" in {
val left = Money("01.10")
assert(left.exponent == 2)
//assert(left.signum == false)
assert(left.digits.deep == Array('0', '1', '1', '0').deep)
}
it should "parse 00000000.00000001" in {
val left = Money("00000000.00000001")
assert(left.exponent == 8)
//assert(left.signum == false)
assert(left.digits.deep == Array('0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '1').deep)
}
it should "parse 10000000.00000000" in {
val left = Money("10000000.00000000")
assert(left.exponent == 8)
//assert(left.signum == false)
assert(left.digits.deep == Array('1', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0').deep)
}
val randomDecimalL = new java.math.BigDecimal(java.lang.Math.random()).divide(new java.math.BigDecimal(10000 + ".0"), java.math.BigDecimal.ROUND_UP).multiply(new java.math.BigDecimal("100000000000.0")).toBigInteger()
val randomDecimalR = new java.math.BigDecimal(java.lang.Math.random()).divide(new java.math.BigDecimal(10000 + ".0"), java.math.BigDecimal.ROUND_UP).multiply(new java.math.BigDecimal("100000000000.0")).toBigInteger()
val randomDecimal = s"${randomDecimalL}.${randomDecimalR}"
"Random parsing" should s"parse ${randomDecimal}" in {
val left = Money(randomDecimal)
assert(left.exponent == randomDecimal.indexOf('.'))
//assert(left.signum == false)
assert(left.digits.deep == randomDecimal.toCharArray.filterNot(_ == '.').deep)
}
*/
}
|
jancajthaml-scala/money
|
src/test/scala/com/github/jancajthaml/money/AdditionSpecs.scala
|
package com.github.jancajthaml.money
import org.scalatest.{FlatSpec, Matchers}
class AdditionSpecs extends FlatSpec with Matchers {
val run = true
if (run) "Trivia" should "add 1 + 1 = 2.0" in {
val left = Money("1", "EUR")
val right = Money("1", "EUR")
val result = left + right
//assert(result.decimal == 1)
//assert(result.signum == false)
assert(result.value == "2")
}
if (run) it should "add 1 + 1 - 100 + 99 + 2 - 3 = 0.0" in {
val a = Money("1", "EUR")
val b = Money("1", "EUR")
val c = Money("100", "EUR")
val d = Money("99", "EUR")
val e = Money("2", "EUR")
val f = Money("3", "EUR")
val result = a + b - c + d + e - f
assert(result.value == "0")
}
if (run) it should "add 0 + 10001 = 10001.0" in {
val left = Money("0", "EUR")
val right = Money("10001", "EUR")
val result = left + right
//assert(result.decimal == 5)
//assert(result.signum == false)
assert(result.value == "10001")
}
if (run) it should "add 10001 + 0 = 10001.0" in {
val left = Money("10001", "EUR")
val right = Money("0", "EUR")
val result = left + right
//assert(result.decimal == 5)
//assert(result.signum == false)
assert(result.value == "10001")
}
if (run) it should "add -1 + -1 = -2.0" in {
val left = Money("-1", "EUR")
val right = Money("-1", "EUR")
val result = left + right
//assert(result.decimal == 1)
//assert(result.signum == true)
assert(result.value == "-2")
}
if (run) it should "add -0 + -10001 = -10001.0" in {
val left = Money("-0", "EUR")
val right = Money("-10001", "EUR")
val result = left + right
//assert(result.decimal == 5)
//assert(result.signum == true)
assert(result.value == "-10001")
}
if (run) it should "add 1 + 9 = 10.0" in {
val left = Money("1", "EUR")
val right = Money("9", "EUR")
val result = left + right
//assert(result.decimal == 2)
//assert(result.signum == false)
assert(result.value == "10")
}
if (run) it should "add 9 + 9 = 18.0" in {
val left = Money("9", "EUR")
val right = Money("9", "EUR")
val result = left + right
//assert(result.decimal == 2)
//assert(result.signum == false)
assert(result.value == "18")
}
if (run) "Normalized" should "add 1.0 + 0.1 = 1.1" in {
val left = Money("1.0", "EUR")
val right = Money("0.1", "EUR")
val result = left + right
//assert(result.decimal == 1)
//assert(result.signum == false)
assert(result.value == "1.1")
}
if (run) "Normalized" should "add 0.001 + 1000.0 = 1000.001" in {
val left = Money("0.001", "EUR")
val right = Money("1000.0", "EUR")
val result = left + right
//assert(result.decimal == 4)
//assert(result.signum == false)
assert(result.value == "1000.001")
}
if (run) it should "add 1000.0 + 0.001 = 1000.001" in {
val left = Money("1000.0", "EUR")
val right = Money("0.001", "EUR")
val result = left + right
//assert(result.decimal == 4)
//assert(result.signum == false)
assert(result.value == "1000.001")
}
if (run) it should "add 1.01 + 1.00001 = 11.010012" in {
val left = Money("1.010002", "EUR")
val right = Money("10.00001", "EUR")
val result = left + right
//assert(result.decimal == 2)
//assert(result.signum == false)
assert(result.value == "11.010012")
}
if (run) it should "add 0.0000091 + 0.9999909 = 1" in {
val left = Money("0.0000091", "EUR")
val right = Money("0.9999909", "EUR")
val result = left + right
//assert(result.decimal == 1)
//assert(result.signum == false)
assert(result.value == "1")
}
}
|
jancajthaml-scala/money
|
src/test/scala/com/github/jancajthaml/money/ComparationSpecs.scala
|
<gh_stars>1-10
package com.github.jancajthaml.money
import org.scalatest.{FlatSpec, Matchers}
class ComparationSpecs extends FlatSpec with Matchers {
val run = true
if (run) "Trivia" should "parse 1" in {
val left = Money("40", "EUR")
val right = Money("39", "EUR")
assert(left >= right)
assert(left > right)
assert(right <= left)
assert(right < left)
}
}
|
jancajthaml-scala/money
|
src/test/scala/com/github/jancajthaml/money/SubtractionSpecs.scala
|
<filename>src/test/scala/com/github/jancajthaml/money/SubtractionSpecs.scala
package com.github.jancajthaml.money
import org.scalatest.{FlatSpec, Matchers}
class SubtractionSpecs extends FlatSpec with Matchers {
val run = true
if (run) "Trivia" should "subtract 3 - 2 = 1.0" in {
val left = Money("3", "EUR")
val right = Money("2", "EUR")
val result = left - right
//assert(result.decimal == 1)
//assert(result.signum == false)
assert(result.value == "1")
}
if (run) it should "subtract 2 - 3 = -1.0" in {
val left = Money("2", "EUR")
val right = Money("3", "EUR")
val result = left - right
//assert(result.decimal == 1)
//assert(result.signum == true)
assert(result.value == "-1")
}
if (run) it should "subtract 100 - 1 = 99.0" in {
val left = Money("100", "EUR")
val right = Money("1", "EUR")
val result = left - right
//assert(result.decimal == 3)
//assert(result.signum == false)
assert(result.value == "99")
}
if (run) it should "subtract 1 - 0.00001 = 0.99999" in {
val left = Money("1", "EUR")
val right = Money("0.00001", "EUR")
val result = left - right
//assert(result.decimal == 1)
//assert(result.signum == false)
assert(result.value == "0.99999")
}
if (run) it should "subtract 0.00501 - 1 = -0.99499" in {
val left = Money("0.00501", "EUR")
val right = Money("1", "EUR")
val result = left - right
//assert(result.decimal == 1)
//assert(result.signum == true)
assert(result.value == "-0.99499")
}
if (run) it should "subtract 0 - 10001 = -10001.0" in {
val left = Money("0", "EUR")
val right = Money("10001", "EUR")
val result = left - right
//assert(result.decimal == 5)
//assert(result.signum == true)
assert(result.value == "-10001")
}
if (run) it should "subtract 10001 - 0 = 10001.0" in {
val left = Money("10001", "EUR")
val right = Money("0", "EUR")
val result = left - right
//assert(result.decimal == 5)
//assert(result.signum == false)
assert(result.value == "10001")
}
if (run) it should "subtract -1 - -1 = 0.0" in {
val left = Money("-1", "EUR")
val right = Money("-1", "EUR")
val result = left - right
//assert(result.decimal == 1)
//assert(result.signum == false)
assert(result.value == "0")
}
/*
if (run) it should "add -0 + -10001 = -10001.0" in {
val left = Money("-0")
val right = Money("-10001")
val result = left + right
assert(result.exponent == 5)
//assert(result.signum == true)
assert(result.digits.deep == Array('1', '0', '0', '0', '1', '0').deep)
}
if (run) it should "add 1 + 9 = 10.0" in {
val left = Money("1")
val right = Money("9")
val result = left + right
assert(result.exponent == 2)
//assert(result.signum == false)
assert(result.digits.deep == Array('1', '0', '0').deep)
}
if (run) it should "add 9 + 9 = 18.0" in {
val left = Money("9")
val right = Money("9")
val result = left + right
assert(result.exponent == 2)
//assert(result.signum == false)
assert(result.digits.deep == Array('1', '8', '0').deep)
}
if (run) "Normalized" should "add 1.0 + 0.1 = 1.1" in {
val left = Money("1.0")
val right = Money("0.1")
val result = left + right
assert(result.exponent == 1)
//assert(result.signum == false)
assert(result.digits.deep == Array('1', '1').deep)
}
if (run) "Normalized" should "add 0.001 + 1000.0 = 1000.001" in {
val left = Money("0.001")
val right = Money("1000.0")
val result = left + right
assert(result.exponent == 4)
//assert(result.signum == false)
assert(result.digits.deep == Array('1', '0', '0', '0', '0', '0', '1').deep)
}
if (run) it should "add 1000.0 + 0.001 = 1000.001" in {
val left = Money("1000.0")
val right = Money("0.001")
val result = left + right
assert(result.exponent == 4)
//assert(result.signum == false)
assert(result.digits.deep == Array('1', '0', '0', '0', '0', '0', '1').deep)
}
if (run) it should "add 1.01 + 1.00001 = 11.010012" in {
val left = Money("1.010002")
val right = Money("10.00001")
val result = left + right
assert(result.exponent == 2)
//assert(result.signum == false)
assert(result.digits.deep == Array('1', '1', '0', '1', '0', '0', '1', '2').deep)
}
if (run) it should "add 0.0000091 + 0.9999909 = 1.0000000" in {
val left = Money("0.0000091")
val right = Money("0.9999909")
val result = left + right
assert(result.exponent == 1)
//assert(result.signum == false)
assert(result.digits.deep == Array('1','0', '0', '0', '0', '0', '0', '0').deep)
}
if (run) {
val randomDecimalL = new java.math.BigDecimal(java.lang.Math.random()).divide(new java.math.BigDecimal(10000 + ".0"), java.math.BigDecimal.ROUND_UP).multiply(new java.math.BigDecimal("100000000000.0")).toBigInteger()
val randomDecimalR = new java.math.BigDecimal(java.lang.Math.random()).divide(new java.math.BigDecimal(10000 + ".0"), java.math.BigDecimal.ROUND_UP).multiply(new java.math.BigDecimal("100000000000.0")).toBigInteger()
val randomDecimal = s"${randomDecimalL}.${randomDecimalR}"
val resultDecimal = new java.math.BigDecimal(randomDecimal).add(new java.math.BigDecimal(randomDecimal)).toPlainString()
"Random" should s"add ${randomDecimal} + ${randomDecimal} = ${resultDecimal}" in {
val left = Money(randomDecimal)
val right = Money(randomDecimal)
val result = left + right
assert(result.exponent == resultDecimal.indexOf('.'))
//assert(result.signum == false)
assert(result.digits.deep == resultDecimal.toCharArray.filterNot(_ == '.').deep)
}
}
*/
}
|
jancajthaml-scala/money
|
src/test/scala/com/github/jancajthaml/money/ParsingPerformance.scala
|
<filename>src/test/scala/com/github/jancajthaml/money/ParsingPerformance.scala<gh_stars>1-10
package com.github.jancajthaml.money
import org.scalameter.api.{Measurer, Bench, Gen, exec}
object ParsingPerformance extends Bench.OfflineReport {
val times = Gen.range("times")(0, 10000, 1000)
performance of "Money" in {
measure method "construct + compact + serialize" in {
using(times) config (
exec.minWarmupRuns -> 10,
exec.maxWarmupRuns -> 10,
exec.benchRuns -> 20,
exec.independentSamples -> 1
) in { sz => {
(0 to sz).foreach { x => Money("10000000.00000001", "EUR").toString() }
} }
}
}
performance of "scala.math.BigDecimal" in {
measure method "construct + serialize" in {
using(times) config (
exec.minWarmupRuns -> 10,
exec.maxWarmupRuns -> 10,
exec.benchRuns -> 20,
exec.independentSamples -> 1
) in { sz => {
(0 to sz).foreach { x => BigDecimal("10000000.00000001").underlying.stripTrailingZeros().toPlainString() }
} }
}
}
performance of "java.math.BigDecimal" in {
measure method "construct + serialize" in {
using(times) config (
exec.minWarmupRuns -> 10,
exec.maxWarmupRuns -> 10,
exec.benchRuns -> 20,
exec.independentSamples -> 1
) in { sz => {
(0 to sz).foreach { x => new java.math.BigDecimal("10000000.00000001").stripTrailingZeros().toPlainString() }
} }
}
}
}
|
jancajthaml-scala/money
|
src/test/scala/com/github/jancajthaml/money/SubtractionPerformance.scala
|
package com.github.jancajthaml.money
import org.scalameter.api.{Measurer, Bench, Gen, exec}
object SubtractionPerformance extends Bench.OfflineReport {
val times = Gen.range("times")(0, 10000, 1000)
performance of "subtraction" in {
measure method "Money.subtract" in {
using(times) config (
exec.minWarmupRuns -> 10,
exec.maxWarmupRuns -> 10,
exec.benchRuns -> 20,
exec.independentSamples -> 1
) in { sz => {
val a = Money("10000.00001", "EUR")
val b = Money("0.0000000957", "EUR")
(0 to sz).foreach { x => (a - b).toString() }
} }
}
measure method "scala.math.BigDecimal.subtract" in {
using(times) config (
exec.minWarmupRuns -> 10,
exec.maxWarmupRuns -> 10,
exec.benchRuns -> 20,
exec.independentSamples -> 1
) in { sz => {
val a = BigDecimal("10000.00001")
val b = BigDecimal("0.0000000957")
(0 to sz).foreach { x => (a - b).underlying.stripTrailingZeros().toPlainString() }
} }
}
measure method "java.math.BigDecimal.subtract" in {
using(times) config (
exec.minWarmupRuns -> 10,
exec.maxWarmupRuns -> 10,
exec.benchRuns -> 20,
exec.independentSamples -> 1
) in { sz => {
val a = new java.math.BigDecimal("10000.00001")
val b = new java.math.BigDecimal("0.0000000957")
(0 to sz).foreach { x => (a.subtract(b)).stripTrailingZeros().toPlainString() }
} }
}
}
}
|
jancajthaml-scala/money
|
src/main/scala/com/github/jancajthaml/money/Money.scala
|
package com.github.jancajthaml.money
import Money._
import java.math.{BigDecimal => BigDec, MathContext}
object Money {
private def assertUnderlying(x: Money) = {
if (x.underlying == null) {
x.underlying = new BigDec(x.repr, MathContext.DECIMAL128)
}
}
private def add(l: Money, r: Money) = {
l.dirty = true
l.underlying = l.underlying.add(r.underlying)
l
}
private def sub(l: Money, r: Money) = {
l.dirty = true
l.underlying = l.underlying.subtract(r.underlying)
l
}
private def mul(l: Money, r: Money) = {
l.dirty = true
l.underlying = l.underlying.multiply(r.underlying)
l
}
private def div(l: Money, r: Money) = {
l.dirty = true
l.underlying = l.underlying.divide(r.underlying)
l
}
}
case class Money(private var repr: String, val currency: String) extends Comparable[Money] {
private var dirty = false
private var underlying: BigDec = null
def value() = {
if (dirty) {
underlying = underlying.stripTrailingZeros()
repr = underlying.toPlainString()
dirty = false
}
repr
}
override def toString() = value
override def compareTo(r: Money) = {
if (currency != r.currency) {
throw new UnsupportedOperationException(s"cannot compareTo ${this} with ${r}")
} else {
assertUnderlying(this)
assertUnderlying(r)
underlying.compareTo(r.underlying)
}
}
def compareTo(r: BigDecimal) = {
assertUnderlying(this)
underlying.compareTo(r.underlying)
}
private def check(r: Money) = {
assertUnderlying(this)
assertUnderlying(r)
if (currency != r.currency) {
throw new UnsupportedOperationException(s"${this} and ${r} are of different currencies")
} else r
}
val compare = (r: Money) => compareTo(r)
def <=(r: Money) = compare(r) <= 0
def <(r: Money) = compare(r) < 0
def >=(r: Money) = compare(r) >= 0
def >(r: Money) = compare(r) > 0
def +(r: Money) = {
val l = Money(value(), currency)
check(l)
check(r)
add(l, r)
}
def -(r: Money) = {
val l = Money(value(), currency)
check(l)
check(r)
sub(l, r)
}
def *(r: Money) = {
val l = Money(value(), currency)
check(l)
check(r)
mul(l, r)
}
def /(r: Money) = {
val l = Money(value(), currency)
check(l)
check(r)
div(l, r)
}
def unary_- = {
val x = Money(value(), currency)
x.underlying = new BigDec(x.repr, MathContext.DECIMAL128).negate().stripTrailingZeros()
x.repr = x.underlying.toPlainString()
x.dirty = false
x
}
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/service/FileStoreServiceSpec.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.service
import org.mockito.ArgumentCaptor
import org.mockito.ArgumentMatchers.any
import org.mockito.BDDMockito.given
import org.mockito.Mockito._
import org.scalatest.BeforeAndAfterEach
import org.scalatest.concurrent.Eventually
import org.scalatestplus.mockito.MockitoSugar
import play.api.libs.Files.TemporaryFile
import uk.gov.hmrc.bindingtarifffilestore.audit.AuditService
import uk.gov.hmrc.bindingtarifffilestore.config.{AppConfig, FileStoreSizeConfiguration}
import uk.gov.hmrc.bindingtarifffilestore.connector.{AmazonS3Connector, UpscanConnector}
import uk.gov.hmrc.bindingtarifffilestore.model._
import uk.gov.hmrc.bindingtarifffilestore.model.upscan._
import uk.gov.hmrc.bindingtarifffilestore.repository.FileMetadataMongoRepository
import uk.gov.hmrc.bindingtarifffilestore.util.UnitSpec
import uk.gov.hmrc.http.HeaderCarrier
import scala.concurrent.ExecutionContext
import scala.concurrent.Future.successful
import scala.concurrent.ExecutionContext.Implicits.global
class FileStoreServiceSpec extends UnitSpec with MockitoSugar with BeforeAndAfterEach with Eventually {
private val config = mock[AppConfig]
private val s3Connector = mock[AmazonS3Connector]
private val repository = mock[FileMetadataMongoRepository]
private val upscanConnector = mock[UpscanConnector]
private val auditService = mock[AuditService]
private implicit val hc: HeaderCarrier = HeaderCarrier()
private val service = new FileStoreService(config, s3Connector, repository, upscanConnector, auditService)
private final val emulatedFailure = new RuntimeException("Emulated failure.")
override protected def afterEach(): Unit = {
super.afterEach()
reset(config, s3Connector, repository, upscanConnector, auditService)
}
"Service 'delete all' " should {
"Clear the Database & File Store" in {
when(repository.deleteAll()).thenReturn(successful(()))
await(service.deleteAll()) shouldBe ((): Unit)
verify(repository).deleteAll()
verify(s3Connector).deleteAll()
}
"Propagate any error" in {
when(repository.deleteAll()).thenThrow(emulatedFailure)
val caught = intercept[RuntimeException] {
await(service.deleteAll())
}
caught shouldBe emulatedFailure
}
}
"Service 'delete one' " should {
"Clear the Database & File Store" in {
when(repository.delete("id")).thenReturn(successful(()))
await(service.delete("id")) shouldBe ((): Unit)
verify(repository).delete("id")
verify(s3Connector).delete("id")
}
"Propagate any error" in {
when(repository.deleteAll()).thenThrow(emulatedFailure)
val caught = intercept[RuntimeException] {
await(service.deleteAll())
}
caught shouldBe emulatedFailure
}
}
"Service 'getAll by id' " should {
"Delegate to Connector" in {
val attachment = mock[FileMetadata]
val attachmentSigned = mock[FileMetadata]
given(attachment.published).willReturn(true)
given(repository.get("id")).willReturn(successful(Some(attachment)))
given(s3Connector.sign(attachment)).willReturn(attachmentSigned)
await(service.find("id")) shouldBe Some(attachmentSigned)
}
"Not sign unpublished files" in {
val attachment = mock[FileMetadata]
given(repository.get("id")).willReturn(successful(Some(attachment)))
await(service.find("id")) shouldBe Some(attachment)
verify(s3Connector, never()).sign(any[FileMetadata])
}
}
"Service 'getAll by search' " should {
"delegate to repository" in {
given(repository.get(Search(), Pagination())).willReturn(successful(Paged.empty[FileMetadata]))
await(service.find(Search(), Pagination())) shouldBe Paged.empty[FileMetadata]
}
"return all attachment requested already signed" in {
val attachment1 = mock[FileMetadata]
val attSigned1 = mock[FileMetadata]
val attachment2 = mock[FileMetadata]
given(attachment1.published).willReturn(true)
given(s3Connector.sign(attachment1)).willReturn(attSigned1)
given(attachment2.published).willReturn(false)
given(repository.get(Search(), Pagination())).willReturn(successful(Paged(Seq(attachment1, attachment2))))
await(service.find(Search(), Pagination())) shouldBe Paged(Seq(attSigned1, attachment2))
}
}
"Service 'initiate'" should {
"Delegate to Connector" in {
val fileMetadata = FileMetadata(id = "id", fileName = Some("file"), mimeType = Some("text/plain"))
val fileMetaDataCreated = mock[FileMetadata]
val uploadTemplate = UpscanTemplate(href = "href", fields = Map("key" -> "value"))
val initiateResponse = UpscanInitiateResponse("ref", uploadTemplate)
given(config.filestoreUrl).willReturn("host")
given(config.fileStoreSizeConfiguration).willReturn(FileStoreSizeConfiguration(1, 1000))
given(config.authorization).willReturn("auth-token")
given(repository.insertFile(fileMetadata)).willReturn(successful(fileMetaDataCreated))
given(upscanConnector.initiate(any[UploadSettings])(any[HeaderCarrier])).willReturn(successful(initiateResponse))
await(service.initiate(fileMetadata)) shouldBe UploadTemplate(
id = "id",
href = "href",
fields = Map("key" -> "value")
)
verify(auditService, times(1)).auditUpScanInitiated(fileId = "id", fileName = Some("file"), upScanRef = "ref")
verifyNoMoreInteractions(auditService)
theInitatePayload shouldBe UploadSettings(
"http://host/file/id/notify?X-Api-Token=<KEY>
1,
1000
)
}
}
"Service 'initiateV2'" should {
"Delegate to Connector" in {
val initiateRequest = v2.FileStoreInitiateRequest(id = Some("id"))
val fileMetadata = FileMetadata("id", None, None)
val uploadTemplate = v2.UpscanFormTemplate(href = "href", fields = Map("key" -> "value"))
val initiateResponse = v2.UpscanInitiateResponse("ref", uploadTemplate)
given(config.filestoreSSL).willReturn(false)
given(config.filestoreUrl).willReturn("host")
given(config.fileStoreSizeConfiguration).willReturn(FileStoreSizeConfiguration(1, 1000))
given(config.authorization).willReturn("auth-token")
given(repository.insertFile(any[FileMetadata])(any[ExecutionContext])).willReturn(successful(fileMetadata))
given(upscanConnector.initiateV2(any[v2.UpscanInitiateRequest])(any[HeaderCarrier]))
.willReturn(successful(initiateResponse))
await(service.initiateV2(initiateRequest)) shouldBe v2
.FileStoreInitiateResponse("id", "ref", initiateResponse.uploadRequest)
verify(auditService, times(1)).auditUpScanInitiated(fileId = "id", fileName = None, upScanRef = "ref")
verifyNoMoreInteractions(auditService)
theInitiateV2Payload shouldBe v2.UpscanInitiateRequest(
callbackUrl = "http://host/file/id/notify?X-Api-Token=<KEY>
successRedirect = None,
errorRedirect = None,
minimumFileSize = Some(1),
maximumFileSize = Some(1000),
expectedContentType = None
)
}
}
private def theInitiateV2Payload: v2.UpscanInitiateRequest = {
val captor: ArgumentCaptor[v2.UpscanInitiateRequest] = ArgumentCaptor.forClass(classOf[v2.UpscanInitiateRequest])
verify(upscanConnector).initiateV2(captor.capture())(any[HeaderCarrier])
captor.getValue
}
"Service 'upload' " should {
"Delegate to Connector" in {
val file = mock[TemporaryFile]
val fileMetadata = FileMetadata(id = "id", fileName = Some("file"), mimeType = Some("text/plain"))
val fileWithMetadata = FileWithMetadata(file, fileMetadata)
val fileMetaDataCreated = mock[FileMetadata]
val uploadTemplate = mock[UpscanTemplate]
val initiateResponse = UpscanInitiateResponse("ref", uploadTemplate)
given(config.filestoreUrl).willReturn("host")
given(config.fileStoreSizeConfiguration).willReturn(FileStoreSizeConfiguration(1, 1000))
given(config.authorization).willReturn("auth-token")
given(repository.insertFile(fileMetadata)).willReturn(successful(fileMetaDataCreated))
given(upscanConnector.initiate(any[UploadSettings])(any[HeaderCarrier])).willReturn(successful(initiateResponse))
given(upscanConnector.upload(any[UpscanTemplate], any[FileWithMetadata])).willReturn(successful((): Unit))
await(service.upload(fileWithMetadata)) shouldBe fileMetaDataCreated
verify(auditService, times(1)).auditUpScanInitiated(fileId = "id", fileName = Some("file"), upScanRef = "ref")
verifyNoMoreInteractions(auditService)
theInitatePayload shouldBe UploadSettings(
"http://host/file/id/notify?X-Api-Token=<KEY>
1,
1000
)
}
}
private def theInitatePayload: UploadSettings = {
val captor: ArgumentCaptor[UploadSettings] = ArgumentCaptor.forClass(classOf[UploadSettings])
verify(upscanConnector).initiate(captor.capture())(any[HeaderCarrier])
captor.getValue
}
"Service 'notify' " should {
"Update the attachment for Successful Scan and Delegate to Connector" in {
val attachment = FileMetadata(id = "id", fileName = Some("file"), mimeType = Some("type"))
val attachmentUpdated = mock[FileMetadata]("updated")
val uploadDetails = mock[UploadDetails]
val scanResult = SuccessfulScanResult("ref", "url", uploadDetails)
val expectedAttachment = attachment.copy(url = Some("url"), scanStatus = Some(ScanStatus.READY))
given(uploadDetails.fileName).willReturn("file")
given(uploadDetails.fileMimeType).willReturn("type")
given(repository.update(expectedAttachment)).willReturn(successful(Some(attachmentUpdated)))
await(service.notify(attachment, scanResult)) shouldBe Some(attachmentUpdated)
verify(auditService, times(1))
.auditFileScanned(fileId = "id", fileName = Some("file"), upScanRef = "ref", upScanStatus = "READY")
verifyNoMoreInteractions(auditService)
}
"Call publish when notifying published files" in {
val attachment = mock[FileMetadata]("Attachment")
val scanResult = SuccessfulScanResult("ref", "url", mock[UploadDetails])
val attachmentUpdating = mock[FileMetadata]("AttachmentUpdating")
val attachmentUpdated = mock[FileMetadata]("AttachmentUpdated")
val attachmentUploaded = mock[FileMetadata]("AttachmentUploaded")
val attachmentUploadedUpdating = mock[FileMetadata]("AttachmentUploadedUpdating")
val attachmentUploadedUpdated = mock[FileMetadata]("AttachmentUploadedAndUpdated")
val attachmentSigned = mock[FileMetadata]("AttachmentSigned")
given(attachment.withScanResult(scanResult)).willReturn(attachmentUpdating)
given(attachment.publishable).willReturn(true)
given(attachment.published).willReturn(false)
given(attachment.isLive).willReturn(true)
given(attachment.id).willReturn("id")
given(attachment.fileName).willReturn(Some("file"))
given(attachmentUpdating.publishable).willReturn(true)
given(attachmentUpdating.published).willReturn(false)
given(attachmentUpdating.isLive).willReturn(true)
given(attachmentUpdated.publishable).willReturn(true)
given(attachmentUpdated.published).willReturn(false)
given(attachmentUpdated.isLive).willReturn(true)
given(attachmentUpdated.scanStatus).willReturn(Some(ScanStatus.READY))
given(attachmentUpdated.id).willReturn("id")
given(attachmentUpdated.fileName).willReturn(Some("file"))
given(attachmentUploaded.published).willReturn(true)
given(attachmentUploaded.publishable).willReturn(true)
given(attachmentUploaded.isLive).willReturn(true)
given(attachmentUploaded.copy(published = true, publishable = true)).willReturn(attachmentUploadedUpdating)
given(attachmentUploadedUpdated.published).willReturn(true)
given(attachmentUploadedUpdated.isLive).willReturn(true)
given(repository.update(attachmentUpdating)).willReturn(successful(Some(attachmentUpdated)))
given(s3Connector.upload(attachmentUpdated)).willReturn(attachmentUploaded)
given(repository.update(attachmentUploadedUpdating)).willReturn(successful(Some(attachmentUploadedUpdated)))
given(s3Connector.sign(attachmentUploadedUpdated)).willReturn(attachmentSigned)
await(service.notify(attachment, scanResult)) shouldBe Some(attachmentSigned)
verify(auditService, times(1))
.auditFileScanned(fileId = "id", fileName = Some("file"), upScanRef = "ref", upScanStatus = "READY")
verify(auditService, times(1)).auditFilePublished(fileId = "id", fileName = "file")
verifyNoMoreInteractions(auditService)
}
"Skip publishing when the file no longer exists" in {
val attachment = mock[FileMetadata]("Attachment")
val uploadDetails = mock[UploadDetails]
val scanResult = SuccessfulScanResult("ref", "url", uploadDetails)
val attachmentUpdating = mock[FileMetadata]("AttachmentUpdating")
val attachmentUpdated = mock[FileMetadata]("AttachmentUpdated")
given(attachment.withScanResult(scanResult)).willReturn(attachmentUpdating)
given(attachment.publishable).willReturn(true)
given(attachment.id).willReturn("id")
given(attachment.fileName).willReturn(Some("file"))
given(attachmentUpdating.published).willReturn(true)
given(attachmentUpdated.published).willReturn(true)
given(attachmentUpdated.scanStatus).willReturn(Some(ScanStatus.READY))
given(attachmentUpdated.id).willReturn("id")
given(attachmentUpdated.fileName).willReturn(Some("file"))
given(repository.update(attachmentUpdating)).willReturn(successful(None))
await(service.notify(attachment, scanResult)) shouldBe None
verify(s3Connector, never()).upload(any[FileMetadata])
verify(s3Connector, never()).sign(any[FileMetadata])
verify(auditService, times(1))
.auditFileScanned(fileId = "id", fileName = Some("file"), upScanRef = "ref", upScanStatus = "READY")
verify(auditService, never()).auditFilePublished(fileId = "id", fileName = "file")
verifyNoMoreInteractions(auditService)
}
"Update the attachment for Failed Scan and Delegate to Connector" in {
val attachment = FileMetadata(id = "id", fileName = Some("file"), mimeType = Some("type"))
val scanResult = FailedScanResult("ref", mock[FailureDetails])
val expectedAttachment = attachment.copy(scanStatus = Some(ScanStatus.FAILED))
val attachmentUpdated = mock[FileMetadata]("updated")
given(repository.update(expectedAttachment)).willReturn(successful(Some(attachmentUpdated)))
await(service.notify(attachment, scanResult)) shouldBe Some(attachmentUpdated)
verify(auditService, times(1))
.auditFileScanned(fileId = "id", fileName = Some("file"), upScanRef = "ref", upScanStatus = "FAILED")
verifyNoMoreInteractions(auditService)
}
}
"Service 'publish' " should {
"Delegate to the File Store if Scanned Safe" in {
val fileUploading = mock[FileMetadata]("Uploading")
val fileUploaded = mock[FileMetadata]("Uploaded")
val fileUpdating = mock[FileMetadata]("Updating")
val fileUpdated = mock[FileMetadata]("Updated")
val fileSigned = mock[FileMetadata]("Signed")
given(fileUploading.scanStatus).willReturn(Some(ScanStatus.READY))
given(fileUploading.published).willReturn(false)
given(fileUploading.isLive).willReturn(true)
given(fileUploading.id).willReturn("id")
given(fileUploading.fileName).willReturn(Some("file"))
given(fileUploaded.copy(published = true)).willReturn(fileUpdating)
given(fileUpdated.published).willReturn(true)
given(s3Connector.upload(fileUploading)).willReturn(fileUploaded)
given(repository.update(any[FileMetadata])(any[ExecutionContext])).willReturn(successful(Some(fileUpdated)))
given(s3Connector.sign(fileUpdated)).willReturn(fileSigned)
await(service.publish(fileUploading)) shouldBe Some(fileSigned)
verify(auditService, times(1)).auditFilePublished(fileId = "id", fileName = "file")
verifyNoMoreInteractions(auditService)
}
"Clear up unpublished expired files" in {
val fileUploading = mock[FileMetadata]("Uploading")
given(fileUploading.scanStatus).willReturn(Some(ScanStatus.READY))
given(fileUploading.published).willReturn(false)
given(fileUploading.isLive).willReturn(false)
given(fileUploading.id).willReturn("id")
given(repository.delete(any[String])(any[ExecutionContext])).willReturn(successful(()))
await(service.publish(fileUploading)) shouldBe None
verify(repository).delete("id")
verifyZeroInteractions(auditService, s3Connector)
}
"Not delegate to the File Store if pre published" in {
val fileUploading = mock[FileMetadata]("Uploading")
given(fileUploading.scanStatus).willReturn(Some(ScanStatus.READY))
given(fileUploading.published).willReturn(true)
await(service.publish(fileUploading)) shouldBe Some(fileUploading)
verifyZeroInteractions(auditService, s3Connector, repository)
}
"Not delegate to the File Store if Scanned UnSafe" in {
val fileUploading = mock[FileMetadata]("Uploading")
val fileUpdating = mock[FileMetadata]("Updating")
val fileUpdated = mock[FileMetadata]("Updated")
given(fileUploading.scanStatus).willReturn(Some(ScanStatus.FAILED))
given(fileUploading.copy(published = true)).willReturn(fileUpdating)
given(repository.update(any[FileMetadata])(any[ExecutionContext])).willReturn(successful(Some(fileUpdated)))
await(service.publish(fileUploading)) shouldBe Some(fileUpdated)
verifyZeroInteractions(auditService, s3Connector)
}
"Not delegate to the File Store if Unscanned" in {
val fileUploading = mock[FileMetadata]("Uploading")
val fileUpdating = mock[FileMetadata]("Updating")
val fileUpdated = mock[FileMetadata]("Updated")
given(fileUploading.scanStatus).willReturn(None)
given(fileUploading.copy(published = true)).willReturn(fileUpdating)
given(repository.update(any[FileMetadata])(any[ExecutionContext])).willReturn(successful(Some(fileUpdated)))
await(service.publish(fileUploading)) shouldBe Some(fileUpdated)
verifyZeroInteractions(auditService, s3Connector)
}
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/model/Search.scala
|
<gh_stars>0
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model
import play.api.mvc.QueryStringBindable
import scala.util.Try
case class Search(
ids: Option[Set[String]] = None,
published: Option[Boolean] = None
)
object Search {
private val idKey = "id"
private val publishedKey = "published"
implicit def bindable(
implicit
stringBinder: QueryStringBindable[String],
booleanBinder: QueryStringBindable[Boolean]
): QueryStringBindable[Search] = new QueryStringBindable[Search] {
override def bind(key: String, requestParams: Map[String, Seq[String]]): Option[Either[String, Search]] = {
def params[T](name: String, map: String => T): Option[Set[T]] =
requestParams
.get(name)
.map {
_.flatMap(_.split(",").filter(_.nonEmpty))
.map(v => Try(map(v)))
.filter(_.isSuccess)
.map(_.get)
.toSet
}
.filter(_.nonEmpty)
def param[T](name: String, map: String => T): Option[T] =
params(name, map).map(_.head)
Some(
Right(
Search(
ids = params(idKey, s => s),
published = param(publishedKey, _.toBoolean)
)
)
)
}
override def unbind(key: String, search: Search): String =
Seq(
search.ids.map(_.map(stringBinder.unbind(idKey, _)).mkString("&")),
search.published.map(booleanBinder.unbind(publishedKey, _))
).filter(_.isDefined).map(_.get).mkString("&")
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/controllers/ErrorHandling.scala
|
<gh_stars>0
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.controllers
import play.api.Logging
import play.api.mvc.{Action, AnyContent, BaseController, Request, Result}
import reactivemongo.core.errors.DatabaseException
import uk.gov.hmrc.bindingtarifffilestore.model.{ErrorCode, JsErrorResponse}
import scala.concurrent.{ExecutionContext, Future}
trait ErrorHandling { self: BaseController with Logging =>
private val DuplicateKeyError = 11000
private[controllers] def mongoErrorHandler: PartialFunction[Throwable, Result] = {
case e: DatabaseException if e.code.contains(DuplicateKeyError) =>
Conflict(JsErrorResponse(ErrorCode.CONFLICT, "Entity already exists"))
case e: Throwable =>
logger.error(s"An unexpected error occurred: ${e.getMessage}", e)
InternalServerError(JsErrorResponse(ErrorCode.UNKNOWN_ERROR, "An unexpected error occurred"))
}
def withErrorHandling(f: Request[AnyContent] => Future[Result])(implicit ec: ExecutionContext): Action[AnyContent] =
Action.async { request: Request[AnyContent] => f(request).recover(mongoErrorHandler) }
def withErrorHandling[A](action: Action[A])(implicit ec: ExecutionContext): Action[A] =
Action(action.parser).async(request => action(request).recover(mongoErrorHandler))
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/model/PagedTest.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model
import play.api.libs.json.{JsObject, Json}
import uk.gov.hmrc.bindingtarifffilestore.util.UnitSpec
class PagedTest extends UnitSpec {
"Paged" should {
"map" in {
Paged(Seq("hello")).map(_.toUpperCase) shouldBe Paged(Seq("HELLO"))
}
"calculate size" in {
Paged.empty.size shouldBe 0
Paged(Seq("")).size shouldBe 1
}
"serialize to JSON" in {
Json.toJson(Paged(Seq("Hello"), 1, 2, 3)).as[JsObject] shouldBe Json.obj(
"results" -> Json.arr("Hello"),
"pageIndex" -> 1,
"pageSize" -> 2,
"resultCount" -> 3
)
}
"serialize from JSON" in {
Json
.obj(
"results" -> Json.arr("Hello"),
"pageIndex" -> 1,
"pageSize" -> 2,
"resultCount" -> 3
)
.as[Paged[String]] shouldBe Paged(Seq("Hello"), 1, 2, 3)
}
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/audit/AuditService.scala
|
<reponame>hmrc/binding-tariff-filestore
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.audit
import javax.inject.{Inject, Singleton}
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.play.bootstrap.audit.DefaultAuditConnector
import scala.concurrent.ExecutionContext.Implicits.global
@Singleton
class AuditService @Inject() (auditConnector: DefaultAuditConnector) {
import AuditPayloadType._
def auditUpScanInitiated(fileId: String, fileName: Option[String], upScanRef: String)(
implicit hc: HeaderCarrier
): Unit =
sendExplicitAuditEvent(
auditEventType = UpScanInitiated,
auditPayload = fileDetailsAuditPayload(fileId, fileName) + ("upScanReference" -> upScanRef)
)
def auditFileScanned(fileId: String, fileName: Option[String], upScanRef: String, upScanStatus: String)(
implicit hc: HeaderCarrier
): Unit =
sendExplicitAuditEvent(
auditEventType = FileScanned,
auditPayload =
fileDetailsAuditPayload(fileId, fileName) + ("upScanReference" -> upScanRef, "upScanStatus" -> upScanStatus)
)
def auditFilePublished(fileId: String, fileName: String)(implicit hc: HeaderCarrier): Unit =
sendExplicitAuditEvent(
auditEventType = FilePublished,
auditPayload = fileDetailsAuditPayload(fileId, fileName)
)
private def fileDetailsAuditPayload(fileId: String, fileName: String): Map[String, String] =
Map(
"fileId" -> fileId,
"fileName" -> fileName
)
private def fileDetailsAuditPayload(fileId: String, fileName: Option[String]): Map[String, String] =
Map(
"fileId" -> fileId
) ++ fileName.map(name => Map("fileName" -> name)).getOrElse(Map.empty)
private def sendExplicitAuditEvent(auditEventType: String, auditPayload: Map[String, String])(
implicit hc: HeaderCarrier
): Unit =
auditConnector.sendExplicitAudit(auditType = auditEventType, detail = auditPayload)
}
object AuditPayloadType {
val UpScanInitiated = "upScanInitiated"
val FileScanned = "fileScanned"
val FilePublished = "filePublished"
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/connector/AmazonS3Connector.scala
|
<reponame>hmrc/binding-tariff-filestore
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.connector
import java.io.BufferedInputStream
import java.net.URL
import java.util
import com.amazonaws.HttpMethod
import com.amazonaws.auth.{AWSStaticCredentialsProvider, BasicAWSCredentials}
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration
import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion
import com.amazonaws.services.s3.model._
import com.amazonaws.services.s3.{AmazonS3, AmazonS3ClientBuilder}
import com.google.inject.Inject
import javax.inject.Singleton
import uk.gov.hmrc.bindingtarifffilestore.config.AppConfig
import uk.gov.hmrc.bindingtarifffilestore.model.FileMetadata
import uk.gov.hmrc.bindingtarifffilestore.util.Logging
import scala.collection.JavaConverters
import scala.util.{Failure, Success, Try}
@Singleton
class AmazonS3Connector @Inject() (config: AppConfig) extends Logging {
private lazy val s3Config = config.s3Configuration
private lazy val credentials = new BasicAWSCredentials(s3Config.key, s3Config.secret)
private lazy val provider = new AWSStaticCredentialsProvider(credentials)
private lazy val s3client: AmazonS3 = {
log.info(s"${s3Config.bucket}:${s3Config.region}:${s3Config.key}:${s3Config.secret.substring(0, 3)}")
val builder = AmazonS3ClientBuilder
.standard()
.withCredentials(provider)
.withPathStyleAccessEnabled(true)
s3Config.endpoint match {
case Some(endpoint) => builder.withEndpointConfiguration(new EndpointConfiguration(endpoint, s3Config.region))
case _ => builder.withRegion(s3Config.region)
}
builder.build()
}
def getAll: Seq[String] =
sequenceOf(
s3client.listObjects(s3Config.bucket).getObjectSummaries
).map(_.getKey)
def upload(fileMetaData: FileMetadata): FileMetadata = {
val url: URL = new URL(fileMetaData.url.getOrElse(throw new IllegalArgumentException("Missing URL")))
val metadata = new ObjectMetadata
// This .get is scary but our file must have received a positive scan
// result and received metadata from Upscan if it is being published
metadata.setContentType(fileMetaData.mimeType.get)
metadata.setContentLength(contentLengthOf(url))
val request = new PutObjectRequest(
s3Config.bucket,
fileMetaData.id,
new BufferedInputStream(url.openStream()),
metadata
).withCannedAcl(CannedAccessControlList.Private)
Try(s3client.putObject(request)) match {
case Success(_) =>
fileMetaData.copy(url = Some(s"${s3Config.baseUrl}/${s3Config.bucket}/${fileMetaData.id}"))
case Failure(e: Throwable) =>
log.error("Failed to upload to the S3 bucket.", e)
throw e
}
}
def delete(id: String): Unit =
s3client.deleteObject(s3Config.bucket, id)
def deleteAll(): Unit = {
val keys: Seq[KeyVersion] = getAll.map(new KeyVersion(_))
if (keys.nonEmpty) {
log.info(s"Removing [${keys.length}] files from S3")
val request = new DeleteObjectsRequest(s3Config.bucket)
.withKeys(JavaConverters.seqAsJavaList(keys))
.withQuiet(false)
s3client.deleteObjects(request)
} else {
log.info(s"No files to remove from S3")
}
}
def sign(fileMetaData: FileMetadata): FileMetadata =
if (fileMetaData.url.isDefined) {
val authenticatedURLRequest = new GeneratePresignedUrlRequest(config.s3Configuration.bucket, fileMetaData.id)
.withMethod(HttpMethod.GET)
val authenticatedURL: URL = s3client.generatePresignedUrl(authenticatedURLRequest)
fileMetaData.copy(url = Some(authenticatedURL.toString))
} else {
fileMetaData
}
private def contentLengthOf(url: URL): Long =
url.openConnection.getContentLengthLong
private def sequenceOf[T](list: util.List[T]): Seq[T] =
JavaConverters.asScalaIteratorConverter(list.iterator).asScala.toSeq
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/connector/AmazonS3ConnectorSpec.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.connector
import com.amazonaws.services.s3.model.AmazonS3Exception
import com.github.tomakehurst.wiremock.client.WireMock._
import org.mockito.BDDMockito.given
import org.scalatest.BeforeAndAfterEach
import org.scalatestplus.mockito.MockitoSugar
import play.api.http.Status
import play.api.libs.Files.SingletonTemporaryFileCreator
import uk.gov.hmrc.bindingtarifffilestore.config.{AppConfig, S3Configuration}
import uk.gov.hmrc.bindingtarifffilestore.model.FileMetadata
import uk.gov.hmrc.bindingtarifffilestore.util.{ResourceFiles, UnitSpec, WiremockTestServer}
class AmazonS3ConnectorSpec
extends UnitSpec
with WiremockTestServer
with MockitoSugar
with BeforeAndAfterEach
with ResourceFiles {
private val s3Config = S3Configuration("key", "secret", "region", "bucket", Some(s"http://localhost:$wirePort"))
private val config = mock[AppConfig]
private val connector = new AmazonS3Connector(config)
override protected def beforeEach(): Unit = {
super.beforeEach()
given(config.s3Configuration).willReturn(s3Config)
}
"Get All" should {
"Delegate to S3" in {
// Given
stubFor(
get("/bucket/?encoding-type=url")
.withHeader(
"Authorization",
matching(s"AWS4-HMAC-SHA256 Credential=${s3Config.key}/\\d+/${s3Config.region}/s3/aws4_request, .*")
)
.willReturn(
aResponse()
.withStatus(Status.OK)
.withBody(fromFile("aws/list-objects_response.xml"))
)
)
// When
val all: Seq[String] = connector.getAll
// Then
all should have size 1
all.head shouldBe "image.jpg"
}
}
"Upload" should {
"Delegate to S3" in {
// Given
stubFor(
put("/bucket/id")
.withHeader(
"Authorization",
matching(s"AWS4-HMAC-SHA256 Credential=${s3Config.key}/\\d+/${s3Config.region}/s3/aws4_request, .*")
)
.withHeader("Content-Type", equalTo("text/plain"))
.willReturn(
aResponse()
.withStatus(Status.OK)
)
)
val url = SingletonTemporaryFileCreator.create("example.txt").path.toUri.toURL.toString
val fileUploading = FileMetadata("id", Some("file.txt"), Some("text/plain"), Some(url))
// Then
val result = connector.upload(fileUploading)
result.id shouldBe "id"
result.fileName shouldBe Some("file.txt")
result.mimeType shouldBe Some("text/plain")
result.url.get shouldBe s"$wireMockUrl/bucket/id"
}
"Throw Exception on missing URL" in {
// Given
val fileUploading = FileMetadata("id", Some("file.txt"), Some("text/plain"))
// Then
val exception = intercept[IllegalArgumentException] {
connector.upload(fileUploading)
}
exception.getMessage shouldBe "Missing URL"
}
"Throw Exception on upload failure" in {
// Given
stubFor(
put("/bucket/id")
.withHeader(
"Authorization",
matching(s"AWS4-HMAC-SHA256 Credential=${s3Config.key}/\\d+/${s3Config.region}/s3/aws4_request, .*")
)
.withHeader("Content-Type", equalTo("text/plain"))
.willReturn(
aResponse()
.withStatus(Status.BAD_GATEWAY)
)
)
val url = SingletonTemporaryFileCreator.create("example.txt").path.toUri.toURL.toString
val fileUploading = FileMetadata("id", Some("file.txt"), Some("text/plain"), Some(url))
// Then
val exception = intercept[AmazonS3Exception] {
connector.upload(fileUploading)
}
exception.getMessage shouldBe "Bad Gateway (Service: Amazon S3; Status Code: 502; Error Code: 502 Bad Gateway; Request ID: null; S3 Extended Request ID: null; Proxy: null)"
}
}
"Sign" should {
"append token to URL" in {
// Given
val file = FileMetadata("id", Some("file.txt"), Some("text/plain"), Some("url"))
// When
connector.sign(file).url.get should startWith(s"$wireMockUrl/bucket/id?X-Amz-Algorithm=AWS4-HMAC-SHA256")
}
"not append token to empty URL" in {
// Given
val file = FileMetadata("id", Some("file.txt"), Some("text/plain"), None)
// When
connector.sign(file).url shouldBe None
}
}
"Delete All" should {
"Delegate to S3" in {
stubFor(
get("/bucket/?encoding-type=url")
.withHeader(
"Authorization",
matching(s"AWS4-HMAC-SHA256 Credential=${s3Config.key}/\\d+/${s3Config.region}/s3/aws4_request, .*")
)
.willReturn(
aResponse()
.withStatus(Status.OK)
.withBody(fromFile("aws/list-objects_response.xml"))
)
)
stubFor(
post("/bucket/?delete")
.withHeader(
"Authorization",
matching(s"AWS4-HMAC-SHA256 Credential=${s3Config.key}/\\d+/${s3Config.region}/s3/aws4_request, .*")
)
.willReturn(
aResponse()
.withStatus(Status.OK)
.withBody(fromFile("aws/delete-objects_response.xml"))
)
)
connector.deleteAll()
verify(
postRequestedFor(urlEqualTo("/bucket/?delete"))
.withRequestBody(equalToXml(fromFile("aws/delete-objects_request.xml")))
)
}
"Do nothing for no files" in {
stubFor(
get("/bucket/?encoding-type=url")
.withHeader(
"Authorization",
matching(s"AWS4-HMAC-SHA256 Credential=${s3Config.key}/\\d+/${s3Config.region}/s3/aws4_request, .*")
)
.willReturn(
aResponse()
.withStatus(Status.OK)
.withBody(fromFile("aws/empty-list-objects_response.xml"))
)
)
connector.deleteAll()
verify(0, postRequestedFor(urlEqualTo("/bucket/?delete")))
}
}
"Delete One" should {
"Delegate to S3" in {
stubFor(
delete("/bucket/id")
.withHeader(
"Authorization",
matching(s"AWS4-HMAC-SHA256 Credential=${s3Config.key}/\\d+/${s3Config.region}/s3/aws4_request, .*")
)
.willReturn(
aResponse()
.withStatus(Status.OK)
)
)
connector.delete("id")
verify(deleteRequestedFor(urlEqualTo("/bucket/id")))
}
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/repository/MongoIndexCreator.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.repository
import reactivemongo.api.indexes.IndexType.Ascending
import reactivemongo.api.indexes.{Index, IndexType}
import reactivemongo.bson.BSONDocument
object MongoIndexCreator {
def createSingleFieldAscendingIndex(
indexFieldKey: String,
isUnique: Boolean = false,
options: BSONDocument = BSONDocument()
): Index =
createCompoundIndex(
indexFieldMappings = Seq(indexFieldKey -> Ascending),
isUnique = isUnique,
options = options
)
def createCompoundIndex(
indexFieldMappings: Seq[(String, IndexType)],
isUnique: Boolean,
name: Option[String] = None,
isBackground: Boolean = false,
options: BSONDocument
): Index =
Index(
key = indexFieldMappings,
name = Some(name.getOrElse(s"${indexFieldMappings.toMap.keys.mkString("-")}_Index")),
unique = isUnique,
background = isBackground,
options = options
)
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/repository/FileMetadataMongoRepository.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.repository
import java.time.Instant
import javax.inject.{Inject, Singleton}
import play.api.libs.json.{JsBoolean, JsObject, JsValue, Json}
import reactivemongo.api.indexes.Index
import reactivemongo.api.{Cursor, QueryOpts}
import reactivemongo.bson.BSONObjectID
import reactivemongo.play.json.ImplicitBSONHandlers._
import uk.gov.hmrc.bindingtarifffilestore.model.FileMetadataMongo.format
import uk.gov.hmrc.bindingtarifffilestore.model._
import uk.gov.hmrc.bindingtarifffilestore.repository.MongoIndexCreator.createSingleFieldAscendingIndex
import uk.gov.hmrc.bindingtarifffilestore.util.Logging
import uk.gov.hmrc.mongo.ReactiveRepository
import scala.concurrent.{ExecutionContext, Future}
@Singleton
class FileMetadataMongoRepository @Inject() (mongoDbProvider: MongoDbProvider)
extends ReactiveRepository[FileMetadata, BSONObjectID](
collectionName = "fileMetadata",
mongo = mongoDbProvider.mongo,
domainFormat = FileMetadataMongo.format
) with Logging {
override lazy val indexes: Seq[Index] = Seq(
createSingleFieldAscendingIndex("id", isUnique = true)
)
override def ensureIndexes(implicit ec: ExecutionContext): Future[Seq[Boolean]] =
for {
status <- Future.sequence(indexes.map(collection.indexesManager.ensure(_)))
_ = collection.indexesManager.list().foreach {
_.foreach(index => log.info(s"Running with Index: [$index] with options [${Json.toJson(index.options)}]"))
}
} yield status
def get(id: String)(implicit ec: ExecutionContext): Future[Option[FileMetadata]] =
collection.find(byId(id)).one[FileMetadata]
def get(search: Search, pagination: Pagination)(implicit ec: ExecutionContext): Future[Paged[FileMetadata]] = {
val query = JsObject(
Map[String, JsValue]()
++ search.ids.map(ids => "id" -> Json.obj("$in" -> ids))
++ search.published.map(pub => "published" -> JsBoolean(pub))
)
for {
results <- collection
.find(query)
.options(
QueryOpts(skipN = (pagination.page - 1) * pagination.pageSize, batchSizeN = pagination.pageSize)
)
.cursor[FileMetadata]()
.collect[Seq](pagination.pageSize, Cursor.FailOnError[Seq[FileMetadata]]())
count <- collection.count(Some(query))
} yield Paged(results, pagination, count)
}
def insertFile(att: FileMetadata)(implicit ec: ExecutionContext): Future[FileMetadata] =
collection
.findAndUpdate(
selector = byId(att.id),
update = att,
fetchNewObject = true,
upsert = true
)
.map(_.value.map(_.as[FileMetadata](FileMetadataMongo.format)).get)
def update(att: FileMetadata)(implicit ec: ExecutionContext): Future[Option[FileMetadata]] =
collection
.findAndUpdate(
selector = byId(att.id),
update = att.copy(lastUpdated = Instant.now()),
fetchNewObject = true,
upsert = false
)
.map(_.value.map(_.as[FileMetadata](FileMetadataMongo.format)))
def delete(id: String)(implicit ec: ExecutionContext): Future[Unit] =
collection.findAndRemove(byId(id)).map(_ => ())
def deleteAll()(implicit ec: ExecutionContext): Future[Unit] =
removeAll().map(_ => ())
private def byId(id: String): JsObject =
Json.obj("id" -> id)
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/controllers/FileStoreController.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.controllers
import java.util.UUID
import javax.inject.{Inject, Singleton}
import play.api.Logging
import play.api.libs.Files.TemporaryFile
import play.api.libs.json.Json
import play.api.mvc._
import uk.gov.hmrc.bindingtarifffilestore.config.AppConfig
import uk.gov.hmrc.bindingtarifffilestore.model.FileMetadataREST._
import uk.gov.hmrc.bindingtarifffilestore.model._
import uk.gov.hmrc.bindingtarifffilestore.model.upscan.ScanResult
import uk.gov.hmrc.bindingtarifffilestore.model.upscan.v2._
import uk.gov.hmrc.bindingtarifffilestore.service.FileStoreService
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.play.bootstrap.backend.controller.BackendController
import scala.concurrent.{ExecutionContext, Future}
import play.api.libs.json.JsValue
@Singleton
class FileStoreController @Inject() (
appConfig: AppConfig,
service: FileStoreService,
parse: PlayBodyParsers,
mcc: MessagesControllerComponents
)(implicit ec: ExecutionContext)
extends BackendController(mcc)
with ErrorHandling
with JsonParsing
with Logging {
private lazy val FileNotFound =
NotFound(JsErrorResponse(ErrorCode.NOTFOUND, "File Not Found"))
private def withFileMetadata(id: String)(f: FileMetadata => Future[Result]): Future[Result] =
service.find(id).flatMap {
case Some(meta) =>
f(meta)
case None =>
Future.successful(FileNotFound)
}
lazy private val testModeFilter = TestMode.actionFilter(appConfig, parse.default)
def deleteAll(): Action[AnyContent] = withErrorHandling {
testModeFilter.async {
service
.deleteAll()
.map(_ => NoContent)
}
}
def delete(id: String): Action[AnyContent] = withErrorHandling { _ =>
service
.delete(id)
.map(_ => NoContent)
}
def initiate: Action[AnyContent] = withErrorHandling {
Action.async { implicit request =>
asJson[FileStoreInitiateRequest] { fileStoreRequest =>
service
.initiateV2(fileStoreRequest)
.map(response => Accepted(Json.toJson(response)))
}
}
}
def upload: Action[AnyContent] = withErrorHandling { implicit request =>
if (request.contentType.contains("application/json")) {
asJson[UploadRequest] { uploadRequest =>
service
.initiate(FileMetadata.fromUploadRequest(uploadRequest))
.map(template => Accepted(Json.toJson(template)))
}
} else if (request.contentType.contains("multipart/form-data")) {
request.body.asMultipartFormData
.map(upload)
.getOrElse(Future.successful(BadRequest))
} else {
Future.successful(BadRequest("Content-Type must be one of [application/json, multipart/form-data]"))
}
}
def get(id: String): Action[AnyContent] = Action.async {
withFileMetadata(id)(meta => Future.successful(Ok(Json.toJson(meta))))
}
def notification(id: String): Action[JsValue] = withErrorHandling {
Action(parse.json).async { implicit req =>
withJsonBody[ScanResult] { scanResult =>
withFileMetadata(id) { meta =>
service
.notify(meta, scanResult)
.map(updatedMeta => Created(Json.toJson(updatedMeta)))
}
}
}
}
def publish(id: String): Action[AnyContent] = withErrorHandling { implicit request =>
withFileMetadata(id) { meta =>
service.publish(meta).map {
case Some(updatedMeta) =>
Accepted(Json.toJson(updatedMeta))
case None =>
FileNotFound
}
}
}
def getAll(search: Search, pagination: Option[Pagination]): Action[AnyContent] = withErrorHandling { _ =>
service.find(search, pagination.getOrElse(Pagination.max)).map { pagedResults =>
if (pagination.isDefined) {
Ok(Json.toJson(pagedResults))
} else {
Ok(Json.toJson(pagedResults.results))
}
}
}
private def upload(body: MultipartFormData[TemporaryFile])(implicit hc: HeaderCarrier): Future[Result] = {
val formFile = body.file("file").filter(_.filename.nonEmpty)
val publishable: Boolean = body.dataParts.getOrElse("publish", Seq.empty).contains("true")
val id: String = body.dataParts.getOrElse("id", Seq.empty).headOption.getOrElse(UUID.randomUUID().toString)
val attachment: Option[FileWithMetadata] = formFile map { file =>
FileWithMetadata(
file.ref,
FileMetadata(
id = id,
fileName = Some(file.filename),
mimeType = Some(file.contentType.getOrElse(throw new RuntimeException("Missing file type"))),
publishable = publishable
)
)
}
attachment
.map(service.upload(_).map(f => Accepted(Json.toJson(f))))
.getOrElse(Future.successful(BadRequest(JsErrorResponse(ErrorCode.INVALID_REQUEST_PAYLOAD, "Invalid File"))))
}
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/audit/AuditServiceTest.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.audit
import org.mockito.ArgumentMatchers.refEq
import org.mockito.Mockito.{reset, verify}
import org.scalatest.BeforeAndAfterEach
import org.scalatestplus.mockito.MockitoSugar
import uk.gov.hmrc.bindingtarifffilestore.audit.AuditPayloadType._
import uk.gov.hmrc.bindingtarifffilestore.util.UnitSpec
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.play.bootstrap.audit.DefaultAuditConnector
import scala.concurrent.ExecutionContext.Implicits.global
class AuditServiceTest extends UnitSpec with MockitoSugar with BeforeAndAfterEach {
private implicit val hc: HeaderCarrier = HeaderCarrier()
private val connector = mock[DefaultAuditConnector]
private val service = new AuditService(connector)
override protected def afterEach(): Unit = {
super.afterEach()
reset(connector)
}
private val fileId = "id"
private val fileName = "name"
private val upScanStatus = "upscan-status"
private val upScanRef = "upscan-ref"
"auditUpScanInitiated()" should {
"send the expected payload to the audit connector" in {
service.auditUpScanInitiated(fileId, Some(fileName), upScanRef)
val payload = auditPayload(fileId, fileName) + ("upScanReference" -> upScanRef)
verify(connector).sendExplicitAudit(refEq(UpScanInitiated), refEq(payload))(refEq(hc), refEq(global))
}
}
"auditFileScanned()" should {
"send the expected payload to the audit connector" in {
service.auditFileScanned(fileId, Some(fileName), upScanRef, upScanStatus)
val payload = auditPayload(fileId, fileName) + ("upScanReference" -> upScanRef, "upScanStatus" -> upScanStatus)
verify(connector).sendExplicitAudit(refEq(FileScanned), refEq(payload))(refEq(hc), refEq(global))
}
}
"auditFilePublished()" should {
"send the expected payload to the audit connector" in {
service.auditFilePublished(fileId, fileName)
val payload = auditPayload(fileId, fileName)
verify(connector).sendExplicitAudit(refEq(FilePublished), refEq(payload))(refEq(hc), refEq(global))
}
}
private def auditPayload(fileId: String, fileName: String): Map[String, String] =
Map(
"fileId" -> fileId,
"fileName" -> fileName
)
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/service/FileStoreService.scala
|
<reponame>hmrc/binding-tariff-filestore
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.service
import play.api.Logging
import uk.gov.hmrc.bindingtarifffilestore.audit.AuditService
import uk.gov.hmrc.bindingtarifffilestore.config.AppConfig
import uk.gov.hmrc.bindingtarifffilestore.connector.{AmazonS3Connector, UpscanConnector}
import uk.gov.hmrc.bindingtarifffilestore.controllers.routes
import uk.gov.hmrc.bindingtarifffilestore.model.ScanStatus.READY
import uk.gov.hmrc.bindingtarifffilestore.model._
import uk.gov.hmrc.bindingtarifffilestore.model.upscan._
import uk.gov.hmrc.bindingtarifffilestore.repository.FileMetadataMongoRepository
import uk.gov.hmrc.bindingtarifffilestore.util.HashUtil
import uk.gov.hmrc.http.HeaderCarrier
import java.{util => ju}
import javax.inject.{Inject, Singleton}
import scala.concurrent.{ExecutionContext, Future}
@Singleton()
class FileStoreService @Inject()(
appConfig: AppConfig,
fileStoreConnector: AmazonS3Connector,
repository: FileMetadataMongoRepository,
upscanConnector: UpscanConnector,
auditService: AuditService
)(implicit ec: ExecutionContext)
extends Logging {
private lazy val authToken = HashUtil.hash(appConfig.authorization)
// Initiates an upload for a POST direct to Upscan
def initiate(metadata: FileMetadata)(implicit hc: HeaderCarrier): Future[UploadTemplate] = {
val fileId = metadata.id
log(fileId, "Initiating")
for {
_ <- repository.insertFile(metadata)
initiateResponse <- upscanInitiate(metadata)
template = initiateResponse.uploadRequest
} yield UploadTemplate(fileId, template.href, template.fields)
}
def initiateV2(
request: v2.FileStoreInitiateRequest
)(implicit hc: HeaderCarrier): Future[v2.FileStoreInitiateResponse] = {
val fileId = request.id.getOrElse(ju.UUID.randomUUID().toString())
log(fileId, "Initiating")
val callbackUrl = routes.FileStoreController
.notification(fileId)
.absoluteURL(appConfig.filestoreSSL, appConfig.filestoreUrl) + s"?X-Api-Token=$<PASSWORD>Token"
val fileMetadata = FileMetadata.fromInitiateRequestV2(fileId, request)
val upscanRequest = v2.UpscanInitiateRequest.fromFileStoreRequest(callbackUrl, appConfig, request)
for {
update <- repository.insertFile(fileMetadata)
initiateResponse <- upscanConnector.initiateV2(upscanRequest)
_ = log(
fileId,
s"Upscan Initiated with url [${initiateResponse.uploadRequest.href}] and Upscan reference [${initiateResponse.reference}]"
)
_ = auditService.auditUpScanInitiated(update.id, update.fileName, initiateResponse.reference)
} yield v2.FileStoreInitiateResponse.fromUpscanResponse(fileId, initiateResponse)
}
// Initiates an upload and Uploads the file direct
def upload(fileWithMetadata: FileWithMetadata)(implicit hc: HeaderCarrier): Future[FileMetadata] = {
val fileId = fileWithMetadata.metadata.id
log(fileId, "Uploading")
for {
update <- repository.insertFile(fileWithMetadata.metadata)
initiateResponse <- upscanInitiate(fileWithMetadata.metadata)
// This future (Upload) executes asynchronously intentionally
_ = log(
fileId,
s"Uploading to Upscan url [${initiateResponse.uploadRequest.href}] with Upscan reference [${initiateResponse.reference}]"
)
_ = upscanConnector
.upload(initiateResponse.uploadRequest, fileWithMetadata)
.recover {
case e =>
error(
fileId,
s"Upload failed to Upscan url [${initiateResponse.uploadRequest.href}] with Upscan reference [${initiateResponse.reference}]",
e
)
}
.onComplete { _ =>
log(
fileId,
s"Uploaded to Upscan url [${initiateResponse.uploadRequest.href}] with Upscan reference [${initiateResponse.reference}]"
)
}
} yield update
}
def find(id: String): Future[Option[FileMetadata]] =
repository.get(id) map signingPermanentURL
def find(search: Search, pagination: Pagination): Future[Paged[FileMetadata]] =
repository.get(search, pagination: Pagination) map (signingPermanentURLs(_))
// when UpScan comes back to us with the scan result
def notify(attachment: FileMetadata, scanResult: ScanResult)(
implicit hc: HeaderCarrier
): Future[Option[FileMetadata]] = {
log(
attachment.id,
s"Scan completed with status [${scanResult.fileStatus}] and Upscan reference [${scanResult.reference}]"
)
auditService
.auditFileScanned(attachment.id, attachment.fileName, scanResult.reference, scanResult.fileStatus.toString)
val updatedAttachment = attachment.withScanResult(scanResult)
scanResult match {
case FailedScanResult(_, details) =>
log(attachment.id, s"Scan failed because it was [${details.failureReason}] with message [${details.message}]")
repository.update(updatedAttachment)
case SuccessfulScanResult(_, _, details) =>
log(
attachment.id,
s"Scan succeeded with details [${details.fileName}, ${details.fileMimeType}, ${details.checksum}, ${details.uploadTimestamp}]"
)
if (updatedAttachment.publishable) {
for {
updated: Option[FileMetadata] <- repository.update(updatedAttachment)
published: Option[FileMetadata] <- updated match {
case Some(metadata) =>
publish(metadata)
case _ =>
log(
attachment.id,
s"Scan completed as READY but it couldn't be published as it no longer exists"
)
Future.successful(None)
}
} yield published
} else {
repository.update(updatedAttachment)
}
}
}
// when the file is uploaded to our S3 bucket
def publish(att: FileMetadata)(implicit hc: HeaderCarrier): Future[Option[FileMetadata]] = {
log(att.id, "Publishing")
(att.scanStatus, att.published) match {
// File is Safe, unpublished and the download URL is still live
case (Some(READY), false) if att.isLive =>
log(att.id, "Publishing file to Permanent Storage")
val metadata = fileStoreConnector.upload(att)
auditService.auditFilePublished(att.id, att.fileName.get)
log(att.id, "Published file to Permanent Storage")
repository
.update(metadata.copy(publishable = true, published = true))
.map(signingPermanentURL)
// File is safe, unpublished but the download URL has expired. Clean Up.
case (Some(READY), false) =>
log(att.id, s"Removing as it had an expired download URL [${att.url}]")
repository.delete(att.id).map(_ => None)
// File not safe yet & is unpublished
case (_, false) =>
log(att.id, s"Marking as publishable")
repository.update(att.copy(publishable = true))
// File is already published
case (_, true) =>
log(att.id, s"Ignoring publish request as it was already published")
Future(Some(att))
}
}
def deleteAll(): Future[Unit] =
repository.deleteAll() map (_ => fileStoreConnector.deleteAll())
def delete(id: String): Future[Unit] = {
log(id, "Deleting")
repository.delete(id) map (_ => fileStoreConnector.delete(id))
}
private def upscanInitiate(fileMetadata: FileMetadata)(implicit hc: HeaderCarrier): Future[UpscanInitiateResponse] = {
val settings = UploadSettings(
callbackUrl = routes.FileStoreController
.notification(fileMetadata.id)
.absoluteURL(appConfig.filestoreSSL, appConfig.filestoreUrl) + s"?X-Api-Token=$<PASSWORD>",
minimumFileSize = appConfig.fileStoreSizeConfiguration.minFileSize,
maximumFileSize = appConfig.fileStoreSizeConfiguration.maxFileSize
)
for {
initiateResponse <- upscanConnector.initiate(settings)
_ = log(
fileMetadata.id,
s"Upscan Initiated with url [${initiateResponse.uploadRequest.href}] and Upscan reference [${initiateResponse.reference}]"
)
_ = auditService.auditUpScanInitiated(fileMetadata.id, fileMetadata.fileName, initiateResponse.reference)
} yield initiateResponse
}
private def signingPermanentURL: Option[FileMetadata] => Option[FileMetadata] = _ map signingIfPublished
private def signingPermanentURLs: Paged[FileMetadata] => Paged[FileMetadata] = _ map signingIfPublished
private def signingIfPublished: FileMetadata => FileMetadata = {
case file if file.published => fileStoreConnector.sign(file)
case other => other
}
private def error(id: String, message: String, error: Throwable): Unit =
logger.error(s"File [$id]: $message", error)
private def log(id: String, message: String): Unit =
logger.info(s"File [$id]: $message")
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/model/SearchTest.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model
import java.net.URLDecoder
import uk.gov.hmrc.bindingtarifffilestore.util.UnitSpec
class SearchTest extends UnitSpec {
private val search = Search(
ids = Some(Set("file-id1", "file-id2")),
published = Some(true)
)
private val params = Map[String, Seq[String]](
"id" -> Seq("file-id1", "file-id2"),
"published" -> Seq("true")
)
"Search Binder" should {
"Unbind Unpopulated Search to Query String" in {
Search.bindable.unbind("", Search()) shouldBe ""
}
"Unbind Populated Search to Query String" in {
val populatedQueryParam: String =
"id=file-id1&id=file-id2&published=true"
URLDecoder.decode(Search.bindable.unbind("", search), "UTF-8") shouldBe populatedQueryParam
}
"Bind empty query string" in {
Search.bindable.bind("", Map()) shouldBe Some(Right(Search()))
}
"Bind query string with empty values" in {
Search.bindable.bind("", params.mapValues(_.map(_ => ""))) shouldBe Some(Right(Search()))
}
"Bind populated query string" in {
Search.bindable.bind("", params) shouldBe Some(Right(search))
}
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/filters/AuthFilter.scala
|
<gh_stars>0
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.filters
import akka.stream.Materializer
import javax.inject.{Inject, Singleton}
import play.api.mvc.{Filter, RequestHeader, Result, Results}
import uk.gov.hmrc.bindingtarifffilestore.config.AppConfig
import uk.gov.hmrc.bindingtarifffilestore.util.HashUtil
import scala.concurrent.Future
@Singleton
class AuthFilter @Inject() (appConfig: AppConfig)(implicit override val mat: Materializer) extends Filter {
private lazy val authTokenName = "<PASSWORD>"
private lazy val healthEndpointUri = "/ping/ping"
private lazy val hashedTokenValue: String = HashUtil.hash(appConfig.authorization)
override def apply(f: RequestHeader => Future[Result])(rh: RequestHeader): Future[Result] =
rh.uri match {
case uri if uri.endsWith(healthEndpointUri) => f(rh)
case _ => ensureAuthTokenIsPresent(f, rh)
}
private def ensureAuthTokenIsPresent(f: RequestHeader => Future[Result], rh: RequestHeader) = {
val headerValue: Option[String] = rh.headers.get(authTokenName)
val hashedQueryParamValues: Option[String] = rh.queryString.get(authTokenName).map(_.head)
(headerValue, hashedQueryParamValues) match {
case (Some(appConfig.authorization), Some(`hashedTokenValue`)) => f(rh)
case (Some(appConfig.authorization), None) => f(rh)
case (None, Some(`hashedTokenValue`)) => f(rh)
case _ => Future.successful(Results.Forbidden(s"Missing or invalid '$authTokenName'"))
}
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/controllers/JsonParsing.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.controllers
import play.api.libs.json._
import play.api.mvc._
import uk.gov.hmrc.bindingtarifffilestore.model.JsErrorResponse
import uk.gov.hmrc.bindingtarifffilestore.model.ErrorCode._
import uk.gov.hmrc.play.bootstrap.backend.controller.BackendBaseController
import scala.concurrent.Future
import scala.concurrent.Future.successful
import scala.util.{Failure, Success, Try}
trait JsonParsing { self: BackendBaseController =>
override protected def withJsonBody[T](
f: T => Future[Result]
)(implicit request: Request[JsValue], m: Manifest[T], reads: Reads[T]): Future[Result] =
Try(request.body.validate[T]) match {
case Success(JsSuccess(payload, _)) => f(payload)
case Success(JsError(errs)) =>
successful(BadRequest(JsErrorResponse(INVALID_REQUEST_PAYLOAD, JsError.toJson(errs))))
case Failure(e) => successful(BadRequest(JsErrorResponse(UNKNOWN_ERROR, e.getMessage)))
}
protected def asJson[T](
f: T => Future[Result]
)(implicit request: Request[AnyContent], reads: Reads[T]): Future[Result] =
Try(request.body.asJson.map(_.validate[T])) match {
case Success(Some(JsSuccess(payload, _))) => f(payload)
case Success(Some(JsError(errs))) =>
successful(BadRequest(JsErrorResponse(INVALID_REQUEST_PAYLOAD, JsError.toJson(errs))))
case Success(None) => successful(BadRequest)
case Failure(e) => successful(BadRequest(JsErrorResponse(UNKNOWN_ERROR, e.getMessage)))
}
}
|
hmrc/binding-tariff-filestore
|
test/it/uk/gov/hmrc/bindingtarifffilestore/AuthSpec.scala
|
<gh_stars>0
/*
* Copyright 2018 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore
import java.security.MessageDigest
import com.google.common.io.BaseEncoding
import play.api.http.HttpVerbs
import play.api.http.Status._
import scalaj.http.Http
import uk.gov.hmrc.bindingtarifffilestore.util.{BaseFeatureSpec, ResourceFiles}
class AuthSpec extends BaseFeatureSpec with ResourceFiles {
private val serviceUrl = s"http://localhost:$port"
private val hashedTokenValue = BaseEncoding.base64Url().encode(
MessageDigest.getInstance("SHA-256")
.digest(appConfig.authorization.getBytes("UTF-8"))
)
Feature("Authentication to incoming requests") {
Scenario("Allowing requests with expected auth header") {
When("I call an endpoint")
val result = Http(s"$serviceUrl/file")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.GET)
.asString
Then("The response code should not be 403")
result.code should not be FORBIDDEN
}
Scenario("Forbidding requests with incorrect value for the auth header") {
When("I call an endpoint")
val result = Http(s"$serviceUrl/file")
.header(apiTokenKey, "WRONG_TOKEN")
.method(HttpVerbs.GET)
.asString
Then("The response code should be 403")
result.code shouldBe FORBIDDEN
result.body shouldBe "Missing or invalid 'X-Api-Token'"
}
Scenario("Forbidding requests with incorrect value for the auth header and expected auth token query param") {
When("I call an endpoint")
val result = Http(s"$serviceUrl/file?X-Api-Token=$hashedTokenValue")
.header(apiTokenKey, "WRONG_TOKEN")
.method(HttpVerbs.GET)
.asString
Then("The response code should be 403")
result.code shouldBe FORBIDDEN
result.body shouldBe "Missing or invalid 'X-Api-Token'"
}
Scenario("Forbidding requests with expected value for the auth header and incorrect auth token query param") {
When("I call an endpoint")
val result = Http(s"$serviceUrl/file?X-Api-Token=WRONG_TOKEN")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.GET)
.asString
Then("The response code should be 403")
result.code shouldBe FORBIDDEN
result.body shouldBe "Missing or invalid 'X-Api-Token'"
}
Scenario("Allowing requests with both expected auth header and expected auth query param") {
When("I call an endpoint")
val result = Http(s"$serviceUrl/file?X-Api-Token=$hashedTokenValue")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.GET)
.asString
Then("The response code should be 200")
result.code shouldBe OK
}
Scenario("Forbidding requests with no auth header and no auth query param") {
When("I call an endpoint")
val result = Http(s"$serviceUrl/file")
.method(HttpVerbs.GET)
.asString
Then("The response code should be 403")
result.code shouldBe FORBIDDEN
result.body shouldBe "Missing or invalid 'X-Api-Token'"
}
Scenario("Allowing requests with no auth header and with expected auth query param") {
When("I call an endpoint")
val result = Http(s"$serviceUrl/file?X-Api-Token=$hashedTokenValue")
.method(HttpVerbs.GET)
.asString
Then("The response code should not be 403")
result.code should not be FORBIDDEN
}
Scenario("Forbidding requests with incorrect value for the auth token query param") {
When("I call an endpoint")
val result = Http(s"$serviceUrl/file?X-Api-Token=WRONG_VALUE")
.method(HttpVerbs.GET)
.asString
Then("The response code should be 403")
result.code shouldBe FORBIDDEN
result.body shouldBe "Missing or invalid 'X-Api-Token'"
}
Scenario("Calls to the health endpoint do not require auth token") {
val result = Http(s"$serviceUrl/ping/ping")
.method(HttpVerbs.GET)
.asString
Then("The response code should be 200")
result.code shouldBe OK
}
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/model/FileMetadata.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model
import java.time.{Instant, LocalDateTime, ZoneOffset}
import java.{util => ju}
import play.api.libs.json._
import uk.gov.hmrc.bindingtarifffilestore.model.ScanStatus._
import uk.gov.hmrc.bindingtarifffilestore.model.upscan._
import uk.gov.hmrc.bindingtarifffilestore.model.upscan.v2
case class FileMetadata(
id: String,
fileName: Option[String],
mimeType: Option[String],
url: Option[String] = None,
scanStatus: Option[ScanStatus] = None,
publishable: Boolean = false,
published: Boolean = false,
lastUpdated: Instant = Instant.now()
) {
private lazy val date = "X-Amz-Date=(\\d{4})(\\d{2})(\\d{2})T(\\d{2})(\\d{2})(\\d{2})".r.unanchored
private lazy val expires = "X-Amz-Expires=(\\d+)".r.unanchored
//date time groups -> based on date regex above
private val yearGroup = 1
private val monthGroup = 2
private val dayGroup = 3
private val hourGroup = 4
private val minuteGroup = 5
private val secondGroup = 6
def isLive: Boolean =
this.url.forall { url =>
(date.findFirstMatchIn(url), expires.findFirstMatchIn(url)) match {
case (Some(dateMatch), Some(expiresMatch)) =>
LocalDateTime
.of(
dateMatch.group(yearGroup).toInt,
dateMatch.group(monthGroup).toInt,
dateMatch.group(dayGroup).toInt,
dateMatch.group(hourGroup).toInt,
dateMatch.group(minuteGroup).toInt,
dateMatch.group(secondGroup).toInt
)
.plusSeconds(expiresMatch.group(1).toLong)
.toInstant(ZoneOffset.UTC)
.isAfter(Instant.now())
case _ => true
}
}
def withScanResult(scanResult: ScanResult): FileMetadata = scanResult match {
case SuccessfulScanResult(reference, downloadUrl, uploadDetails) =>
copy(
fileName = Some(uploadDetails.fileName),
mimeType = Some(uploadDetails.fileMimeType),
url = Some(downloadUrl),
scanStatus = Some(ScanStatus.READY)
)
case FailedScanResult(reference, failureDetails) =>
copy(scanStatus = Some(ScanStatus.FAILED))
}
}
object FileMetadata {
def fromUploadRequest(uploadRequest: UploadRequest) =
FileMetadata(
id = uploadRequest.id.getOrElse(ju.UUID.randomUUID().toString),
fileName = Some(uploadRequest.fileName),
mimeType = Some(uploadRequest.mimeType),
publishable = uploadRequest.publishable
)
def fromInitiateRequestV2(id: String, request: v2.FileStoreInitiateRequest) =
FileMetadata(
id = id,
fileName = None,
mimeType = None,
publishable = request.publishable
)
}
object FileMetadataREST {
val writes: OWrites[FileMetadata] = new OWrites[FileMetadata] {
override def writes(o: FileMetadata): JsObject =
JsObject(
Map[String, JsValue](
"id" -> JsString(o.id),
"publishable" -> JsBoolean(o.publishable),
"published" -> JsBoolean(o.published),
"lastUpdated" -> JsString(o.lastUpdated.toString)
)
++ o.fileName.map("fileName" -> Json.toJson(_))
++ o.mimeType.map("mimeType" -> Json.toJson(_))
++ o.scanStatus.map("scanStatus" -> Json.toJson(_))
++ o.url.filter(_ => o.scanStatus.contains(READY)).map("url" -> JsString(_))
)
}
implicit val format: OFormat[FileMetadata] = OFormat(Json.reads[FileMetadata], writes)
}
object FileMetadataMongo {
implicit val instantFormat: OFormat[Instant] = new OFormat[Instant] {
override def writes(instant: Instant): JsObject =
Json.obj("$date" -> instant.toEpochMilli)
override def reads(json: JsValue): JsResult[Instant] =
json match {
case JsObject(map) if map.contains("$date") =>
map("$date") match {
case JsNumber(v) => JsSuccess(Instant.ofEpochMilli(v.toLong))
case _ => JsError("Unexpected Instant Format")
}
case _ => JsError("Unexpected Instant Format")
}
}
private val underlying = Json.using[Json.WithDefaultValues].format[FileMetadata]
implicit val format: OFormat[FileMetadata] = OFormat(
r = underlying,
w = OWrites(fm => underlying.writes(fm).as[JsObject])
)
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/model/upscan/ScanResultSpec.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model.upscan
import java.time.Instant
import play.api.libs.json.{JsObject, JsString, Json}
import uk.gov.hmrc.bindingtarifffilestore.util.UnitSpec
class ScanResultSpec extends UnitSpec {
"Successful Scan Result" should {
val model = SuccessfulScanResult("ref", "url", UploadDetails("file", "type", Instant.EPOCH, "checksum"))
val json = JsObject(
Map(
"reference" -> JsString("ref"),
"downloadUrl" -> JsString("url"),
"uploadDetails" -> JsObject(
Map(
"fileName" -> JsString("file"),
"fileMimeType" -> JsString("type"),
"uploadTimestamp" -> JsString("1970-01-01T00:00:00Z"),
"checksum" -> JsString("checksum")
)
),
"fileStatus" -> JsString("READY")
)
)
"Convert Result to JSON" in {
Json.toJson[ScanResult](model)(ScanResult.format) shouldBe json
}
"Convert JSON to Result" in {
Json.fromJson[ScanResult](json)(ScanResult.format).get shouldBe model
}
}
"Failed Scan Result" should {
val model = FailedScanResult("ref", FailureDetails(FailureReason.QUARANTINE, "message"))
val json = JsObject(
Map(
"reference" -> JsString("ref"),
"failureDetails" -> JsObject(
Map(
"failureReason" -> JsString("QUARANTINE"),
"message" -> JsString("message")
)
),
"fileStatus" -> JsString("FAILED")
)
)
"Convert Result to JSON" in {
Json.toJson[ScanResult](model)(ScanResult.format) shouldBe json
}
"Convert JSON to Result" in {
Json.fromJson[ScanResult](json)(ScanResult.format).get shouldBe model
}
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/model/upscan/v2/UpscanInitiateRequest.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model.upscan.v2
import play.api.libs.json.{Json, OFormat}
import uk.gov.hmrc.bindingtarifffilestore.config.AppConfig
case class UpscanInitiateRequest(
callbackUrl: String,
successRedirect: Option[String],
errorRedirect: Option[String],
minimumFileSize: Option[Long],
maximumFileSize: Option[Long],
expectedContentType: Option[String]
)
object UpscanInitiateRequest {
implicit val format: OFormat[UpscanInitiateRequest] = Json.format[UpscanInitiateRequest]
def fromFileStoreRequest(callbackUrl: String, appConfig: AppConfig, request: FileStoreInitiateRequest) =
UpscanInitiateRequest(
callbackUrl = callbackUrl,
successRedirect = request.successRedirect,
errorRedirect = request.errorRedirect,
minimumFileSize = Some(request.minFileSize.getOrElse(appConfig.fileStoreSizeConfiguration.minFileSize).toLong),
maximumFileSize = Some(request.maxFileSize.getOrElse(appConfig.fileStoreSizeConfiguration.maxFileSize).toLong),
expectedContentType = request.expectedContentType
)
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/model/UploadRequestSpec.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model
import play.api.libs.json.Json
import uk.gov.hmrc.bindingtarifffilestore.util.UnitSpec
class UploadRequestSpec extends UnitSpec {
val jsonWithoutPublishable = """{"fileName":"demopage.pdf","mimeType":"application/pdf"}"""
val jsonWithoutPublishableExpected =
"""{"fileName":"demopage.pdf","mimeType":"application/pdf","publishable":false}"""
val fullJSON = """{"fileName":"demopage.pdf","mimeType":"application/pdf","publishable":true}"""
".format" when {
"map full json into object" in {
val actual = Json.toJson(Json.parse(fullJSON).as[UploadRequest]).toString
actual shouldBe fullJSON
}
"map not full json into object with defaults" in {
val actual = Json.toJson(Json.parse(jsonWithoutPublishable).as[UploadRequest]).toString
actual shouldBe jsonWithoutPublishableExpected
}
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/model/Pagination.scala
|
<filename>app/uk/gov/hmrc/bindingtarifffilestore/model/Pagination.scala
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model
import play.api.mvc.QueryStringBindable
import uk.gov.hmrc.bindingtarifffilestore.model.Pagination.{defaultPageSize, defaultPageStart}
case class Pagination(
page: Int = defaultPageStart,
pageSize: Int = defaultPageSize
)
object Pagination {
val defaultPageStart = 1
val defaultPageSize = 100
val max: Pagination = Pagination(1, Integer.MAX_VALUE)
private val pageKey = "page"
private val pageSizeKey = "page_size"
implicit def bindable(implicit intBinder: QueryStringBindable[Int]): QueryStringBindable[Pagination] =
new QueryStringBindable[Pagination] {
override def bind(key: String, params: Map[String, Seq[String]]): Option[Either[String, Pagination]] = {
def param(name: String): Option[Int] = intBinder.bind(name, params).filter(_.isRight).map(_.right.get)
val page: Option[Int] = param(pageKey).filter(_ > 0)
val pageSize: Option[Int] = param(pageSizeKey)
(page, pageSize) match {
case (Some(p), Some(s)) => Some(Right(Pagination(page = p, pageSize = s)))
case (Some(p), _) => Some(Right(Pagination(page = p)))
case (_, Some(s)) => Some(Right(Pagination(pageSize = s)))
case _ => None
}
}
override def unbind(key: String, query: Pagination): String =
Seq[String](
intBinder.unbind(pageKey, query.page),
intBinder.unbind(pageSizeKey, query.pageSize)
).mkString("&")
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/connector/UpscanConnector.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.connector
import org.apache.http.HttpResponse
import org.apache.http.client.methods.HttpPost
import org.apache.http.entity.ContentType
import org.apache.http.entity.mime.MultipartEntityBuilder
import org.apache.http.entity.mime.content.{FileBody, StringBody}
import org.apache.http.impl.client.HttpClientBuilder
import org.apache.http.util.EntityUtils
import uk.gov.hmrc.bindingtarifffilestore.config.AppConfig
import uk.gov.hmrc.bindingtarifffilestore.model.FileWithMetadata
import uk.gov.hmrc.bindingtarifffilestore.model.upscan.{UploadSettings, UpscanInitiateResponse, UpscanTemplate, v2}
import uk.gov.hmrc.bindingtarifffilestore.util.Logging
import uk.gov.hmrc.http.{HeaderCarrier, HttpClient}
import javax.inject.{Inject, Singleton}
import scala.concurrent.Future.{failed, successful}
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try
@Singleton
class UpscanConnector @Inject() (appConfig: AppConfig, http: HttpClient)(implicit executionContext: ExecutionContext) extends Logging {
def initiate(uploadSettings: UploadSettings)(implicit headerCarrier: HeaderCarrier): Future[UpscanInitiateResponse] =
http.POST[UploadSettings, UpscanInitiateResponse](
s"${appConfig.upscanInitiateUrl}/upscan/initiate", uploadSettings
)
def initiateV2(uploadRequest: v2.UpscanInitiateRequest)(implicit hc: HeaderCarrier): Future[v2.UpscanInitiateResponse] =
http.POST[v2.UpscanInitiateRequest, v2.UpscanInitiateResponse](
s"${appConfig.upscanInitiateUrl}/upscan/v2/initiate",
uploadRequest
)
def upload(template: UpscanTemplate, fileWithMetaData: FileWithMetadata): Future[Unit] = {
log.info(s"Uploading file [${fileWithMetaData.metadata.id}] with template [$template]")
val builder: MultipartEntityBuilder = MultipartEntityBuilder.create
template.fields.foreach(entry => builder.addPart(entry._1, new StringBody(entry._2, ContentType.TEXT_PLAIN)))
builder.addPart(
"file",
new FileBody(
fileWithMetaData.file.file,
fileWithMetaData.metadata.mimeType
.flatMap(typ => Option(ContentType.getByMimeType(typ)))
.getOrElse(ContentType.DEFAULT_BINARY),
fileWithMetaData.metadata.fileName.getOrElse(fileWithMetaData.file.file.getName())
)
)
val request: HttpPost = new HttpPost(template.href)
request.setEntity(builder.build())
val client = HttpClientBuilder.create.build
val attempt = Try(client.execute(request)).map { response: HttpResponse =>
val code = response.getStatusLine.getStatusCode
if (code >= 200 && code < 300) {
log.info(s"Uploaded file [${fileWithMetaData.metadata.id}] successfully to Upscan Bucket [${template.href}]")
successful((): Unit)
} else {
failed(
new RuntimeException(
s"Bad AWS response for file [${fileWithMetaData.metadata.id}] with status [$code] body [${EntityUtils
.toString(response.getEntity)}]"
)
)
}
}
client.close()
attempt.get
}
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/model/upscan/ScanResult.scala
|
<reponame>hmrc/binding-tariff-filestore
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model.upscan
import java.time.Instant
import play.api.libs.json._
import uk.gov.hmrc.bindingtarifffilestore.model
import uk.gov.hmrc.bindingtarifffilestore.model.ScanStatus.{FAILED, READY, ScanStatus}
import uk.gov.hmrc.bindingtarifffilestore.model.upscan.FailureReason.FailureReason
import uk.gov.hmrc.play.json.Union
case class SuccessfulScanResult(
override val reference: String,
downloadUrl: String,
uploadDetails: UploadDetails
) extends ScanResult {
override val fileStatus: model.ScanStatus.Value = READY
}
case class FailedScanResult(
override val reference: String,
failureDetails: FailureDetails
) extends ScanResult {
override val fileStatus: model.ScanStatus.Value = FAILED
}
sealed trait ScanResult {
val reference: String
val fileStatus: ScanStatus
}
object ScanResult {
implicit val formatSuccess: OFormat[SuccessfulScanResult] = Json.format[SuccessfulScanResult]
implicit val formatFailed: OFormat[FailedScanResult] = Json.format[FailedScanResult]
implicit val format: Format[ScanResult] = Union
.from[ScanResult]("fileStatus")
.and[SuccessfulScanResult](READY.toString)
.and[FailedScanResult](FAILED.toString)
.format
}
case class UploadDetails(
fileName: String,
fileMimeType: String,
uploadTimestamp: Instant,
checksum: String
)
object UploadDetails {
implicit val format: OFormat[UploadDetails] = Json.format
}
case class FailureDetails(
failureReason: FailureReason,
message: String
)
object FailureDetails {
implicit val format: OFormat[FailureDetails] = Json.format
}
|
hmrc/binding-tariff-filestore
|
app/uk/gov/hmrc/bindingtarifffilestore/config/AppConfig.scala
|
<filename>app/uk/gov/hmrc/bindingtarifffilestore/config/AppConfig.scala<gh_stars>0
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.config
import javax.inject.{Inject, Singleton}
import play.api.Configuration
import uk.gov.hmrc.play.bootstrap.config._
@Singleton
class AppConfig @Inject() (
config: Configuration,
servicesConfig: ServicesConfig
) {
lazy val authorization: String = config.get[String]("auth.api-token")
lazy val s3Configuration = S3Configuration(
config.get[String]("s3.accessKeyId"),
base64Decode(config.get[String]("s3.secretKeyId")),
config.get[String]("s3.region"),
config.get[String]("s3.bucket"),
Option(config.get[String]("s3.endpoint")).filter(_.nonEmpty)
)
lazy val upscanInitiateUrl: String = servicesConfig.baseUrl("upscan-initiate")
lazy val fileStoreSizeConfiguration = FileStoreSizeConfiguration(
maxFileSize = config.get[Int]("upscan.maxFileSize"),
minFileSize = config.get[Int]("upscan.minFileSize")
)
lazy val filestoreUrl: String = config.get[String]("filestore.url")
lazy val filestoreSSL: Boolean = config.get[Boolean]("filestore.ssl")
private def base64Decode(text: String) = new String(java.util.Base64.getDecoder.decode(text))
lazy val isTestMode: Boolean = config.getOptional[Boolean]("testMode").getOrElse(false)
}
case class S3Configuration(
key: String,
secret: String,
region: String,
bucket: String,
endpoint: Option[String]
) {
def baseUrl: String = endpoint.getOrElse(s"https://s3-$region.amazonaws.com")
}
case class FileStoreSizeConfiguration(
minFileSize: Int,
maxFileSize: Int
)
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/repository/FileMetadataRepositorySpec.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.repository
import org.scalatest.concurrent.Eventually
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.scalatestplus.mockito.MockitoSugar
import reactivemongo.api.indexes.Index
import reactivemongo.api.indexes.IndexType.Ascending
import reactivemongo.api.{DB, ReadConcern}
import reactivemongo.bson._
import reactivemongo.core.errors.DatabaseException
import reactivemongo.play.json.ImplicitBSONHandlers._
import uk.gov.hmrc.bindingtarifffilestore.model.FileMetadataMongo.format
import uk.gov.hmrc.bindingtarifffilestore.model.{FileMetadata, Paged, Pagination, Search}
import uk.gov.hmrc.mongo.MongoSpecSupport
import java.util.UUID
import scala.concurrent.ExecutionContext.Implicits.global
class FileMetadataRepositorySpec
extends BaseMongoIndexSpec
with BeforeAndAfterAll
with BeforeAndAfterEach
with MongoSpecSupport
with Eventually
with MockitoSugar {
self =>
val readConcern: ReadConcern = ReadConcern.Local
private val mongoDbProvider: MongoDbProvider = new MongoDbProvider {
override val mongo: () => DB = self.mongo
}
private val att1 = generateAttachment
private val att2 = generateAttachment
private val repository = createMongoRepo
private def createMongoRepo =
new FileMetadataMongoRepository(mongoDbProvider)
override def beforeEach(): Unit = {
super.beforeEach()
await(repository.drop)
await(repository.ensureIndexes)
}
override def afterAll(): Unit = {
super.afterAll()
await(repository.drop)
}
private def collectionSize: Int =
await(repository.collection.count(None, Some(0), 0, None, readConcern = readConcern)).toInt
"deleteAll()" should {
"clear the collection" in {
val size = collectionSize
await(repository.insertFile(att1))
await(repository.insertFile(att2))
collectionSize shouldBe 2 + size
await(repository.deleteAll) shouldBe ((): Unit)
collectionSize shouldBe 0
}
}
"insert" should {
"insert a new document in the collection" in {
val size = collectionSize
await(repository.insertFile(att1)) shouldBe att1
collectionSize shouldBe 1 + size
await(repository.collection.find(selectorById(att1)).one[FileMetadata]) shouldBe Some(att1)
}
}
"update" should {
"modify an existing document in the collection" in {
await(repository.insertFile(att1))
val size = collectionSize
val updated = att1.copy(mimeType = Some(generateString), fileName = Some(generateString))
await(repository.update(updated))
collectionSize shouldBe size
val metadata = await(repository.collection.find(selectorById(updated)).one[FileMetadata])
metadata.map(_.id) shouldBe Some(att1.id)
metadata.map(_.mimeType) shouldBe Some(updated.mimeType)
metadata.map(_.fileName) shouldBe Some(updated.fileName)
metadata.map(_.lastUpdated).get.isAfter(updated.lastUpdated) shouldBe true
}
"do nothing when trying to update a non existing document in the collection" in {
val size = collectionSize
await(repository.update(att1)) shouldBe None
collectionSize shouldBe size
}
}
"get" should {
"retrieve the expected document from the collection" in {
await(repository.insertFile(att1))
await(repository.insertFile(att2))
collectionSize shouldBe 2
await(repository.get(att1.id)) shouldBe Some(att1)
}
"return None when there are no documents in the collection" in {
await(repository.get(att1.id)) shouldBe None
}
}
"delete" should {
"delete the expected document from the collection" in {
await(repository.insertFile(att1))
await(repository.insertFile(att2))
collectionSize shouldBe 2
await(repository.delete(att1.id))
collectionSize shouldBe 1
await(repository.get(att1.id)) shouldBe None
}
}
"The collection" should {
"have a unique index based on the field 'id' " in {
await(repository.collection.insert(att1))
val size = collectionSize
val caught = intercept[DatabaseException] {
await(repository.collection.insert(att1.copy(url = Some(generateString))))
}
caught.code shouldBe Some(11000)
collectionSize shouldBe size
}
"have all expected indexes" in {
import scala.concurrent.duration._
val expectedIndexes = List(
Index(key = Seq("id" -> Ascending), name = Some("id_Index"), unique = true),
Index(key = Seq("_id" -> Ascending), name = Some("_id_"))
)
val repo = createMongoRepo
await(repo.ensureIndexes)
eventually(timeout(5.seconds), interval(100.milliseconds)) {
assertIndexes(expectedIndexes.sorted, getIndexes(repo.collection).sorted)
}
await(repo.drop)
}
}
"get many" should {
"retrieve the expected documents by id" in {
await(repository.insertFile(att1))
await(repository.insertFile(att2))
collectionSize shouldBe 2
await(repository.get(Search(ids = Some(Set(att1.id))), Pagination())) shouldBe Paged(Seq(att1))
await(repository.get(Search(ids = Some(Set(att2.id))), Pagination())) shouldBe Paged(Seq(att2))
}
"retrieve the expected documents by published" in {
await(repository.insertFile(att1.copy(published = true)))
await(repository.insertFile(att2.copy(published = false)))
collectionSize shouldBe 2
await(repository.get(Search(published = Some(true)), Pagination())) shouldBe Paged(
Seq(att1.copy(published = true))
)
await(repository.get(Search(published = Some(false)), Pagination())) shouldBe Paged(
Seq(att2.copy(published = false))
)
}
"retrieve all the files for empty Search" in {
await(repository.insertFile(att1))
await(repository.insertFile(att2))
collectionSize shouldBe 2
await(repository.get(Search(), Pagination())) shouldBe Paged(Seq(att1, att2))
}
"return None when there are no documents matching" in {
await(repository.get(Search(), Pagination())) shouldBe Paged.empty[FileMetadata]
}
}
private def generateAttachment = FileMetadata(
id = generateString,
fileName = Some(generateString),
mimeType = Some(generateString)
)
private def generateString = UUID.randomUUID().toString
private def selectorById(att: FileMetadata) =
BSONDocument("id" -> att.id)
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/controllers/FileStoreControllerSpec.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.controllers
import akka.stream.Materializer
import org.mockito.ArgumentCaptor
import org.mockito.ArgumentMatchers.{any, refEq}
import org.mockito.Mockito.{reset, verify, when}
import org.scalatest.BeforeAndAfterEach
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.mockito.MockitoSugar
import play.api.http.Status._
import play.api.libs.Files.{SingletonTemporaryFileCreator, TemporaryFile}
import play.api.libs.json.{JsValue, Json, Writes}
import play.api.mvc.MultipartFormData.FilePart
import play.api.mvc._
import play.api.test.FakeRequest
import uk.gov.hmrc.bindingtarifffilestore.config.AppConfig
import uk.gov.hmrc.bindingtarifffilestore.model.FileMetadataREST.format
import uk.gov.hmrc.bindingtarifffilestore.model._
import uk.gov.hmrc.bindingtarifffilestore.model.upscan.v2.{FileStoreInitiateRequest, FileStoreInitiateResponse, UpscanFormTemplate}
import uk.gov.hmrc.bindingtarifffilestore.model.upscan.{ScanResult, SuccessfulScanResult, UploadDetails}
import uk.gov.hmrc.bindingtarifffilestore.service.FileStoreService
import uk.gov.hmrc.bindingtarifffilestore.util.{UnitSpec, WithFakeApplication}
import uk.gov.hmrc.http.{HeaderCarrier, HttpVerbs}
import java.time.Instant
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future.{failed, successful}
class FileStoreControllerSpec
extends UnitSpec
with Matchers
with WithFakeApplication
with MockitoSugar
with BeforeAndAfterEach {
private implicit val mat: Materializer = fakeApplication.materializer
private val appConfig = mock[AppConfig]
private val service = mock[FileStoreService]
lazy val playBodyParsers: PlayBodyParsers = fakeApplication.injector.instanceOf[PlayBodyParsers]
lazy val cc: MessagesControllerComponents = fakeApplication.injector.instanceOf[MessagesControllerComponents]
private val controller = new FileStoreController(appConfig, service, playBodyParsers, cc)
private val fakeRequest = FakeRequest()
private def jsonRequest[T](body: T)(implicit writes: Writes[T]): Request[AnyContent] =
fakeRequest
.withJsonBody(Json.toJson(body))
.withHeaders("Content-Type" -> "application/json")
override protected def afterEach(): Unit = {
super.afterEach()
reset(service)
}
"Delete All" should {
val req = FakeRequest(method = HttpVerbs.DELETE, path = "/file")
"return 403 if the test mode is disabled" in {
when(appConfig.isTestMode).thenReturn(false)
val result = await(controller.deleteAll()(req))
status(result) shouldEqual FORBIDDEN
jsonBodyOf(result)
.toString() shouldEqual s"""{"code":"FORBIDDEN","message":"You are not allowed to call ${req.method} ${req.path}"}"""
}
"return 204 if the test mode is enabled" in {
when(appConfig.isTestMode).thenReturn(true)
when(service.deleteAll()).thenReturn(successful(()))
val result = await(controller.deleteAll()(req))
status(result) shouldEqual NO_CONTENT
}
"return 500 when an error occurred" in {
val error = new RuntimeException
when(appConfig.isTestMode).thenReturn(true)
when(service.deleteAll()).thenReturn(failed(error))
val result = await(controller.deleteAll()(req))
status(result) shouldEqual INTERNAL_SERVER_ERROR
jsonBodyOf(result).toString() shouldEqual """{"code":"UNKNOWN_ERROR","message":"An unexpected error occurred"}"""
}
}
"Delete By ID" should {
val id = "ABC-123_000"
val req = FakeRequest(method = HttpVerbs.DELETE, path = s"/file/$id")
"return 204" in {
when(appConfig.isTestMode).thenReturn(true)
when(service.delete(id)).thenReturn(successful((): Unit))
val result = await(controller.delete(id)(req))
status(result) shouldBe NO_CONTENT
}
"return 500 when an error occurred" in {
val error = new RuntimeException
when(appConfig.isTestMode).thenReturn(true)
when(service.delete(id)).thenReturn(failed(error))
val result = await(controller.delete(id)(req))
status(result) shouldEqual INTERNAL_SERVER_ERROR
jsonBodyOf(result).toString() shouldEqual """{"code":"UNKNOWN_ERROR","message":"An unexpected error occurred"}"""
}
}
"Get By ID" should {
"return 200 when found" in {
val attachment = FileMetadata(id = "id", fileName = Some("file"), mimeType = Some("type"))
when(service.find(id = "id")).thenReturn(successful(Some(attachment)))
val result = await(controller.get("id")(fakeRequest))
status(result) shouldBe OK
bodyOf(result) shouldEqual Json.toJson(attachment).toString()
}
"return 404 when not found" in {
when(service.find(id = "id")).thenReturn(successful(None))
val result = await(controller.get("id")(fakeRequest))
status(result) shouldBe NOT_FOUND
}
}
"Get By Search" should {
"return 200 with empty array" in {
when(service.find(Search(ids = Some(Set.empty)), Pagination.max))
.thenReturn(successful(Paged.empty[FileMetadata]))
val result = await(controller.getAll(Search(ids = Some(Set.empty)), None)(fakeRequest))
status(result) shouldBe OK
bodyOf(result) shouldEqual Json.toJson(Seq.empty).toString()
}
"return 200 with non empty array" in {
val attachment1 = FileMetadata(id = "id1", fileName = Some("file1"), mimeType = Some("type1"))
val attachment2 = FileMetadata(id = "id2", fileName = Some("file2"), mimeType = Some("type2"))
when(service.find(Search(ids = Some(Set("id1", "id2"))), Pagination.max))
.thenReturn(successful(Paged(Seq(attachment1, attachment2))))
val result = await(controller.getAll(Search(ids = Some(Set("id1", "id2"))), None)(fakeRequest))
status(result) shouldBe OK
bodyOf(result) shouldEqual Json.toJson(Seq(attachment1, attachment2)).toString()
}
"return 200 with pagination and non empty pager" in {
val attachment1 = FileMetadata(id = "id1", fileName = Some("file1"), mimeType = Some("type1"))
val attachment2 = FileMetadata(id = "id2", fileName = Some("file2"), mimeType = Some("type2"))
when(service.find(Search(ids = Some(Set("id1", "id2"))), Pagination()))
.thenReturn(successful(Paged(Seq(attachment1, attachment2))))
val result = await(controller.getAll(Search(ids = Some(Set("id1", "id2"))), Some(Pagination()))(fakeRequest))
status(result) shouldBe OK
bodyOf(result) shouldEqual Json.toJson(Paged(Seq(attachment1, attachment2))).toString()
}
}
"Notify" should {
"return 201 when found" in {
val scanResult = SuccessfulScanResult("ref", "url", UploadDetails("file", "type", Instant.now(), "checksum"))
val attachment = FileMetadata(id = "id", fileName = Some("file"), mimeType = Some("type"))
val attachmentUpdated =
FileMetadata(id = "id", fileName = Some("file"), mimeType = Some("type"), url = Some("url"))
when(service.find(id = "id")).thenReturn(successful(Some(attachment)))
when(service.notify(refEq(attachment), refEq(scanResult))(any[HeaderCarrier]))
.thenReturn(successful(Some(attachmentUpdated)))
val request: FakeRequest[JsValue] = fakeRequest.withBody(Json.toJson[ScanResult](scanResult))
val result: Result = await(controller.notification(id = "id")(request))
status(result) shouldBe CREATED
jsonBodyOf(result) shouldBe Json.toJson(attachmentUpdated)
}
"return 404 when not found" in {
val scanResult = SuccessfulScanResult("ref", "url", UploadDetails("file", "type", Instant.now(), "checksum"))
when(service.find("id")).thenReturn(successful(None))
val request: FakeRequest[JsValue] = fakeRequest.withBody(Json.toJson[ScanResult](scanResult))
val result: Result = await(controller.notification(id = "id")(request))
status(result) shouldBe NOT_FOUND
}
}
"Publish" should {
"return 201 when found" in {
val attachmentExisting =
FileMetadata(id = "id", fileName = Some("file"), mimeType = Some("type"), scanStatus = Some(ScanStatus.READY))
val attachmentUpdated = FileMetadata(
id = "id",
fileName = Some("file"),
mimeType = Some("type"),
scanStatus = Some(ScanStatus.READY),
url = Some("url")
)
when(service.find(id = "id")).thenReturn(successful(Some(attachmentExisting)))
when(service.publish(refEq(attachmentExisting))(any[HeaderCarrier]))
.thenReturn(successful(Some(attachmentUpdated)))
val result: Result = await(controller.publish(id = "id")(fakeRequest))
status(result) shouldBe ACCEPTED
jsonBodyOf(result) shouldBe Json.toJson(attachmentUpdated)
}
"return 404 when not found" in {
when(service.find(id = "id")).thenReturn(successful(None))
val result: Result = await(controller.publish(id = "id")(fakeRequest))
status(result) shouldBe NOT_FOUND
}
"return 404 when publish returns not found" in {
val attachmentExisting =
FileMetadata(id = "id", fileName = Some("file"), mimeType = Some("type"), scanStatus = Some(ScanStatus.READY))
when(service.find(id = "id")).thenReturn(successful(Some(attachmentExisting)))
when(service.publish(refEq(attachmentExisting))(any[HeaderCarrier])).thenReturn(successful(None))
val result: Result = await(controller.publish(id = "id")(fakeRequest))
status(result) shouldBe NOT_FOUND
}
}
"Initiate" should {
"return 202 on valid json" in {
// Given
val response = FileStoreInitiateResponse("id", "ref", UpscanFormTemplate("href", Map()))
when(service.initiateV2(any[FileStoreInitiateRequest])(any[HeaderCarrier])).thenReturn(successful(response))
// When
val request = FileStoreInitiateRequest(publishable = true)
val result: Result = await(controller.initiate(jsonRequest(request)))
// Then
status(result) shouldBe ACCEPTED
}
"return 202 on valid json with ID" in {
// Given
val response = FileStoreInitiateResponse("id", "ref", UpscanFormTemplate("href", Map()))
when(service.initiateV2(any[FileStoreInitiateRequest])(any[HeaderCarrier])).thenReturn(successful(response))
// When
val request = FileStoreInitiateRequest(id = Some("id"), publishable = true)
val result: Result = await(controller.initiate(jsonRequest(request)))
// Then
status(result) shouldBe ACCEPTED
}
"return 400 on invalid json" in {
// When
val result: Result = await(controller.initiate(jsonRequest(Json.obj())))
// Then
status(result) shouldBe BAD_REQUEST
}
}
"Upload" should {
val fileName = "file.txt"
val mimeType = "text/plain"
val tmpFile = SingletonTemporaryFileCreator.create("example-file.txt")
def multipartRequest(body: MultipartFormData[TemporaryFile]): Request[AnyContent] =
fakeRequest
.withMultipartFormDataBody(body)
.withHeaders("Content-Type" -> "multipart/form-data")
"return 202 on valid json" in {
// Given
val response = UploadTemplate("id", "href", Map())
when(service.initiate(any[FileMetadata])(any[HeaderCarrier])).thenReturn(successful(response))
// When
val request = UploadRequest(fileName = "file.txt", mimeType = "text/plain", publishable = true)
val result: Result = await(controller.upload(jsonRequest(request)))
// Then
status(result) shouldBe ACCEPTED
val metadata = theFileInitiated
metadata.publishable shouldBe true
metadata.fileName shouldBe Some("file.txt")
metadata.mimeType shouldBe Some("text/plain")
}
"return 202 on valid json with ID" in {
// Given
val response = UploadTemplate("id", "href", Map())
when(service.initiate(any[FileMetadata])(any[HeaderCarrier])).thenReturn(successful(response))
// When
val request = UploadRequest(id = Some("id"), fileName = "file.txt", mimeType = "text/plain", publishable = true)
val result: Result = await(controller.upload(jsonRequest(request)))
// Then
status(result) shouldBe ACCEPTED
val metadata = theFileInitiated
metadata.id shouldBe "id"
metadata.publishable shouldBe true
metadata.fileName shouldBe Some("file.txt")
metadata.mimeType shouldBe Some("text/plain")
}
"return 202 on valid file" in {
// Given
val metadataUploaded = FileMetadata(id = "id", fileName = Some(fileName), mimeType = Some(mimeType))
when(service.upload(any[FileWithMetadata])(any[HeaderCarrier])).thenReturn(successful(metadataUploaded))
// When
val filePart = FilePart[TemporaryFile](key = "file", fileName, contentType = Some(mimeType), ref = tmpFile)
val form = MultipartFormData[TemporaryFile](dataParts = Map(), files = Seq(filePart), badParts = Seq.empty)
val result: Result = await(controller.upload(multipartRequest(form)))
// Then
status(result) shouldBe ACCEPTED
val metadata = theFileUploaded.metadata
metadata.published shouldBe false
metadata.fileName shouldBe Some("file.txt")
metadata.mimeType shouldBe Some("text/plain")
}
"return 202 on valid file with id" in {
// Given
val metadataUploaded = FileMetadata(id = "id", fileName = Some(fileName), mimeType = Some(mimeType))
when(service.upload(any[FileWithMetadata])(any[HeaderCarrier])).thenReturn(successful(metadataUploaded))
// When
val filePart = FilePart[TemporaryFile](key = "file", fileName, contentType = Some(mimeType), ref = tmpFile)
val form = MultipartFormData[TemporaryFile](
dataParts = Map("id" -> Seq("id")),
files = Seq(filePart),
badParts = Seq.empty
)
val result: Result = await(controller.upload(multipartRequest(form)))
// Then
status(result) shouldBe ACCEPTED
val metadata = theFileUploaded.metadata
metadata.id shouldBe "id"
metadata.published shouldBe false
metadata.fileName shouldBe Some("file.txt")
metadata.mimeType shouldBe Some("text/plain")
}
"return 202 on valid file with publish=true" in {
// Given
val metadataUploaded =
FileMetadata(id = "id", fileName = Some("name"), mimeType = Some(mimeType), published = true)
when(service.upload(any[FileWithMetadata])(any[HeaderCarrier])).thenReturn(successful(metadataUploaded))
// When=
val filePart = FilePart[TemporaryFile](key = "file", fileName, contentType = Some(mimeType), ref = tmpFile)
val form = MultipartFormData[TemporaryFile](
dataParts = Map("publish" -> Seq("true")),
files = Seq(filePart),
badParts = Seq.empty
)
val result: Result = await(controller.upload(multipartRequest(form)))
// Then
status(result) shouldBe ACCEPTED
val metadata = theFileUploaded.metadata
metadata.publishable shouldBe true
metadata.fileName shouldBe Some("file.txt")
metadata.mimeType shouldBe Some("text/plain")
}
"return 202 on valid file with publish=false" in {
// Given
val metadataUploaded =
FileMetadata(id = "id", fileName = Some("name"), mimeType = Some(mimeType), published = true)
when(service.upload(any[FileWithMetadata])(any[HeaderCarrier])).thenReturn(successful(metadataUploaded))
// When=
val filePart = FilePart[TemporaryFile](key = "file", fileName, contentType = Some(mimeType), ref = tmpFile)
val form = MultipartFormData[TemporaryFile](
dataParts = Map("publish" -> Seq("false")),
files = Seq(filePart),
badParts = Seq.empty
)
val result: Result = await(controller.upload(multipartRequest(form)))
// Then
status(result) shouldBe ACCEPTED
val metadata = theFileUploaded.metadata
metadata.published shouldBe false
metadata.fileName shouldBe Some("file.txt")
metadata.mimeType shouldBe Some("text/plain")
}
"Throw exception on missing mime type" in {
val filePart = FilePart[TemporaryFile](key = "file", fileName, contentType = None, ref = tmpFile)
val form = MultipartFormData[TemporaryFile](dataParts = Map(), files = Seq(filePart), badParts = Seq.empty)
val exception = intercept[RuntimeException] {
controller.upload(multipartRequest(form))
}
exception.getMessage shouldBe "Missing file type"
}
"return 400 on missing file" in {
val form = MultipartFormData[TemporaryFile](dataParts = Map(), files = Seq(), badParts = Seq.empty)
val result: Result = await(controller.upload(multipartRequest(form)))
status(result) shouldBe BAD_REQUEST
}
"return 400 on missing filename" in {
val filePart =
FilePart[TemporaryFile](key = "file", filename = "", contentType = Some("text/plain"), ref = tmpFile)
val form = MultipartFormData[TemporaryFile](dataParts = Map(), files = Seq(filePart), badParts = Seq.empty)
val result: Result = await(controller.upload(multipartRequest(form)))
status(result) shouldBe BAD_REQUEST
}
"return 400 on invalid json" in {
// When
val result: Result = await(controller.upload(jsonRequest(Json.obj())))
// Then
status(result) shouldBe BAD_REQUEST
}
"return 400 on missing content type" in {
val result: Result = await(controller.upload(FakeRequest()))
status(result) shouldBe BAD_REQUEST
}
def theFileUploaded: FileWithMetadata = {
val captor = ArgumentCaptor.forClass(classOf[FileWithMetadata])
verify(service).upload(captor.capture())(any[HeaderCarrier])
captor.getValue
}
def theFileInitiated: FileMetadata = {
val captor = ArgumentCaptor.forClass(classOf[FileMetadata])
verify(service).initiate(captor.capture())(any[HeaderCarrier])
captor.getValue
}
}
}
|
hmrc/binding-tariff-filestore
|
test/it/uk/gov/hmrc/bindingtarifffilestore/FileStoreSpec.scala
|
<gh_stars>0
/*
* Copyright 2018 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore
import com.github.tomakehurst.wiremock.client.WireMock._
import org.apache.commons.io.IOUtils
import play.api.Application
import play.api.http.{ContentTypes, HeaderNames, HttpVerbs, Status}
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.libs.Files.SingletonTemporaryFileCreator
import play.api.libs.json._
import reactivemongo.api.ReadConcern
import scalaj.http.{Http, HttpResponse, MultiPart}
import uk.gov.hmrc.bindingtarifffilestore.model.upscan._
import uk.gov.hmrc.bindingtarifffilestore.model.{Pagination, Search, UploadRequest}
import uk.gov.hmrc.bindingtarifffilestore.repository.FileMetadataMongoRepository
import uk.gov.hmrc.bindingtarifffilestore.util.{ResourceFiles, WiremockFeatureTestServer}
import java.io.{File, InputStream}
import java.net.URI
import java.nio.charset.Charset
import java.nio.file.Files
import java.time.Instant
import scala.collection.JavaConverters._
import scala.collection.Map
import scala.concurrent.Await.result
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.util.Try
class FileStoreSpec extends WiremockFeatureTestServer with ResourceFiles {
private val timeout: FiniteDuration = 2.seconds
private val serviceUrl = s"http://localhost:$port"
private val filePath = "test/resources/file.txt"
private lazy val dbFileStore: FileMetadataMongoRepository = app.injector.instanceOf[FileMetadataMongoRepository]
val readConcern: ReadConcern = ReadConcern.Local
override protected def beforeEach(): Unit = {
super.beforeEach()
dropDbFileStore()
ensureDbFileStoreIndexes()
}
override def fakeApplication(): Application = new GuiceApplicationBuilder()
.configure(
"s3.endpoint" -> s"http://localhost:$wirePort",
"microservice.services.upscan-initiate.port" -> s"$wirePort"
)
.build()
Feature("Delete All") {
Scenario("Clear collections & files") {
Given("There are some documents in the collection")
upload("some-file1.txt", "text/plain")
upload("some-file2.txt", "text/plain")
dbFileStoreSize shouldBe 2
stubS3ListAll()
stubS3DeleteAll()
When("I delete all documents")
val deleteResult = Http(s"$serviceUrl/file")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.DELETE)
.asString
Then("The response code should be 204")
deleteResult.code shouldEqual Status.NO_CONTENT
And("The response body is empty")
deleteResult.body shouldBe ""
And("No documents exist in the mongo collection")
dbFileStoreSize shouldBe 0
And("the are no files")
val files = Http(s"$serviceUrl/file")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.GET)
.execute(convertingArrayResponseToJS)
files.code shouldBe 200
files.body.toString() shouldBe "[]"
}
}
Feature("Delete") {
Scenario("Delete the file") {
Given("A file has been uploaded")
val id = upload("some-file.txt", "text/plain")
.body("id").as[JsString].value
dbFileStoreSize shouldBe 1
When("I request the file details")
val response = deleteFile(id)
Then("The response code should be Ok")
response.code shouldBe Status.NO_CONTENT
And("The response body is empty")
response.body shouldBe ""
And("No documents exist in the mongo collection")
dbFileStoreSize shouldBe 0
}
}
Feature("Upload") {
Scenario("Should persist") {
Given("A Client of the FileStore has a file")
val filename = "some-file.txt"
val contentType = "text/plain"
When("It is uploaded")
val response: HttpResponse[Map[String, JsValue]] = upload(filename, contentType)
Then("The response code should be Accepted")
response.code shouldBe Status.ACCEPTED
And("The response body contains the file details")
response.body("fileName") shouldBe JsString(filename)
response.body("mimeType") shouldBe JsString(contentType)
response.body.contains("url") shouldBe false
response.body.contains("scanStatus") shouldBe false
}
}
Feature("Initiate") {
Scenario("Should persist") {
Given("A Client of the FileStore has a file")
val filename = "some-file.txt"
val contentType = "text/plain"
When("It is initiated")
val response: HttpResponse[Map[String, JsValue]] = initiate(filename, contentType)
Then("The response code should be Accepted")
response.code shouldBe Status.ACCEPTED
And("The response body contains the file upload template")
response.body("href") shouldBe JsString("http://localhost:20001/upscan/upload")
response.body("fields") shouldBe Json.obj("key" -> "value")
}
}
Feature("Initiate V2") {
Scenario("Should accept initiate requests without ID") {
Given("A Client of the FileStore needs an upload form")
When("It is requested")
val response: HttpResponse[Map[String, JsValue]] = initiateV2()
Then("The response code should be Accepted")
response.code shouldBe Status.ACCEPTED
And("The response body contains the file upload template")
response.body("id") shouldBe a[JsString]
response.body("uploadRequest") shouldBe Json.obj(
"href" -> "http://localhost:20001/upscan/upload",
"fields" -> Json.obj("key" -> "value")
)
}
Scenario("Should accept initiate requests with client generated ID") {
Given("A Client of the FileStore needs an upload form")
When("It is requested")
val response: HttpResponse[Map[String, JsValue]] = initiateV2(Some("slurm"))
Then("The response code should be Accepted")
response.code shouldBe Status.ACCEPTED
And("The response body contains the file upload template")
response.body("id") shouldBe JsString("slurm")
response.body("uploadRequest") shouldBe Json.obj(
"href" -> "http://localhost:20001/upscan/upload",
"fields" -> Json.obj("key" -> "value")
)
}
}
Feature("Get") {
Scenario("Should show the file is persisted") {
Given("A file has been uploaded")
val id = upload("some-file.txt", "text/plain")
.body("id").as[JsString].value
When("I request the file details")
val response = getFile(id)
Then("The response code should be Ok")
response.code shouldBe Status.OK
And("The response body contains the file details")
response.body("fileName") shouldBe JsString("some-file.txt")
response.body("mimeType") shouldBe JsString("text/plain")
response.body.contains("url") shouldBe false
response.body.contains("scanStatus") shouldBe false
}
}
Feature("Get files") {
Scenario("Should return all files matching search") {
Given("Files have been uploaded")
val id1 = upload("some-file1.txt", "text/plain").body("id").as[JsString].value
val id2 = upload("some-file2.txt", "text/plain").body("id").as[JsString].value
When("I request the file details")
val response = getFiles(Search(ids = Some(Set(id1, id2))))
Then("The response code should be Ok")
response.code shouldBe Status.OK
And("The response body contains the file details")
response.body.asInstanceOf[JsArray].value.size shouldBe 2
(response.body \\ "fileName").map(_.as[String]) should contain only("some-file1.txt", "some-file2.txt")
}
Scenario("Should return all files for empty search") {
Given("Files have been uploaded")
upload("some-file1.txt", "text/plain").body("id").as[JsString].value
upload("some-file2.txt", "text/plain").body("id").as[JsString].value
When("I request the file details")
val response = getFiles(Search())
Then("The response code should be Ok")
response.code shouldBe Status.OK
And("The response body contains the file details")
(response.body \\ "fileName").map(_.as[String]) should contain allOf("some-file1.txt", "some-file2.txt")
}
}
Feature("Get files with pagination") {
Scenario("Should return all files matching search") {
Given("Files have been uploaded")
val id1 = upload("some-file1.txt", "text/plain").body("id").as[JsString].value
val id2 = upload("some-file2.txt", "text/plain").body("id").as[JsString].value
When("I request the file details")
val response = getFiles(Search(ids = Some(Set(id1, id2))), Some(Pagination()))
Then("The response code should be Ok")
response.code shouldBe Status.OK
And("The response body contains the file details")
response.body.asInstanceOf[JsObject].value("resultCount").toString().toInt shouldBe 2
(response.body \\ "fileName").map(_.as[String]) should contain only("some-file1.txt", "some-file2.txt")
}
Scenario("Should return all files for empty search") {
Given("Files have been uploaded")
upload("some-file1.txt", "text/plain").body("id").as[JsString].value
upload("some-file2.txt", "text/plain").body("id").as[JsString].value
When("I request the file details")
val response = getFiles(Search(), Some(Pagination()))
Then("The response code should be Ok")
response.code shouldBe Status.OK
And("The response body contains the file details")
response.body.asInstanceOf[JsObject].value("resultCount").toString().toInt shouldBe 2
(response.body \\ "fileName").map(_.as[String]) should contain allOf("some-file1.txt", "some-file2.txt")
}
}
Feature("Notify") {
Scenario("Successful scan should update the status") {
Given("A File has been uploaded")
val id = upload("some-file.txt", "text/plain")
.body("id").as[JsString].value
When("Notify is Called")
val uri = new File(filePath).toURI
val response = notifySuccess(id, "some-file.txt", uri)
Then("The response code should be Created")
response.code shouldBe Status.CREATED
And("The response body contains the file details")
response.body("fileName") shouldBe JsString("some-file.txt")
response.body("mimeType") shouldBe JsString("text/plain")
response.body("url") shouldBe JsString(uri.toString)
And("The response shows the file is marked as safe")
response.body("scanStatus") shouldBe JsString("READY")
}
Scenario("Quarantined scan should update the status") {
Given("A File has been uploaded")
val id = upload("some-file.txt", "text/plain")
.body("id").as[JsString].value
When("Notify is Called")
val response = notifyFailure(id)
Then("The response code should be Created")
response.code shouldBe Status.CREATED
And("The response body contains the file details")
response.body("fileName") shouldBe JsString("some-file.txt")
response.body("mimeType") shouldBe JsString("text/plain")
response.body.contains("url") shouldBe false
And("The response shows the file is marked as quarantined")
response.body("scanStatus") shouldBe JsString("FAILED")
}
}
Feature("Publish") {
Scenario("Should persist the file to permanent storage") {
Given("A File has been uploaded and marked as safe")
val id = upload("some-file.txt", "text/plain")
.body("id").as[JsString].value
notifySuccess(id, "some-file.txt")
When("It is Published")
val response = publishSafeFile(id)
Then("The response code should be Accepted")
response.code shouldBe Status.ACCEPTED
And("The response body contains the file details")
response.body("fileName") shouldBe JsString("some-file.txt")
response.body("mimeType") shouldBe JsString("text/plain")
response.body("scanStatus") shouldBe JsString("READY")
response.body("publishable") shouldBe JsBoolean(true)
response.body("published") shouldBe JsBoolean(true)
And("The response shows the file published")
response.body("url").as[JsString].value should include(s"$id?X-Amz-Algorithm=AWS4-HMAC-SHA256")
}
Scenario("Should mark an un-safe file as publishable, but not persist") {
Given("A File has been uploaded and marked as quarantined")
val id = upload("some-file.txt", "text/plain")
.body("id").as[JsString].value
notifyFailure(id)
When("It is Published")
val publishResponse = publishUnSafeFile(id)
Then("The response code should be Forbidden")
publishResponse.code shouldBe Status.ACCEPTED
And("The response body contains the file details")
publishResponse.body("fileName") shouldBe JsString("some-file.txt")
publishResponse.body("mimeType") shouldBe JsString("text/plain")
publishResponse.body("scanStatus") shouldBe JsString("FAILED")
publishResponse.body("publishable") shouldBe JsBoolean(true)
publishResponse.body("published") shouldBe JsBoolean(false)
And("I can call GET and see the file is unpublished")
val getResponse = getFile(id)
getResponse.code shouldBe Status.OK
getResponse.body("fileName") shouldBe JsString("some-file.txt")
getResponse.body("mimeType") shouldBe JsString("text/plain")
getResponse.body("scanStatus") shouldBe JsString("FAILED")
getResponse.body("publishable") shouldBe JsBoolean(true)
getResponse.body("published") shouldBe JsBoolean(false)
getResponse.body.contains("url") shouldBe false
}
Scenario("Should remove publishable file which has expired") {
Given("A File has been uploaded and marked as safe")
val id = upload("some-file.txt", "text/plain")
.body("id").as[JsString].value
val uri = new File(filePath).toURI
notifySuccess(id, "some-file.txt", uri = new URI(uri.toString + "?X-Amz-Date=19700101T000000Z&X-Amz-Expires=0"))
When("It is Published")
val response = publishSafeFile(id)
Then("The response code should be Not Found")
response.code shouldBe Status.NOT_FOUND
And("I can call GET and see the file does not exist")
val getResponse = getFile(id)
getResponse.code shouldBe Status.NOT_FOUND
}
}
private def getFile(id: String): HttpResponse[Map[String, JsValue]] = {
Http(s"$serviceUrl/file/$id")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.GET)
.execute(convertingResponseToJS)
}
private def deleteFile(id: String): HttpResponse[String] = {
stubS3DeleteOne(id)
Http(s"$serviceUrl/file/$id")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.DELETE)
.asString
}
private def getFiles(search: Search, pagination: Option[Pagination] = None): HttpResponse[JsValue] = {
val queryParams = Search.bindable.unbind("", search) + pagination.map(p => "&" + Pagination.bindable.unbind("", p)).getOrElse("")
Http(s"$serviceUrl/file?$queryParams")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.GET)
.execute(convertingArrayResponseToJS)
}
private def publishSafeFile(id: String): HttpResponse[Map[String, JsValue]] = {
stubS3Upload(id)
Http(s"$serviceUrl/file/$id/publish")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.POST)
.execute(convertingResponseToJS)
}
private def publishUnSafeFile(id: String): HttpResponse[Map[String, JsValue]] = {
// Should NOT call S3 Upload
Http(s"$serviceUrl/file/$id/publish")
.header(apiTokenKey, appConfig.authorization)
.method(HttpVerbs.POST)
.execute(convertingResponseToJS)
}
private def notifySuccess(id: String, fileName: String, uri: URI = new File(filePath).toURI): HttpResponse[Map[String, JsValue]] = {
val url = uri.toURL.toString
val model = SuccessfulScanResult("reference", url, UploadDetails(fileName, "text/plain", Instant.now(), "checksum"))
Http(s"$serviceUrl/file/$id/notify")
.postData(Json.toJson[ScanResult](model).toString())
.header(HeaderNames.CONTENT_TYPE, ContentTypes.JSON)
.param(apiTokenKey, hash(appConfig.authorization))
.execute(convertingResponseToJS)
}
private def notifyFailure(id: String): HttpResponse[Map[String, JsValue]] = {
val model = FailedScanResult("reference", FailureDetails(FailureReason.QUARANTINE, "message"))
Http(s"$serviceUrl/file/$id/notify")
.postData(Json.toJson[ScanResult](model).toString())
.header(HeaderNames.CONTENT_TYPE, ContentTypes.JSON)
.param(apiTokenKey, hash(appConfig.authorization))
.execute(convertingResponseToJS)
}
private def upload(filename: String, contentType: String): HttpResponse[Map[String, JsValue]] = {
stubUpscanInitiate
stubUpscanUpload
val tempFile = SingletonTemporaryFileCreator.create(filename)
Files.write(tempFile.path, List("foo").asJava)
val form = MultiPart(
"file",
filename,
contentType,
Files.readAllBytes(tempFile.path)
)
Http(s"$serviceUrl/file")
.header(apiTokenKey, appConfig.authorization)
.postMulti(form)
.execute(convertingResponseToJS)
}
private def initiate(filename: String, contentType: String): HttpResponse[Map[String, JsValue]] = {
stubUpscanInitiate
Http(s"$serviceUrl/file")
.header("Content-Type", "application/json")
.header(apiTokenKey, appConfig.authorization)
.postData(Json.toJson(UploadRequest(fileName = filename, mimeType = contentType)).toString())
.execute(convertingResponseToJS)
}
private def initiateV2(id: Option[String] = None): HttpResponse[Map[String, JsValue]] = {
stubUpscanInitiateV2
Http(s"$serviceUrl/file/initiate")
.header("Content-Type", "application/json")
.header(apiTokenKey, appConfig.authorization)
.postData(Json.toJson(v2.FileStoreInitiateRequest(id = id)).toString())
.execute(convertingResponseToJS)
}
private def stubUpscanUpload = {
stubFor(
post("/upscan/upload")
.willReturn(
aResponse()
.withStatus(Status.OK)
)
)
}
private def stubUpscanInitiate = {
stubFor(
post("/upscan/initiate")
.willReturn(
aResponse()
.withBody(fromFile("upscan/initiate_wiremock-response.json"))
)
)
}
private def stubUpscanInitiateV2 = {
stubFor(
post("/upscan/v2/initiate")
.willReturn(
aResponse()
.withBody(fromFile("upscan/initiate_wiremock-response.json"))
)
)
}
private def stubS3Upload(id: String) = {
stubFor(
put(s"/digital-tariffs-local/$id")
.willReturn(
aResponse()
.withStatus(Status.OK)
)
)
}
private def stubS3ListAll() = {
stubFor(
get("/digital-tariffs-local/?encoding-type=url")
.willReturn(
aResponse()
.withStatus(Status.OK)
.withBody(fromFile("aws/list-objects_response.xml"))
)
)
}
private def stubS3DeleteAll() = {
stubFor(
post(s"/digital-tariffs-local/?delete")
.willReturn(
aResponse()
.withStatus(Status.OK)
.withBody(fromFile("aws/delete-objects_response.xml"))
)
)
}
private def stubS3DeleteOne(id: String) = {
stubFor(
delete(s"/digital-tariffs-local/$id")
.willReturn(
aResponse()
.withStatus(Status.OK)
)
)
}
private def convertingResponseToJS: InputStream => Map[String, JsValue] = { is =>
val body = IOUtils.toString(is, Charset.defaultCharset)
Try(Json.parse(body))
.map(_.as[JsObject].value)
.getOrElse(throw new AssertionError(s"The response was not valid JSON object:\n $body"))
}
private def convertingArrayResponseToJS: InputStream => JsValue = { is =>
val body = IOUtils.toString(is, Charset.defaultCharset)
Try(Json.parse(body))
.getOrElse(throw new AssertionError(s"The response was not valid JSON array:\n $body"))
}
private def dbFileStoreSize: Int = {
result(dbFileStore.collection.count(None, Some(0), 0, None, readConcern = readConcern), timeout).toInt
}
private def dropDbFileStore(): Unit = {
result(dbFileStore.drop, timeout)
}
private def ensureDbFileStoreIndexes(): Unit = {
result(dbFileStore.ensureIndexes, timeout)
}
}
|
hmrc/binding-tariff-filestore
|
project/AppDependencies.scala
|
<reponame>hmrc/binding-tariff-filestore<gh_stars>0
import sbt._
import play.core.PlayVersion.current
object AppDependencies {
private lazy val apacheHttpVersion = "4.5.13"
val compile = Seq(
"com.amazonaws" % "aws-java-sdk-s3" % "1.11.882",
"uk.gov.hmrc" %% "bootstrap-backend-play-28" % "5.13.0",
"uk.gov.hmrc" %% "simple-reactivemongo" % "8.0.0-play-28",
"uk.gov.hmrc" %% "play-json-union-formatter" % "1.15.0-play-28",
"org.apache.httpcomponents" % "httpclient" % apacheHttpVersion,
"org.apache.httpcomponents" % "httpmime" % apacheHttpVersion
)
val jettyVersion = "9.4.32.v20200930"
val test: Seq[ModuleID] = Seq(
"com.github.tomakehurst" % "wiremock" % "2.27.2",
"com.typesafe.play" %% "play-test" % current ,
"org.mockito" % "mockito-core" % "3.11.2",
"org.jsoup" % "jsoup" % "1.14.1",
"org.pegdown" % "pegdown" % "1.6.0" ,
"org.scalatest" %% "scalatest" % "3.2.9" ,
"org.scalatestplus.play" %% "scalatestplus-play" % "5.1.0" ,
"org.scalatestplus" %% "mockito-3-4" % "3.2.9.0",
"com.vladsch.flexmark" % "flexmark-all" % "0.35.10",
"org.scalacheck" %% "scalacheck" % "1.15.4",
"uk.gov.hmrc" %% "service-integration-test" % "1.1.0-play-28",
"uk.gov.hmrc" %% "reactivemongo-test" % "5.0.0-play-28",
"org.scalaj" %% "scalaj-http" % "2.4.2",
//Need to peg this version for wiremock - try removing this on next lib upgrade
"org.eclipse.jetty" % "jetty-server" % jettyVersion,
"org.eclipse.jetty" % "jetty-servlet" % jettyVersion
).map(_ % "test, it")
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/connector/UpscanConnectorSpec.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.connector
import akka.actor.ActorSystem
import com.github.tomakehurst.wiremock.client.WireMock._
import org.mockito.BDDMockito.given
import org.scalatest.BeforeAndAfterEach
import org.scalatestplus.mockito.MockitoSugar
import play.api.http.Status
import play.api.libs.Files.SingletonTemporaryFileCreator
import play.api.libs.ws.WSClient
import uk.gov.hmrc.bindingtarifffilestore.config.AppConfig
import uk.gov.hmrc.bindingtarifffilestore.model.upscan.{UploadSettings, UpscanInitiateResponse, UpscanTemplate}
import uk.gov.hmrc.bindingtarifffilestore.model.{FileMetadata, FileWithMetadata}
import uk.gov.hmrc.bindingtarifffilestore.util._
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.play.audit.http.HttpAuditing
import uk.gov.hmrc.play.bootstrap.http.DefaultHttpClient
import scala.concurrent.ExecutionContext.Implicits.global
class UpscanConnectorSpec
extends UnitSpec
with WithFakeApplication
with WiremockTestServer
with MockitoSugar
with BeforeAndAfterEach
with ResourceFiles {
private val config = mock[AppConfig]
private val actorSystem = ActorSystem.create("test")
private val wsClient: WSClient = fakeApplication.injector.instanceOf[WSClient]
private val httpAuditing = fakeApplication.injector.instanceOf[HttpAuditing]
private val hmrcWsClient = new DefaultHttpClient(fakeApplication.configuration, httpAuditing, wsClient, actorSystem)
private implicit val headers: HeaderCarrier = HeaderCarrier()
private val connector = new UpscanConnector(config, hmrcWsClient)
override protected def beforeEach(): Unit = {
super.beforeEach()
given(config.upscanInitiateUrl).willReturn(wireMockUrl)
}
"Connector" should {
"Initiate" in {
stubFor(
post("/upscan/initiate")
.willReturn(
aResponse()
.withBody(fromFile("upscan/initiate_response.json"))
)
)
val response = await(connector.initiate(UploadSettings("callback", 1, 1000)))
response shouldBe UpscanInitiateResponse(
reference = "reference",
uploadRequest = UpscanTemplate(
href = "href",
fields = Map(
"key" -> "value"
)
)
)
}
"Upload" in {
stubFor(
post("/path")
.willReturn(
aResponse()
.withStatus(Status.NO_CONTENT)
)
)
val templateUploading = UpscanTemplate(
href = s"$wireMockUrl/path",
fields = Map(
"key" -> "value"
)
)
val fileUploading = FileWithMetadata(
SingletonTemporaryFileCreator.create("example-file.json"),
FileMetadata("id", Some("file.txt"), Some("text/plain"))
)
await(connector.upload(templateUploading, fileUploading))
}
"Upload with error handling" in {
stubFor(
post("/path")
.willReturn(
aResponse()
.withStatus(Status.BAD_GATEWAY)
.withBody("content")
)
)
val templateUploading = UpscanTemplate(
href = s"$wireMockUrl/path",
fields = Map(
"key" -> "value"
)
)
val fileUploading = FileWithMetadata(
SingletonTemporaryFileCreator.create("example-file.json"),
FileMetadata("id", Some("file.txt"), Some("text/plain"))
)
intercept[RuntimeException] {
await(connector.upload(templateUploading, fileUploading))
}.getMessage shouldBe "Bad AWS response for file [id] with status [502] body [content]"
}
}
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/model/FileMetadataSpec.scala
|
<reponame>hmrc/binding-tariff-filestore<filename>test/unit/uk/gov/hmrc/bindingtarifffilestore/model/FileMetadataSpec.scala<gh_stars>0
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.model
import java.time.Instant
import play.api.libs.json._
import uk.gov.hmrc.bindingtarifffilestore.util.UnitSpec
class FileMetadataSpec extends UnitSpec {
"File Meta Data" should {
val model = FileMetadata(
id = "id",
fileName = Some("fileName"),
mimeType = Some("type"),
url = Some("url"),
publishable = true,
published = true,
scanStatus = Some(ScanStatus.READY),
lastUpdated = Instant.EPOCH
)
val jsonMongo: JsObject = Json.obj(
"id" -> JsString("id"),
"fileName" -> JsString("fileName"),
"mimeType" -> JsString("type"),
"url" -> JsString("url"),
"scanStatus" -> JsString("READY"),
"publishable" -> JsBoolean(true),
"published" -> JsBoolean(true),
"lastUpdated" -> Json.obj("$date" -> JsNumber(0))
)
val jsonMongoWithoutDefaults: JsObject = Json.obj(
"id" -> JsString("id"),
"fileName" -> JsString("fileName"),
"mimeType" -> JsString("type"),
"url" -> JsString("url"),
"scanStatus" -> JsString("READY"),
"lastUpdated" -> Json.obj("$date" -> JsNumber(0))
)
val jsonREST: JsObject = Json.obj(
"url" -> JsString("url"),
"lastUpdated" -> JsString("1970-01-01T00:00:00Z"),
"published" -> JsBoolean(true),
"scanStatus" -> JsString("READY"),
"fileName" -> JsString("fileName"),
"mimeType" -> JsString("type"),
"id" -> JsString("id"),
"publishable" -> JsBoolean(true)
)
"Convert to Mongo JSON" in {
val value = Json.toJson(model)(FileMetadataMongo.format)
value.toString() shouldBe jsonMongo.toString()
}
"Convert from Mongo JSON" in {
val value = Json.fromJson[FileMetadata](jsonMongo)(FileMetadataMongo.format).get
value shouldBe model
}
"Convert from Mongo JSON with defaults" in {
val value = Json.fromJson[FileMetadata](jsonMongoWithoutDefaults)(FileMetadataMongo.format).get
value shouldBe model.copy(publishable = false, published = false)
}
"Convert to REST JSON" in {
val value = Json.toJson(model)(FileMetadataREST.format)
value.toString() shouldBe jsonREST.toString()
}
"Convert to REST JSON ignoring URL if Un-scanned" in {
val value = Json.toJson(model.copy(scanStatus = None))(FileMetadataREST.format)
value.toString() shouldBe Json
.obj(
"lastUpdated" -> JsString("1970-01-01T00:00:00Z"),
"published" -> JsBoolean(true),
"fileName" -> JsString("fileName"),
"mimeType" -> JsString("type"),
"id" -> JsString("id"),
"publishable" -> JsBoolean(true)
)
.toString()
}
"Convert to REST JSON ignoring URL if Failed" in {
val value = Json.toJson(model.copy(scanStatus = Some(ScanStatus.FAILED)))(FileMetadataREST.format)
value.toString() shouldBe Json
.obj(
"lastUpdated" -> JsString("1970-01-01T00:00:00Z"),
"published" -> JsBoolean(true),
"scanStatus" -> JsString("FAILED"),
"fileName" -> JsString("fileName"),
"mimeType" -> JsString("type"),
"id" -> JsString("id"),
"publishable" -> JsBoolean(true)
)
.toString()
}
"Convert from REST JSON" in {
val value = Json.fromJson[FileMetadata](jsonREST)(FileMetadataREST.format).get
value shouldBe model
}
"Calculate liveness of signed URL" in {
def metadata(url: String): FileMetadata = FileMetadata("id", Some("file"), Some("type"), Some(url))
metadata("https://s3.amazonaws.com/bucket/abc?X-Amz-Date=30000101T000000Zkey=value&X-Amz-Expires=86400").isLive shouldBe true
metadata("https://s3.amazonaws.com/bucket/abc?X-Amz-Expires=86400&X-Amz-Date=30000101T000000Zkey=value").isLive shouldBe true
metadata("https://s3.amazonaws.com/bucket/file?X-Amz-Date=20190101T000000Z&X-Amz-Expires=0").isLive shouldBe false
metadata("url").isLive shouldBe true
FileMetadata("id", Some("file"), Some("type")).isLive shouldBe true
}
}
}
|
hmrc/binding-tariff-filestore
|
test/util/uk/gov/hmrc/bindingtarifffilestore/util/BaseFeatureSpec.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.util
import com.google.common.io.BaseEncoding
import org.scalatest._
import org.scalatest.featurespec.AnyFeatureSpec
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.play.guice.GuiceOneServerPerSuite
import uk.gov.hmrc.bindingtarifffilestore.config.AppConfig
import uk.gov.hmrc.bindingtarifffilestore.repository.FileMetadataMongoRepository
import java.security.MessageDigest
import scala.concurrent.Await.result
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
abstract class BaseFeatureSpec
extends AnyFeatureSpec
with Matchers
with GivenWhenThen
with GuiceOneServerPerSuite
with BeforeAndAfterEach
with BeforeAndAfterAll {
protected lazy val apiTokenKey = "X-Api-Token"
protected lazy val appConfig: AppConfig = app.injector.instanceOf[AppConfig]
protected def hash: String => String = { s: String =>
BaseEncoding.base64Url().encode(MessageDigest.getInstance("SHA-256").digest(s.getBytes("UTF-8")))
}
private val timeout = 2.seconds
private lazy val store: FileMetadataMongoRepository = app.injector.instanceOf[FileMetadataMongoRepository]
private def ensureIndexes(): Unit =
result(store.ensureIndexes, timeout)
override protected def beforeEach(): Unit = {
super.beforeEach()
drop()
ensureIndexes()
}
override protected def afterAll(): Unit = {
super.afterAll()
drop()
}
private def drop(): Unit =
result(store.drop, timeout)
}
|
hmrc/binding-tariff-filestore
|
test/unit/uk/gov/hmrc/bindingtarifffilestore/config/AppConfigSpec.scala
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.bindingtarifffilestore.config
import org.mockito.ArgumentMatchers.refEq
import org.mockito.Mockito
import org.mockito.Mockito.when
import org.scalatest.BeforeAndAfterEach
import org.scalatestplus.mockito.MockitoSugar
import play.api.Configuration
import uk.gov.hmrc.bindingtarifffilestore.util.{UnitSpec, WithFakeApplication}
import uk.gov.hmrc.play.bootstrap.config.ServicesConfig
class AppConfigSpec extends UnitSpec with WithFakeApplication with MockitoSugar with BeforeAndAfterEach {
val serviceConfig: ServicesConfig = mock[ServicesConfig]
override protected def beforeEach(): Unit = {
super.beforeEach()
Mockito.reset(serviceConfig)
}
private def fileStoreSizeConfiguration(pairs: (String, Int)*): FileStoreSizeConfiguration = {
var config = Map[String, Int](
"upscan.minFileSize" -> 0,
"upscan.maxFileSize" -> 0
)
pairs.foreach(e => config = config + e)
new AppConfig(Configuration.from(config), serviceConfig).fileStoreSizeConfiguration
}
private def s3ConfigWith(pairs: (String, String)*): S3Configuration = {
var config = Map(
"s3.secretKeyId" -> "",
"s3.accessKeyId" -> "",
"s3.region" -> "",
"s3.bucket" -> "",
"s3.endpoint" -> ""
)
pairs.foreach(e => config = config + e)
new AppConfig(Configuration.from(config), serviceConfig).s3Configuration
}
private def upscanConfigWith(host: String, port: String, pairs: (String, String)*): AppConfig = {
when(serviceConfig.baseUrl(refEq("upscan-initiate"))).thenReturn(s"http://$host:$port")
new AppConfig(Configuration.from(pairs.map(e => e._1 -> e._2).toMap), serviceConfig)
}
private def configWith(pairs: (String, String)*): AppConfig =
new AppConfig(Configuration.from(pairs.map(e => e._1 -> e._2).toMap), serviceConfig)
"Config" should {
"decode AWS S3 Secret" in {
s3ConfigWith("s3.secretKeyId" -> "dGVzdA==").secret shouldBe "test"
}
"return AWS S3 Access Key" in {
s3ConfigWith("s3.accessKeyId" -> "key").key shouldBe "key"
}
"return AWS S3 region" in {
s3ConfigWith("s3.region" -> "region").region shouldBe "region"
}
"return AWS S3 bucket" in {
s3ConfigWith("s3.bucket" -> "bucket").bucket shouldBe "bucket"
}
"return AWS S3 endpoint" in {
s3ConfigWith("s3.endpoint" -> "endpoint").endpoint shouldBe Some("endpoint")
}
"return AWS S3 blank endpoint as None" in {
s3ConfigWith("s3.endpoint" -> "").endpoint shouldBe None
}
"return AWS S3 base URL" in {
s3ConfigWith("s3.endpoint" -> "endpoint").baseUrl shouldBe "endpoint"
}
"return AWS S3 default base URL" in {
s3ConfigWith(
"s3.region" -> "region"
).baseUrl shouldBe "https://s3-region.amazonaws.com"
}
"return application Host" in {
configWith("filestore.url" -> "url").filestoreUrl shouldBe "url"
}
"return application SSL" in {
configWith("filestore.ssl" -> "true").filestoreSSL shouldBe true
}
"return upscan-initiate URL" in {
upscanConfigWith("host", "123").upscanInitiateUrl shouldBe "http://host:123"
}
"return upscan min file size" in {
fileStoreSizeConfiguration(
"upscan.minFileSize" -> 12
).minFileSize shouldBe 12
}
"return upscan max file size" in {
fileStoreSizeConfiguration(
"upscan.maxFileSize" -> 123456
).maxFileSize shouldBe 123456
}
}
}
|
Gressa/PageRankAlgorithm
|
src/main/scala/net/sansa_stack/template/spark/rdf/PageRankImplementation.scala
|
package net.sansa_stack.template.spark.rdf
import org.apache.spark.sql.SparkSession
import java.net.{ URI => JavaURI }
import scala.collection.mutable
import org.apache.spark.graphx.Graph
import net.sansa_stack.rdf.spark.io._
import net.sansa_stack.rdf.spark.model.graph._
import net.sansa_stack.rdf.spark.model._
import org.apache.jena.riot.Lang
import org.apache.jena.graph.NodeFactory
import org.apache.spark.rdd.RDD
import scala.util.control._
import java.io.PrintWriter
import java.util.concurrent.TimeUnit;
import java.io.File
import java.net.URI
import scala.collection.mutable
import org.apache.jena.riot.Lang
import org.apache.jena.graph._
import net.sansa_stack.query.spark.query._
import net.sansa_stack.rdf.spark.kge.triples.Triples
object PageRankImplementation {
def main(args: Array[String]) {
parser.parse(args, Config()) match {
case Some(config) =>
run(config.in, config.predicate, config.iterations, config.considertotalpages, config.out, config.limit)
case None =>
println(parser.usage)
}
}
def run(input: String, predicate: String, iterations: Int, considertotalpages: Boolean, output: String, limit: Int): Unit = {
println("================")
println("| PageRank |")
println("================")
val spark = SparkSession.builder
.appName(s"PageRank ( $input )")
.master("local[*]")
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.getOrCreate()
println("============================================")
println("Static Page Rank => Num of iterations: " + iterations)
println("============================================")
//static page rank
staticPageRank(spark, predicate, input, output, iterations, considertotalpages, limit)
staticGraphXPageRank(spark, predicate, input, iterations, limit)
spark.stop
}
def staticPageRank(spark: SparkSession, outlink: String, input: String, output: String, iterations: Int, considertotalpages: Boolean, limit: Int): Unit = {
var startTime = System.currentTimeMillis()
val lang = Lang.NTRIPLES
val triples = spark.rdf(lang)(input)
val subjects = triples.getSubjects().distinct()
val filteredTriples = triples.find(None, Some(NodeFactory.createURI(outlink)), None)
var pairs = filteredTriples.flatMap(f => Some(f.getSubject, f.getObject))
var links = pairs.distinct().groupByKey().cache() // RDD1 <url, outlink url> -> RDD2<url, Array(outlink url)> }
val allPages = (subjects.union(filteredTriples.getObjects().distinct())).distinct()
val numOfAllPages = allPages.distinct().count().toDouble
val preprocessingTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0) // in minutes
println("Num of all pages: " + numOfAllPages)
println("Static PageRank Preprocessing Time: " + preprocessingTime)
println("No of subjects: " + subjects.count())
println("No of filtered triples: " + filteredTriples.count())
startTime = System.currentTimeMillis()
//set initial rank
var pageRank = links.mapValues(v => 1.0) // RDD create the ranks <url, 1>
var leftPages = allPages.subtract(pageRank.map(f => f._1))
var rankedLeftPages = leftPages.distinct().flatMap(f => Some(f, 1.0))
pageRank = pageRank.union(rankedLeftPages).distinct()
//Calculate page rank
for (i <- 1 to iterations) {
var contributions = links.join(pageRank)
.values
.flatMap {
case (urls, rank) =>
val size = urls.size.toDouble
urls.map(url => (url, rank / size))
}
var leftSum = 1 - 0.85
//take in consideration number of all pages
if (considertotalpages) {
leftSum = leftSum / numOfAllPages
}
pageRank = contributions.reduceByKey(_ + _).mapValues(leftSum + 0.85 * _)
//This calculation is done when a page has no outlinks
val leftPages = allPages.subtract(pageRank.map(f => f._1))
val rankedLeftPages = leftPages.distinct().flatMap(f => Some(f, leftSum))
pageRank = pageRank.union(rankedLeftPages).distinct()
}
val pageRankCalculationTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0)
println("Static Page Rank calculation time: " + pageRankCalculationTime)
startTime = System.currentTimeMillis()
val triplesRank = pageRank
.sortBy(_._2, ascending = false)
.map(f => Triple.create(NodeFactory.createURI(f._1.toString()), NodeFactory.createURI("http://property/pageRank"), NodeFactory.createURI(f._2.toString())))
val sparqlQuery = s"""SELECT *
WHERE { ?s ?p ?o }
LIMIT $limit"""
val result = triplesRank.sparql(sparqlQuery)
result.rdd.foreach(println(_))
val postProcessingTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0)
println("\nStatic Page Rank postprocessing time: " + postProcessingTime)
val totalRunningTime = preprocessingTime + pageRankCalculationTime + postProcessingTime
println("Static Page Rank total running time: " + totalRunningTime + " minutes")
println("=======================================================")
}
def staticGraphXPageRank(spark: SparkSession, outlink: String, input: String, iterations: Int, limit: Int): Unit = {
//GraphX
var startTime = System.currentTimeMillis()
val lang = Lang.NTRIPLES
val triples = spark.rdf(lang)(input)
val subjects = triples.getSubjects().distinct()
val filteredTriples = triples.find(None, Some(NodeFactory.createURI(outlink)), None)
val graph = filteredTriples.asGraph()
val preprocessingTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0) // in minutes
println("\nStatic GraphX Preprocessing Time: " + preprocessingTime)
startTime = System.currentTimeMillis()
//GraphX static pagerank
val staticpagerank = graph.staticPageRank(iterations).vertices
val spr = staticpagerank.join(graph.vertices)
.map({ case (k, (r, v)) => (r, v, k) })
val calculationTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0)
println("Static GraphX PageRank calculation time: " + calculationTime)
startTime = System.currentTimeMillis()
// sort by the rank
val triplesPageRank = spr.sortBy(_._1, ascending = false)
.map(f => Triple.create(NodeFactory.createURI(f._2.toString()), NodeFactory.createURI("http://property/graphXPageRank"), NodeFactory.createURI(f._1.toString())))
val sparqlQuery = s"""SELECT *
WHERE { ?s ?p ?o }
LIMIT $limit"""
val result = triplesPageRank.sparql(sparqlQuery)
result.rdd.foreach(println(_))
val postProcessingTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0)
println("\nStatic GraphX Page Rank postprocessing time: " + postProcessingTime)
val totalRunningTime = preprocessingTime + calculationTime + postProcessingTime
println("Static GraphX Page Rank total running time: " + totalRunningTime + " minutes")
println("================================================================")
}
case class Config(in: String = "", predicate: String = "", iterations: Int = 0, considertotalpages: Boolean = false, out: String = "", limit: Int = 0)
// the CLI parser
val parser = new scopt.OptionParser[Config]("Page Rank") {
head(" PageRank ")
opt[String]('i', "input").required().valueName("<path>")
.action((x, c) => c.copy(in = x))
.text("path to file that contains the data (in N-Triples format)")
help("help").text("prints this usage text")
opt[Int]('n', "iterations")
.action((x, c) => c.copy(iterations = x))
.text("number of iterations")
opt[Boolean]('c', "considertotalpages")
.action((x, c) => c.copy(considertotalpages = x))
.text("consider total pages")
opt[String]('p', "predicate")
.action((x, c) => c.copy(predicate = x))
.text("Predicate")
opt[Int]('q', "limit")
.action((x, c) => c.copy(limit = x))
.text("Query Limit")
opt[String]('o', "output").valueName("<directory>")
.action((x, c) => c.copy(out = x))
.text("path to file that contains the page rank of data (in N-Triples format)")
}
}
|
Gressa/PageRankAlgorithm
|
src/main/scala/net/sansa_stack/template/spark/rdf/PageRankAlgorithmConvergence.scala
|
package net.sansa_stack.template.spark.rdf
import org.apache.spark.sql.SparkSession
import java.net.{ URI => JavaURI }
import scala.collection.mutable
import org.apache.spark.graphx.Graph
import net.sansa_stack.rdf.spark.io._
import net.sansa_stack.rdf.spark.model.graph._
import net.sansa_stack.rdf.spark.model._
import org.apache.jena.riot.Lang
import org.apache.jena.graph.NodeFactory
import org.apache.spark.rdd.RDD
import scala.util.control._
import java.io.PrintWriter
import java.util.concurrent.TimeUnit;
import java.io.File
import java.net.URI
import scala.collection.mutable
import org.apache.jena.riot.Lang
import org.apache.jena.graph._
import net.sansa_stack.query.spark.query._
import net.sansa_stack.rdf.spark.kge.triples.Triples
object PageRankAlgorithmConvergence {
def main(args: Array[String]) {
parser.parse(args, Config()) match {
case Some(config) =>
run(config.in, config.predicate, config.tolerance, config.considertotalpages, config.out, config.limit)
case None =>
println(parser.usage)
}
}
def run(input: String, predicate: String, tolerance: Double, considertotalpages: Boolean, output: String, limit: Int): Unit = {
println("================")
println("| PageRank |")
println("================")
val spark = SparkSession.builder
.appName(s"PageRank ( $input )")
.master("local[*]")
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.getOrCreate()
//Page rank with convergence
println("\n\n================================================")
println("PageRank with Convergence => Tolerance: " + tolerance)
println("================================================")
//convergence page rank
convergencePageRank(spark, predicate, input, tolerance, considertotalpages, limit)
convergenceGraphXPageRank(spark, predicate, input, tolerance, limit)
spark.stop
}
def convergencePageRank(spark: SparkSession, outlink: String, input: String, tolerance: Double, considertotalpages: Boolean, limit: Int): Unit = {
var startTime = System.currentTimeMillis()
val lang = Lang.NTRIPLES
val triples = spark.rdf(lang)(input)
val subjects = triples.getSubjects().distinct()
val filteredTriples = triples.find(None, Some(NodeFactory.createURI(outlink)), None)
val pairs = filteredTriples.map { f => (f.getSubject, f.getObject) }
val links = pairs.distinct().groupByKey().cache()
val allPages = subjects.union(filteredTriples.getObjects().distinct()).distinct()
val numOfAllPages = allPages.distinct().count().toDouble
val preprocessingTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0) // in minutes
println("Num of all pages: " + numOfAllPages)
println("\nPageRank with Convergence Preprocessing Time: " + preprocessingTime)
startTime = System.currentTimeMillis()
var pageRank = links.mapValues(v => 1.0)
var leftPages = allPages.subtract(pageRank.map(f => f._1))
var rankedLeftPages = leftPages.distinct().flatMap(f => Some(f, 1.0))
pageRank = pageRank.union(rankedLeftPages).distinct()
var didConverge = false
while (!didConverge) {
val contributions = links.join(pageRank)
.values
.flatMap {
case (urls, rank) =>
val size = urls.size.toDouble
urls.map(url => (url, rank / size))
}
var leftSum = 1 - 0.85
if (considertotalpages) {
leftSum = leftSum / numOfAllPages
}
//This calculation is done in case a page has no outlinks
var newPageRanks = contributions.reduceByKey(_ + _).mapValues(leftSum + 0.85 * _)
leftPages = allPages.subtract(newPageRanks.map(f => f._1))
rankedLeftPages = leftPages.distinct().flatMap(f => Some(f, leftSum))
newPageRanks = newPageRanks.union(rankedLeftPages).distinct()
val outer = new Breaks;
outer.breakable {
for (newRank <- newPageRanks.collect()) {
//find old page rank for this value
val oldRank = pageRank.filter { case (key, value) => key == newRank._1 }
if (Math.abs(newRank._2 - oldRank.values.sum()) < tolerance) {
didConverge = true
pageRank = newPageRanks
} else {
didConverge = false
pageRank = newPageRanks
outer.break
}
}
}
}
val pageRankCalculationTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0)
println("Page Rank with Convergence calculation time: " + pageRankCalculationTime)
startTime = System.currentTimeMillis()
val triplesRank = pageRank.sortBy(_._2, ascending = false)
.map(f => Triple.create(NodeFactory.createURI(f._1.toString()), NodeFactory.createURI("http://property/pageRank"), NodeFactory.createURI(f._2.toString())))
val sparqlQuery = s"""SELECT *
WHERE { ?s ?p ?o }
LIMIT $limit"""
val result = triplesRank.sparql(sparqlQuery)
result.rdd.foreach(println(_))
val postProcessingTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0)
println("\nPage Rank with Convergence postprocessing time: " + postProcessingTime)
val totalRunningTime = preprocessingTime + pageRankCalculationTime + postProcessingTime
println("Page Rank with Convergence total running time: " + totalRunningTime + " minutes")
println("===================================================================")
}
def convergenceGraphXPageRank(spark: SparkSession, outlink: String, input: String, tolerance: Double, limit: Int): Unit = {
//GraphX with convergence
var startTime = System.currentTimeMillis()
val lang = Lang.NTRIPLES
val triples = spark.rdf(lang)(input)
val subjects = triples.getSubjects().distinct()
val filteredTriples = triples.find(None, Some(NodeFactory.createURI(outlink)), None)
val graph = filteredTriples.asGraph()
val preprocessingTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0) // in minutes
println("\nConvergence GraphX Preprocessing Time: " + preprocessingTime)
startTime = System.currentTimeMillis()
val convergenceStartTimePageRank = System.currentTimeMillis()
val pagerank = graph.pageRank(tolerance).vertices
val cpr = pagerank.join(graph.vertices)
.map({ case (k, (r, v)) => (r, v, k) })
val calculationTime = (System.currentTimeMillis() - convergenceStartTimePageRank) / (60 * 1000.0)
println("\nGraphX Page Rank with Convergence calculation time: " + calculationTime)
startTime = System.currentTimeMillis()
val triplesRanked = cpr.sortBy(_._1, ascending = false)
.map(f => Triple.create(NodeFactory.createURI(f._1.toString()), NodeFactory.createURI("http://property/graphXPageRank"), NodeFactory.createURI(f._2.toString())))
val sparqlQuery = s"""SELECT *
WHERE { ?s ?p ?o }
LIMIT $limit"""
val result = triplesRanked.sparql(sparqlQuery)
result.rdd.foreach(println(_))
val postProcessingTime = (System.currentTimeMillis() - startTime) / (60 * 1000.0)
println("\nGraphX Page Rank with Convergence postprocessing time: " + postProcessingTime)
val totalConvergenceGraphXRunningTime = preprocessingTime + calculationTime + postProcessingTime
println("GraphX Page Rank with Convergence total running time: " + totalConvergenceGraphXRunningTime + " minutes")
println("===============================================================================")
}
case class Config(in: String = "", predicate: String = "", iterations: Int = 0, tolerance: Double = 0, considertotalpages: Boolean = false, out: String = "", limit: Int = 0)
// the CLI parser
val parser = new scopt.OptionParser[Config]("Page Rank") {
head(" PageRank ")
opt[String]('i', "input").required().valueName("<path>")
.action((x, c) => c.copy(in = x))
.text("path to file that contains the data (in N-Triples format)")
help("help").text("prints this usage text")
opt[Double]('t', "tolerance")
.action((x, c) => c.copy(tolerance = x))
.text("tolerance")
opt[Boolean]('c', "considertotalpages")
.action((x, c) => c.copy(considertotalpages = x))
.text("consider total pages")
opt[String]('p', "predicate")
.action((x, c) => c.copy(predicate = x))
.text("Predicate")
opt[Int]('q', "limit")
.action((x, c) => c.copy(limit = x))
.text("Query Limit")
opt[String]('o', "output").valueName("<directory>")
.action((x, c) => c.copy(out = x))
.text("path to file that contains the page rank of data (in N-Triples format)")
}
}
|
three/scala-maxmind-iplookups
|
src/test/scala/com.snowplowanalytics.maxmind.iplookups/IpLookupsTest.scala
|
/*
* Copyright (c) 2012-2018 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.maxmind.iplookups
import java.net.UnknownHostException
import com.maxmind.geoip2.exception.AddressNotFoundException
import org.specs2.mutable.Specification
import scalaz._
import Scalaz._
import model._
object IpLookupsTest {
def ipLookupsFromFiles(memCache: Boolean, lruCache: Int): IpLookups = {
val geoFile = getClass.getResource("GeoIP2-City-Test.mmdb").getFile
val ispFile = getClass.getResource("GeoIP2-ISP-Test.mmdb").getFile
val domainFile = getClass.getResource("GeoIP2-Domain-Test.mmdb").getFile
val connectionTypeFile =
getClass.getResource("GeoIP2-Connection-Type-Test.mmdb").getFile
IpLookups(
Some(geoFile),
Some(ispFile),
Some(domainFile),
Some(connectionTypeFile),
memCache,
lruCache)
}
// Databases and test data taken from https://github.com/maxmind/MaxMind-DB/tree/master/test-data
val testData: Map[String, IpLookupResult] = Map(
"192.168.127.12" -> IpLookupResult(
IpLocation(
countryCode = "CN",
countryName = "China",
region = Some("22"),
city = Some("Changchun"),
latitude = 43.88F,
longitude = 125.3228F,
timezone = Some("Asia/Harbin"),
postalCode = None,
metroCode = None,
regionName = Some("<NAME>")
).success.some,
new AddressNotFoundException("The address 192.168.127.12 is not in the database.").failure.some,
new AddressNotFoundException("The address 192.168.127.12 is not in the database.").failure.some,
new AddressNotFoundException("The address 192.168.127.12 is not in the database.").failure.some,
"Dialup".success.some
),
"172.16.31.10" -> IpLookupResult(
IpLocation(
countryCode = "US",
countryName = "United States",
region = Some("WA"),
city = Some("Milton"),
latitude = 47.2513F,
longitude = -122.3149F,
timezone = Some("America/Los_Angeles"),
postalCode = Some("98354"),
metroCode = Some(819),
regionName = Some("Washington")
).success.some,
"Century Link".success.some,
"Lariat Software".success.some,
new AddressNotFoundException("The address 172.16.31.10 is not in the database.").failure.some,
new AddressNotFoundException("The address 172.16.31.10 is not in the database.").failure.some
),
"192.168.3.11" -> IpLookupResult(
IpLocation(
countryCode = "BT",
countryName = "Bhutan",
region = None,
city = None,
latitude = 27.5F,
longitude = 90.5F,
timezone = Some("Asia/Thimphu"),
postalCode = None,
metroCode = None,
regionName = None
).success.some,
"Loud Packet".success.some,
"zudoarichikito_".success.some,
"shoesfin.NET".success.some,
new AddressNotFoundException("The address 192.168.3.11 is not in the database.").failure.some
),
// Invalid IP address, as per
// http://stackoverflow.com/questions/10456044/what-is-a-good-invalid-ip-address-to-use-for-unit-tests
"192.0.2.0" -> IpLookupResult(
new AddressNotFoundException("The address 192.0.2.0 is not in the database.").failure.some,
new AddressNotFoundException("The address 192.0.2.0 is not in the database.").failure.some,
new AddressNotFoundException("The address 192.0.2.0 is not in the database.").failure.some,
new AddressNotFoundException("The address 192.0.2.0 is not in the database.").failure.some,
new AddressNotFoundException("The address 192.0.2.0 is not in the database.").failure.some
)
)
}
class IpLookupsTest extends Specification {
"Looking up some IP address locations should match their expected locations" should {
val mcf: Boolean => String = mc => if (mc) "using" else "without using"
val lcf: Int => String =
lc => if (lc > 0) "LRU cache sized %s".format(lc) else "no LRU cache"
val formatter: (String, Boolean, Int) => String =
(ip, mcache, lcache) =>
"The IP address %s looked up (%s memory cache and with %s)".format(
ip,
mcf(mcache),
lcf(lcache))
import IpLookupsTest._
for {
memCache <- Seq(true, false)
lruCache <- Seq(0, 1000, 10000)
} {
val ipLookups = ipLookupsFromFiles(memCache, lruCache)
testData foreach {
case (ip, expected) =>
formatter(ip, memCache, lruCache) should {
val actual = ipLookups.performLookups(ip)
matchIpLookupResult(actual, expected)
}
}
}
"providing an invalid ip should fail" in {
val ipLookups = ipLookupsFromFiles(true, 0)
val expected = IpLookupResult(
new UnknownHostException("not: Name or service not known").failure.some,
new UnknownHostException("not: Name or service not known").failure.some,
new UnknownHostException("not: Name or service not known").failure.some,
new UnknownHostException("not: Name or service not known").failure.some,
new UnknownHostException("not: Name or service not known").failure.some
)
val actual = ipLookups.performLookups("not")
matchIpLookupResult(actual, expected)
}
"providing no files should return Nones" in {
val ipLookups = IpLookups(None, None, None, None, true, 0)
val expected = IpLookupResult(None, None, None, None, None)
val actual = ipLookups.performLookups("192.168.3.11")
matchIpLookupResult(actual, expected)
}
}
private def matchIpLookupResult(actual: IpLookupResult, expected: IpLookupResult) = {
s"have iplocation = ${actual.ipLocation}" in {
matchThrowables(actual.ipLocation, expected.ipLocation)
}
s"have isp = ${actual.isp}" in { matchThrowables(actual.isp, expected.isp) }
s"have org = ${actual.organization}" in {
matchThrowables(actual.organization, expected.organization)
}
s"have domain = ${actual.domain}" in {
matchThrowables(actual.domain, expected.domain)
}
s"have net speed = ${actual.connectionType}" in {
matchThrowables(actual.connectionType, expected.connectionType)
}
}
// needed because == doesn't work on exceptions
private def matchThrowables[A](
actual: Option[Validation[Throwable, A]],
expected: Option[Validation[Throwable, A]]
): Boolean = actual match {
case None => actual must_== expected
case Some(r) =>
r match {
case Success(_) => actual must_== expected
case Failure(_) =>
getErrorMessage(actual) must_== getErrorMessage(expected)
}
}
private def getErrorMessage[A](
e: Option[Validation[Throwable, A]]): Option[Validation[String, A]] =
e.map(_.leftMap(_.getMessage))
}
|
three/scala-maxmind-iplookups
|
src/main/scala/com.snowplowanalytics.maxmind.iplookups/IpLookups.scala
|
<reponame>three/scala-maxmind-iplookups
/*
* Copyright (c) 2012-2018 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.maxmind.iplookups
import java.io.File
import java.net.InetAddress
import com.maxmind.db.CHMCache
import com.maxmind.geoip2.DatabaseReader
import com.twitter.util.SynchronizedLruMap
import scalaz._
import model._
/** Companion object to hold alternative constructors. */
object IpLookups {
/**
* Alternative constructor taking Strings rather than Files
*
* @param geoFile Geographic lookup database filepath
* @param ispFile ISP lookup database filepath
* @param domainFile Domain lookup database filepath
* @param connectionTypeFile Connection type lookup database filepath
* @param memCache Whether to use MaxMind's CHMCache
* @param lruCache Maximum size of SynchronizedLruMap cache
*/
def apply(
geoFile: Option[String] = None,
ispFile: Option[String] = None,
domainFile: Option[String] = None,
connectionTypeFile: Option[String] = None,
memCache: Boolean = true,
lruCache: Int = 10000
): IpLookups =
new IpLookups(
geoFile.map(new File(_)),
ispFile.map(new File(_)),
domainFile.map(new File(_)),
connectionTypeFile.map(new File(_)),
memCache,
lruCache
)
}
/**
* IpLookups is a Scala wrapper around MaxMind's own DatabaseReader Java class.
*
* Two main differences:
*
* 1. getLocation(ipS: String) now returns an IpLocation
* case class, not a raw MaxMind Location
* 2. IpLookups introduces an LRU cache to improve
* lookup performance
*
* Inspired by:
* https://github.com/jt6211/hadoop-dns-mining/blob/master/src/main/java/io/covert/dns/geo/IpLookups.java
*
* @param geoFile Geographic lookup database file
* @param ispFile ISP lookup database file
* @param domainFile Domain lookup database file
* @param connectionTypeFile Connection type lookup database file
* @param memCache Whether to use MaxMind's CHMCache
* @param lruCache Maximum size of SynchronizedLruMap cache
*/
class IpLookups(
geoFile: Option[File] = None,
ispFile: Option[File] = None,
domainFile: Option[File] = None,
connectionTypeFile: Option[File] = None,
memCache: Boolean = true,
lruCache: Int = 10000
) {
// Initialise the cache
private val lru =
if (lruCache > 0)
Some(new SynchronizedLruMap[String, IpLookupResult](lruCache))
else None // Of type mutable.Map[String, LookupData]
// Configure the lookup services
private val geoService = getService(geoFile)
private val ispService =
getService(ispFile).map(SpecializedReader(_, ReaderFunctions.isp))
private val orgService =
getService(ispFile).map(SpecializedReader(_, ReaderFunctions.org))
private val domainService =
getService(domainFile).map(SpecializedReader(_, ReaderFunctions.domain))
private val connectionTypeService =
getService(connectionTypeFile).map(SpecializedReader(_, ReaderFunctions.connectionType))
/**
* Get a LookupService from a database file
*
* @param serviceFile The database file
* @return LookupService
*/
private def getService(serviceFile: Option[File]): Option[DatabaseReader] =
serviceFile.map { f =>
val builder = new DatabaseReader.Builder(f)
(
if (memCache) builder.withCache(new CHMCache())
else builder
).build()
}
/**
* Returns the MaxMind location for this IP address
* as an IpLocation, or None if MaxMind cannot find
* the location.
*/
val performLookups: String => IpLookupResult = (s: String) =>
lru
.map(performLookupsWithLruCache(_, s))
.getOrElse(performLookupsWithoutLruCache(s))
/**
* This version does not use the LRU cache.
* Concurrently looks up information
* based on an IP address from one or
* more MaxMind LookupServices
*
* @param ip IP address
* @return Tuple containing the results of the
* LookupServices
*/
private def performLookupsWithoutLruCache(ip: String): IpLookupResult = {
val ipAddress = getIpAddress(ip)
/**
* Creates a Validation boxing the result of using a lookup service on the ip
* @param service ISP, domain or connection type LookupService
* @return the result of the lookup
*/
def getLookup(service: Option[SpecializedReader]): Option[Validation[Throwable, String]] =
service.map { s =>
for {
ipA <- ipAddress
v <- s.getValue(ipA)
} yield v
}
val ipLocation: Option[Validation[Throwable, IpLocation]] =
geoService.map { gs =>
for {
ipA <- ipAddress
v <- Validation.fromTryCatch(gs.city(ipA))
} yield IpLocation.apply(v)
}
IpLookupResult(
ipLocation,
getLookup(ispService),
getLookup(orgService),
getLookup(domainService),
getLookup(connectionTypeService)
)
}
/**
* Returns the MaxMind location for this IP address
* as an IpLocation, or None if MaxMind cannot find
* the location.
*
* This version uses and maintains the LRU cache.
*
* Don't confuse the LRU returning None (meaning that no
* cache entry could be found), versus an extant cache entry
* containing None (meaning that the IP address is unknown).
*/
private def performLookupsWithLruCache(
lru: SynchronizedLruMap[String, IpLookupResult],
ip: String
): IpLookupResult = lru.get(ip) match {
case Some(result) => result // In the LRU cache
case None => // Not in the LRU cache
val result = performLookupsWithoutLruCache(ip)
lru.put(ip, result)
result
}
/** Transforms a String into an Validation[Throwable, InetAddress] */
private def getIpAddress(ip: String): Validation[Throwable, InetAddress] =
Validation.fromTryCatch(InetAddress.getByName(ip))
}
|
three/scala-maxmind-iplookups
|
project/plugins.sbt
|
<gh_stars>0
addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.3")
addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.15")
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.1")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.3.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.2")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1")
|
algorithmiaio/plugin-sdk
|
build.sbt
|
<filename>build.sbt
inThisBuild(List(
organization := "com.algorithmia",
homepage := Some(url("https://github.com/algorithmiaio/plugin-sdk")),
licenses := List("MIT" -> url("https://github.com/algorithmiaio/plugin-sdk/blob/master/LICENSE")),
developers := List(
Developer(
"jbooth",
"<NAME>",
"<EMAIL>",
url("https://algorithmia.com")
)
)
))
name := "plugin-sdk"
organization := "com.algorithmia"
description := "Algorithmia plugin SDK"
// Forbid including Scala related libraries
autoScalaLibrary := false
// Do not append Scala versions to generated artifacts
crossPaths := false
|
algorithmiaio/plugin-sdk
|
project/plugins.sbt
|
addSbtPlugin("com.frugalmechanic" % "fm-sbt-s3-resolver" % "[0.11.0,1)")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.15.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0")
addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7")
|
macrodatalab/spark-bo
|
examples/src/main/scala/com/bigobject/spark/examples/boOption.scala
|
package com.bigobject.spark.examples
class BoOption(args : Array[String]) {
private val options = nextOption(Map(), args.toList)
private def nextOption(opt : Map[String, Any], list: List[String]) : Map[String, Any] = {
list match {
case Nil => opt
case opt1 :: opt2 :: tail if (opt1.startsWith("--") && opt2.startsWith("--")) =>
nextOption(opt ++ Map(opt1.substring(2) -> true), opt2 :: tail)
case opt1 :: opt2 :: tail if (opt1.startsWith("--")) =>
nextOption(opt ++ Map(opt1.substring(2) -> opt2), tail)
case opt1 :: Nil if (opt1.startsWith("--")) =>
nextOption(opt ++ Map(opt1.substring(2) -> true), list.tail)
}
}
def getBoolean(key: String): Boolean = {
if (options.contains(key))
options(key).asInstanceOf[Boolean]
else
false
}
def getString(key: String): String = {
if (options.contains(key))
options(key).asInstanceOf[String]
else
""
}
def getInt(key: String): Int = {
if (options.contains(key))
options(key).asInstanceOf[String].toInt
else
0
}
}
|
macrodatalab/spark-bo
|
spark-1.5.1/bigobject/src/main/scala/com/bigobject/spark/BORelation.scala
|
<filename>spark-1.5.1/bigobject/src/main/scala/com/bigobject/spark/BORelation.scala
/*
* Copyright 2015 bigobject.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bigobject.spark
import java.util.Properties
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.{Logging, Partition}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, DataFrame, Row, SQLContext}
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types.StructType
class DefaultSource
extends RelationProvider
with SchemaRelationProvider
with CreatableRelationProvider
with Logging {
/** Returns a new base relation with the given parameters. */
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
createRelation(sqlContext, parameters, null)
}
/** Returns a new base relation with the given parameters and schema. */
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: StructType): BaseRelation = {
val url = parameters.getOrElse("url", sys.error("Option 'url' not specified"))
val table = parameters.getOrElse("dbtable", sys.error("Option 'dbtable' not specified"))
val properties = new Properties()
parameters.foreach(kv => properties.setProperty(kv._1, kv._2))
BORelation(url, table, schema, properties)(sqlContext)
}
/** Returns a new base relation with the given parameters and DataFrame. */
override def createRelation(
sqlContext: SQLContext,
mode: SaveMode,
parameters: Map[String, String],
data: DataFrame): BaseRelation = {
val url = parameters.getOrElse("url", sys.error("Option 'url' not specified"))
val urls = url.split(",")
val table = parameters.getOrElse("dbtable", sys.error("Option 'dbtable' not specified"))
val overwrite = (mode == SaveMode.Overwrite)
var exist = false
var u = null
for (u <- urls) {
if (BORDD.isTableExist(u, table)) {
exist = true
if(overwrite) {
val boApi = new BOIface(u, "cmd", "post", Array(s"DROP TABLE $table"))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to delete existing $table table. (Http status: ${boApi.httpStatus}, BO status: ${boApi.status})")
return null
}
}
}
}
if (exist) {
if (mode == SaveMode.ErrorIfExists) {
sys.error(s"Table $table exists.")
}
}
if (!exist || overwrite) {
val key = parameters.getOrElse("key", "")
val sb = new StringBuilder(BORDD.schemaString(data))
if (key.length() > 0)
sb.append(s", KEY ($key)")
val schemaStr = sb.toString()
val boApi = new BOIface(urls(0), "cmd", "post", Array(s"CREATE TABLE $table ($schemaStr)"))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to create $table table. (Http status: ${boApi.httpStatus}, BO status: ${boApi.status})")
return null
}
}
val schema = data.schema
data.foreachPartition {iter =>
// TODO: write to different partition (BO server) separately.
BORDD.writeData(iter, urls(0), table, schema)
}
val properties = new Properties()
parameters.foreach(kv => properties.setProperty(kv._1, kv._2))
BORelation(url, table, schema, properties)(sqlContext)
}
}
case class BORelation(
url: String,
table: String,
sch: StructType = null,
properties: Properties = new Properties())(@transient val sqlContext: SQLContext)
extends BaseRelation
with PrunedFilteredScan
with InsertableRelation
with Logging {
override val needConversion: Boolean = false
private val urls = url.split(",")
private def checkParams() = {
if (urls.length == 0 || urls(0).length == 0 || table.length == 0)
throw new IllegalArgumentException("No BO server is speciffied.")
}
checkParams()
override val schema: StructType = {
if (sch != null)
sch
else
BORDD.resolveTable(urls(0), table)
}
override def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
logInfo(s"buildScan is called.")
requiredColumns.foreach(c => logInfo(s"required colume: $c"))
filters.foreach(f => logInfo(s"filter: $f"))
BORDD.scanTable(
sqlContext.sparkContext,
schema,
properties,
table,
requiredColumns,
filters,
BORDD.getPartition(urls, new Array[String](urls.length))).asInstanceOf[RDD[Row]]
}
// TODO: we should do just "INSERT INTO", not whole table??
override def insert(data: DataFrame, overwrite: Boolean): Unit = {
logInfo(s"insert is called. data: $data, overwrite: $overwrite.")
val exist = BORDD.isTableExist(urls(0), table)
if (exist && overwrite) {
val boApi = new BOIface(urls(0), "cmd", "post", Array(s"DROP TABLE $table"))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to delete existing $table table. (Http status: ${boApi.httpStatus}, BO status: ${boApi.status})")
return
}
// TODO: check status code
}
if (!exist || overwrite) {
// TODO: add key
val schString = BORDD.schemaString(data)
val boApi = new BOIface(urls(0), "cmd", "post", Array(s"CREATE TABLE $table ($schString)"))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to create $table table. (Http status: ${boApi.httpStatus}, BO status: ${boApi.status})")
return
}
// TODO: check status code
}
val sch = data.schema
data.foreachPartition {iter =>
BORDD.writeData(iter, urls(0), table, sch)
}
}
}
|
macrodatalab/spark-bo
|
examples/src/main/scala/com/bigobject/spark/examples/byBO/calculate-byBO.scala
|
<gh_stars>1-10
package com.bigobject.spark.examples.byBO
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.functions._
import com.bigobject.spark._
import com.bigobject.spark.examples.BoOption
object SumApp {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Sum by BO")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
val option = new BoOption(args)
val url = option.getString("url")
if (url.length == 0) {
println("Missing BO url.")
sys.exit(1)
}
val df = BORDD.sql(sc, url, "SELECT channel_name, sum(total_price) FROM sales GROUP BY channel_name")
df.show()
}
}
object AvgApp {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("Avg by BO")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
val option = new BoOption(args)
val url = option.getString("url")
if (url.length == 0) {
println("Missing BO url.")
sys.exit(1)
}
val df = BORDD.sql(sc, url, "SELECT channel_name, avg(total_price) FROM sales GROUP BY channel_name")
df.show()
}
}
object JoinApp {
def main(args: Array[String]) {
val option = new BoOption(args)
val url = option.getString("url")
val isNoFind = option.getBoolean("no-find")
if (url.length == 0) {
println("Missing BO url.")
sys.exit(1)
}
var appName = "Join by BO with FIND"
if (isNoFind)
appName = "Join by BO without FIND"
val conf = new SparkConf().setAppName(appName)
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
var sqlJoin = "FIND TOP ALL Product.id, Product.name, channel_name, sum(total_price) FROM sales"
if (isNoFind)
sqlJoin = "SELECT Product.id, Product.name, channel_name, sum(total_price) FROM sales GROUP BY Product.id, Product.name, channel_name ORDER BY sum(total_price) DESC"
val df = BORDD.sql(sc, url, sqlJoin)
df.show()
}
}
|
macrodatalab/spark-bo
|
spark-1.4.1/bigobject/src/main/scala/com/bigobject/spark/BORDD.scala
|
/*
* Copyright 2015 bigobject.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bigobject.spark
import java.sql.{SQLException, Timestamp}
import java.util.Properties
import java.io.ByteArrayOutputStream
import scala.util.control._
import scala.collection.Map
import scala.collection.mutable.ArrayBuffer
import scala.collection.convert.Wrappers.{JListWrapper, JMapWrapper}
import org.apache.commons.lang3.StringUtils
import org.apache.spark.{Logging, Partition, SparkContext, TaskContext}
import org.apache.spark.rdd.RDD
import org.apache.spark.Logging
import org.apache.spark.sql.catalyst.expressions.{Row, SpecificMutableRow}
import org.apache.spark.sql.catalyst.util.DateUtils
import org.apache.spark.sql.types._
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.sql.sources._
import org.apache.http._
import org.apache.http.client._
import org.apache.http.client.methods.{HttpGet, HttpPost, CloseableHttpResponse}
import org.apache.http.impl.client.DefaultHttpClient
import org.apache.http.entity.StringEntity
import org.apache.http.util.EntityUtils
import com.fasterxml.jackson.core.{JsonFactory,JsonGenerator,JsonParser,JsonToken}
import com.fasterxml.jackson.databind.ObjectMapper
case class BOPartition(url: String, idx: Int, preSQL: String = null) extends Partition {
override def index: Int = idx
}
/* BOIface class
* . wrappers BO RESTful API.
*/
class BOIface(url: String, cmd: String, method: String, stmts: Array[String], async: Boolean = false) extends Logging {
private val jFactory = new JsonFactory()
private val mapper = new ObjectMapper()
var error = new ArrayBuffer[String]
var content = new ArrayBuffer[Map[String, Any]]
var httpStatus = 0
val status = exec()
private def convert(obj: Any): Any = obj match {
case map: java.util.Map[_, _] =>
JMapWrapper(map).mapValues(convert).map(identity)
case list: java.util.List[_] =>
JListWrapper(list).map(convert)
case atom => atom
}
private def exec() : Int = {
logInfo(s"exec(url: $url, cmd: $cmd)")
val client = new DefaultHttpClient
var httpRsp = null.asInstanceOf[CloseableHttpResponse]
if (method.equalsIgnoreCase("post")) {
val post = new HttpPost(s"$url/$cmd")
val qStream = new ByteArrayOutputStream(1024)
val jG = jFactory.createJsonGenerator(qStream)
val loop = new Breaks
loop.breakable {
for (sql <- stmts) {
if (sql.length() == 0)
loop.break
jG.writeStartObject()
jG.writeStringField("Stmt", sql)
if (async) {
jG.writeObjectFieldStart("Opts")
jG.writeBooleanField("Handle", true)
jG.writeEndObject()
}
jG.writeEndObject()
}
}
jG.close()
val qStr = qStream.toString()
var len = qStr.length
if (len > 512) len = 512
logInfo("sql: " + qStr.substring(0, len))
val se = new StringEntity(qStr, "utf-8")
post.setEntity(se)
httpRsp = client.execute(post)
}
else
{
val get = new HttpGet(s"$url/$cmd")
httpRsp = client.execute(get)
}
httpStatus = httpRsp.getStatusLine().getStatusCode()
val rspStr = EntityUtils.toString(httpRsp.getEntity(), "utf-8")
client.close()
var boStatus = 0
var offset = 0
var slen = 0
val jP = jFactory.createParser(rspStr)
while (jP.nextToken() != null) {
jP.skipChildren()
slen = jP.getCurrentLocation().getCharOffset().asInstanceOf[Int]
val data = convert(mapper.readValue(rspStr.substring(offset, slen), classOf[Object])).asInstanceOf[Map[String, Any]]
if (data.contains("Content"))
content += data("Content").asInstanceOf[Map[String, Any]]
if (data.contains("Err"))
error += data("Err").asInstanceOf[String]
if (data.contains("Status"))
if (boStatus == 0)
boStatus = data("Status").asInstanceOf[Int]
offset = slen + 1
}
return boStatus
}
}
/* BORDD singleton
* . implements some utilities for BORDD class.
* . implements BO specific syntax, e.g., FIND.
*/
object BORDD extends Logging {
val bo2catalyst = Map("STRING" -> StringType,
"BYTE" -> ByteType,
"INT8" -> ByteType,
"INT16" -> ShortType,
"INT32" -> IntegerType,
"INT64" -> LongType,
"FLOAT" -> FloatType,
"DOUBLE" -> DoubleType,
"DATE32" -> DateType,
"DATETIME32" -> TimestampType,
"DATETIME64" -> TimestampType)
val catalyst2bo: Map[DataType, String] = Map(
StringType -> "STRING",
ByteType -> "BYTE",
ShortType -> "INT16",
IntegerType -> "INT32",
LongType -> "INT64",
FloatType -> "FLOAT",
DoubleType -> "DOUBLE",
BooleanType -> "INT8",
DateType -> "DATE32",
TimestampType -> "DATETIME64")
def getPartition(urls: Array[String], sqlStr: Array[String]) : Array[Partition] = {
if (urls.length != sqlStr.length) {
logError(s"Number of url (${urls.length}) is different from number of SQL statement (${sqlStr.length}).")
return null.asInstanceOf[Array[Partition]]
}
var parts = new ArrayBuffer[Partition]()
var i = 0
var url = null
for (url <- urls) {
if (url.length > 0)
{
parts += BOPartition(url, i, sqlStr(i))
i += 1
}
}
parts.toArray
}
def isTableExist(url : String, tblName : String) : Boolean = {
val boApi = new BOIface(url, "cmd", "post", Array(s"DESC $tblName"))
if (boApi.httpStatus == 200 && boApi.status == 0)
true
else
false
}
def command(
sc: SparkContext,
url: String,
sqlString: String,
sqlCtx: SQLContext = null) : Int = {
var sqlSc = sqlCtx
if (sqlCtx == null)
sqlSc = new SQLContext(sc)
val urls = url.split(",")
urls.foreach{u =>
val boApi = new BOIface(u, "cmd", "post", Array(sqlString))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Run command failed. (Http status code: ${boApi.httpStatus}, BO status code: ${boApi.status})")
return boApi.status
}
}
0
}
def sql(
sc: SparkContext,
url: String,
sqlString: String,
sqlCtx: SQLContext = null) : DataFrame = {
var sqlSc = sqlCtx
if (sqlCtx == null)
sqlSc = new SQLContext(sc)
val urls = url.split(",")
val boApi = new BOIface(urls(0), "cmd", "post", Array(sqlString), true)
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to resolve sql statement. (Http status code: ${boApi.httpStatus}, BO status code: ${boApi.status})")
return null.asInstanceOf[DataFrame]
}
if (!boApi.content(0).contains("res")) {
logError("Invalid BO output: no BO handle.")
return null.asInstanceOf[DataFrame]
}
val handle = boApi.content(0)("res").asInstanceOf[String]
if (handle.length == 0) {
logError("Invalid BO output: BO handle is empty.")
return null.asInstanceOf[DataFrame]
}
val schema = resolveSchema(urls(0), s"HDESC $handle")
if (schema == null)
return null.asInstanceOf[DataFrame]
var sqls = new Array[String](urls.length)
sqls(0) = s"SCAN $handle"
for (i <- 1 to sqls.length - 1)
sqls(i) = sqlString
var allRdd = null.asInstanceOf[RDD[Row]]
val partitions = getPartition(urls, sqls)
for (part <- partitions) {
val boPart = part.asInstanceOf[BOPartition]
val boRdd = new BORDD(
sc,
schema,
null,
Array[String](null),
Array[Filter](null),
Array[Partition](BOPartition(boPart.url, 0, boPart.preSQL)),
null)
if (allRdd == null)
allRdd = boRdd
else
allRdd = allRdd.union(boRdd)
}
sqlSc.createDataFrame(allRdd, schema)
}
def writeData(
iter: Iterator[Row],
url: String,
table: String,
schema: StructType) = {
val sb = new StringBuilder(s"INSERT INTO $table VALUES ")
var rcount = 0
while (iter.hasNext) {
val row = iter.next()
sb.append(insertStmt(row, schema)).append(",")
rcount += 1
}
if (rcount > 0) {
val boApi = new BOIface(url, "cmd", "post", Array(sb.take(sb.length - 1).toString()))
if (boApi.httpStatus != 200 || boApi.status != 0)
logError(s"Failed to insert data into $table table. (Http status code: ${boApi.httpStatus}, BO status code: ${boApi.status})")
else
logInfo(s"insert $rcount rows into $table.")
}
}
private def getCatalystType(colType: String): DataType = {
if (!bo2catalyst.contains(colType))
throw new SQLException("Unsupported type: " + colType)
bo2catalyst(colType)
}
private def getBOType(colType: DataType): String = {
if (!catalyst2bo.contains(colType))
throw new SQLException("Unsupported type: " + colType)
catalyst2bo(colType)
}
def insertStmt(row: Row, schema: StructType): String = {
val sb = new StringBuilder("(")
val numFields = schema.fields.length
var i = 0
while (i < numFields) {
if (row.isNullAt(i)) {
sb.append("NULL")
}
else {
schema.fields(i).dataType match {
case StringType => sb.append("'").append(StringUtils.replace(row.getString(i), "'", "\\'")).append("'")
case TimestampType => sb.append("'").append(row.getAs[java.sql.Timestamp](i)).append("'")
case DateType => sb.append("'").append(row.getAs[java.sql.Date](i)).append("'")
case IntegerType => sb.append(row.getInt(i))
case LongType => sb.append(row.getLong(i))
case DoubleType => sb.append(row.getDouble(i))
case FloatType => sb.append(row.getFloat(i))
case ShortType => sb.append(row.getShort(i))
case ByteType => sb.append(row.getByte(i))
case BooleanType => sb.append(row.getBoolean(i))
case _ => throw new IllegalArgumentException(
s"Can't translate non-null value for field $i")
}
}
i += 1
if (i < numFields)
sb.append(",")
}
sb.append(")").toString()
}
def makeCsv(iterator: Iterator[Row], schema: StructType): String = {
val sb = new StringBuilder()
while (iterator.hasNext) {
val row = iterator.next()
val numFields = schema.fields.length
var i = 0
while (i < numFields) {
if (!row.isNullAt(i)) {
schema.fields(i).dataType match {
case StringType => sb.append(StringUtils.replace(row.getString(i), "'", "\\'"))
case TimestampType => sb.append(row.getAs[java.sql.Timestamp](i))
case DateType => sb.append(row.getAs[java.sql.Date](i))
case IntegerType => sb.append(row.getInt(i))
case LongType => sb.append(row.getLong(i))
case DoubleType => sb.append(row.getDouble(i))
case FloatType => sb.append(row.getFloat(i))
case ShortType => sb.append(row.getShort(i))
case ByteType => sb.append(row.getByte(i))
case BooleanType => sb.append(row.getBoolean(i))
case _ => throw new IllegalArgumentException(
s"Can't translate non-null value for field $i")
}
}
i += 1
if (i < numFields)
sb.append(",")
}
sb.append("\n")
}
sb.take(sb.length - 1).toString()
}
def schemaString(df: DataFrame): String = {
val sb = new StringBuilder()
val keys = new StringBuilder()
df.schema.fields foreach { field => {
val colName = field.name
val colType = getBOType(field.dataType)
// All BO data are not NULL
// val nullable = if (field.nullable) "" else "NOT NULL"
val nullable = ""
sb.append(s", $colName $colType $nullable")
if (field.metadata.contains("key") && field.metadata.getBoolean("key"))
keys.append(s", $colName")
}}
if (keys.length >= 2) {
val keyStr = keys.substring(2)
sb.append(s", KEY($keyStr)")
}
if (sb.length < 2) "" else sb.substring(2)
}
private def resolveSchema(url: String, schemaCmd: String): StructType = {
val boApi = new BOIface(url, "cmd", "post", Array(schemaCmd))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to resolve schema. (Http status code: ${boApi.httpStatus}, BO status code: ${boApi.status})")
return null.asInstanceOf[StructType]
}
if (!boApi.content(0).contains("schema")) {
logError("Invalid BO output: no BO table schema.")
return null.asInstanceOf[StructType]
}
val schMap = boApi.content(0)("schema").asInstanceOf[Map[String, Any]]
if (!schMap.contains("attr")) {
logError("Invalid BO output: no BO table schema attribute.")
return null.asInstanceOf[StructType]
}
var keys = null.asInstanceOf[ArrayBuffer[String]]
if (schMap.contains("key"))
keys = schMap("key").asInstanceOf[ArrayBuffer[String]]
val columns = schMap("attr").asInstanceOf[ArrayBuffer[Map[String, String]]]
val fields = new Array[StructField](columns.size)
var i = 0
for (i <- 0 until columns.size) {
val col = columns(i)
val colName = col("name")
val metadata = new MetadataBuilder().putString("name", colName)
if (keys != null && keys.contains(colName))
metadata.putBoolean("key", true)
fields(i) = StructField(colName, getCatalystType(col("type")), false, metadata.build());
}
return new StructType(fields)
}
def resolveTable(url: String, table: String): StructType = {
return resolveSchema(url, s"DESC $table")
}
private def pruneSchema(schema: StructType, columns: Array[String]): StructType = {
val fieldMap = Map(schema.fields map { x => x.metadata.getString("name") -> x }: _*)
new StructType(columns map { name => fieldMap(name) })
}
def scanTable(
sc: SparkContext,
schema: StructType,
properties: Properties,
fqTable: String,
requiredColumns: Array[String],
filters: Array[Filter],
parts: Array[Partition]): RDD[Row] = {
val sche = pruneSchema(schema, requiredColumns)
logInfo(s"scanTable is called. pruned schema: $sche.")
new BORDD(
sc,
sche,
fqTable,
requiredColumns,
filters,
parts,
properties)
}
}
/* BORDD class
* . implements RDD[Row]
*/
class BORDD(
sc: SparkContext,
schema: StructType,
fqTable: String,
columns: Array[String],
filters: Array[Filter],
partitions: Array[Partition],
properties: Properties)
extends RDD[Row](sc, Nil)
with Logging {
override def getPartitions: Array[Partition] = partitions
private val columnList: String = {
val sb = new StringBuilder()
columns.foreach(x => sb.append(",").append(x))
if (sb.length == 0) "*" else sb.substring(1)
}
private def compileValue(value: Any): Any = value match {
case stringValue: UTF8String => s"'${escapeSql(stringValue.toString)}'"
case stringValue: String => s"'${escapeSql(stringValue.toString)}'"
case _ => value
}
private def escapeSql(value: String): String =
if (value == null) null else StringUtils.replace(value, "'", "\\'")
private def compileFilter(f: Filter): String = f match {
case EqualTo(attr, value) => s"$attr = ${compileValue(value)}"
case LessThan(attr, value) => s"$attr < ${compileValue(value)}"
case GreaterThan(attr, value) => s"$attr > ${compileValue(value)}"
case LessThanOrEqual(attr, value) => s"$attr <= ${compileValue(value)}"
case GreaterThanOrEqual(attr, value) => s"$attr >= ${compileValue(value)}"
case StringStartsWith(attr, value) => s"$attr LIKE ${compileValue(value + '%')}"
case StringEndsWith(attr, value) => s"$attr LIKE ${compileValue('%' + value)}"
case StringContains(attr, value) => s"$attr LIKE ${compileValue('%' + value + '%')}"
case In(attr, value) =>
val sb = new StringBuilder("")
value.foreach(x => sb.append(s"${compileValue(x)},"))
s"$attr IN (${sb.substring(0, sb.length - 1)})"
case Not(child) => s"NOT (${compileFilter(child)})"
case And(left, right) => s"${compileFilter(left)} AND ${compileFilter(right)}"
case Or(left, right) => s"${compileFilter(left)} OR ${compileFilter(right)}"
case IsNull(attr) => s"IS NULL"
case _ => null // BO types are not NULL, don't push down "NOT NULL"
}
private val filterWhereClause: String = {
val filterStrings = filters map compileFilter filter (_ != null)
if (filterStrings.size > 0) {
val sb = new StringBuilder("WHERE ")
filterStrings.foreach(x => sb.append(x).append(" AND "))
sb.substring(0, sb.length - 5)
} else ""
}
private def getSqlString(): String = {
var sqlText = s"SELECT $columnList FROM $fqTable $filterWhereClause"
val fetchSize = properties.getProperty("fetchSize", "0").toInt
if (fetchSize > 0) {
sqlText += s" LIMIT $fetchSize"
}
sqlText
}
override def compute(thePart: Partition, context: TaskContext): Iterator[Row] = new Iterator[Row]
{
var finished = false
var gotNext = false
var nextValue: Row = null
// TODO: anything to clean up?
// context.addTaskCompletionListener{context => onTaskComplete()}
val part = thePart.asInstanceOf[BOPartition]
val mutableRow = new SpecificMutableRow(schema.fields.map(x => x.dataType))
var iter = null.asInstanceOf[Iterator[ArrayBuffer[Any]]]
var sqlStr = part.preSQL
if (sqlStr == null)
sqlStr = getSqlString()
val boApi = new BOIface(part.url, "cmd", "post", Array(sqlStr))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to get $fqTable table. (Http status code: ${boApi.httpStatus}, BO status code: ${boApi.status})")
finished = true
}
else {
var rows = new ArrayBuffer[ArrayBuffer[Any]]
var content = null.asInstanceOf[Map[String, ArrayBuffer[Any]]]
for (content <- boApi.content
if content.contains("content")) {
rows ++= content("content").asInstanceOf[ArrayBuffer[ArrayBuffer[Any]]]
}
if (rows.isEmpty) {
finished = true
}
else {
iter = rows.toIterator
}
}
def getNext(): Row = {
if (!iter.hasNext) {
finished = true
null.asInstanceOf[Row]
}
else {
val row = iter.next()
var i = 0
val fields = schema.fields
for (i <- 0 until fields.size) {
fields(i).dataType match {
case DateType =>
val dateVal = row(i).asInstanceOf[String]
if (dateVal != null) {
mutableRow.update(i, DateUtils.millisToDays(DateUtils.stringToTime(dateVal).getTime()))
} else {
mutableRow.update(i, null)
}
case TimestampType =>
val dateVal = row(i).asInstanceOf[String]
if (dateVal != null) {
mutableRow.update(i, Timestamp.valueOf(dateVal))
} else {
mutableRow.update(i, null)
}
case DoubleType =>
val value = row(i)
value match {
case value: java.lang.Integer => mutableRow.setDouble(i, value.asInstanceOf[Int].toDouble)
case _ => mutableRow.setDouble(i, value.asInstanceOf[Double])
}
case FloatType =>
val value = row(i)
value match {
case value: java.lang.Integer => mutableRow.setFloat(i, value.asInstanceOf[Int].toFloat)
case value: java.lang.Double => mutableRow.setFloat(i, value.asInstanceOf[Double].toFloat)
case _ => mutableRow.setFloat(i, value.asInstanceOf[Float])
}
case ByteType =>
val value = row(i)
value match {
case value: java.lang.Integer => mutableRow.setByte(i, value.asInstanceOf[Int].toByte)
case _ => mutableRow.setByte(i, value.asInstanceOf[Byte])
}
case ShortType =>
val value = row(i)
value match {
case value: java.lang.Integer => mutableRow.setShort(i, value.asInstanceOf[Int].toShort)
case _ => mutableRow.setShort(i, value.asInstanceOf[Short])
}
case IntegerType => mutableRow.setInt(i, row(i).asInstanceOf[Int])
case LongType =>
val value = row(i)
value match {
case value: java.lang.Integer => mutableRow.setLong(i, value.asInstanceOf[Int].toLong)
case _ => mutableRow.setLong(i, value.asInstanceOf[Long])
}
// TODO: use getBytes for better performance, if the encoding is UTF-8
case StringType => mutableRow.setString(i, row(i).asInstanceOf[String])
}
}
mutableRow
}
}
override def hasNext: Boolean = {
if (!finished) {
if (!gotNext) {
nextValue = getNext()
gotNext = true
}
}
!finished
}
override def next(): Row = {
if (!hasNext) {
throw new NoSuchElementException("End of stream")
}
gotNext = false
nextValue
}
}
}
|
macrodatalab/spark-bo
|
examples/src/main/scala/com/bigobject/spark/examples/byDF/calculate-byDF.scala
|
<filename>examples/src/main/scala/com/bigobject/spark/examples/byDF/calculate-byDF.scala
package com.bigobject.spark.examples.byDF
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.functions._
import com.bigobject.spark._
import com.bigobject.spark.examples.BoOption
object SumApp {
def main(args: Array[String]) {
val option = new BoOption(args)
val url = option.getString("url")
val isCache = option.getBoolean("cache")
val isDFAPI = option.getBoolean("use-df-api")
var appName = "Sum by Spark SQL"
if (isDFAPI)
appName = "Sum by DataFrame API"
val conf = new SparkConf().setAppName(appName)
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
if (url.length == 0) {
println("Missing BO url.")
sys.exit(1)
}
val sales = sqlContext.load("com.bigobject.spark", Map("url" -> url, "dbtable" -> "sales_tbl"))
sales.registerTempTable("sales")
if (isCache) {
// cache sales table
sales.cache()
// pre-load sales table
sales.count()
}
if (isDFAPI) {
// by DataFrame API
val df = sales.groupBy(sales("channel_name")).agg(sum("total_price"))
df.show()
}
else {
// by sql statement
val df = sqlContext.sql("SELECT channel_name, sum(total_price) FROM sales GROUP BY channel_name")
df.show()
}
}
}
object AvgApp {
def main(args: Array[String]) {
val option = new BoOption(args)
val url = option.getString("url")
val isCache = option.getBoolean("cache")
val isDFAPI = option.getBoolean("use-df-api")
var appName = "Avg by Spark SQL"
if (isDFAPI)
appName = "Avg by DataFrame API"
val conf = new SparkConf().setAppName(appName)
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
if (url.length == 0) {
println("Missing BO url.")
sys.exit(1)
}
val sales = sqlContext.load("com.bigobject.spark", Map("url" -> url, "dbtable" -> "sales_tbl"))
sales.registerTempTable("sales")
if (isCache) {
// cache sales table
sales.cache()
// pre-load sales table
sales.count()
}
if (isDFAPI) {
// by DataFrame API
val df = sales.groupBy(sales("channel_name")).agg(avg("total_price"))
df.show()
}
else {
// by sql statement
val df = sqlContext.sql("SELECT channel_name, avg(total_price) FROM sales GROUP BY channel_name")
df.show()
}
}
}
object JoinApp {
def main(args: Array[String]) {
val option = new BoOption(args)
val url = option.getString("url")
val isCache = option.getBoolean("cache")
val isDFAPI = option.getBoolean("use-df-api")
var appName = "Join by Spark SQL"
if (isDFAPI)
appName = "Join by DataFrame API"
val conf = new SparkConf().setAppName(appName)
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
if (url.length == 0) {
println("Missing BO url.")
sys.exit(1)
}
val sales = sqlContext.load("com.bigobject.spark", Map("url" -> url, "dbtable" -> "sales_tbl"))
val prod = sqlContext.load("com.bigobject.spark", Map("url" -> url, "dbtable" -> "Product"))
sales.registerTempTable("sales")
prod.registerTempTable("Product")
if (isCache) {
// cache sales and Product tables
sales.cache()
prod.cache()
// pre-load sales and Product tables
sales.count()
prod.count()
}
if (isDFAPI) {
// by DataFrame API
val df = sales.join(prod, sales("ProductID") === prod("id")).groupBy(prod("id"), prod("name"), sales("channel_name")).agg(sum("total_price").alias("total_sale")).orderBy(desc("total_sale"))
df.show()
}
else {
// by sql statement
val df = sqlContext.sql("SELECT Product.id, Product.name, sales.channel_name, sum(total_price) AS total_sale FROM Product, sales WHERE Product.id = sales.ProductID GROUP BY Product.id, Product.name, sales.channel_name ORDER BY total_sale DESC")
df.show()
}
}
}
|
scala-steward/caliban-deriving
|
caliban-deriving/src/main/scala-3/caliban/deriving/package.scala
|
package caliban.deriving
import caliban.parsing.adt.Directive
import caliban.schema.Schema
import caliban.schema.Annotations.GQLName
import caliban.schema.Annotations.GQLDescription
import caliban.schema.Annotations.GQLInputName
import caliban.schema.Annotations.GQLDeprecated
import caliban.schema.Annotations.GQLDirective
import scala.quoted.*
import caliban.schema.Types
import caliban.deriving.annotations.GQLExclude
import caliban.schema.Step.ObjectStep
import caliban.schema.Step
import caliban.schema.Step.FunctionStep
import caliban.InputValue
import caliban.schema.ArgBuilder
import caliban.CalibanError.ExecutionError
import caliban.schema.Step.QueryStep
import zio.query.ZQuery
import caliban.wrappers.ApolloTracing.Execution
import java.text.DateFormat.Field
inline def deriveSchemaInstance[R, T]: Schema[R, T] =
${ deriveSchemaInstanceImpl[R, T] }
private def deriveSchemaInstanceImpl[R: Type, T: Type](using Quotes): Expr[Schema[R, T]] = {
import quotes.reflect.*
case class GraphQLInfo(
name: Expr[String],
inputName: Expr[String],
description: Expr[Option[String]],
deprecationReason: Expr[Option[String]],
directives: Expr[List[caliban.parsing.adt.Directive]]
) {
def isDeprecated: Expr[Boolean] = '{ ${ deprecationReason }.isDefined }
def directivesOpt: Expr[Option[List[caliban.parsing.adt.Directive]]] = '{ Some($directives).filterNot(_.isEmpty) }
}
case class Field(field: Symbol, constructorParameter: Option[Symbol], typ: TypeRepr)
def extractAnnotation[Annotation: Type](symbol: Symbol, altSymbol: Option[Symbol]): Option[Term] =
symbol
.getAnnotation(TypeRepr.of[Annotation].typeSymbol)
.orElse(altSymbol.flatMap(_.getAnnotation(TypeRepr.of[Annotation].typeSymbol)))
def extractStringAnnotation[Annotation: Type](symbol: Symbol, altSymbol: Option[Symbol]): Option[Expr[String]] =
extractAnnotation[Annotation](symbol, altSymbol).flatMap {
case Apply(_, List(Literal(StringConstant(name)))) => Some(Expr(name))
case _ => None
}
def extractInfo(symbol: Symbol, altSymbol: Option[Symbol]): GraphQLInfo = {
val nameAnnotation = extractStringAnnotation[GQLName](symbol, altSymbol)
val inputNameAnnotation = extractStringAnnotation[GQLInputName](symbol, altSymbol)
val descriptionAnnotation = extractStringAnnotation[GQLDescription](symbol, altSymbol)
val deprecatedAnnotation = extractStringAnnotation[GQLDeprecated](symbol, altSymbol)
val directiveAnnotations: Seq[Expr[Directive]] = symbol.annotations.filter { annotation =>
annotation.tpe =:= TypeRepr.of[GQLDirective]
}.flatMap {
case Apply(_, List(directive)) => Some(directive)
case _ => None
}
.map(_.asExprOf[Directive])
val name = nameAnnotation.getOrElse(Expr(symbol.name)) // TODO: append type parameter names
val inputName = inputNameAnnotation.getOrElse('{ caliban.schema.Schema.customizeInputTypeName($name) })
val description: Expr[Option[String]] = descriptionAnnotation match {
case Some(d) => '{ Some($d) }
case None => '{ None }
}
val deprecationReason: Expr[Option[String]] = deprecatedAnnotation match {
case Some(d) => '{ Some($d) }
case None => '{ None }
}
val directives = '{ List(${ Varargs(directiveAnnotations) }: _*) }
GraphQLInfo(
name,
inputName,
description,
deprecationReason,
directives
)
}
def isExcluded(symbol: Symbol): Boolean =
symbol
.getAnnotation(TypeRepr.of[GQLExclude].typeSymbol)
.isDefined
def makeCalibanType(schema: Expr[Schema[_, _]], isInput: Boolean): Expr[() => caliban.introspection.adt.__Type] =
'{ () =>
if (${ schema }.optional) {
${ schema }.toType_(${ Expr(isInput) })
} else {
Types.makeNonNull($schema.toType_(${ Expr(isInput) }))
}
}
def summonSchema(envType: TypeRepr, fieldType: TypeRepr) =
(envType.asType match {
case '[e] =>
fieldType.asType match {
case '[f] => Expr.summon[Schema[e, f]]
}
}).getOrElse {
report.throwError(s"Cannot find an instance of Schema for $fieldType")
}
def summonArgBuilder(fieldType: TypeRepr) =
(fieldType.asType match {
case '[f] => Expr.summon[ArgBuilder[f]]
}).getOrElse {
report.throwError(s"Cannot find an instance of Schema for $fieldType")
}
def deriveParam(envType: TypeRepr, field: Field): Expr[caliban.introspection.adt.__InputValue] = {
val info = extractInfo(field.field, field.constructorParameter)
val schema = summonSchema(envType, field.typ)
'{
caliban.introspection.adt.__InputValue(
${ Expr(field.field.name) },
${ info.description },
${ makeCalibanType(schema, isInput = true) },
None, // TODO
${ info.directivesOpt }
)
}
}
def getReturnType(fieldType: TypeRepr) =
(fieldType match {
case MethodType(_, _, returnType) =>
returnType
case _ =>
fieldType
}).widen
def deriveField(envType: TypeRepr, field: Field): Expr[caliban.introspection.adt.__Field] = {
val info = extractInfo(field.field, field.constructorParameter)
val returnType = getReturnType(field.typ)
val schema = summonSchema(envType, returnType)
val firstParamList = field.field.paramSymss.headOption // NOTE: multiple parameter lists are not supported
val args =
firstParamList.filterNot(_.isEmpty) match {
case Some(params) =>
// Non-empty list of parameters
params.map { param =>
val paramType = Ref(param).tpe.widen
deriveParam(envType, Field(param, None, paramType))
}
case None =>
// Parameterless or empty parameter list
Nil
}
'{
caliban.introspection.adt.__Field(
${ Expr(field.field.name) },
${ info.description },
List(${ Varargs(args) }: _*),
${ makeCalibanType(schema, isInput = false) },
${ info.isDeprecated },
${ info.deprecationReason },
${ info.directivesOpt }
)
}
}
def deriveStep(resolveValue: Expr[T], envType: TypeRepr, field: Field): Expr[Step[R]] = {
val info = extractInfo(field.field, field.constructorParameter)
val returnType = getReturnType(field.typ)
val schema = summonSchema(envType, returnType)
val firstParamList = field.field.paramSymss.headOption // NOTE: multiple parameter lists are not supported
returnType.asType match {
case '[t] =>
firstParamList.filterNot(_.isEmpty) match {
case Some(params) =>
// Non-empty list of parameters
def buildArgs(args: Expr[Map[String, InputValue]])(using Quotes): Expr[Either[ExecutionError, t]] = {
val terms = params.map { param =>
val paramType = param.tree.asInstanceOf[ValDef].tpt.tpe
val argBuilder = summonArgBuilder(paramType)
'{ ${ argBuilder }.build(${ args }(${ Expr(param.name) })) }.asTerm
}
def unwrap(paramRefs: List[Term], remaining: List[(Symbol, Term)])(using
Quotes
): Expr[Either[ExecutionError, t]] =
remaining match {
case Nil =>
val call = Apply(Select(resolveValue.asTerm, field.field), paramRefs.reverse).asExprOf[t]
'{ Right[ExecutionError, t]($call) }.asExprOf[Either[ExecutionError, t]]
case (headSym, headTerm) :: tail =>
val headType = headSym.tree.asInstanceOf[ValDef].tpt.tpe
headType.asType match {
case '[ht] =>
val anonFun = Symbol.newMethod(
Symbol.spliceOwner,
"anonFun",
MethodType(List(headSym.name))(
(_: MethodType) => List(headType),
(_: MethodType) => TypeRepr.of[Either[ExecutionError, t]]
)
)
val term = Select
.unique(headTerm, "flatMap")
.appliedToTypes(List(TypeRepr.of[ExecutionError], returnType))
.appliedTo(
Block(
List(
DefDef(
anonFun,
{ case List(List(paramTerm: Term)) =>
Some(
unwrap(paramTerm.asExprOf[ht].asTerm :: paramRefs, tail).asTerm
.changeOwner(anonFun)
)
}
)
),
Closure(Ref(anonFun), None)
)
)
.asExprOf[Either[ExecutionError, t]]
term
}
}
unwrap(Nil, params zip terms)
}
'{
FunctionStep { args =>
${ buildArgs('args) } match {
case Left(error) => QueryStep(ZQuery.fail(error))
case Right(value) => ${ schema.asExprOf[Schema[R, t]] }.resolve(value)
}
}
}
case None =>
val invoke =
if (firstParamList == Some(Nil)) Apply(Select(resolveValue.asTerm, field.field), List()).asExprOf[t]
else {
Select(resolveValue.asTerm, field.field).asExprOf[t]
}
'{ ${ schema.asExprOf[Schema[R, t]] }.resolve(${ invoke }) }
}
}
}
def deriveStepWithName(resolveValue: Expr[T], envType: TypeRepr, field: Field): Expr[(String, Step[R])] = {
val fieldName = field.field.name
val step = deriveStep(resolveValue, envType, field)
'{
(
${ Expr(fieldName) },
$step
)
}
}
def deriveInput(envType: TypeRepr, info: GraphQLInfo, fields: List[Field]): Expr[caliban.introspection.adt.__Type] = {
val fieldExprs: List[Expr[caliban.introspection.adt.__InputValue]] =
fields.map { case field =>
deriveParam(envType, field)
}
'{
caliban.schema.Types.makeInputObject(
Some(${ info.inputName }),
${ info.description },
List(${ Varargs(fieldExprs) }: _*)
)
}
}
def deriveObject(
envType: TypeRepr,
info: GraphQLInfo,
fields: List[Field]
): Expr[caliban.introspection.adt.__Type] = {
val fieldExprs: List[Expr[caliban.introspection.adt.__Field]] =
fields.map { field =>
deriveField(envType, field)
}
'{
caliban.schema.Types.makeObject(
Some(${ info.name }),
${ info.description },
List(${ Varargs(fieldExprs) }: _*),
${ info.directives }
)
}
}
def enrichWithConstructorField(product: Symbol, field: Field): Field =
field.copy(constructorParameter = product.primaryConstructor.paramSymss.flatten.find { param =>
param.name == field.field.name
})
def getInputFields(targetSym: Symbol, targetType: TypeRepr): List[Field] =
targetSym.caseFields
.filterNot(isExcluded)
.map(field => Field(field, None, targetType.memberType(field)))
.map(field => enrichWithConstructorField(targetSym, field))
def getAllFields(targetSym: Symbol, targetType: TypeRepr): List[Field] = {
val ordering = targetSym.declarations.zipWithIndex.toMap
val supertypeOrdering =
(targetSym.memberFields ++ targetSym.memberMethods)
.filterNot(ordering.contains)
.groupBy(_.owner)
.flatMap { case (owner, _) =>
owner.declarations.zipWithIndex.map { case (k, idx) => k -> (idx + 1000) }
}
.toMap
(targetSym.memberFields ++ targetSym.memberMethods).filter { field =>
!field.flags.is(Flags.Artifact) &&
!field.flags.is(Flags.Synthetic) &&
!field.flags.is(Flags.Protected) &&
!field.flags.is(Flags.Private)
}
.filterNot(isExcluded)
.filterNot { member =>
member.owner.fullName == "java.lang.Object" ||
member.owner.fullName == "scala.Any" ||
member.owner.fullName == "scala.Product" ||
member.owner.fullName == "scala.reflect.Enum" ||
member.owner.fullName == "scala.Equals" ||
member.owner.fullName == "scala.deriving.Mirror$.Singleton"
}
.sortBy { member =>
ordering
.get(member)
.orElse(supertypeOrdering.get(member))
.getOrElse(Int.MaxValue)
}
.map(field => Field(field, None, targetType.memberType(field)))
.map(field => enrichWithConstructorField(targetSym, field))
}
def deriveProduct(envType: TypeRepr, targetSym: Symbol, targetType: TypeRepr): Expr[Schema[R, T]] = {
val inputFields = getInputFields(targetSym, targetType)
val allFields = getAllFields(targetSym, targetType)
val info = extractInfo(targetSym, None)
'{
new Schema[R, T] {
def resolve(value: T): caliban.schema.Step[R] =
ObjectStep.apply[R](
${ info.name },
List(${ Varargs(allFields.map(field => deriveStepWithName('value, envType, field))) }: _*).toMap
)
protected[this] def toType(isInput: Boolean, isSubscription: Boolean): caliban.introspection.adt.__Type =
if (isInput) {
${ deriveInput(envType, info, inputFields) }
} else {
${ deriveObject(envType, info, allFields) }
}
}
}
}
def findLeafConstructors(of: Symbol): List[Symbol] =
of.children.flatMap { child =>
if (child.flags.is(Flags.Trait)) {
findLeafConstructors(child)
} else {
List(child)
}
}
def getSubclassType(subclassTree: Tree): TypeRepr =
subclassTree match {
case cls: ClassDef => cls.constructor.returnTpt.tpe
case ValDef(_, tpt, _) => tpt.tpe
case Bind(_, pattern: Term) => pattern.tpe
}
def deriveInterface(
info: GraphQLInfo,
envType: TypeRepr,
fields: List[Field],
subtypeInOuts: Map[Symbol, (List[Field], List[Field])]
): Expr[caliban.introspection.adt.__Type] = {
val fieldExprs: List[Expr[caliban.introspection.adt.__Field]] =
fields.map { field =>
deriveField(envType, field)
}
val subtypeExprs =
subtypeInOuts.map { case (subtype, (_, outs)) =>
val subtypeInfo = extractInfo(subtype, None)
deriveObject(envType, subtypeInfo, outs)
}.toList
'{
lazy val iface: caliban.introspection.adt.__Type = caliban.schema.Types.makeInterface(
Some(${ info.name }),
${ info.description },
() => List(${ Varargs(fieldExprs) }: _*),
List(${ Varargs(subtypeExprs) }: _*).map(_.copy(interfaces = () => Some(List(iface))))
)
iface
}
}
def deriveUnion(
info: GraphQLInfo,
envType: TypeRepr,
subtypeInOuts: Map[Symbol, (List[Field], List[Field])]
): Expr[caliban.introspection.adt.__Type] = {
val subtypeExprs =
subtypeInOuts.map { case (subtype, (_, outs)) =>
val subtypeInfo = extractInfo(subtype, None)
deriveObject(envType, subtypeInfo, outs)
}.toList
'{
caliban.schema.Types.makeUnion(
Some(${ info.name }),
${ info.description },
List(${ Varargs(subtypeExprs) }: _*)
)
}
}
def deriveEnumValue(subtype: Symbol): Expr[caliban.introspection.adt.__EnumValue] = {
val subtypeInfo = extractInfo(subtype, None)
'{
caliban.introspection.adt.__EnumValue(
name = ${ subtypeInfo.name },
description = ${ subtypeInfo.description },
isDeprecated = ${ subtypeInfo.isDeprecated },
deprecationReason = ${ subtypeInfo.deprecationReason }
)
}
}
def deriveEnum(
info: GraphQLInfo,
subtypeInOuts: Map[Symbol, (List[Field], List[Field])]
): Expr[caliban.introspection.adt.__Type] = {
val enumValues =
subtypeInOuts.map { case (subtype, _) =>
deriveEnumValue(subtype)
}.toList
'{
caliban.schema.Types.makeEnum(
Some(${ info.name }),
${ info.description },
List(${ Varargs(enumValues) }: _*),
None
)
}
}
def deriveSum(envType: TypeRepr, targetSym: Symbol, targetType: TypeRepr): Expr[Schema[R, T]] = {
val subclasses = findLeafConstructors(targetSym)
val outputs = getAllFields(targetSym, targetType)
val info = extractInfo(targetSym, None)
val subclassInOut =
subclasses.map { subclass =>
val subclassType = getSubclassType(subclass.tree)
val inputs = getInputFields(subclass, subclassType)
val outputs = getAllFields(subclass, subclassType)
(subclass, (inputs, outputs))
}.toMap
val isEnum = outputs.isEmpty && subclassInOut.forall { case (_, (in, out)) => in.isEmpty && out.isEmpty }
val isUnion = !isEnum && outputs.isEmpty
val isInterface = !isEnum && !isUnion
def generateResolveMatch(value: Expr[T])(using Quotes): Expr[caliban.schema.Step[R]] =
Match(
value.asTerm,
subclassInOut.map { case (subclass, (_, outs)) =>
val subclassType = getSubclassType(subclass.tree)
val subclassInfo = extractInfo(subclass, None)
val sym = Symbol.newBind(Symbol.spliceOwner, "x", Flags.EmptyFlags, subclassType)
val pattern = Bind(sym, Typed(Ref(sym), TypeIdent(subclassType.typeSymbol)))
val rhs = '{
ObjectStep.apply[R](
${ subclassInfo.name },
List(${ Varargs(outs.map(field => deriveStepWithName(Ref(sym).asExprOf[T], envType, field))) }: _*).toMap
)
}
CaseDef(pattern, None, rhs.asTerm)
}.toList
).asExprOf[caliban.schema.Step[R]]
'{
new Schema[R, T] {
def resolve(value: T): caliban.schema.Step[R] =
${ generateResolveMatch('{ value }) }
protected[this] def toType(isInput: Boolean, isSubscription: Boolean): caliban.introspection.adt.__Type =
${
if (isInterface) {
deriveInterface(info, envType, outputs, subclassInOut)
} else if (isUnion) {
deriveUnion(info, envType, subclassInOut)
} else {
deriveEnum(info, subclassInOut)
}
}
}
}
}
val envType = TypeRepr.of[R]
val targetType = TypeRepr.of[T]
val targetTree = TypeTree.of[T]
val targetSym = targetTree.symbol
val isCaseClass = targetSym.flags.is(Flags.Case)
val isSealedTrait =
(targetSym.flags.is(Flags.Trait) && targetSym.flags.is(Flags.Sealed)) || (targetSym.flags.is(Flags.Enum))
val isOpenTrait = !isSealedTrait && targetSym.flags.is(Flags.Trait)
val result =
if (isCaseClass || isOpenTrait) {
deriveProduct(envType, targetSym, targetType)
} else {
deriveSum(envType, targetSym, targetType)
}
result
}
|
scala-steward/caliban-deriving
|
caliban-deriving/src/test/scala/caliban/deriving/InterfaceDerivationSpec.scala
|
package caliban.deriving
import caliban.GraphQL.graphQL
import caliban.schema.Annotations.GQLDescription
import caliban.schema.Schema
import caliban.{GraphQL, RootResolver}
import zio.test._
import zio.test.environment._
object InterfaceDerivationSpec extends DefaultRunnableSpec {
sealed trait ExampleSum {
def x: Int
@GQLDescription("the y field")
val y: String
}
object ExampleSum {
case class A(z: Option[Int]) extends ExampleSum {
override def x: Int = z.getOrElse(1)
override val y: String = "A"
}
@GQLDescription("the B")
case class B(x: Int, y: String) extends ExampleSum
}
implicit lazy val exampleSumSchema: Schema[Any, ExampleSum] = deriveSchemaInstance[Any, ExampleSum]
lazy val exampleValue: ExampleSum = ExampleSum.A(Some(10))
lazy val api: GraphQL[Any] = graphQL(RootResolver(exampleValue))
val expectedSchema: String =
"""schema {
| query: ExampleSum
|}
|
|interface ExampleSum {
| x: Int!
| "the y field"
| y: String!
|}
|
|type A implements ExampleSum {
| z: Int
| x: Int!
| y: String!
|}
|
|"the B"
|type B implements ExampleSum {
| x: Int!
| y: String!
|}""".stripMargin
override def spec: ZSpec[TestEnvironment, Any] =
suite("Caliban Derivation")(
suite("Sum type with common fields")(
test("schema rendered as expected") {
val rendered = api.render
assertTrue(rendered == expectedSchema)
}
)
)
}
|
scala-steward/caliban-deriving
|
caliban-deriving/src/test/scala/caliban/deriving/ProductDerivationSpec.scala
|
package caliban.deriving
import caliban.GraphQL.graphQL
import caliban.deriving.annotations.GQLExclude
import caliban.schema.Annotations.{GQLDeprecated, GQLDescription, GQLName}
import caliban.schema.{GenericSchema, Schema}
import caliban.{GraphQL, RootResolver}
import zio._
import zio.query.ZQuery
import zio.random.Random
import zio.test._
import zio.test.environment._
object ProductDerivationSpec extends DefaultRunnableSpec {
@GQLName("EP")
case class ExampleProduct(
name: String,
@GQLDescription("A list of the character's nicknames") nicknames: List[String]
) {
@GQLDescription("Foo")
def foo(): Int = 5
@GQLDeprecated("Don't use Bar.")
def bar: Int = 42
def query: ZQuery[Any, Nothing, String] =
ZQuery.succeed(nicknames.mkString(", "))
def op(pattern: String, limit: Int): List[String] =
nicknames.filter(_.matches(pattern)).take(limit)
@GQLDescription("Randomly picks one of the nicknames")
def randomNickname: ZIO[Random, Nothing, Option[String]] =
if (nicknames.nonEmpty) {
random.nextIntBounded(nicknames.size).map { idx =>
Some(nicknames(idx))
}
} else ZIO.none
// This field will not be exposed via GraphQL
@GQLExclude
def internal(): Unit = xyz
// Private fields are also not exposed via GraphQL
private val xyz: Unit = ()
}
case class ExampleProduct2() {
def something(p: ExampleProduct): Long = p.bar.toLong
}
object ExampleProduct extends GenericSchema[Random] {
implicit lazy val exampleProductSchema: Schema[Random, ExampleProduct] =
deriveSchemaInstance[Random, ExampleProduct]
val exampleValue: ExampleProduct = ExampleProduct(
"hello",
List("a", "b")
)
lazy val api: GraphQL[Random] = graphQL(RootResolver(exampleValue))
val expectedSchema: String =
"""schema {
| query: EP
|}
|
|type EP {
| name: String!
| "A list of the character's nicknames"
| nicknames: [String!]!
| "Foo"
| foo: Int!
| bar: Int! @deprecated(reason: "Don't use Bar.")
| query: String!
| op(pattern: String!, limit: Int!): [String!]!
| "Randomly picks one of the nicknames"
| randomNickname: String
|}""".stripMargin
implicit lazy val exampleProduct2Schema: Schema[Random, ExampleProduct2] =
deriveSchemaInstance[Random, ExampleProduct2]
val exampleValue2: ExampleProduct2 = ExampleProduct2()
lazy val api2: GraphQL[Random] = graphQL(RootResolver(exampleValue2))
val expectedSchema2: String =
"""schema {
| query: ExampleProduct2
|}
|
|scalar Long
|
|input EPInput {
| name: String!
| "A list of the character's nicknames"
| nicknames: [String!]!
|}
|
|type ExampleProduct2 {
| something(p: EPInput!): Long!
|}""".stripMargin
}
override def spec: ZSpec[TestEnvironment, Any] =
suite("Caliban Derivation")(
suite("Product type")(
test("schema rendered as expected") {
val rendered = ExampleProduct.api.render
assertTrue(rendered == ExampleProduct.expectedSchema)
},
test("schema rendered as expected when used as input") {
val rendered = ExampleProduct.api2.render
assertTrue(rendered == ExampleProduct.expectedSchema2)
}
)
)
}
|
amur-host/node
|
it/src/test/scala/com/amurplatform/it/sync/matcher/TradeBalanceAndRoundingTestSuite.scala
|
package com.amurplatform.it.sync.matcher
import com.typesafe.config.{Config, ConfigFactory}
import com.amurplatform.account.PrivateKeyAccount
import com.amurplatform.api.http.assets.SignedIssueV1Request
import com.amurplatform.it.ReportingTestName
import com.amurplatform.it.api.AssetDecimalsInfo
import com.amurplatform.it.api.SyncHttpApi._
import com.amurplatform.it.api.SyncMatcherHttpApi._
import com.amurplatform.it.sync.CustomFeeTransactionSuite.defaultAssetQuantity
import com.amurplatform.it.sync._
import com.amurplatform.it.transactions.NodesFromDocker
import com.amurplatform.it.util._
import com.amurplatform.transaction.AssetId
import com.amurplatform.transaction.assets.IssueTransactionV1
import com.amurplatform.transaction.assets.exchange.OrderType.{BUY, SELL}
import com.amurplatform.transaction.assets.exchange.{AssetPair, Order, OrderType}
import com.amurplatform.utils.Base58
import org.scalatest.{BeforeAndAfterAll, CancelAfterFailure, FreeSpec, Matchers}
import scala.concurrent.duration._
import scala.math.BigDecimal.RoundingMode
import scala.util.Random
class TradeBalanceAndRoundingTestSuite
extends FreeSpec
with Matchers
with BeforeAndAfterAll
with CancelAfterFailure
with NodesFromDocker
with ReportingTestName {
import TradeBalanceAndRoundingTestSuite._
override protected def nodeConfigs: Seq[Config] = Configs
private def matcherNode = nodes.head
private def aliceNode = nodes(1)
private def bobNode = nodes(2)
matcherNode.signedIssue(createSignedIssueRequest(IssueUsdTx))
matcherNode.signedIssue(createSignedIssueRequest(IssueWctTx))
nodes.waitForHeightArise()
"Alice and Bob trade AMUR-USD" - {
nodes.waitForHeightArise()
val aliceAmurBalanceBefore = matcherNode.accountBalances(aliceNode.address)._1
val bobAmurBalanceBefore = matcherNode.accountBalances(bobNode.address)._1
val price = 238
val buyOrderAmount = 425532L
val sellOrderAmount = 3100000000L
val correctedSellAmount = correctAmount(sellOrderAmount, price)
val adjustedAmount = receiveAmount(OrderType.BUY, price, buyOrderAmount)
val adjustedTotal = receiveAmount(OrderType.SELL, price, buyOrderAmount)
log.debug(s"correctedSellAmount: $correctedSellAmount, adjustedAmount: $adjustedAmount, adjustedTotal: $adjustedTotal")
"place usd-amur order" in {
// Alice wants to sell USD for Amur
val bobOrder1 = matcherNode.prepareOrder(bobNode, amurUsdPair, OrderType.SELL, price, sellOrderAmount)
val bobOrder1Id = matcherNode.placeOrder(bobOrder1).message.id
matcherNode.waitOrderStatus(amurUsdPair, bobOrder1Id, "Accepted", 1.minute)
matcherNode.reservedBalance(bobNode)("AMUR") shouldBe sellOrderAmount + matcherFee
matcherNode.tradableBalance(bobNode, amurUsdPair)("AMUR") shouldBe bobAmurBalanceBefore - (sellOrderAmount + matcherFee)
val aliceOrder = matcherNode.prepareOrder(aliceNode, amurUsdPair, OrderType.BUY, price, buyOrderAmount)
val aliceOrderId = matcherNode.placeOrder(aliceOrder).message.id
matcherNode.waitOrderStatusAndAmount(amurUsdPair, aliceOrderId, "Filled", Some(420169L), 1.minute)
// Bob wants to buy some USD
matcherNode.waitOrderStatusAndAmount(amurUsdPair, bobOrder1Id, "PartiallyFilled", Some(420169L), 1.minute)
// Each side get fair amount of assets
val exchangeTx = matcherNode.transactionsByOrder(aliceOrder.idStr()).headOption.getOrElse(fail("Expected an exchange transaction"))
nodes.waitForHeightAriseAndTxPresent(exchangeTx.id)
}
"get opened trading markets. USD price-asset " in {
val openMarkets = matcherNode.tradingMarkets()
openMarkets.markets.size shouldBe 1
val markets = openMarkets.markets.head
markets.amountAssetName shouldBe "AMUR"
markets.amountAssetInfo shouldBe Some(AssetDecimalsInfo(8))
markets.priceAssetName shouldBe usdAssetName
markets.priceAssetInfo shouldBe Some(AssetDecimalsInfo(Decimals))
}
"check usd and amur balance after fill" in {
val aliceAmurBalanceAfter = matcherNode.accountBalances(aliceNode.address)._1
val aliceUsdBalance = matcherNode.assetBalance(aliceNode.address, UsdId.base58).balance
val bobAmurBalanceAfter = matcherNode.accountBalances(bobNode.address)._1
val bobUsdBalance = matcherNode.assetBalance(bobNode.address, UsdId.base58).balance
(aliceAmurBalanceAfter - aliceAmurBalanceBefore) should be(
adjustedAmount - (BigInt(matcherFee) * adjustedAmount / buyOrderAmount).bigInteger.longValue())
aliceUsdBalance - defaultAssetQuantity should be(-adjustedTotal)
bobAmurBalanceAfter - bobAmurBalanceBefore should be(
-adjustedAmount - (BigInt(matcherFee) * adjustedAmount / sellOrderAmount).bigInteger.longValue())
bobUsdBalance should be(adjustedTotal)
}
"check filled amount and tradable balance" in {
val bobsOrderId = matcherNode.fullOrderHistory(bobNode).head.id
val filledAmount = matcherNode.orderStatus(bobsOrderId, amurUsdPair).filledAmount.getOrElse(0L)
filledAmount shouldBe adjustedAmount
}
"check reserved balance" in {
val reservedFee = BigInt(matcherFee) - (BigInt(matcherFee) * adjustedAmount / sellOrderAmount)
log.debug(s"reservedFee: $reservedFee")
val expectedBobReservedBalance = correctedSellAmount - adjustedAmount + reservedFee
matcherNode.reservedBalance(bobNode)("AMUR") shouldBe expectedBobReservedBalance
matcherNode.reservedBalance(aliceNode) shouldBe empty
}
"check amur-usd tradable balance" in {
val expectedBobTradableBalance = bobAmurBalanceBefore - (correctedSellAmount + matcherFee)
matcherNode.tradableBalance(bobNode, amurUsdPair)("AMUR") shouldBe expectedBobTradableBalance
matcherNode.tradableBalance(aliceNode, amurUsdPair)("AMUR") shouldBe aliceNode.accountBalances(aliceNode.address)._1
val orderId = matcherNode.fullOrderHistory(bobNode).head.id
matcherNode.fullOrderHistory(bobNode).size should be(1)
matcherNode.cancelOrder(bobNode, amurUsdPair, Some(orderId))
matcherNode.waitOrderStatus(amurUsdPair, orderId, "Cancelled", 1.minute)
matcherNode.tradableBalance(bobNode, amurUsdPair)("AMUR") shouldBe bobNode.accountBalances(bobNode.address)._1
}
}
"Alice and Bob trade AMUR-USD check CELLING" - {
val price2 = 289
val buyOrderAmount2 = 0.07.amur
val sellOrderAmount2 = 3.amur
val correctedSellAmount2 = correctAmount(sellOrderAmount2, price2)
"place usd-amur order" in {
nodes.waitForHeightArise()
// Alice wants to sell USD for Amur
val bobAmurBalanceBefore = matcherNode.accountBalances(bobNode.address)._1
matcherNode.tradableBalance(bobNode, amurUsdPair)("AMUR")
val bobOrder1 = matcherNode.prepareOrder(bobNode, amurUsdPair, OrderType.SELL, price2, sellOrderAmount2)
val bobOrder1Id = matcherNode.placeOrder(bobOrder1).message.id
matcherNode.waitOrderStatus(amurUsdPair, bobOrder1Id, "Accepted", 1.minute)
matcherNode.reservedBalance(bobNode)("AMUR") shouldBe correctedSellAmount2 + matcherFee
matcherNode.tradableBalance(bobNode, amurUsdPair)("AMUR") shouldBe bobAmurBalanceBefore - (correctedSellAmount2 + matcherFee)
val aliceOrder = matcherNode.prepareOrder(aliceNode, amurUsdPair, OrderType.BUY, price2, buyOrderAmount2)
val aliceOrderId = matcherNode.placeOrder(aliceOrder).message.id
matcherNode.waitOrderStatus(amurUsdPair, aliceOrderId, "Filled", 1.minute)
// Bob wants to buy some USD
matcherNode.waitOrderStatus(amurUsdPair, bobOrder1Id, "PartiallyFilled", 1.minute)
// Each side get fair amount of assets
val exchangeTx = matcherNode.transactionsByOrder(aliceOrder.idStr()).headOption.getOrElse(fail("Expected an exchange transaction"))
nodes.waitForHeightAriseAndTxPresent(exchangeTx.id)
matcherNode.cancelOrder(bobNode, amurUsdPair, Some(bobOrder1Id))
}
}
"Alice and Bob trade WCT-USD" - {
val wctUsdBuyAmount = 146
val wctUsdSellAmount = 347
val wctUsdPrice = 12739213
"place wct-usd order" in {
val aliceUsdBalance = aliceNode.assetBalance(aliceNode.address, UsdId.base58).balance
val bobUsdBalance = bobNode.assetBalance(bobNode.address, UsdId.base58).balance
val bobOrderId = matcherNode.placeOrder(bobNode, wctUsdPair, SELL, wctUsdPrice, wctUsdSellAmount).message.id
matcherNode.waitOrderStatus(wctUsdPair, bobOrderId, "Accepted", 1.minute)
val aliceOrderId = matcherNode.placeOrder(aliceNode, wctUsdPair, BUY, wctUsdPrice, wctUsdBuyAmount).message.id
matcherNode.waitOrderStatus(wctUsdPair, aliceOrderId, "Filled", 1.minute)
val exchangeTx = matcherNode.transactionsByOrder(aliceOrderId).headOption.getOrElse(fail("Expected an exchange transaction"))
nodes.waitForHeightAriseAndTxPresent(exchangeTx.id)
matcherNode.reservedBalance(bobNode)(s"$WctId") should be(wctUsdSellAmount - correctAmount(wctUsdBuyAmount, wctUsdPrice))
matcherNode.tradableBalance(bobNode, wctUsdPair)(s"$WctId") shouldBe defaultAssetQuantity - wctUsdSellAmount
matcherNode.tradableBalance(aliceNode, wctUsdPair)(s"$UsdId") shouldBe aliceUsdBalance - receiveAmount(SELL, wctUsdBuyAmount, wctUsdPrice)
matcherNode.tradableBalance(bobNode, wctUsdPair)(s"$UsdId") shouldBe bobUsdBalance + receiveAmount(SELL, wctUsdBuyAmount, wctUsdPrice)
matcherNode.reservedBalance(bobNode)("AMUR") shouldBe
(matcherFee - (BigDecimal(matcherFee * receiveAmount(OrderType.BUY, wctUsdPrice, wctUsdBuyAmount)) / wctUsdSellAmount).toLong)
matcherNode.cancelOrder(bobNode, wctUsdPair, Some(matcherNode.fullOrderHistory(bobNode).head.id))
}
}
"get opened trading markets. Check WCT-USD" in {
val openMarkets = matcherNode.tradingMarkets()
val markets = openMarkets.markets.last
markets.amountAssetName shouldBe wctAssetName
markets.amountAssetInfo shouldBe Some(AssetDecimalsInfo(Decimals))
markets.priceAssetName shouldBe usdAssetName
markets.priceAssetInfo shouldBe Some(AssetDecimalsInfo(Decimals))
}
"Alice and Bob trade WCT-AMUR on not enoght fee when place order" - {
val wctAmurSellAmount = 2
val wctAmurPrice = 11234560000000L
"bob lease all amur exact half matcher fee" in {
val leasingAmount = bobNode.accountBalances(bobNode.address)._1 - leasingFee - matcherFee / 2
val leaseTxId =
bobNode.lease(bobNode.address, matcherNode.address, leasingAmount, leasingFee).id
nodes.waitForHeightAriseAndTxPresent(leaseTxId)
val bobOrderId = matcherNode.placeOrder(bobNode, wctAmurPair, SELL, wctAmurPrice, wctAmurSellAmount).message.id
matcherNode.waitOrderStatus(wctAmurPair, bobOrderId, "Accepted", 1.minute)
matcherNode.tradableBalance(bobNode, wctAmurPair)("AMUR") shouldBe matcherFee / 2 + receiveAmount(SELL, wctAmurPrice, wctAmurSellAmount) - matcherFee
matcherNode.cancelOrder(bobNode, wctAmurPair, Some(bobOrderId))
assertBadRequestAndResponse(matcherNode.placeOrder(bobNode, wctAmurPair, SELL, wctAmurPrice, wctAmurSellAmount / 2),
"Not enough tradable balance")
bobNode.cancelLease(bobNode.address, leaseTxId, leasingFee)
}
}
def correctAmount(a: Long, price: Long): Long = {
val settledTotal = (BigDecimal(price) * a / Order.PriceConstant).setScale(0, RoundingMode.FLOOR).toLong
(BigDecimal(settledTotal) / price * Order.PriceConstant).setScale(0, RoundingMode.CEILING).toLong
}
def receiveAmount(ot: OrderType, matchPrice: Long, matchAmount: Long): Long =
if (ot == BUY) correctAmount(matchAmount, matchPrice)
else {
(BigInt(matchAmount) * matchPrice / Order.PriceConstant).bigInteger.longValueExact()
}
}
object TradeBalanceAndRoundingTestSuite {
import ConfigFactory._
import com.amurplatform.it.NodeConfigs._
private val ForbiddenAssetId = "FdbnAsset"
val Decimals: Byte = 2
private val minerDisabled = parseString("amur.miner.enable = no")
private val matcherConfig = parseString(s"""
|amur.matcher {
| enable = yes
| account = <KEY>
| bind-address = "0.0.0.0"
| order-match-tx-fee = 300000
| blacklisted-assets = ["$ForbiddenAssetId"]
| balance-watching.enable = yes
|}""".stripMargin)
private val _Configs: Seq[Config] = (Default.last +: Random.shuffle(Default.init).take(3))
.zip(Seq(matcherConfig, minerDisabled, minerDisabled, empty()))
.map { case (n, o) => o.withFallback(n) }
private val aliceSeed = _Configs(1).getString("account-seed")
private val bobSeed = _Configs(2).getString("account-seed")
private val alicePk = PrivateKeyAccount.fromSeed(aliceSeed).right.get
private val bobPk = PrivateKeyAccount.fromSeed(bobSeed).right.get
val usdAssetName = "USD-X"
val wctAssetName = "WCT-X"
val IssueUsdTx: IssueTransactionV1 = IssueTransactionV1
.selfSigned(
sender = alicePk,
name = usdAssetName.getBytes(),
description = "asset description".getBytes(),
quantity = defaultAssetQuantity,
decimals = Decimals,
reissuable = false,
fee = 1.amur,
timestamp = System.currentTimeMillis()
)
.right
.get
val IssueWctTx: IssueTransactionV1 = IssueTransactionV1
.selfSigned(
sender = bobPk,
name = wctAssetName.getBytes(),
description = "asset description".getBytes(),
quantity = defaultAssetQuantity,
decimals = Decimals,
reissuable = false,
fee = 1.amur,
timestamp = System.currentTimeMillis()
)
.right
.get
val UsdId: AssetId = IssueUsdTx.id()
val WctId = IssueWctTx.id()
val wctUsdPair = AssetPair(
amountAsset = Some(WctId),
priceAsset = Some(UsdId)
)
val wctAmurPair = AssetPair(
amountAsset = Some(WctId),
priceAsset = None
)
val amurUsdPair = AssetPair(
amountAsset = None,
priceAsset = Some(UsdId)
)
private val updatedMatcherConfig = parseString(s"""
|amur.matcher {
| price-assets = [ "$UsdId", "AMUR"]
|}
""".stripMargin)
private val Configs = _Configs.map(updatedMatcherConfig.withFallback(_))
def createSignedIssueRequest(tx: IssueTransactionV1): SignedIssueV1Request = {
import tx._
SignedIssueV1Request(
Base58.encode(tx.sender.publicKey),
new String(name),
new String(description),
quantity,
decimals,
reissuable,
fee,
timestamp,
signature.base58
)
}
}
|
amur-host/node
|
src/main/scala/com/amurplatform/matcher/smart/MatcherContext.scala
|
package com.amurplatform.matcher.smart
import cats.data.EitherT
import cats.implicits._
import cats.kernel.Monoid
import com.amurplatform.lang.Global
import com.amurplatform.lang.v1.compiler.Types.{FINAL, UNIT}
import com.amurplatform.lang.v1.evaluator.FunctionIds._
import com.amurplatform.lang.v1.evaluator.ctx._
import com.amurplatform.lang.v1.evaluator.ctx.impl.amur.Bindings.orderObject
import com.amurplatform.lang.v1.evaluator.ctx.impl.amur.Types._
import com.amurplatform.lang.v1.evaluator.ctx.impl.{CryptoContext, PureContext}
import com.amurplatform.lang.v1.{CTX, FunctionHeader}
import com.amurplatform.transaction.assets.exchange.Order
import com.amurplatform.transaction.smart.RealTransactionWrapper
import monix.eval.Coeval
object MatcherContext {
private val baseContext = Monoid.combine(PureContext.ctx, CryptoContext.build(Global)).evaluationContext
def build(nByte: Byte, in: Coeval[Order]): EvaluationContext = {
val inputEntityCoeval: Coeval[Either[String, CaseObj]] =
Coeval.defer(in.map(o => Right(orderObject(RealTransactionWrapper.ord(o)))))
val heightCoeval: Coeval[Either[String, Long]] = Coeval.evalOnce(Left("height is inaccessible when running script on matcher"))
val matcherTypes = Seq(addressType, aliasType, orderType, assetPairType)
val matcherVars: Map[String, (FINAL, LazyVal)] = Map(
("height", (com.amurplatform.lang.v1.compiler.Types.LONG, LazyVal(EitherT(heightCoeval)))),
("tx", (orderType.typeRef, LazyVal(EitherT(inputEntityCoeval))))
)
def inaccessibleFunction(name: String, internalName: Short): BaseFunction =
NativeFunction(name, 1, internalName, UNIT, Seq.empty: _*) {
case _ =>
s"Function ${name} is inaccessible when running script on matcher".asLeft
}
def inaccessibleUserFunction(name: String): BaseFunction = {
NativeFunction(
name,
1,
FunctionTypeSignature(UNIT, Seq.empty, FunctionHeader.User(name)),
ev = {
case _ =>
s"Function ${name} is inaccessible when running script on matcher".asLeft
}
)
}
val getIntegerF: BaseFunction = inaccessibleFunction("getInteger", DATA_LONG_FROM_STATE)
val getBooleanF: BaseFunction = inaccessibleFunction("getBoolean", DATA_BOOLEAN_FROM_STATE)
val getBinaryF: BaseFunction = inaccessibleFunction("getBinary", DATA_BYTES_FROM_STATE)
val getStringF: BaseFunction = inaccessibleFunction("getString", DATA_STRING_FROM_STATE)
val txByIdF: BaseFunction = inaccessibleFunction("txByIdF", GETTRANSACTIONBYID)
val txHeightByIdF: BaseFunction = inaccessibleFunction("txHeightByIdF", TRANSACTIONHEIGHTBYID)
val addressFromPublicKeyF: BaseFunction = inaccessibleUserFunction("addressFromPublicKeyF")
val addressFromStringF: BaseFunction = inaccessibleUserFunction("addressFromStringF")
val addressFromRecipientF: BaseFunction = inaccessibleFunction("addressFromRecipientF", ADDRESSFROMRECIPIENT)
val assetBalanceF: BaseFunction = inaccessibleFunction("assetBalanceF", ACCOUNTASSETBALANCE)
val amurBalanceF: BaseFunction = inaccessibleUserFunction("amurBalanceF")
val functions = Seq(
txByIdF,
txHeightByIdF,
getIntegerF,
getBooleanF,
getBinaryF,
getStringF,
addressFromPublicKeyF,
addressFromStringF,
addressFromRecipientF,
assetBalanceF,
amurBalanceF
)
val matcherContext = CTX(matcherTypes, matcherVars, functions).evaluationContext
baseContext |+| matcherContext
}
}
|
amur-host/node
|
src/main/scala/com/amurplatform/settings/WalletSettings.scala
|
<gh_stars>1-10
package com.amurplatform.settings
import java.io.File
import com.amurplatform.state.ByteStr
case class WalletSettings(file: Option[File], password: String, seed: Option[ByteStr])
|
amur-host/node
|
src/main/scala/com/amurplatform/transaction/ChainSpecific.scala
|
<filename>src/main/scala/com/amurplatform/transaction/ChainSpecific.scala<gh_stars>1-10
package com.amurplatform.transaction
trait ChainSpecific {
val chainId: Byte
}
|
amur-host/node
|
benchmark/src/main/scala/com/amurplatform/state/Settings.scala
|
package com.amurplatform.state
import com.typesafe.config.Config
import net.ceedubs.ficus.Ficus._
import net.ceedubs.ficus.readers.ArbitraryTypeReader._
case class Settings(networkConfigFile: String,
aliasesFile: String,
aliasesFromHeight: Int,
restTxsFile: String,
restTxsFromHeight: Int,
accountsFile: String,
accountsFromHeight: Int,
assetsFile: String,
assetsFromHeight: Int,
dataFile: String,
dataFromHeight: Int)
object Settings {
def fromConfig(config: Config): Settings = {
implicit val _ = net.ceedubs.ficus.readers.namemappers.HyphenNameMapper
config.as[Settings]("amur.benchmark.state")
}
}
|
amur-host/node
|
lang/jvm/src/test/scala/com/amurplatform/lang/TypeInferrerTest.scala
|
package com.amurplatform.lang
import com.amurplatform.lang.v1.compiler.Types._
import org.scalatest.{FreeSpec, Matchers}
import Common._
import com.amurplatform.lang.v1.compiler.TypeInferrer
import com.amurplatform.lang.v1.evaluator.ctx.CaseType
class TypeInferrerTest extends FreeSpec with Matchers {
val typeparamT = TYPEPARAM('T')
val typeparamG = TYPEPARAM('G')
"no types to infer" - {
"all types are correct" in {
TypeInferrer(Seq((STRING, STRING), (STRING, STRING), (CASETYPEREF("User", List()), CASETYPEREF("User", List()))),
Map("User" -> CaseType("User", List.empty))) shouldBe Right(Map.empty)
}
"fails if no simple common type" in {
TypeInferrer(Seq((LONG, BYTEVECTOR))) should produce("Non-matching types")
}
"fails if no obj common type" in {
TypeInferrer(Seq((CASETYPEREF("User", List()), CASETYPEREF("Admin", List()))),
Map("User" -> CaseType("User", List.empty), "Admin" -> CaseType("Admin", List.empty))) should produce("Non-matching types")
}
}
"inferring" - {
"simple type" in {
TypeInferrer(Seq((LONG, typeparamT))) shouldBe Right(Map(typeparamT -> LONG))
}
"many simple types" in {
TypeInferrer(
Seq(
(LONG, typeparamT),
(BYTEVECTOR, typeparamG)
)) shouldBe Right(Map(typeparamT -> LONG, typeparamG -> BYTEVECTOR))
}
"one simple same type" in {
TypeInferrer(Seq((LONG, typeparamT), (LONG, typeparamT), (LONG, typeparamT))) shouldBe Right(Map(typeparamT -> LONG))
}
"option" - {
"as plain type" in {
TypeInferrer(Seq((LIST(LONG), typeparamT))) shouldBe Right(Map(typeparamT -> LIST(LONG)))
}
"containing inner type" in {
TypeInferrer(Seq((LIST(LONG), PARAMETERIZEDLIST(typeparamT)))) shouldBe Right(Map(typeparamT -> LONG))
}
"containing same inner type" in {
TypeInferrer(Seq((LIST(LONG), PARAMETERIZEDLIST(typeparamT)), (LIST(LONG), PARAMETERIZEDLIST(typeparamT)))) shouldBe Right(
Map(typeparamT -> LONG))
}
"containing inner and separate type" in {
TypeInferrer(Seq((LONG, typeparamT), (LIST(LONG), PARAMETERIZEDLIST(typeparamT)))) shouldBe Right(Map(typeparamT -> LONG))
}
"containing best common type" in {
TypeInferrer(Seq((LONG, typeparamT), (LIST(NOTHING), PARAMETERIZEDLIST(typeparamT)))) shouldBe Right(Map(typeparamT -> LONG))
}
"fails if no common type" in {
TypeInferrer(Seq((BYTEVECTOR, typeparamT), (BYTEVECTOR, PARAMETERIZEDLIST(typeparamT)))) should produce("Non-matching types")
TypeInferrer(Seq((LONG, typeparamT), (LIST(LIST(NOTHING)), PARAMETERIZEDLIST(typeparamT)))) should produce("Can't match inferred types")
TypeInferrer(Seq((BYTEVECTOR, typeparamT), (LIST(LONG), PARAMETERIZEDLIST(typeparamT)))) should produce("Can't match inferred types")
}
}
"union" - {
val optionLong = UNION(LONG, UNIT)
"no types to infer" - {
"simple types" in {
TypeInferrer(Seq((LONG, optionLong))) shouldBe Right(Map.empty)
}
"no common simple type" in {
TypeInferrer(Seq((LONG, UNION(BOOLEAN, UNIT)))) should produce("Non-matching types")
}
"inside list" in {
TypeInferrer(Seq((LIST(LONG), LIST(optionLong)))) shouldBe Right(Map.empty)
}
"no common type inside list" in {
TypeInferrer(Seq((LIST(LONG), LIST(UNION(BOOLEAN, UNIT))))) should produce("Non-matching types")
}
"unit in union" in {
TypeInferrer(Seq((UNIT, UNION(BOOLEAN, UNIT)))) shouldBe Right(Map.empty)
}
}
"inferring" - {
val optionT = PARAMETERIZEDUNION(List(typeparamT, UNIT))
"simple types" in {
TypeInferrer(Seq((LONG, optionT))) shouldBe Right(Map(typeparamT -> LONG))
}
"inside list" in {
TypeInferrer(Seq((LIST(LONG), PARAMETERIZEDLIST(optionT)))) shouldBe Right(Map(typeparamT -> LONG))
}
"inside union" in {
TypeInferrer(Seq((optionLong, optionT))) shouldBe Right(Map(typeparamT -> LONG))
}
"Option[Int] matches type parameter" in {
TypeInferrer(Seq((optionLong, typeparamT))) shouldBe Right(Map(typeparamT -> optionLong))
}
"common type of Int and Option[Int] should be Option[Int]" in {
TypeInferrer(Seq((optionLong, typeparamT), (LONG, typeparamT))) shouldBe Right(Map(typeparamT -> optionLong))
}
"ambiguous inference" in {
TypeInferrer(Seq((LONG, PARAMETERIZEDUNION(List(typeparamT, typeparamG))))) should produce("Can't resolve correct type")
}
}
}
}
}
|
amur-host/node
|
src/test/scala/com/amurplatform/transaction/TransferTransactionV1Specification.scala
|
package com.amurplatform.transaction
import com.amurplatform.TransactionGen
import com.amurplatform.state.{ByteStr, EitherExt2}
import org.scalatest._
import org.scalatest.prop.PropertyChecks
import play.api.libs.json.Json
import com.amurplatform.account.{Address, PublicKeyAccount}
import com.amurplatform.transaction.transfer._
import com.amurplatform.state.diffs._
import com.amurplatform.utils.Base58
class TransferTransactionV1Specification extends PropSpec with PropertyChecks with Matchers with TransactionGen {
property("Transfer serialization roundtrip") {
forAll(transferV1Gen) { transfer: TransferTransactionV1 =>
val recovered = TransferTransactionV1.parseBytes(transfer.bytes()).get
recovered.sender.address shouldEqual transfer.sender.address
recovered.assetId.map(_ == transfer.assetId.get).getOrElse(transfer.assetId.isEmpty) shouldBe true
recovered.feeAssetId.map(_ == transfer.feeAssetId.get).getOrElse(transfer.feeAssetId.isEmpty) shouldBe true
recovered.timestamp shouldEqual transfer.timestamp
recovered.amount shouldEqual transfer.amount
recovered.fee shouldEqual transfer.fee
recovered.recipient.stringRepr shouldEqual transfer.recipient.stringRepr
recovered.bytes() shouldEqual transfer.bytes()
}
}
property("Transfer serialization from TypedTransaction") {
forAll(transferV1Gen) { tx: TransferTransactionV1 =>
val recovered = TransactionParsers.parseBytes(tx.bytes()).get
recovered.bytes() shouldEqual tx.bytes()
}
}
property("JSON format validation") {
val js = Json.parse("""{
"type": 4,
"id": "FLszEaqasJptohmP6zrXodBwjaEYq4jRP2BzdPPjvukk",
"sender": "3N5GRqzDBhjVXnCn44baHcz2GoZy5qLxtTh",
"senderPublicKey": "<KEY>",
"fee": 100000,
"timestamp": 1526552510868,
"signature": "<KEY>",
"proofs": ["<KEY>"],
"version": 1,
"recipient": "<KEY>",
"assetId": null,
"feeAssetId": null,
"feeAsset": null,
"amount": 1900000,
"attachment": "4t2Xazb2SX"
}
""")
val tx = TransferTransactionV1
.create(
None,
PublicKeyAccount.fromBase58String("<KEY>").explicitGet(),
Address.fromString("<KEY>").explicitGet(),
1900000,
1526552510868L,
None,
100000,
Base58.decode("4t2Xazb2SX").get,
ByteStr.decodeBase58("<KEY>").get
)
.right
.get
tx.json() shouldEqual js
}
property("negative") {
for {
(_, sender, recipient, amount, timestamp, _, feeAmount, attachment) <- transferParamGen
sender <- accountGen
} yield
TransferTransactionV1.selfSigned(None, sender, recipient, amount, timestamp, None, feeAmount, attachment) should produce("insufficient fee")
}
}
|
amur-host/node
|
lang/shared/src/main/scala/com/amurplatform/lang/v1/task/imports.scala
|
<reponame>amur-host/node
package com.amurplatform.lang.v1.task
object imports extends TaskMFunctions with TaskMInstances
|
amur-host/node
|
generator/src/main/scala/com.amurplatform.generator/OracleTransactionGenerator.scala
|
<reponame>amur-host/node
package com.amurplatform.generator
import cats.Show
import com.amurplatform.account.PrivateKeyAccount
import com.amurplatform.generator.OracleTransactionGenerator.Settings
import com.amurplatform.generator.utils.Gen
import com.amurplatform.it.util._
import com.amurplatform.state._
import com.amurplatform.transaction.smart.SetScriptTransaction
import com.amurplatform.transaction.transfer.TransferTransactionV1
import com.amurplatform.transaction.{DataTransaction, Transaction}
class OracleTransactionGenerator(settings: Settings, val accounts: Seq[PrivateKeyAccount]) extends TransactionGenerator {
override def next(): Iterator[Transaction] = generate(settings).toIterator
def generate(settings: Settings): Seq[Transaction] = {
val oracle = accounts.last
val scriptedAccount = accounts.head
val script = Gen.oracleScript(oracle, settings.requiredData)
val enoughFee = 0.005.amur
val setScript: Transaction =
SetScriptTransaction
.selfSigned(1, scriptedAccount, Some(script), enoughFee, System.currentTimeMillis())
.explicitGet()
val setDataTx: Transaction = DataTransaction
.selfSigned(1, oracle, settings.requiredData.toList, enoughFee, System.currentTimeMillis())
.explicitGet()
val transactions: List[Transaction] =
List
.fill(settings.transactions) {
TransferTransactionV1
.selfSigned(
None,
scriptedAccount,
oracle,
1.amur,
System.currentTimeMillis(),
None,
enoughFee,
Array.emptyByteArray
)
.explicitGet()
}
setScript +: setDataTx +: transactions
}
}
object OracleTransactionGenerator {
final case class Settings(transactions: Int, requiredData: Set[DataEntry[_]])
object Settings {
implicit val toPrintable: Show[Settings] = { x =>
s"Transactions: ${x.transactions}\n" +
s"DataEntries: ${x.requiredData}\n"
}
}
}
|
amur-host/node
|
src/test/scala/com/amurplatform/history/BlockchainUpdaterBurnTest.scala
|
package com.amurplatform.history
import com.amurplatform.TransactionGen
import com.amurplatform.features.BlockchainFeatures
import com.amurplatform.settings.{BlockchainSettings, AmurSettings}
import com.amurplatform.state._
import com.amurplatform.state.diffs.{ENOUGH_AMT, produce}
import org.scalacheck.Gen
import org.scalatest.prop.PropertyChecks
import org.scalatest.{Matchers, PropSpec}
import com.amurplatform.transaction.GenesisTransaction
import com.amurplatform.transaction.assets.{BurnTransactionV1, IssueTransactionV1, ReissueTransactionV1}
import com.amurplatform.transaction.transfer.TransferTransactionV1
class BlockchainUpdaterBurnTest extends PropSpec with PropertyChecks with DomainScenarioDrivenPropertyCheck with Matchers with TransactionGen {
val Amur: Long = 100000000
type Setup =
(Long, GenesisTransaction, TransferTransactionV1, IssueTransactionV1, BurnTransactionV1, ReissueTransactionV1)
val preconditions: Gen[Setup] = for {
master <- accountGen
ts <- timestampGen
transferAssetAmurFee <- smallFeeGen
alice <- accountGen
(_, assetName, description, quantity, decimals, _, _, _) <- issueParamGen
genesis: GenesisTransaction = GenesisTransaction.create(master, ENOUGH_AMT, ts).explicitGet()
masterToAlice: TransferTransactionV1 = TransferTransactionV1
.selfSigned(None, master, alice, 3 * Amur, ts + 1, None, transferAssetAmurFee, Array.emptyByteArray)
.explicitGet()
issue: IssueTransactionV1 = IssueTransactionV1.selfSigned(alice, assetName, description, quantity, decimals, false, Amur, ts + 100).explicitGet()
burn: BurnTransactionV1 = BurnTransactionV1.selfSigned(alice, issue.assetId(), quantity / 2, Amur, ts + 200).explicitGet()
reissue: ReissueTransactionV1 = ReissueTransactionV1.selfSigned(alice, issue.assetId(), burn.quantity, true, Amur, ts + 300).explicitGet()
} yield (ts, genesis, masterToAlice, issue, burn, reissue)
val localBlockchainSettings: BlockchainSettings = DefaultBlockchainSettings.copy(
functionalitySettings = DefaultBlockchainSettings.functionalitySettings
.copy(
featureCheckBlocksPeriod = 1,
blocksForFeatureActivation = 1,
preActivatedFeatures = Map(BlockchainFeatures.NG.id -> 0, BlockchainFeatures.DataTransaction.id -> 0)
))
val localAmurSettings: AmurSettings = settings.copy(blockchainSettings = localBlockchainSettings)
property("issue -> burn -> reissue in sequential blocks works correctly") {
scenario(preconditions, localAmurSettings) {
case (domain, (ts, genesis, masterToAlice, issue, burn, reissue)) =>
val block0 = customBuildBlockOfTxs(randomSig, Seq(genesis), defaultSigner, 1, ts)
val block1 = customBuildBlockOfTxs(block0.uniqueId, Seq(masterToAlice), defaultSigner, 1, ts + 150)
val block2 = customBuildBlockOfTxs(block1.uniqueId, Seq(issue), defaultSigner, 1, ts + 250)
val block3 = customBuildBlockOfTxs(block2.uniqueId, Seq(burn), defaultSigner, 1, ts + 350)
val block4 = customBuildBlockOfTxs(block3.uniqueId, Seq(reissue), defaultSigner, 1, ts + 450)
domain.appendBlock(block0)
domain.appendBlock(block1)
domain.appendBlock(block2)
val assetDescription1 = domain.blockchainUpdater.assetDescription(issue.assetId()).get
assetDescription1.reissuable should be(false)
assetDescription1.totalVolume should be(issue.quantity)
domain.appendBlock(block3)
val assetDescription2 = domain.blockchainUpdater.assetDescription(issue.assetId()).get
assetDescription2.reissuable should be(false)
assetDescription2.totalVolume should be(issue.quantity - burn.quantity)
domain.blockchainUpdater.processBlock(block4) should produce("Asset is not reissuable")
}
}
property("issue -> burn -> reissue in micro blocks works correctly") {
scenario(preconditions, localAmurSettings) {
case (domain, (ts, genesis, masterToAlice, issue, burn, reissue)) =>
val block0 = customBuildBlockOfTxs(randomSig, Seq(genesis), defaultSigner, 1, ts)
val block1 = customBuildBlockOfTxs(block0.uniqueId, Seq(masterToAlice), defaultSigner, 1, ts + 150)
val block2 = customBuildBlockOfTxs(block1.uniqueId, Seq(issue), defaultSigner, 1, ts + 250)
val block3 = customBuildBlockOfTxs(block2.uniqueId, Seq(burn, reissue), defaultSigner, 1, ts + 350)
domain.appendBlock(block0)
domain.appendBlock(block1)
domain.appendBlock(block2)
val assetDescription1 = domain.blockchainUpdater.assetDescription(issue.assetId()).get
assetDescription1.reissuable should be(false)
assetDescription1.totalVolume should be(issue.quantity)
domain.blockchainUpdater.processBlock(block3) should produce("Asset is not reissuable")
}
}
}
|
amur-host/node
|
src/test/scala/com/amurplatform/mining/package.scala
|
package com.amurplatform
import com.amurplatform.state.Blockchain
import com.amurplatform.transaction.Transaction
package object mining {
private[mining] def createConstConstraint(maxSize: Long, transactionSize: => Long) = OneDimensionalMiningConstraint(
maxSize,
new com.amurplatform.mining.TxEstimators.Fn {
override def apply(b: Blockchain, t: Transaction) = transactionSize
override val minEstimate = transactionSize
}
)
}
|
amur-host/node
|
src/main/scala/com/amurplatform/transaction/Authorized.scala
|
<filename>src/main/scala/com/amurplatform/transaction/Authorized.scala
package com.amurplatform.transaction
import com.amurplatform.account.PublicKeyAccount
trait Authorized {
val sender: PublicKeyAccount
}
|
amur-host/node
|
src/main/scala/com/amurplatform/network/PeerSynchronizer.scala
|
<gh_stars>1-10
package com.amurplatform.network
import java.net.InetSocketAddress
import com.amurplatform.utils.ScorexLogging
import io.netty.channel.ChannelHandler.Sharable
import io.netty.channel.{ChannelHandlerContext, ChannelInboundHandlerAdapter}
import scala.concurrent.duration.FiniteDuration
class PeerSynchronizer(peerDatabase: PeerDatabase, peerRequestInterval: FiniteDuration) extends ChannelInboundHandlerAdapter with ScorexLogging {
private var peersRequested = false
private var declaredAddress = Option.empty[InetSocketAddress]
def requestPeers(ctx: ChannelHandlerContext): Unit = if (ctx.channel().isActive) {
peersRequested = true
ctx.writeAndFlush(GetPeers)
ctx.executor().schedule(peerRequestInterval) {
requestPeers(ctx)
}
}
override def channelRead(ctx: ChannelHandlerContext, msg: AnyRef): Unit = {
declaredAddress.foreach(peerDatabase.touch)
msg match {
case hs: Handshake =>
val rda = for {
rda <- hs.declaredAddress
rdaAddress <- Option(rda.getAddress)
ctxAddress <- ctx.remoteAddress.map(_.getAddress)
if rdaAddress == ctxAddress
} yield rda
rda match {
case None => log.debug(s"${id(ctx)} Declared address $rda does not match actual remote address ${ctx.remoteAddress.map(_.getAddress)}")
case Some(x) =>
log.trace(s"${id(ctx)} Touching declared address")
peerDatabase.touch(x)
declaredAddress = Some(x)
}
requestPeers(ctx)
super.channelRead(ctx, msg)
case GetPeers =>
ctx.writeAndFlush(KnownPeers(peerDatabase.knownPeers.keys.toSeq))
case KnownPeers(peers) if peersRequested =>
peersRequested = false
val (added, notAdded) = peers.partition(peerDatabase.addCandidate)
log.trace(s"${id(ctx)} Added peers: ${format(added)}, not added peers: ${format(notAdded)}")
case KnownPeers(peers) =>
log.trace(s"${id(ctx)} Got unexpected list of known peers containing ${peers.size} entries")
case _ =>
super.channelRead(ctx, msg)
}
}
private def format[T](xs: Iterable[T]): String = xs.mkString("[", ", ", "]")
}
object PeerSynchronizer {
@Sharable
class NoopPeerSynchronizer extends ChannelInboundHandlerAdapter {
override def channelRead(ctx: ChannelHandlerContext, msg: AnyRef): Unit = {
msg match {
case GetPeers =>
case KnownPeers(_) =>
case _ =>
super.channelRead(ctx, msg)
}
}
}
val Disabled = new NoopPeerSynchronizer()
}
|
amur-host/node
|
src/main/scala/com/amurplatform/api/http/assets/SignedExchangeRequestV2.scala
|
<filename>src/main/scala/com/amurplatform/api/http/assets/SignedExchangeRequestV2.scala
package com.amurplatform.api.http.assets
import cats.implicits._
import io.swagger.annotations.ApiModelProperty
import play.api.libs.json.{Format, Json}
import com.amurplatform.account.PublicKeyAccount
import com.amurplatform.api.http.BroadcastRequest
import com.amurplatform.transaction.{ValidationError, Proofs}
import com.amurplatform.transaction.assets.exchange.{ExchangeTransaction, ExchangeTransactionV2, Order}
object SignedExchangeRequestV2 {
implicit val orderFormat: Format[Order] = com.amurplatform.transaction.assets.exchange.OrderJson.orderFormat
implicit val signedExchangeRequestFormat: Format[SignedExchangeRequestV2] = Json.format
}
case class SignedExchangeRequestV2(@ApiModelProperty(value = "Base58 encoded sender public key", required = true)
senderPublicKey: String,
@ApiModelProperty(value = "Buy Order")
order1: Order,
@ApiModelProperty(value = "Sell Order")
order2: Order,
@ApiModelProperty(required = true)
price: Long,
@ApiModelProperty(required = true, example = "1000000")
amount: Long,
@ApiModelProperty(required = true)
fee: Long,
@ApiModelProperty(required = true)
buyMatcherFee: Long,
@ApiModelProperty(required = true)
sellMatcherFee: Long,
@ApiModelProperty(required = true)
timestamp: Long,
@ApiModelProperty(required = true, example = "2")
version: Byte,
@ApiModelProperty(required = true)
proofs: List[String],
) extends BroadcastRequest {
def toTx: Either[ValidationError, ExchangeTransaction] =
for {
_sender <- PublicKeyAccount.fromBase58String(senderPublicKey)
_proofBytes <- proofs.traverse(s => parseBase58(s, "invalid proof", Proofs.MaxProofStringSize))
_proofs <- Proofs.create(_proofBytes)
_t <- ExchangeTransactionV2.create(order1, order2, price, amount, buyMatcherFee, sellMatcherFee, fee, timestamp, _proofs)
} yield _t
}
|
amur-host/node
|
src/main/scala/com/amurplatform/matcher/model/EventSerializers.scala
|
<filename>src/main/scala/com/amurplatform/matcher/model/EventSerializers.scala
package com.amurplatform.matcher.model
import java.io.NotSerializableException
import akka.serialization._
import com.amurplatform.matcher.market.MatcherActor.OrderBookCreated
import com.amurplatform.matcher.market.OrderBookActor.Snapshot
import com.amurplatform.matcher.market.{MatcherActor, OrderBookActor}
import com.amurplatform.matcher.model.Events._
import com.amurplatform.matcher.model.MatcherModel.{Level, Price}
import play.api.libs.functional.syntax._
import play.api.libs.json.Reads._
import play.api.libs.json._
import com.amurplatform.transaction.assets.exchange.OrderJson._
import com.amurplatform.transaction.assets.exchange.{AssetPair, Order}
import scala.collection.immutable.TreeMap
class EventSerializers extends SerializerWithStringManifest {
import EventSerializers._
override def identifier: Int = id
override def manifest(o: AnyRef): String = o match {
case _: OrderBookActor.Snapshot => Manifest.Snapshot
case _: MatcherActor.OrderBookCreated => Manifest.OrderBookCreated
case _: OrderAdded => Manifest.OrderAdded
case _: OrderExecuted => Manifest.OrderExecuted
case _: OrderCanceled => Manifest.OrderCancelled
case _: MatcherActor.Snapshot => Manifest.MatcherSnapshot
}
override def toBinary(o: AnyRef): Array[Byte] =
Json
.stringify(o match {
case s: OrderBookActor.Snapshot => snapshotFormat.writes(s)
case obc: MatcherActor.OrderBookCreated => orderBookCreatedFormat.writes(obc)
case x: MatcherActor.Snapshot => matcherSnapshot.writes(x)
case oa: OrderAdded => orderAddedFormat.writes(oa)
case oe: OrderExecuted => orderExecutedFormat.writes(oe)
case oc: OrderCanceled => orderCancelledFormat.writes(oc)
})
.getBytes
override def fromBinary(bytes: Array[Byte], manifest: String): AnyRef = manifest match {
case Manifest.Snapshot => snapshotFormat.reads(Json.parse(bytes)).get
case Manifest.OrderBookCreated => orderBookCreatedFormat.reads(Json.parse(bytes)).get
case Manifest.MatcherSnapshot => matcherSnapshot.reads(Json.parse(bytes)).get
case Manifest.OrderAdded => orderAddedFormat.reads(Json.parse(bytes)).get
case Manifest.OrderExecuted => orderExecutedFormat.reads(Json.parse(bytes)).get
case Manifest.OrderCancelled => orderCancelledFormat.reads(Json.parse(bytes)).get
case _ => throw new NotSerializableException(manifest)
}
}
object EventSerializers {
private[EventSerializers] val id = 4001
private[EventSerializers] object Manifest {
val Snapshot = "snapshot"
val OrderBookCreated = "orderBookCreated"
val MatcherSnapshot = "matcherSnapshot"
val OrderAdded = "event.OrderAdded"
val OrderExecuted = "event.OrderExecuted"
val OrderCancelled = "event.OrderCancelled"
}
private def dataToSerialize(lo: LimitOrder) = (lo.price, lo.amount, lo.fee, lo.order)
private def limitOrderFormatBuilder[T <: LimitOrder](limitOrderBuilder: (Long, Long, Long, Order) => T): Format[T] = Format(
Reads[T] {
case js: JsObject =>
val price = (js \ "price").as[Long]
val amount = (js \ "amount").as[Long]
val order = (js \ "order").as[Order]
val fee = (js \ "fee").asOpt[Long].getOrElse(LimitOrder.getPartialFee(order.matcherFee, order.amount, amount))
JsSuccess(limitOrderBuilder(price, amount, fee, order))
case _ => JsError("failed to deserialize LimitOrder")
},
((__ \ "price").format[Long] and
(__ \ "amount").format[Long] and
(__ \ "fee").format[Long] and
(__ \ "order").format[Order])(limitOrderBuilder, dataToSerialize)
)
implicit val limitFormat: Format[LimitOrder] = limitOrderFormatBuilder[LimitOrder](LimitOrder.limitOrder)
implicit val buyFormat: Format[BuyLimitOrder] = limitOrderFormatBuilder[BuyLimitOrder](BuyLimitOrder.apply)
implicit val sellFormat: Format[SellLimitOrder] = limitOrderFormatBuilder[SellLimitOrder](SellLimitOrder.apply)
implicit val orderMapWrites: Writes[Map[Price, Level[LimitOrder]]] = (tree: Map[Price, Level[LimitOrder]]) =>
JsObject(tree.map {
case (k, v) =>
k.toString -> JsArray(v.map(o => Json.toJson(o)))
})
implicit val buyOrderTreeMapReads: Reads[TreeMap[Price, Level[BuyLimitOrder]]] = { jv: JsValue =>
val a = jv.as[Map[String, Level[BuyLimitOrder]]].map { case (k, v) => (k.toLong, v) }
JsSuccess(TreeMap.empty[Price, Level[BuyLimitOrder]](OrderBook.bidsOrdering) ++ a)
}
implicit val sellOrderTreeMapReads: Reads[TreeMap[Price, Level[SellLimitOrder]]] = { jv: JsValue =>
val a = jv.as[Map[String, Level[SellLimitOrder]]].map { case (k, v) => (k.toLong, v) }
JsSuccess(TreeMap.empty[Price, Level[SellLimitOrder]](OrderBook.asksOrdering) ++ a)
}
implicit val orderBookFormat: Format[OrderBook] = Json.format
val orderAddedFormat = Format(
(__ \ "o").read[LimitOrder].map(OrderAdded),
Writes[OrderAdded](oa => Json.obj("o" -> oa.order))
)
val orderExecutedFormat: Format[OrderExecuted] = ((__ \ "o1").format[LimitOrder] and
(__ \ "o2").format[LimitOrder])(OrderExecuted.apply, unlift(OrderExecuted.unapply))
val orderCancelledFormat = Format(
Reads[OrderCanceled] {
case js: JsObject =>
val o = (js \ "o").as[LimitOrder]
val u = (js \ "unmatchable").asOpt[Boolean]
JsSuccess(OrderCanceled(o, unmatchable = u.getOrElse(false)))
case _ => JsError("failed to deserialize OrderCanceled")
},
Writes[OrderCanceled](oc => Json.obj("o" -> oc.limitOrder, "unmatchable" -> oc.unmatchable))
)
private def mkOrderBookCreated(a1: String, a2: String) = OrderBookCreated(AssetPair.createAssetPair(a1, a2).get)
private def orderBookToPair(obc: OrderBookCreated) = (obc.pair.amountAssetStr, obc.pair.priceAssetStr)
implicit val orderBookCreatedFormat: Format[OrderBookCreated] = ((__ \ "a1").format[String] and
(__ \ "a2").format[String])(mkOrderBookCreated, orderBookToPair)
implicit val assetPair: Format[AssetPair] = ((__ \ "a1").format[String] and
(__ \ "a2").format[String])((a, b) => AssetPair.createAssetPair(a, b).get, x => (x.amountAssetStr, x.priceAssetStr))
implicit val matcherSnapshot: Format[MatcherActor.Snapshot] = Json.format[MatcherActor.Snapshot]
implicit val tuple2Format: Format[(Price, Price)] = new Format[(Long, Long)] {
def writes(o: (Long, Long)): JsValue = Json.arr(o._1, o._2)
def reads(json: JsValue): JsResult[(Long, Long)] = {
val a = json.as[JsArray].value
JsSuccess((a.head.as[Long], a(1).as[Long]))
}
}
implicit val cacheFormat: Format[Map[String, (Price, Price)]] = new Format[Map[String, (Long, Long)]] {
def writes(cache: Map[String, (Long, Long)]): JsValue =
JsObject(cache.mapValues(v => Json.arr(v._1, v._2)))
def reads(jv: JsValue): JsResult[Map[String, (Long, Long)]] =
JsSuccess(jv.as[Map[String, (Long, Long)]])
}
implicit val snapshotFormat: Format[Snapshot] =
Format((JsPath \ "o").read[OrderBook].map(Snapshot), Writes[Snapshot](s => Json.obj("o" -> s.orderBook)))
}
|
amur-host/node
|
src/test/scala/com/amurplatform/http/DebugApiRouteSpec.scala
|
<filename>src/test/scala/com/amurplatform/http/DebugApiRouteSpec.scala
package com.amurplatform.http
import com.amurplatform.TestWallet
import com.amurplatform.settings.AmurSettings
import com.amurplatform.api.http.ApiKeyNotValid
class DebugApiRouteSpec extends RouteSpec("/debug") with RestAPISettingsHelper with TestWallet {
private val sampleConfig = com.typesafe.config.ConfigFactory.load()
private val amurSettings = AmurSettings.fromConfig(sampleConfig)
private val configObject = sampleConfig.root()
private val route =
DebugApiRoute(amurSettings, null, null, null, null, null, null, null, null, null, null, null, null, null, configObject).route
routePath("/configInfo") - {
"requires api-key header" in {
Get(routePath("/configInfo?full=true")) ~> route should produce(ApiKeyNotValid)
Get(routePath("/configInfo?full=false")) ~> route should produce(ApiKeyNotValid)
}
}
}
|
amur-host/node
|
src/test/scala/com/amurplatform/transaction/smart/script/ScriptV1Test.scala
|
package com.amurplatform.transaction.smart.script
import com.amurplatform.lang.v1.FunctionHeader
import com.amurplatform.lang.v1.compiler.Terms._
import com.amurplatform.lang.v1.testing.TypedScriptGen
import com.amurplatform.state.diffs.produce
import org.scalatest.prop.PropertyChecks
import org.scalatest.{Matchers, PropSpec}
import scodec.bits.ByteVector
import com.amurplatform.transaction.smart.script.v1.ScriptV1
import com.amurplatform.lang.v1.evaluator.FunctionIds._
import com.amurplatform.lang.v1.evaluator.ctx.impl.PureContext
class ScriptV1Test extends PropSpec with PropertyChecks with Matchers with TypedScriptGen {
property("ScriptV1.apply should permit BOOLEAN scripts") {
forAll(BOOLEANgen(10)) { expr =>
ScriptV1(expr) shouldBe 'right
}
}
property("ScriptV1.apply should deny too complex scripts") {
val byteVector = CONST_BYTEVECTOR(ByteVector(1))
val expr = (1 to 21)
.map { _ =>
FUNCTION_CALL(
function = FunctionHeader.Native(SIGVERIFY),
args = List(byteVector, byteVector, byteVector)
)
}
.reduceLeft[EXPR](IF(_, _, FALSE))
ScriptV1(expr) should produce("Script is too complex")
}
property("ScriptV1.apply should deny too big scripts") {
val bigSum = (1 to 100).foldLeft[EXPR](CONST_LONG(0)) { (r, i) =>
FUNCTION_CALL(
function = FunctionHeader.Native(SUM_LONG),
args = List(r, CONST_LONG(i))
)
}
val expr = (1 to 9).foldLeft[EXPR](CONST_LONG(0)) { (r, i) =>
FUNCTION_CALL(
function = PureContext.eq.header,
args = List(r, bigSum)
)
}
ScriptV1(expr) should produce("Script is too large")
}
property("19 sigVerify should fit in maxSizeInBytes") {
val byteVector = CONST_BYTEVECTOR(ByteVector(1))
val expr = (1 to 19)
.map { _ =>
FUNCTION_CALL(
function = FunctionHeader.Native(SIGVERIFY),
args = List(byteVector, byteVector, byteVector)
)
}
.reduceLeft[EXPR](IF(_, _, FALSE))
ScriptV1(expr) shouldBe 'right
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.