code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package bita
import akka.actor.{ ActorSystem, Actor, Props, ActorRef }
import akka.bita.{ RandomScheduleHelper, Scheduler }
import akka.bita.pattern.Patterns._
import akka.util.duration._
import akka.util.Timeout
import akka.dispatch.Await
import bita.util.FileHelper
import bita.criteria._
import bita.ScheduleOptimization._
import org.scalatest._
import java.util.concurrent.TimeUnit
import akka.testkit.TestProbe
import util._
import java.io.File
import scala.io.Source
abstract class BitaTests extends FunSuite with ImprovedTestHelper with BeforeAndAfterEach {
// The name of this test battery
def name = "unkown"
// Are we expecting certain shedules to fail?
def expectFailures = false
// The delay to wait Futures/Awaits/...
implicit val timeout = Timeout(5000, TimeUnit.MILLISECONDS)
// delay between start and end message
def delay = 1000
// Available criterions in Bita: PRCriterion, PCRCriterion, PMHRCriterion
def criteria = Array[Criterion](PRCriterion, PCRCriterion, PMHRCriterion)
// Folders where we need to store the test results
val resultDir = "test-results/%s/".format(this.name)
val randomTracesDir = resultDir + "random/"
val randomTracesTestDir = resultDir + "random-test/"
var verbose = 0
var randomTime = 0
var randomTraces = 1
// This test will keep on generating random schedules for 5 minutes or until an bug is found.
test("Test with random sheduler within a timeout", Tag("random-schedule")) {
random = true
if (randomTime > 0) {
testRandomByTime(name, randomTracesTestDir, randomTime)
}
random = false
}
// Generate and test schedules at once.
test("Generate and test schedules with criterion", Tag("test")) {
var randomTrace = FileHelper.getFiles(randomTracesDir, (name ⇒ name.contains("-trace.txt")))
for (criterion ← criteria) {
for (opt ← criterion.optimizations.-(NONE)) {
var scheduleDir = resultDir + "%s-%s/".format(criterion.name, opt)
FileHelper.emptyDir(scheduleDir)
runGenerateSchedulesAndTest(name, scheduleDir, randomTraces, criterion, opt)
}
}
measure();
summary();
validate();
}
// This will count how many bugs there were found with a certain schedule.
// Giving you an indication of how good a shedule is.
private def measure() = {
if (verbose >= 3) {
// The number of traces after which the coverage should be measured.
var interval = 5
for (criterion ← criteria) {
for (opt ← criterion.optimizations.-(NONE)) {
var scheduleDir = resultDir + "%s-%s/".format(criterion.name, opt)
if (new java.io.File(scheduleDir).exists) {
var randomTraces = FileHelper.getFiles(randomTracesDir, (name ⇒ name.contains("-trace.txt")))
FileHelper.copyFiles(randomTraces, scheduleDir)
var resultFile = scheduleDir + "%s-%s-result.txt".format(criterion.name, opt)
var traceFiles = FileHelper.getFiles(scheduleDir, (name ⇒ name.contains("-trace.txt")))
traceFiles = FileHelper.sortTracesByName(traceFiles, "-%s-")
criterion.measureCoverage(traceFiles, resultFile, interval)
}
}
}
}
}
// Give a summary of where the bugs
// This is tool dependendant information
private def summary() = {
if (verbose >= 2) {
for (path ← new File(resultDir).listFiles if path.isDirectory()) { // Iterate over all directories
val file: File = new File(path + "\\time-bug-report.txt")
val faulty = Source.fromFile(file).getLines().size
if (file.isFile()) { // Check if they contain a bug report file from Bita
if (faulty <= 4) { // Check if the shedule was faulty shedules (should be more then 4 lines then)
print(Console.GREEN)
} else {
print(Console.RED)
}
Source.fromFile(file).getLines().foreach { // Iterate over the content and print it
println
}
println(Console.RESET)
}
}
}
}
// This will validate if we have found a valid race condition.
private def validate() = {
var msg = ""
if (verbose >= 1) {
if (numShedules != 0) {
if (expectFailures) {
if ((numFaulty == 0)) { // Show the info
print(Console.RED + Console.BOLD)
msg = "**FAILURE** Generated %d shedules and %d of them failed (Failures expected).".format(numShedules, numFaulty)
} else {
print(Console.GREEN + Console.BOLD)
msg = "**SUCCESS** Generated %d shedules and %d of them failed.".format(numShedules, numFaulty)
}
} else {
if ((numFaulty == 0)) { // Show the info
print(Console.GREEN + Console.BOLD)
msg = "**SUCCESS** Generated %d shedules and %d of them failed.".format(numShedules, numFaulty)
} else {
print(Console.RED + Console.BOLD)
msg = "**FAILURE** Generated %d shedules and %d of them failed (No failures expected).".format(numShedules, numFaulty)
}
}
} else {
print(Console.RED + Console.BOLD)
msg = "**FAILURE** Something went wrong, generated %d shedules".format(numShedules, numFaulty)
}
println("*===========================================================================================*")
println("| |")
println("| "+msg.padTo(87, ' ')+" |")
println("| |")
println("*===========================================================================================*"+Console.RESET)
}
// Assert to make the test fail or succeed, for showing it in the testrunner
assert(numShedules != 0, "Generated %d shedules.".format(numShedules))
assert((numFaulty != 0) == expectFailures, msg)
}
override def beforeEach(td: TestData) {
val config: Map[String, Any] = td.configMap
verbose = config.getOrElse("verbose", "1").asInstanceOf[String].toInt // read out the config passed via scalatest options
randomTime = config.getOrElse("randomTime", "0").asInstanceOf[String].toInt
randomTraces = config.getOrElse("randomTraces", "1").asInstanceOf[String].toInt
}
} | Tjoene/thesis | Case_Programs/scala-redis-2.9-pre-scala-2.10/src/test/scala/bita/BitaTests.scala | Scala | gpl-2.0 | 6,535 |
package aprsbigdata.maps.kml
class PlaceMarker(val label: String,
val longitude: Double,
val latitude: Double,
val altitude: Double = 0,
val bearing: Double = 0.0,
val timestamp: Long = 0) {
override def toString() = label + " " + List(longitude, latitude, altitude, bearing).mkString(",");
}
/**
* Represents one of the qAR one-hops we identified, with the iGate callsign and the caller information.
*/
class QARMarker(val iGateCallsign:String, callerCallsign: String, longitude: Double,
latitude: Double, altitude: Double = 0.0, bearing: Double = 0.0, timestamp:Long=0, var distance:Double=0.0)
extends PlaceMarker(callerCallsign, longitude, latitude,
altitude=altitude, bearing=bearing, timestamp=timestamp) {
override def toString() = {
label+"=>"+iGateCallsign+" "+List(longitude, latitude, altitude, bearing).mkString(",")+" dist="+distance;
}
}
class DigipeaterInfo(val callsign: String, val longitude: Double, val latitude: Double, val altitude: Double = 0) {
override def toString() = callsign + " " + List(longitude, latitude, altitude).mkString(",");
}
| cericyeh/aprsbigdata | src/main/scala/aprsbigdata/maps/kml/PlaceMarker.scala | Scala | lgpl-3.0 | 1,105 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.webapi
import java.util.concurrent.TimeUnit
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.http.scaladsl.model.HttpEntity
import akka.http.scaladsl.model.HttpResponse
import akka.http.scaladsl.model.MediaTypes
import akka.http.scaladsl.model.StatusCode
import akka.http.scaladsl.model.StatusCodes
import com.netflix.atlas.akka.DiagnosticMessage
import com.netflix.atlas.core.db.Database
import com.netflix.atlas.core.db.MemoryDatabase
import com.netflix.atlas.core.model.Datapoint
import com.netflix.atlas.core.model.DefaultSettings
import com.netflix.atlas.core.model.TagKey
import com.netflix.atlas.core.norm.NormalizationCache
import com.netflix.atlas.core.validation.ValidationResult
import com.netflix.spectator.api.Registry
import com.netflix.spectator.api.histogram.BucketCounter
import com.netflix.spectator.api.histogram.BucketFunctions
class LocalPublishActor(registry: Registry, db: Database) extends Actor with ActorLogging {
import com.netflix.atlas.webapi.PublishApi._
// TODO: This actor is only intended to work with MemoryDatabase, but the binding is
// setup for the Database interface.
private val processor =
new LocalPublishActor.DatapointProcessor(registry, db.asInstanceOf[MemoryDatabase])
// Number of invalid datapoints received
private val numInvalid = registry.createId("atlas.db.numInvalid")
def receive: Receive = {
case req @ PublishRequest(Nil, Nil, _, _) =>
req.complete(DiagnosticMessage.error(StatusCodes.BadRequest, "empty payload"))
case req @ PublishRequest(Nil, failures, _, _) =>
updateStats(failures)
val msg = FailureMessage.error(failures)
sendError(req, StatusCodes.BadRequest, msg)
case req @ PublishRequest(values, Nil, _, _) =>
processor.update(values)
req.complete(HttpResponse(StatusCodes.OK))
case req @ PublishRequest(values, failures, _, _) =>
processor.update(values)
updateStats(failures)
val msg = FailureMessage.partial(failures)
sendError(req, StatusCodes.Accepted, msg)
}
private def sendError(req: PublishRequest, status: StatusCode, msg: FailureMessage): Unit = {
val entity = HttpEntity(MediaTypes.`application/json`, msg.toJson)
req.complete(HttpResponse(status = status, entity = entity))
}
private def updateStats(failures: List[ValidationResult]): Unit = {
failures.foreach {
case ValidationResult.Pass => // Ignored
case ValidationResult.Fail(error, _, _) =>
registry.counter(numInvalid.withTag("error", error)).increment()
}
}
}
object LocalPublishActor {
/**
* Process datapoints from publish requests and feed into the in-memory database.
*
* @param registry
* Registry for keeping track of metrics.
* @param memDb
* Database that will receive the final processed datapoints.
* @param maxMessageIds
* Maximum number of message ids to track. If this limit is exceeded, then it
* is possible for duplicate messages to get processed which may lead to over
* counting if rollups are being used.
*/
private[webapi] class DatapointProcessor(
registry: Registry,
memDb: MemoryDatabase,
maxMessageIds: Int = 1000000
) {
// Track the ages of data flowing into the system. Data is expected to arrive quickly and
// should hit the backend within the step interval used.
private val numReceived = {
val f = BucketFunctions.age(DefaultSettings.stepSize, TimeUnit.MILLISECONDS)
BucketCounter.get(registry, registry.createId("atlas.db.numMetricsReceived"), f)
}
private val cache = new NormalizationCache(DefaultSettings.stepSize, memDb.update)
def update(vs: List[Datapoint]): Unit = {
val now = registry.clock().wallTime()
vs.foreach { v =>
numReceived.record(now - v.timestamp)
v.tags.get(TagKey.dsType) match {
case Some("counter") => cache.updateCounter(v)
case Some("gauge") => cache.updateGauge(v)
case Some("rate") => cache.updateRate(v)
case Some("sum") => cache.updateSum(v)
case _ => cache.updateRate(v)
}
}
}
}
}
| copperlight/atlas | atlas-webapi/src/main/scala/com/netflix/atlas/webapi/LocalPublishActor.scala | Scala | apache-2.0 | 4,820 |
package stgy
object Vec {
def apply(x: Double, y: Double) = new Vec(x, y)
def apply() = new Vec(0, 0)
}
class Vec(var x: Double, var y: Double) extends Element {
def +(v: Vec) = Vec(x + v.x, y + v.y)
def unary_- = Vec(-x, -y)
def -(v: Vec) = this + (-v)
def *(v: Vec) = Vec(x * v.x, y * v.y)
def *(mul: Double) = Vec(x * mul, y * mul)
def /(mul: Double) = Vec(x / mul, y / mul)
def *=(v: Vec) = {
x *= v.x
y *= v.y
}
def *=(n: Double): Unit = {
x *= n
y *= n
}
def /=(n: Double): Unit = {
x /= n
y /= n
}
def length2() = x * x + y * y
def length() = math.sqrt(length2())
def normalize(): Vec = {
val l = length()
if (l != 0) {
x /= l
y /= l
}
this
}
} | DeltaIMT/Delta | stgy_server/src/main/scala/stgy/Vec.scala | Scala | mit | 754 |
package gapt.proofs.ceres
import gapt.expr._
import gapt.expr.formula.All
import gapt.expr.formula.And
import gapt.expr.formula.Atom
import gapt.expr.formula.Bottom
import gapt.expr.formula.Eq
import gapt.expr.formula.Ex
import gapt.expr.formula.Formula
import gapt.expr.formula.Iff
import gapt.expr.formula.Imp
import gapt.expr.formula.Neg
import gapt.expr.formula.Or
import gapt.expr.formula.Top
import gapt.expr.formula.constants.ExistsC
import gapt.expr.formula.constants.ForallC
import gapt.expr.formula.constants.QuantifierC
import gapt.expr.util.freeVariables
import gapt.logic.hol.toNNF
import gapt.proofs.Sequent
import gapt.proofs.context.mutable.MutableContext
import scala.util.matching.Regex
object Viperize {
def apply( top: Expr )( implicit ctx: MutableContext ): Sequent[Formula] = {
val newAnte = ctx.normalizer.rules.map( x => {
val pattern = new Regex( "\\\\S+S[TF]*A[TF]*" )
if ( ( pattern findAllIn x.lhs.toString ).nonEmpty ) {
val matrix = Iff( x.lhs, x.rhs )
All.Block( freeVariables( matrix ).toSeq, matrix )
} else if ( !( x.lhs.ty.toString.matches( "o" ) ) ) {
val matrix = Eq( x.lhs, x.rhs )
val App( name, args ) = x.lhs
println( name )
All.Block( freeVariables( matrix ).toSeq, matrix )
} else Bottom()
} )
val newSuc = All.Block( freeVariables( top ).toSeq, Imp( top, Bottom() ) )
Sequent( newAnte.toSeq filterNot ( x => x.alphaEquals( Bottom() ) ), Seq( newSuc ) )
}
}
object CharFormN extends StructVisitor[Formula, Unit] {
def apply( struct: Struct ): Formula = {
val csf = recurse( struct, StructTransformer[Formula, Unit](
{ ( x, _ ) => x }, { ( x, y, _ ) =>
{
if ( x.equals( Top() ) ) y
else if ( y.equals( Top() ) ) x
else And( x, y )
}
}, Top(),
{ ( x, y, _ ) =>
{
if ( x.equals( Bottom() ) ) y
else if ( y.equals( Bottom() ) ) x
else Or( x, y )
}
}, Bottom(), { ( x, _ ) => Neg( x ) },
{ ( _, _, _ ) => throw new Exception( "Should not contain CLS terms" ) } ), () )
All.Block( freeVariables( csf ).toSeq, csf )
}
}
object CharFormPRN {
def apply( scs: Map[CLS, ( Struct, Set[Var] )] ): Map[Formula, ( Formula, Set[Var] )] = Support(
scs, stTN )
private val stTN = StructTransformer[Formula, Map[( String, Sequent[Boolean] ), String]](
{ ( x, _ ) => x }, { ( x, y, _ ) => And( x, y ) }, Top(), { ( x, y, _ ) => Or( x, y ) },
Bottom(), { ( x, _ ) => Neg( x ) }, Support.cF )
def PR( chF: Map[Formula, ( Formula, Set[Var] )] )( implicit ctx: MutableContext ): Unit =
Support.add( chF, ForallC )
}
object CharFormP extends StructVisitor[Formula, Unit] {
def apply( struct: Struct ): Formula = {
val csf = recurse( struct, StructTransformer[Formula, Unit](
{ ( x, _ ) => toNNF( Neg( x ) ) }, { ( x, y, _ ) =>
{
if ( x.equals( Bottom() ) ) y
else if ( y.equals( Bottom() ) ) x
else Or( x, y )
}
}, Bottom(),
{ ( x, y, _ ) =>
{
if ( x.equals( Top() ) ) y
else if ( y.equals( Top() ) ) x
else And( x, y )
}
}, Top(), { ( x, _ ) => Neg( x ) },
{ ( _, _, _ ) => throw new Exception( "Should not contain CLS terms" ) } ), () )
Ex.Block( freeVariables( csf ).toSeq, csf )
}
}
object CharFormPRP {
def apply( scs: Map[CLS, ( Struct, Set[Var] )] ): Map[Formula, ( Formula, Set[Var] )] = Support( scs, stTP )
private val stTP = StructTransformer[Formula, Map[( String, Sequent[Boolean] ), String]](
{ ( x, _ ) => Neg( x ) }, { ( x, y, _ ) => Or( x, y ) }, Bottom(),
{ ( x, y, _ ) => And( x, y ) }, Top(), { ( x, _ ) => Neg( x ) }, Support.cF )
def PR( chF: Map[Formula, ( Formula, Set[Var] )] )( implicit ctx: MutableContext ): Unit =
Support.add( chF, ExistsC )
}
private object Support {
def apply(
scs: Map[CLS, ( Struct, Set[Var] )],
stT: StructTransformer[Formula, Map[( String, Sequent[Boolean] ), String]] ): Map[Formula, ( Formula, Set[Var] )] = {
val names = structNames( scs )
scs.map {
case ( CLS( Apps( Const( name, _, _ ), vs ), cc ), ( st, vars ) ) =>
( Atom( names( ( name, cc ) ), vs ), ( constructingForm( st, names, stT ), vars ) )
}
}
def cF( pn: Expr, cc: Sequent[Boolean], mn: Map[( String, Sequent[Boolean] ), String] ): Formula = {
val Apps( Const( name, _, _ ), vs ) = pn
Atom( mn.getOrElse( ( name, cc ), { throw new Exception( "Should be in map" ) } ), vs )
}
//assuming NNFCNF
private def QuantIntroForAll( f: Formula, evar: Set[Var] ): Formula = f match {
case And( x, And( Top(), Top() ) ) => QuantIntroForAll( x, evar )
case And( And( Top(), Top() ), x ) => QuantIntroForAll( x, evar )
case And( Top(), x ) => QuantIntroForAll( x, evar )
case And( x, Top() ) => QuantIntroForAll( x, evar )
case And( x, y ) => And( QuantIntroForAll( x, evar ), QuantIntroForAll( y, evar ) )
case Or( x, Or( Bottom(), Bottom() ) ) => QuantIntroForAll( x, evar )
case Or( Or( Bottom(), Bottom() ), x ) => QuantIntroForAll( x, evar )
case Or( Bottom(), x ) => QuantIntroForAll( x, evar )
case Or( x, Bottom() ) => QuantIntroForAll( x, evar )
case Or( Neg( Neg( x ) ), Neg( Neg( y ) ) ) =>
All.Block( evar.intersect( freeVariables( Or( x, y ) ) ).toSeq, Or( x, y ) )
case Or( x, Neg( Neg( y ) ) ) =>
All.Block( evar.intersect( freeVariables( Or( x, y ) ) ).toSeq, Or( x, y ) )
case Or( Neg( Neg( x ) ), y ) =>
All.Block( evar.intersect( freeVariables( Or( x, y ) ) ).toSeq, Or( x, y ) )
case Or( x, y ) =>
All.Block( evar.intersect( freeVariables( Or( x, y ) ) ).toSeq, Or( x, y ) )
case Atom( _, _ ) =>
All.Block( evar.intersect( freeVariables( f ) ).toSeq, f )
case Top() => Top()
case Bottom() => Bottom()
case Neg( Neg( x ) ) => QuantIntroForAll( x, evar )
case Neg( Atom( _, _ ) ) => All.Block( evar.intersect( freeVariables( f ) ).toSeq, f )
case Neg( x ) => Neg( QuantIntroForAll( x, evar ) )
}
private def QuantIntroExists( f: Formula, evar: Set[Var] ): Formula = f match {
case Or( x, Or( Bottom(), Bottom() ) ) => QuantIntroExists( x, evar )
case Or( Or( Bottom(), Bottom() ), x ) => QuantIntroExists( x, evar )
case Or( Bottom(), x ) => QuantIntroExists( x, evar )
case Or( x, Bottom() ) => QuantIntroExists( x, evar )
case Or( x, y ) => Or( QuantIntroExists( x, evar ), QuantIntroExists( y, evar ) )
case And( x, And( Top(), Top() ) ) => QuantIntroExists( x, evar )
case And( And( Top(), Top() ), x ) => QuantIntroExists( x, evar )
case And( Top(), x ) => QuantIntroExists( x, evar )
case And( x, Top() ) => QuantIntroExists( x, evar )
case And( Neg( Neg( x ) ), Neg( Neg( y ) ) ) =>
Ex.Block( evar.intersect( freeVariables( And( x, y ) ) ).toSeq, And( x, y ) )
case And( x, Neg( Neg( y ) ) ) =>
Ex.Block( evar.intersect( freeVariables( And( x, y ) ) ).toSeq, And( x, y ) )
case And( Neg( Neg( x ) ), y ) =>
Ex.Block( evar.intersect( freeVariables( And( x, y ) ) ).toSeq, And( x, y ) )
case And( x, y ) =>
Ex.Block( evar.intersect( freeVariables( And( x, y ) ) ).toSeq, And( x, y ) )
case Atom( _, _ ) =>
Ex.Block( evar.intersect( freeVariables( f ) ).toSeq, f )
case Top() => Top()
case Bottom() => Bottom()
case Neg( Neg( x ) ) => QuantIntroExists( x, evar )
case Neg( Atom( _, _ ) ) => Ex.Block( evar.intersect( freeVariables( f ) ).toSeq, f )
case Neg( x ) => Neg( QuantIntroExists( x, evar ) )
}
def add( chF: Map[Formula, ( Formula, Set[Var] )], qType: QuantifierC )( implicit ctx: MutableContext ): Unit = {
import gapt.proofs.context.update.ReductionRuleUpdate.reductionRulesAsReductionRuleUpdate
val definitions: Map[Const, List[( Atom, Formula )]] = {
for ( ( f @ Atom( newEx, _ ), ( form, vars ) ) <- chF.toList )
yield ( newEx.asInstanceOf[Const], ( f,
if ( qType.equals( ForallC ) ) QuantIntroForAll( form, vars )
else QuantIntroExists( form, vars ) ) )
}.groupBy( _._1 ).map { case ( pred, eqns ) => ( pred, eqns.map( _._2 ) ) }
definitions.keys.foreach { ctx += _ }
ctx += definitions.values.flatten.map {
case ( lhs, rhs ) => ReductionRule( lhs, rhs )
}.toSeq
}
private def structNames( sss: Map[CLS, ( Struct, Set[Var] )] ): Map[( String, Sequent[Boolean] ), String] =
sss.keySet.map {
case CLS( Apps( Const( name, _, _ ), _ ), cc ) =>
val cutConfigChars = cc.map( b => if ( b ) 'T' else 'F' )
( ( name, cc ), name + "S" + cutConfigChars.succedent.mkString + "A" + cutConfigChars.antecedent.mkString )
}.toMap
private object constructingForm extends StructVisitor[Formula, Map[( String, Sequent[Boolean] ), String]] {
def apply( struct: Struct, names: Map[( String, Sequent[Boolean] ), String],
stT: StructTransformer[Formula, Map[( String, Sequent[Boolean] ), String]] ): Formula =
recurse( struct, stT, names )
}
}
| gapt/gapt | core/src/main/scala/gapt/proofs/ceres/CharacteristicFormula.scala | Scala | gpl-3.0 | 9,361 |
package hexico.meeple.ui
import scala.swing._
import scala.util.Random
import scala.collection.mutable
import hexico.meeple.game.{Board, Tilesets}
class GameWindow extends Frame {
val TILE_SIZE = 50
val random = new Random()
val tileset = new mutable.Queue ++ Tilesets.START ++ random.shuffle(Tilesets.BASE)
val board: Board = new Board
val boardPanel = new BoardPanel(board, TILE_SIZE)
val previewPanel = new TilePreviewPanel(TILE_SIZE)
val stateLabel = new Label(s"${tileset.size - 1} tiles remaining")
// Add the start tile
board.addTile(tileset.dequeue(), (0, 0))
// Put the next tile into the preview panel
previewPanel.tile = tileset.dequeue()
boardPanel.nextTile = Some(previewPanel.tile)
// Listen for tile clicks and place tiles
listenTo(boardPanel)
reactions += {
case TileClicked(x, y) => {
for (tile <- previewPanel.selectedTile if board.valid(tile, (x, y))) {
board.addTile(tile, (x, y))
if (tileset.size == 0) {
stateLabel.text = "Game over!"
previewPanel.tile = null
boardPanel.nextTile = None
} else {
stateLabel.text = s"${tileset.size} tiles remaining"
previewPanel.tile = tileset.dequeue()
boardPanel.nextTile = Some(previewPanel.tile)
}
}
}
}
title = "Meeple: Game"
preferredSize = new Dimension(800, 600)
val menu = new GridBagPanel {
val showMoves = new CheckBox("Show possible moves")
val c = new Constraints
c.anchor = GridBagPanel.Anchor.North
c.gridx = 0
layout(showMoves) = c
layout(previewPanel) = c
layout(stateLabel) = c
c.weighty = 1
layout(Swing.VGlue) = c
}
contents = new BorderPanel {
layout(new ScrollPane {
contents = new GridBagPanel {
val c = new Constraints
c.anchor = GridBagPanel.Anchor.Center
layout(boardPanel) = c
}
}) = BorderPanel.Position.Center
layout(menu) = BorderPanel.Position.East
}
}
| alanbriolat/meeple | src/main/scala/hexico/meeple/ui/GameWindow.scala | Scala | gpl-3.0 | 1,985 |
package org.denigma.nlp.annotator
import org.denigma.brat._
import org.denigma.nlp.messages.Annotations
import org.scalajs.dom
import scalajs.js.JSConverters._
import scala.scalajs.js
import scala.scalajs.js.Array
object ReachBratModel {
def apply(): ReachBratModel = new ReachBratModel()
}
class ReachBratModel {
protected def regulation(tp: String, labels: List[String]) =
EventType(tp, labels,
List(
LabeledType("Theme", List("Theme", "Th"))
))
protected def geneExpression(tp: String, labels: List[String]) = EventType(tp, labels,
List(
LabeledType("Theme", List("Theme", "Th"))
)
)
protected def justEvent(tp: String, labels: List[String]) = EventType(tp, labels,
List(
LabeledType("Theme", List("Theme", "Th"))
)
)
protected def eventWithCauseAndSite(tp: String, labels: List[String]) = EventType(tp, labels,
List(
LabeledType("Theme", List("Theme", "Th")),
LabeledType("Cause", List("Cause", "Ca")),
LabeledType("Site", List("Site", "Si"))
)
)
protected def eventWithSite(tp: String, labels: List[String]) = EventType(tp, labels,
List(
LabeledType("Theme", List("Theme", "Th")),
LabeledType("Site", List("Site", "Si"))
)
)
protected def eventWithProduct(tp: String, labels: List[String]) = EventType(tp, labels,
List(
LabeledType("Theme", List("Theme", "Th")),
LabeledType("Product", List("Product", "Prod", "Pr"))
)
)
protected def bioColData() = {
val types = List(
new EntityType("Cellular_component", List("Cellular_component", "CellComp", "CC"), "lightgreen", "darken"),
new EntityType("Simple_chemical", List("Simple_chemical", "Chemical", "Chem"), "pink", "darken"),
new EntityType("Site", List("Site", "Si"), "gold", "darken"),
new EntityType("Gene_or_gene_product", List("Gene_or_gene_product", "Gene_GP", "GGP"), "blue", "darken"),
new EntityType("Protein", List("Protein", "Pro", "P"), "violet", "darken"),
new EntityType("Complex", List("Protein_Complex", "Complex", "Cplx"), "navy", "darken")
)
val attributes = List[EntityAttributeType]( //TODO: fix FORMAT
new EntityAttributeType("Origin", Map("Target"->"Rulename")),
new EntityAttributeType("Negation", Map.empty),
new EntityAttributeType("Speculation", Map.empty)
)
/*
* type: Origin
args: [ Target, Rulename ]
type: Negation
args: none (attribute present or absent)
type: Speculation
args: none (attribute present or absent)
* */
val relations = List[RelationType](
RelationType("Equiv", List("Equivalent", "Equiv", "Eq"))
)
val regulationTypes: List[EventType] = List(
regulation("Regulation", List("Regulation", "Regulat", "Reg")),
regulation("Positive_regulation", List("Positive_regulation", "+Regulation", "+Reg")),
regulation("Activation", List("Activation", "Activ", "+Act")),
regulation("Negative_regulation", List("Negative_regulation", "-Regulation", "-Reg")),
regulation("Inactivation", List("Inactivation", "Inactiv", "-Act"))
)
val geneExpressions: List[EventType] = List(
geneExpression("Gene_expression", List("Gene_expression", "Gene_expr", "GeneX")),
geneExpression("Transcription", List("Transcription", "Transcript", "Tsc")),
geneExpression("Translation", List("Translation", "Translate", "Tsl"))
)
val conversions: List[EventType] = List(
eventWithProduct("Conversion", List("Conversion", "Conver", "Conv")),
eventWithCauseAndSite("Phosphorylation", List("Phosphorylation", "Phosphor", "+Phos")),
eventWithCauseAndSite("Dephosphorylation", List("Dephosphorylation", "Dephos", "-Phos")),
eventWithCauseAndSite("Acetylation", List("Acetylation", "Acetyl", "+Acet")),
eventWithCauseAndSite("Deacetylation", List("Deacetylation", "Deacetyl", "-Acet")),
eventWithCauseAndSite("Glycosylation", List("Glycosylation", "Glycosyl", "+Glycos")),
eventWithCauseAndSite("Deglycosylation", List("Deglycosylation", "Deglycos", "-Glycos")),
eventWithCauseAndSite("Hydroxylation", List("Hydroxylation", "Hydroxyl", "+Hydr")),
eventWithCauseAndSite("Dehydroxylation", List("Dehydroxylation", "Dehydrox", "-Hydr")),
eventWithCauseAndSite("Methylation", List("Methylation", "Methyl", "+Meth")),
eventWithCauseAndSite("Demethylation", List("Demethylation", "Demethyl", "-Meth")),
eventWithCauseAndSite("Ubiquitination", List("Ubiquitination", "Ubiquit", "+Ubiq")),
eventWithCauseAndSite("Deubiquitination", List("Deubiquitination", "Deubiq", "-Ubiq")),
eventWithCauseAndSite("DNA_methylation", List("DNA_methylation", "DNA_methyl", "+DNAmeth")),
eventWithCauseAndSite("DNA_demethylation", List("DNA_demethylation", "DNA_demeth", "-DNAmeth")),
justEvent("Degradation", List("Degradation", "Degrade", "Deg")),
eventWithCauseAndSite("Binding", List("Binding", "Bind")),
eventWithProduct("Dissociation", List("Dissociation", "Dissoc")),
EventType("Localization", List("Localization", "Local", "Loc"),
List(
LabeledType("Theme", List("Theme", "Th")),
LabeledType("AtLoc", List("AtLoc", "At")),
LabeledType("FromLoc", List("FromLoc", "From")),
LabeledType("ToLoc", List("ToLoc", "To"))
)),
eventWithSite("Protein_with_site", List("Protein_with_site", "Pro_w_site", "PWS")),
EventType("Transport", List("Transport"),
List(
LabeledType("Theme", List("Theme", "Th")),
LabeledType("FromLoc", List("FromLoc", "From")),
LabeledType("ToLoc", List("ToLoc", "To"))
))
)
val eventTypes = regulationTypes ++ geneExpressions ++conversions
ColData(types, relationTypes = relations, attributes = attributes, events = eventTypes)
}
protected def eventMention(doc: Annotations.Document, mention: Annotations.CorefEventMention, id: String, mentions: Map[Annotations.Mention, String]): BratEvent = {
val trig = mentions.getOrElse(mention.trigger, {
dom.console.error("cannot find mention for trigger " + mention.trigger)
""
})
val args = mention.arguments.flatMap{
case (key, vals) =>
vals.collect{
case v if mentions.contains(v) =>
key -> mentions(v)
}
}.toList
//println("ARGUMENTS ARE: "+args)
BratEvent(id, trig, args)
}
protected def entityMention(doc: Annotations.Document, mention: Annotations.Mention, id: String, mentions: Map[Annotations.Mention, String]): Entity = {
val pos = mention.span//doc.position(mention)
val ent = Entity(id, mention.label, List(pos.start -> pos.end))
ent
}
def docData(doc: Annotations.Document, mentions: Map[Annotations.Mention, String] ): DocData = {
val (
entities: Map[String, Entity],
triggers: Map[String, Entity],
events: Map[String, BratEvent]
) = mentions.foldLeft((
Map.empty[String, Entity],
Map.empty[String, Entity],
Map.empty[String, BratEvent])
){
case ((ents, trigs, evs), (mention: Annotations.CorefEventMention, id)) if evs.contains(id) => (ents, trigs, evs)
case ((ents, trigs, evs), (mention: Annotations.CorefEventMention, id)) =>
val ev = eventMention(doc, mention, id, mentions)
if(trigs.contains(ev.trigger)) (ents, trigs, evs.updated(id, ev)) else
if(ents.contains(ev.trigger))(ents, trigs.updated(ev.trigger, ents(ev.trigger)), evs.updated(id, ev)) else
{
val tr = entityMention(doc, mention, ev.trigger, mentions)
(ents, trigs.updated(ev.trigger, tr), evs.updated(id, ev))
}
case ((ents, trigs, evs), (mention, id)) if trigs.contains(id) =>
//(ents.updated(id, trigs(id)), trigs, evs)
(ents, trigs, evs)
case ((ents, trigs, evs), (mention, id)) =>
val ent = entityMention(doc, mention, id, mentions)
(ents.updated(id, ent), trigs, evs)
}
val (entList, trigList, eventList) = (entities.values.toList, triggers.values.toList, events.values.toList)
//val all: List[BratObject] = entList++trigList++eventList
val comments = mentions.map(m=>js.Array(m._2, m._1.foundBy,m._1.label)).toList
DocData(doc.text, entities = entList, trigs = trigList, events = eventList, comms = comments)
}
lazy val colData: ColData = bioColData()
}
| antonkulaga/bio-nlp | annotator/js/src/main/scala/org.denigma/nlp/annotator/ReachBratModel.scala | Scala | mpl-2.0 | 8,420 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.util
import java.sql.{Date, Timestamp}
import java.text.{DateFormat, SimpleDateFormat}
import java.util.{Calendar, Locale, TimeZone}
import java.util.concurrent.ConcurrentHashMap
import java.util.function.{Function => JFunction}
import javax.xml.bind.DatatypeConverter
import scala.annotation.tailrec
import org.apache.spark.unsafe.types.UTF8String
/**
* Helper functions for converting between internal and external date and time representations.
* Dates are exposed externally as java.sql.Date and are represented internally as the number of
* dates since the Unix epoch (1970-01-01). Timestamps are exposed externally as java.sql.Timestamp
* and are stored internally as longs, which are capable of storing timestamps with microsecond
* precision.
*/
object DateTimeUtils {
// we use Int and Long internally to represent [[DateType]] and [[TimestampType]]
type SQLDate = Int
type SQLTimestamp = Long
// see http://stackoverflow.com/questions/466321/convert-unix-timestamp-to-julian
// it's 2440587.5, rounding up to compatible with Hive
final val JULIAN_DAY_OF_EPOCH = 2440588
final val SECONDS_PER_DAY = 60 * 60 * 24L
final val MICROS_PER_SECOND = 1000L * 1000L
final val MILLIS_PER_SECOND = 1000L
final val NANOS_PER_SECOND = MICROS_PER_SECOND * 1000L
final val MICROS_PER_DAY = MICROS_PER_SECOND * SECONDS_PER_DAY
final val MILLIS_PER_DAY = SECONDS_PER_DAY * 1000L
// number of days in 400 years
final val daysIn400Years: Int = 146097
// number of days between 1.1.1970 and 1.1.2001
final val to2001 = -11323
// this is year -17999, calculation: 50 * daysIn400Year
final val YearZero = -17999
final val toYearZero = to2001 + 7304850
final val TimeZoneGMT = TimeZone.getTimeZone("GMT")
final val MonthOf31Days = Set(1, 3, 5, 7, 8, 10, 12)
val TIMEZONE_OPTION = "timeZone"
def defaultTimeZone(): TimeZone = TimeZone.getDefault()
// Reuse the Calendar object in each thread as it is expensive to create in each method call.
private val threadLocalGmtCalendar = new ThreadLocal[Calendar] {
override protected def initialValue: Calendar = {
Calendar.getInstance(TimeZoneGMT)
}
}
// `SimpleDateFormat` is not thread-safe.
private val threadLocalTimestampFormat = new ThreadLocal[DateFormat] {
override def initialValue(): SimpleDateFormat = {
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.US)
}
}
def getThreadLocalTimestampFormat(timeZone: TimeZone): DateFormat = {
val sdf = threadLocalTimestampFormat.get()
sdf.setTimeZone(timeZone)
sdf
}
// `SimpleDateFormat` is not thread-safe.
private val threadLocalDateFormat = new ThreadLocal[DateFormat] {
override def initialValue(): SimpleDateFormat = {
new SimpleDateFormat("yyyy-MM-dd", Locale.US)
}
}
def getThreadLocalDateFormat(): DateFormat = {
val sdf = threadLocalDateFormat.get()
sdf.setTimeZone(defaultTimeZone())
sdf
}
private val computedTimeZones = new ConcurrentHashMap[String, TimeZone]
private val computeTimeZone = new JFunction[String, TimeZone] {
override def apply(timeZoneId: String): TimeZone = TimeZone.getTimeZone(timeZoneId)
}
def getTimeZone(timeZoneId: String): TimeZone = {
computedTimeZones.computeIfAbsent(timeZoneId, computeTimeZone)
}
def newDateFormat(formatString: String, timeZone: TimeZone): DateFormat = {
val sdf = new SimpleDateFormat(formatString, Locale.US)
sdf.setTimeZone(timeZone)
// Enable strict parsing, if the input date/format is invalid, it will throw an exception.
// e.g. to parse invalid date '2016-13-12', or '2016-01-12' with invalid format 'yyyy-aa-dd',
// an exception will be throwed.
sdf.setLenient(false)
sdf
}
// we should use the exact day as Int, for example, (year, month, day) -> day
def millisToDays(millisUtc: Long): SQLDate = {
millisToDays(millisUtc, defaultTimeZone())
}
def millisToDays(millisUtc: Long, timeZone: TimeZone): SQLDate = {
// SPARK-6785: use Math.floor so negative number of days (dates before 1970)
// will correctly work as input for function toJavaDate(Int)
val millisLocal = millisUtc + timeZone.getOffset(millisUtc)
Math.floor(millisLocal.toDouble / MILLIS_PER_DAY).toInt
}
// reverse of millisToDays
def daysToMillis(days: SQLDate): Long = {
daysToMillis(days, defaultTimeZone())
}
def daysToMillis(days: SQLDate, timeZone: TimeZone): Long = {
val millisLocal = days.toLong * MILLIS_PER_DAY
millisLocal - getOffsetFromLocalMillis(millisLocal, timeZone)
}
def dateToString(days: SQLDate): String =
getThreadLocalDateFormat.format(toJavaDate(days))
// Converts Timestamp to string according to Hive TimestampWritable convention.
def timestampToString(us: SQLTimestamp): String = {
timestampToString(us, defaultTimeZone())
}
// Converts Timestamp to string according to Hive TimestampWritable convention.
def timestampToString(us: SQLTimestamp, timeZone: TimeZone): String = {
val ts = toJavaTimestamp(us)
val timestampString = ts.toString
val timestampFormat = getThreadLocalTimestampFormat(timeZone)
val formatted = timestampFormat.format(ts)
if (timestampString.length > 19 && timestampString.substring(19) != ".0") {
formatted + timestampString.substring(19)
} else {
formatted
}
}
@tailrec
def stringToTime(s: String): java.util.Date = {
val indexOfGMT = s.indexOf("GMT")
if (indexOfGMT != -1) {
// ISO8601 with a weird time zone specifier (2000-01-01T00:00GMT+01:00)
val s0 = s.substring(0, indexOfGMT)
val s1 = s.substring(indexOfGMT + 3)
// Mapped to 2000-01-01T00:00+01:00
stringToTime(s0 + s1)
} else if (!s.contains('T')) {
// JDBC escape string
if (s.contains(' ')) {
Timestamp.valueOf(s)
} else {
Date.valueOf(s)
}
} else {
DatatypeConverter.parseDateTime(s).getTime()
}
}
/**
* Returns the number of days since epoch from java.sql.Date.
*/
def fromJavaDate(date: Date): SQLDate = {
millisToDays(date.getTime)
}
/**
* Returns a java.sql.Date from number of days since epoch.
*/
def toJavaDate(daysSinceEpoch: SQLDate): Date = {
new Date(daysToMillis(daysSinceEpoch))
}
/**
* Returns a java.sql.Timestamp from number of micros since epoch.
*/
def toJavaTimestamp(us: SQLTimestamp): Timestamp = {
// setNanos() will overwrite the millisecond part, so the milliseconds should be
// cut off at seconds
var seconds = us / MICROS_PER_SECOND
var micros = us % MICROS_PER_SECOND
// setNanos() can not accept negative value
if (micros < 0) {
micros += MICROS_PER_SECOND
seconds -= 1
}
val t = new Timestamp(seconds * 1000)
t.setNanos(micros.toInt * 1000)
t
}
/**
* Returns the number of micros since epoch from java.sql.Timestamp.
*/
def fromJavaTimestamp(t: Timestamp): SQLTimestamp = {
if (t != null) {
t.getTime() * 1000L + (t.getNanos().toLong / 1000) % 1000L
} else {
0L
}
}
/**
* Returns the number of microseconds since epoch from Julian day
* and nanoseconds in a day
*/
def fromJulianDay(day: Int, nanoseconds: Long): SQLTimestamp = {
// use Long to avoid rounding errors
val seconds = (day - JULIAN_DAY_OF_EPOCH).toLong * SECONDS_PER_DAY
seconds * MICROS_PER_SECOND + nanoseconds / 1000L
}
/**
* Returns Julian day and nanoseconds in a day from the number of microseconds
*
* Note: support timestamp since 4717 BC (without negative nanoseconds, compatible with Hive).
*/
def toJulianDay(us: SQLTimestamp): (Int, Long) = {
val julian_us = us + JULIAN_DAY_OF_EPOCH * MICROS_PER_DAY
val day = julian_us / MICROS_PER_DAY
val micros = julian_us % MICROS_PER_DAY
(day.toInt, micros * 1000L)
}
/*
* Converts the timestamp to milliseconds since epoch. In spark timestamp values have microseconds
* precision, so this conversion is lossy.
*/
def toMillis(us: SQLTimestamp): Long = {
// When the timestamp is negative i.e before 1970, we need to adjust the millseconds portion.
// Example - 1965-01-01 10:11:12.123456 is represented as (-157700927876544) in micro precision.
// In millis precision the above needs to be represented as (-157700927877).
Math.floor(us.toDouble / MILLIS_PER_SECOND).toLong
}
/*
* Converts millseconds since epoch to SQLTimestamp.
*/
def fromMillis(millis: Long): SQLTimestamp = {
millis * 1000L
}
/**
* Parses a given UTF8 date string to the corresponding a corresponding [[Long]] value.
* The return type is [[Option]] in order to distinguish between 0L and null. The following
* formats are allowed:
*
* `yyyy`
* `yyyy-[m]m`
* `yyyy-[m]m-[d]d`
* `yyyy-[m]m-[d]d `
* `yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]`
* `yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z`
* `yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m`
* `yyyy-[m]m-[d]d [h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m`
* `yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]`
* `yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z`
* `yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m`
* `yyyy-[m]m-[d]dT[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m`
* `[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]`
* `[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z`
* `[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m`
* `[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m`
* `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]`
* `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]Z`
* `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m`
* `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m`
*/
def stringToTimestamp(s: UTF8String): Option[SQLTimestamp] = {
stringToTimestamp(s, defaultTimeZone())
}
def stringToTimestamp(s: UTF8String, timeZone: TimeZone): Option[SQLTimestamp] = {
if (s == null) {
return None
}
var tz: Option[Byte] = None
val segments: Array[Int] = Array[Int](1, 1, 1, 0, 0, 0, 0, 0, 0)
var i = 0
var currentSegmentValue = 0
val bytes = s.getBytes
var j = 0
var digitsMilli = 0
var justTime = false
while (j < bytes.length) {
val b = bytes(j)
val parsedValue = b - '0'.toByte
if (parsedValue < 0 || parsedValue > 9) {
if (j == 0 && b == 'T') {
justTime = true
i += 3
} else if (i < 2) {
if (b == '-') {
if (i == 0 && j != 4) {
// year should have exact four digits
return None
}
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else if (i == 0 && b == ':') {
justTime = true
segments(3) = currentSegmentValue
currentSegmentValue = 0
i = 4
} else {
return None
}
} else if (i == 2) {
if (b == ' ' || b == 'T') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
return None
}
} else if (i == 3 || i == 4) {
if (b == ':') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
return None
}
} else if (i == 5 || i == 6) {
if (b == 'Z') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
tz = Some(43)
} else if (b == '-' || b == '+') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
tz = Some(b)
} else if (b == '.' && i == 5) {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
return None
}
if (i == 6 && b != '.') {
i += 1
}
} else {
if (b == ':' || b == ' ') {
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
return None
}
}
} else {
if (i == 6) {
digitsMilli += 1
}
currentSegmentValue = currentSegmentValue * 10 + parsedValue
}
j += 1
}
segments(i) = currentSegmentValue
if (!justTime && i == 0 && j != 4) {
// year should have exact four digits
return None
}
while (digitsMilli < 6) {
segments(6) *= 10
digitsMilli += 1
}
// We are truncating the nanosecond part, which results in loss of precision
while (digitsMilli > 6) {
segments(6) /= 10
digitsMilli -= 1
}
if (!justTime && isInvalidDate(segments(0), segments(1), segments(2))) {
return None
}
if (segments(3) < 0 || segments(3) > 23 || segments(4) < 0 || segments(4) > 59 ||
segments(5) < 0 || segments(5) > 59 || segments(6) < 0 || segments(6) > 999999 ||
segments(7) < 0 || segments(7) > 23 || segments(8) < 0 || segments(8) > 59) {
return None
}
val c = if (tz.isEmpty) {
Calendar.getInstance(timeZone)
} else {
Calendar.getInstance(
getTimeZone(f"GMT${tz.get.toChar}${segments(7)}%02d:${segments(8)}%02d"))
}
c.set(Calendar.MILLISECOND, 0)
if (justTime) {
c.set(Calendar.HOUR_OF_DAY, segments(3))
c.set(Calendar.MINUTE, segments(4))
c.set(Calendar.SECOND, segments(5))
} else {
c.set(segments(0), segments(1) - 1, segments(2), segments(3), segments(4), segments(5))
}
Some(c.getTimeInMillis * 1000 + segments(6))
}
/**
* Parses a given UTF8 date string to the corresponding a corresponding [[Int]] value.
* The return type is [[Option]] in order to distinguish between 0 and null. The following
* formats are allowed:
*
* `yyyy`,
* `yyyy-[m]m`
* `yyyy-[m]m-[d]d`
* `yyyy-[m]m-[d]d `
* `yyyy-[m]m-[d]d *`
* `yyyy-[m]m-[d]dT*`
*/
def stringToDate(s: UTF8String): Option[SQLDate] = {
if (s == null) {
return None
}
val segments: Array[Int] = Array[Int](1, 1, 1)
var i = 0
var currentSegmentValue = 0
val bytes = s.getBytes
var j = 0
while (j < bytes.length && (i < 3 && !(bytes(j) == ' ' || bytes(j) == 'T'))) {
val b = bytes(j)
if (i < 2 && b == '-') {
if (i == 0 && j != 4) {
// year should have exact four digits
return None
}
segments(i) = currentSegmentValue
currentSegmentValue = 0
i += 1
} else {
val parsedValue = b - '0'.toByte
if (parsedValue < 0 || parsedValue > 9) {
return None
} else {
currentSegmentValue = currentSegmentValue * 10 + parsedValue
}
}
j += 1
}
if (i == 0 && j != 4) {
// year should have exact four digits
return None
}
segments(i) = currentSegmentValue
if (isInvalidDate(segments(0), segments(1), segments(2))) {
return None
}
val c = threadLocalGmtCalendar.get()
c.clear()
c.set(segments(0), segments(1) - 1, segments(2), 0, 0, 0)
c.set(Calendar.MILLISECOND, 0)
Some((c.getTimeInMillis / MILLIS_PER_DAY).toInt)
}
/**
* Return true if the date is invalid.
*/
private def isInvalidDate(year: Int, month: Int, day: Int): Boolean = {
if (year < 0 || year > 9999 || month < 1 || month > 12 || day < 1 || day > 31) {
return true
}
if (month == 2) {
if (isLeapYear(year) && day > 29) {
return true
} else if (!isLeapYear(year) && day > 28) {
return true
}
} else if (!MonthOf31Days.contains(month) && day > 30) {
return true
}
false
}
/**
* Returns the microseconds since year zero (-17999) from microseconds since epoch.
*/
private def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
microsec + toYearZero * MICROS_PER_DAY
}
private def localTimestamp(microsec: SQLTimestamp): SQLTimestamp = {
localTimestamp(microsec, defaultTimeZone())
}
private def localTimestamp(microsec: SQLTimestamp, timeZone: TimeZone): SQLTimestamp = {
absoluteMicroSecond(microsec) + timeZone.getOffset(microsec / 1000) * 1000L
}
/**
* Returns the hour value of a given timestamp value. The timestamp is expressed in microseconds.
*/
def getHours(microsec: SQLTimestamp): Int = {
((localTimestamp(microsec) / MICROS_PER_SECOND / 3600) % 24).toInt
}
/**
* Returns the hour value of a given timestamp value. The timestamp is expressed in microseconds.
*/
def getHours(microsec: SQLTimestamp, timeZone: TimeZone): Int = {
((localTimestamp(microsec, timeZone) / MICROS_PER_SECOND / 3600) % 24).toInt
}
/**
* Returns the minute value of a given timestamp value. The timestamp is expressed in
* microseconds.
*/
def getMinutes(microsec: SQLTimestamp): Int = {
((localTimestamp(microsec) / MICROS_PER_SECOND / 60) % 60).toInt
}
/**
* Returns the minute value of a given timestamp value. The timestamp is expressed in
* microseconds.
*/
def getMinutes(microsec: SQLTimestamp, timeZone: TimeZone): Int = {
((localTimestamp(microsec, timeZone) / MICROS_PER_SECOND / 60) % 60).toInt
}
/**
* Returns the second value of a given timestamp value. The timestamp is expressed in
* microseconds.
*/
def getSeconds(microsec: SQLTimestamp): Int = {
((localTimestamp(microsec) / MICROS_PER_SECOND) % 60).toInt
}
/**
* Returns the second value of a given timestamp value. The timestamp is expressed in
* microseconds.
*/
def getSeconds(microsec: SQLTimestamp, timeZone: TimeZone): Int = {
((localTimestamp(microsec, timeZone) / MICROS_PER_SECOND) % 60).toInt
}
private[this] def isLeapYear(year: Int): Boolean = {
(year % 4) == 0 && ((year % 100) != 0 || (year % 400) == 0)
}
/**
* Return the number of days since the start of 400 year period.
* The second year of a 400 year period (year 1) starts on day 365.
*/
private[this] def yearBoundary(year: Int): Int = {
year * 365 + ((year / 4 ) - (year / 100) + (year / 400))
}
/**
* Calculates the number of years for the given number of days. This depends
* on a 400 year period.
* @param days days since the beginning of the 400 year period
* @return (number of year, days in year)
*/
private[this] def numYears(days: Int): (Int, Int) = {
val year = days / 365
val boundary = yearBoundary(year)
if (days > boundary) (year, days - boundary) else (year - 1, days - yearBoundary(year - 1))
}
/**
* Calculates the year and the number of the day in the year for the given
* number of days. The given days is the number of days since 1.1.1970.
*
* The calculation uses the fact that the period 1.1.2001 until 31.12.2400 is
* equals to the period 1.1.1601 until 31.12.2000.
*/
private[this] def getYearAndDayInYear(daysSince1970: SQLDate): (Int, Int) = {
// add the difference (in days) between 1.1.1970 and the artificial year 0 (-17999)
var daysSince1970Tmp = daysSince1970
// Since Julian calendar was replaced with the Gregorian calendar,
// the 10 days after Oct. 4 were skipped.
// (1582-10-04) -141428 days since 1970-01-01
if (daysSince1970 <= -141428) {
daysSince1970Tmp -= 10
}
val daysNormalized = daysSince1970Tmp + toYearZero
val numOfQuarterCenturies = daysNormalized / daysIn400Years
val daysInThis400 = daysNormalized % daysIn400Years + 1
val (years, dayInYear) = numYears(daysInThis400)
val year: Int = (2001 - 20000) + 400 * numOfQuarterCenturies + years
(year, dayInYear)
}
/**
* Returns the 'day in year' value for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getDayInYear(date: SQLDate): Int = {
getYearAndDayInYear(date)._2
}
/**
* Returns the year value for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getYear(date: SQLDate): Int = {
getYearAndDayInYear(date)._1
}
/**
* Returns the quarter for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getQuarter(date: SQLDate): Int = {
var (year, dayInYear) = getYearAndDayInYear(date)
if (isLeapYear(year)) {
dayInYear = dayInYear - 1
}
if (dayInYear <= 90) {
1
} else if (dayInYear <= 181) {
2
} else if (dayInYear <= 273) {
3
} else {
4
}
}
/**
* Split date (expressed in days since 1.1.1970) into four fields:
* year, month (Jan is Month 1), dayInMonth, daysToMonthEnd (0 if it's last day of month).
*/
def splitDate(date: SQLDate): (Int, Int, Int, Int) = {
var (year, dayInYear) = getYearAndDayInYear(date)
val isLeap = isLeapYear(year)
if (isLeap && dayInYear == 60) {
(year, 2, 29, 0)
} else {
if (isLeap && dayInYear > 60) dayInYear -= 1
if (dayInYear <= 181) {
if (dayInYear <= 31) {
(year, 1, dayInYear, 31 - dayInYear)
} else if (dayInYear <= 59) {
(year, 2, dayInYear - 31, if (isLeap) 60 - dayInYear else 59 - dayInYear)
} else if (dayInYear <= 90) {
(year, 3, dayInYear - 59, 90 - dayInYear)
} else if (dayInYear <= 120) {
(year, 4, dayInYear - 90, 120 - dayInYear)
} else if (dayInYear <= 151) {
(year, 5, dayInYear - 120, 151 - dayInYear)
} else {
(year, 6, dayInYear - 151, 181 - dayInYear)
}
} else {
if (dayInYear <= 212) {
(year, 7, dayInYear - 181, 212 - dayInYear)
} else if (dayInYear <= 243) {
(year, 8, dayInYear - 212, 243 - dayInYear)
} else if (dayInYear <= 273) {
(year, 9, dayInYear - 243, 273 - dayInYear)
} else if (dayInYear <= 304) {
(year, 10, dayInYear - 273, 304 - dayInYear)
} else if (dayInYear <= 334) {
(year, 11, dayInYear - 304, 334 - dayInYear)
} else {
(year, 12, dayInYear - 334, 365 - dayInYear)
}
}
}
}
/**
* Returns the month value for the given date. The date is expressed in days
* since 1.1.1970. January is month 1.
*/
def getMonth(date: SQLDate): Int = {
var (year, dayInYear) = getYearAndDayInYear(date)
if (isLeapYear(year)) {
if (dayInYear == 60) {
return 2
} else if (dayInYear > 60) {
dayInYear = dayInYear - 1
}
}
if (dayInYear <= 31) {
1
} else if (dayInYear <= 59) {
2
} else if (dayInYear <= 90) {
3
} else if (dayInYear <= 120) {
4
} else if (dayInYear <= 151) {
5
} else if (dayInYear <= 181) {
6
} else if (dayInYear <= 212) {
7
} else if (dayInYear <= 243) {
8
} else if (dayInYear <= 273) {
9
} else if (dayInYear <= 304) {
10
} else if (dayInYear <= 334) {
11
} else {
12
}
}
/**
* Returns the 'day of month' value for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getDayOfMonth(date: SQLDate): Int = {
var (year, dayInYear) = getYearAndDayInYear(date)
if (isLeapYear(year)) {
if (dayInYear == 60) {
return 29
} else if (dayInYear > 60) {
dayInYear = dayInYear - 1
}
}
if (dayInYear <= 31) {
dayInYear
} else if (dayInYear <= 59) {
dayInYear - 31
} else if (dayInYear <= 90) {
dayInYear - 59
} else if (dayInYear <= 120) {
dayInYear - 90
} else if (dayInYear <= 151) {
dayInYear - 120
} else if (dayInYear <= 181) {
dayInYear - 151
} else if (dayInYear <= 212) {
dayInYear - 181
} else if (dayInYear <= 243) {
dayInYear - 212
} else if (dayInYear <= 273) {
dayInYear - 243
} else if (dayInYear <= 304) {
dayInYear - 273
} else if (dayInYear <= 334) {
dayInYear - 304
} else {
dayInYear - 334
}
}
/**
* The number of days for each month (not leap year)
*/
private val monthDays = Array(31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31)
/**
* Returns the date value for the first day of the given month.
* The month is expressed in months since year zero (17999 BC), starting from 0.
*/
private def firstDayOfMonth(absoluteMonth: Int): SQLDate = {
val absoluteYear = absoluteMonth / 12
var monthInYear = absoluteMonth - absoluteYear * 12
var date = getDateFromYear(absoluteYear)
if (monthInYear >= 2 && isLeapYear(absoluteYear + YearZero)) {
date += 1
}
while (monthInYear > 0) {
date += monthDays(monthInYear - 1)
monthInYear -= 1
}
date
}
/**
* Returns the date value for January 1 of the given year.
* The year is expressed in years since year zero (17999 BC), starting from 0.
*/
private def getDateFromYear(absoluteYear: Int): SQLDate = {
val absoluteDays = (absoluteYear * 365 + absoluteYear / 400 - absoluteYear / 100
+ absoluteYear / 4)
absoluteDays - toYearZero
}
/**
* Add date and year-month interval.
* Returns a date value, expressed in days since 1.1.1970.
*/
def dateAddMonths(days: SQLDate, months: Int): SQLDate = {
val (year, monthInYear, dayOfMonth, daysToMonthEnd) = splitDate(days)
val absoluteMonth = (year - YearZero) * 12 + monthInYear - 1 + months
val nonNegativeMonth = if (absoluteMonth >= 0) absoluteMonth else 0
val currentMonthInYear = nonNegativeMonth % 12
val currentYear = nonNegativeMonth / 12
val leapDay = if (currentMonthInYear == 1 && isLeapYear(currentYear + YearZero)) 1 else 0
val lastDayOfMonth = monthDays(currentMonthInYear) + leapDay
val currentDayInMonth = if (daysToMonthEnd == 0 || dayOfMonth >= lastDayOfMonth) {
// last day of the month
lastDayOfMonth
} else {
dayOfMonth
}
firstDayOfMonth(nonNegativeMonth) + currentDayInMonth - 1
}
/**
* Add timestamp and full interval.
* Returns a timestamp value, expressed in microseconds since 1.1.1970 00:00:00.
*/
def timestampAddInterval(start: SQLTimestamp, months: Int, microseconds: Long): SQLTimestamp = {
timestampAddInterval(start, months, microseconds, defaultTimeZone())
}
/**
* Add timestamp and full interval.
* Returns a timestamp value, expressed in microseconds since 1.1.1970 00:00:00.
*/
def timestampAddInterval(
start: SQLTimestamp,
months: Int,
microseconds: Long,
timeZone: TimeZone): SQLTimestamp = {
val days = millisToDays(start / 1000L, timeZone)
val newDays = dateAddMonths(days, months)
start +
daysToMillis(newDays, timeZone) * 1000L - daysToMillis(days, timeZone) * 1000L +
microseconds
}
/**
* Returns number of months between time1 and time2. time1 and time2 are expressed in
* microseconds since 1.1.1970.
*
* If time1 and time2 having the same day of month, or both are the last day of month,
* it returns an integer (time under a day will be ignored).
*
* Otherwise, the difference is calculated based on 31 days per month, and rounding to
* 8 digits.
*/
def monthsBetween(time1: SQLTimestamp, time2: SQLTimestamp): Double = {
monthsBetween(time1, time2, defaultTimeZone())
}
/**
* Returns number of months between time1 and time2. time1 and time2 are expressed in
* microseconds since 1.1.1970.
*
* If time1 and time2 having the same day of month, or both are the last day of month,
* it returns an integer (time under a day will be ignored).
*
* Otherwise, the difference is calculated based on 31 days per month, and rounding to
* 8 digits.
*/
def monthsBetween(time1: SQLTimestamp, time2: SQLTimestamp, timeZone: TimeZone): Double = {
val millis1 = time1 / 1000L
val millis2 = time2 / 1000L
val date1 = millisToDays(millis1, timeZone)
val date2 = millisToDays(millis2, timeZone)
val (year1, monthInYear1, dayInMonth1, daysToMonthEnd1) = splitDate(date1)
val (year2, monthInYear2, dayInMonth2, daysToMonthEnd2) = splitDate(date2)
val months1 = year1 * 12 + monthInYear1
val months2 = year2 * 12 + monthInYear2
if (dayInMonth1 == dayInMonth2 || ((daysToMonthEnd1 == 0) && (daysToMonthEnd2 == 0))) {
return (months1 - months2).toDouble
}
// milliseconds is enough for 8 digits precision on the right side
val timeInDay1 = millis1 - daysToMillis(date1, timeZone)
val timeInDay2 = millis2 - daysToMillis(date2, timeZone)
val timesBetween = (timeInDay1 - timeInDay2).toDouble / MILLIS_PER_DAY
val diff = (months1 - months2).toDouble + (dayInMonth1 - dayInMonth2 + timesBetween) / 31.0
// rounding to 8 digits
math.round(diff * 1e8) / 1e8
}
/*
* Returns day of week from String. Starting from Thursday, marked as 0.
* (Because 1970-01-01 is Thursday).
*/
def getDayOfWeekFromString(string: UTF8String): Int = {
val dowString = string.toString.toUpperCase(Locale.ROOT)
dowString match {
case "SU" | "SUN" | "SUNDAY" => 3
case "MO" | "MON" | "MONDAY" => 4
case "TU" | "TUE" | "TUESDAY" => 5
case "WE" | "WED" | "WEDNESDAY" => 6
case "TH" | "THU" | "THURSDAY" => 0
case "FR" | "FRI" | "FRIDAY" => 1
case "SA" | "SAT" | "SATURDAY" => 2
case _ => -1
}
}
/**
* Returns the first date which is later than startDate and is of the given dayOfWeek.
* dayOfWeek is an integer ranges in [0, 6], and 0 is Thu, 1 is Fri, etc,.
*/
def getNextDateForDayOfWeek(startDate: SQLDate, dayOfWeek: Int): SQLDate = {
startDate + 1 + ((dayOfWeek - 1 - startDate) % 7 + 7) % 7
}
/**
* Returns last day of the month for the given date. The date is expressed in days
* since 1.1.1970.
*/
def getLastDayOfMonth(date: SQLDate): SQLDate = {
val (_, _, _, daysToMonthEnd) = splitDate(date)
date + daysToMonthEnd
}
private val TRUNC_TO_YEAR = 1
private val TRUNC_TO_MONTH = 2
private val TRUNC_INVALID = -1
/**
* Returns the trunc date from original date and trunc level.
* Trunc level should be generated using `parseTruncLevel()`, should only be 1 or 2.
*/
def truncDate(d: SQLDate, level: Int): SQLDate = {
if (level == TRUNC_TO_YEAR) {
d - DateTimeUtils.getDayInYear(d) + 1
} else if (level == TRUNC_TO_MONTH) {
d - DateTimeUtils.getDayOfMonth(d) + 1
} else {
// caller make sure that this should never be reached
sys.error(s"Invalid trunc level: $level")
}
}
/**
* Returns the truncate level, could be TRUNC_YEAR, TRUNC_MONTH, or TRUNC_INVALID,
* TRUNC_INVALID means unsupported truncate level.
*/
def parseTruncLevel(format: UTF8String): Int = {
if (format == null) {
TRUNC_INVALID
} else {
format.toString.toUpperCase(Locale.ROOT) match {
case "YEAR" | "YYYY" | "YY" => TRUNC_TO_YEAR
case "MON" | "MONTH" | "MM" => TRUNC_TO_MONTH
case _ => TRUNC_INVALID
}
}
}
/**
* Lookup the offset for given millis seconds since 1970-01-01 00:00:00 in given timezone.
* TODO: Improve handling of normalization differences.
* TODO: Replace with JSR-310 or similar system - see SPARK-16788
*/
private[sql] def getOffsetFromLocalMillis(millisLocal: Long, tz: TimeZone): Long = {
var guess = tz.getRawOffset
// the actual offset should be calculated based on milliseconds in UTC
val offset = tz.getOffset(millisLocal - guess)
if (offset != guess) {
guess = tz.getOffset(millisLocal - offset)
if (guess != offset) {
// fallback to do the reverse lookup using java.sql.Timestamp
// this should only happen near the start or end of DST
val days = Math.floor(millisLocal.toDouble / MILLIS_PER_DAY).toInt
val year = getYear(days)
val month = getMonth(days)
val day = getDayOfMonth(days)
var millisOfDay = (millisLocal % MILLIS_PER_DAY).toInt
if (millisOfDay < 0) {
millisOfDay += MILLIS_PER_DAY.toInt
}
val seconds = (millisOfDay / 1000L).toInt
val hh = seconds / 3600
val mm = seconds / 60 % 60
val ss = seconds % 60
val ms = millisOfDay % 1000
val calendar = Calendar.getInstance(tz)
calendar.set(year, month - 1, day, hh, mm, ss)
calendar.set(Calendar.MILLISECOND, ms)
guess = (millisLocal - calendar.getTimeInMillis()).toInt
}
}
guess
}
/**
* Convert the timestamp `ts` from one timezone to another.
*
* TODO: Because of DST, the conversion between UTC and human time is not exactly one-to-one
* mapping, the conversion here may return wrong result, we should make the timestamp
* timezone-aware.
*/
def convertTz(ts: SQLTimestamp, fromZone: TimeZone, toZone: TimeZone): SQLTimestamp = {
// We always use local timezone to parse or format a timestamp
val localZone = defaultTimeZone()
val utcTs = if (fromZone.getID == localZone.getID) {
ts
} else {
// get the human time using local time zone, that actually is in fromZone.
val localTs = ts + localZone.getOffset(ts / 1000L) * 1000L // in fromZone
localTs - getOffsetFromLocalMillis(localTs / 1000L, fromZone) * 1000L
}
if (toZone.getID == localZone.getID) {
utcTs
} else {
val localTs = utcTs + toZone.getOffset(utcTs / 1000L) * 1000L // in toZone
// treat it as local timezone, convert to UTC (we could get the expected human time back)
localTs - getOffsetFromLocalMillis(localTs / 1000L, localZone) * 1000L
}
}
/**
* Returns a timestamp of given timezone from utc timestamp, with the same string
* representation in their timezone.
*/
def fromUTCTime(time: SQLTimestamp, timeZone: String): SQLTimestamp = {
convertTz(time, TimeZoneGMT, getTimeZone(timeZone))
}
/**
* Returns a utc timestamp from a given timestamp from a given timezone, with the same
* string representation in their timezone.
*/
def toUTCTime(time: SQLTimestamp, timeZone: String): SQLTimestamp = {
convertTz(time, getTimeZone(timeZone), TimeZoneGMT)
}
/**
* Re-initialize the current thread's thread locals. Exposed for testing.
*/
private[util] def resetThreadLocals(): Unit = {
threadLocalGmtCalendar.remove()
threadLocalTimestampFormat.remove()
threadLocalDateFormat.remove()
}
}
| wangyixiaohuihui/spark2-annotation | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala | Scala | apache-2.0 | 35,516 |
import Array._
object Problem {
def findMaxPeriod(): Int = {
var sequenceLength = 0; var max = 0
for (i <- 1000 to 0 by -1) {
if (sequenceLength > i) {
return max
}
var remainders = ofDim[Int](i); var value = 1; var pos = 0
while (remainders(value) == 0 && value != 0) {
remainders(value) = pos
pos += 1
value *= 10
value %= i
}
if (pos - remainders(value) > sequenceLength) {
sequenceLength = pos - remainders(value)
max = i
}
}
-1
}
def main(args: Array[String]) {
val start = System.currentTimeMillis
println(findMaxPeriod())
val stop = System.currentTimeMillis
println("Time taken: " + (stop - start) + "ms")
}
}
| Jiri-Kremser/euler | 026/Problem.scala | Scala | gpl-2.0 | 761 |
package play.api.i18n
import java.io.File
import java.text.MessageFormat
import javax.inject.{Singleton, Inject}
import play.api.mvc._
import play.api._
import play.api.i18n.Messages.UrlMessageSource
import play.mvc.Http
import scala.util.{Success, Try}
import scala.collection.JavaConverters._
trait MMessages extends MessagesApi
/**
* Using play structure in order to re-use all the original MessagesPlugin private[i18n] internal classes
* Created by Ruben Lozano Díaz on 11/02/2014.
*/
@Singleton
class MMessagesImpl @Inject() (environment: Environment, configuration: Configuration, langs: Langs) extends MMessages {
private val config = PlayConfig(configuration)
val DEFAULT_FILENAME = "messagelisting.properties"
lazy val messageListingName = Try(configuration.getString("i18n.messagelisting").getOrElse(DEFAULT_FILENAME)) match {
case Success(s) => s case _ => DEFAULT_FILENAME
}
val messages: Map[String, Map[String, String]] = allMessages
override def preferred(candidates: Seq[Lang]) = Messages(langs.preferred(candidates), this)
override def preferred(request: mvc.RequestHeader) = {
val maybeLangFromCookie = request.cookies.get(langCookieName)
.flatMap(c => Lang.get(c.value))
val lang = langs.preferred(maybeLangFromCookie.toSeq ++ request.acceptLanguages)
Messages(lang, this)
}
override def preferred(request: Http.RequestHeader) = preferred(request._underlyingHeader())
override def setLang(result: Result, lang: Lang) = result.withCookies(Cookie(langCookieName, lang.code, path = Session.path, domain = Session.domain,
secure = langCookieSecure, httpOnly = langCookieHttpOnly))
override def clearLang(result: Result) = result.discardingCookies(DiscardingCookie(langCookieName, path = Session.path, domain = Session.domain,
secure = langCookieSecure))
override def apply(key: String, args: Any*)(implicit lang: Lang): String = {
translate(key, args).getOrElse(noMatch(key, args))
}
override def apply(keys: Seq[String], args: Any*)(implicit lang: Lang): String = {
keys.foldLeft[Option[String]](None) {
case (None, key) => translate(key, args)
case (acc, _) => acc
}.getOrElse(noMatch(keys.last, args))
}
override def translate(key: String, args: Seq[Any])(implicit lang: Lang): Option[String] = {
val langsToTry: List[Lang] =
List(lang, Lang(lang.language, ""), Lang("default", ""), Lang("default.play", ""))
val pattern: Option[String] =
langsToTry.foldLeft[Option[String]](None)((res, lang) =>
res.orElse(messages.get(lang.code).flatMap(_.get(key))))
pattern.map(pattern =>
new MessageFormat(pattern, lang.toLocale).format(args.map(_.asInstanceOf[java.lang.Object]).toArray))
}
override def isDefinedAt(key: String)(implicit lang: Lang): Boolean = {
val langsToTry: List[Lang] = List(lang, Lang(lang.language, ""), Lang("default", ""), Lang("default.play", ""))
langsToTry.foldLeft[Boolean](false)({ (acc, lang) =>
acc || messages.get(lang.code).map(_.isDefinedAt(key)).getOrElse(false)
})
}
lazy val langCookieName = config.getDeprecated[String]("play.i18n.langCookieName", "application.lang.cookie")
lazy val langCookieSecure = config.get[Boolean]("play.i18n.langCookieSecure")
lazy val langCookieHttpOnly = config.get[Boolean]("play.i18n.langCookieHttpOnly")
private def noMatch(key: String, args: Seq[Any]) = key
private def loadMessagesStr(lang:String): Map[String,String] =
scala.io.Source.fromInputStream(environment.classLoader.getResourceAsStream(messageListingName)).getLines().map{ messageFile =>
environment.classLoader.getResources(lang+File.separator+messageFile).asScala.toList.reverse.map{ messageUrl =>
Messages.parse(UrlMessageSource(messageUrl), messageFile).fold(e => throw e, identity)
}.foldLeft(Map.empty[String, String]) { _ ++ _ }
}.foldLeft(Map.empty[String, String]) { _ ++ _ }
private def allMessages = {
langs.availables.map(_.code).map { lang =>
(lang, loadMessagesStr(lang))
}.toMap
}
}
| Department-for-Work-and-Pensions/Multi-Messages | module/src/main/scala/play/api/i18n/MMessage.scala | Scala | mit | 4,066 |
object Main extends App {
println("Wrong main")
}
| joescii/sbt-native-packager | src/sbt-test/universal/staging-custom-main/src/main/scala/Main.scala | Scala | bsd-2-clause | 52 |
package Scalisp
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers
class ProcedureSpec extends FlatSpec with ShouldMatchers {
val repl = new REPL()
"Multi-line code" should "not crash" in {
repl.execute("""
(define a 1)
(define b 2)
(define l '(1 2 3 4))
""")
}
"Addition and substraction" should "work as expected" in {
repl.executeLine("(+ 4 3 5 3)") should equal (15)
repl.executeLine("(+ 10 -3 8)") should equal (15)
repl.executeLine("(- 10 -3 8)") should equal (5)
repl.executeLine("(+ a b)") should equal (3)
repl.executeLine("(- a b)") should equal (-1)
}
it should "still work if only one element is passed" in {
repl.executeLine("(+ 7)") should equal (7)
repl.executeLine("(- 7)") should equal (7)
}
"Comparisons" should "work normally" in {
repl.executeLine("(< 2 3)") should equal (true)
repl.executeLine("(< 4 3)") should equal (false)
repl.executeLine("(< 3 3)") should equal (false)
repl.executeLine("(< 1 2 3 4 5)") should equal (true)
repl.executeLine("(< 3 2 1 4)") should equal (false)
repl.executeLine("(< a b)") should equal (true)
repl.executeLine("(< b a)") should equal (false)
repl.executeLine("(> 2 3)") should equal (false)
repl.executeLine("(> 4 3)") should equal (true)
repl.executeLine("(> 3 3)") should equal (false)
repl.executeLine("(> 5 4 3 2 1)") should equal (true)
repl.executeLine("(> 4 2 3 1)") should equal (false)
repl.executeLine("(> a b)") should equal (false)
repl.executeLine("(> b a)") should equal (true)
repl.executeLine("(= 2 3)") should equal (false)
repl.executeLine("(= 3 3)") should equal (true)
repl.executeLine("(= 1 2 3)") should equal (false)
repl.executeLine("(= 3 3 3 3)") should equal (true)
repl.executeLine("(= a b)") should equal (false)
repl.executeLine("(= a a)") should equal (true)
}
"car" should "return the head of a list" in {
repl.executeLine("(car '(1 2 3 4))") should equal (1)
repl.executeLine("(car l)") should equal (1)
}
it should "throw a TypeError on non-lists" in {
evaluating { repl.executeLine("(car 1)") } should produce [TypeError]
}
"cdr" should "return the tail of a list" in {
repl.executeLine("(cdr '(1 2 3 4))") should equal (List(2, 3, 4))
repl.executeLine("(cdr l)") should equal (List(2, 3, 4))
}
it should "throw a TypeError on non-lists" in {
evaluating { repl.executeLine("(cdr 1)") } should produce [TypeError]
}
"append" should "merge two or more lists" in {
repl.executeLine("(append '(1 2) '(3 4))") should equal (List(1, 2, 3, 4))
repl.executeLine("(append '(1 2) '(3 4) '(5 6) '(7 8))") should equal (List(1, 2, 3, 4, 5, 6, 7, 8))
repl.executeLine("(append l l)") should equal (List(1, 2, 3, 4, 1, 2, 3, 4))
}
it should "throw a TypeError on non-lists" in {
evaluating { repl.executeLine("(append 1 2 3)") } should produce [TypeError]
evaluating { repl.executeLine("(append '(1 2) 2 3)") } should produce [TypeError]
}
"range" should "create a range of numbers" in {
repl.executeLine("(range 5)") should equal (List(0, 1, 2, 3, 4))
repl.executeLine("(range 5 10)") should equal (List(5, 6, 7, 8, 9))
}
"length" should "return the length of a list" in {
repl.executeLine("(length (range 10))") should equal (10)
}
"map" should "map the values of a list" in {
repl.executeLine("(map (lambda (a) (+ a 5)) (range 5))") should equal (List(5, 6, 7, 8, 9))
}
"foldl" should "fold a list" in {
repl.executeLine("(foldl + 0 (range 5))") should equal (10)
repl.executeLine("(foldl * 1 (range 1 5))") should equal (24)
}
"reduce" should "should act like fold, but with no initial value" in {
repl.executeLine("(reduce + (range 5))") should equal (10)
repl.executeLine("(reduce * (range 1 5))") should equal (24)
}
"filter" should "filter a list" in {
repl.executeLine("(filter (lambda (a) (> a 5)) (range 10))") should equal (
List(6, 7, 8, 9))
}
}
| quantintel/Scalisp | src/test/scala/Procedures.scala | Scala | mit | 3,937 |
package sangria.execution
import sangria.ast
import sangria.marshalling.{InputUnmarshaller, ResultMarshaller}
import sangria.parser.SourceMapper
import sangria.schema._
import sangria.validation.QueryValidator
import InputUnmarshaller.emptyMapVars
import sangria.execution.deferred.DeferredResolver
import scala.concurrent.{ExecutionContext, Future}
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}
case class Executor[Ctx, Root](
schema: Schema[Ctx, Root],
queryValidator: QueryValidator = QueryValidator.default,
deferredResolver: DeferredResolver[Ctx] = DeferredResolver.empty,
exceptionHandler: ExceptionHandler = ExceptionHandler.empty,
deprecationTracker: DeprecationTracker = DeprecationTracker.empty,
middleware: List[Middleware[Ctx]] = Nil,
maxQueryDepth: Option[Int] = None,
queryReducers: List[QueryReducer[Ctx, _]] = Nil
)(implicit executionContext: ExecutionContext) {
def prepare[Input](
queryAst: ast.Document,
userContext: Ctx,
root: Root,
operationName: Option[String] = None,
variables: Input = emptyMapVars
)(implicit um: InputUnmarshaller[Input]): Future[PreparedQuery[Ctx, Root, Input]] = {
val (violations, validationTiming) =
TimeMeasurement.measure(queryValidator.validateQuery(schema, queryAst))
if (violations.nonEmpty)
Future.failed(ValidationError(violations, exceptionHandler))
else {
val scalarMiddleware = Middleware.composeFromScalarMiddleware(middleware, userContext)
val valueCollector = new ValueCollector[Ctx, Input](
schema,
variables,
queryAst.sourceMapper,
deprecationTracker,
userContext,
exceptionHandler,
scalarMiddleware,
false)(um)
val executionResult = for {
operation <- Executor.getOperation(exceptionHandler, queryAst, operationName)
unmarshalledVariables <- valueCollector.getVariableValues(
operation.variables,
scalarMiddleware)
fieldCollector = new FieldCollector[Ctx, Root](
schema,
queryAst,
unmarshalledVariables,
queryAst.sourceMapper,
valueCollector,
exceptionHandler)
tpe <- Executor.getOperationRootType(
schema,
exceptionHandler,
operation,
queryAst.sourceMapper)
fields <- fieldCollector.collectFields(ExecutionPath.empty, tpe, Vector(operation))
} yield {
val preparedFields = fields.fields.flatMap {
case CollectedField(_, astField, Success(_)) =>
val allFields =
tpe.getField(schema, astField.name).asInstanceOf[Vector[Field[Ctx, Root]]]
val field = allFields.head
val args = valueCollector.getFieldArgumentValues(
ExecutionPath.empty.add(astField, tpe),
Some(astField),
field.arguments,
astField.arguments,
unmarshalledVariables)
args.toOption.map(PreparedField(field, _))
case _ => None
}
QueryReducerExecutor
.reduceQuery(
schema,
queryReducers,
exceptionHandler,
fieldCollector,
valueCollector,
unmarshalledVariables,
tpe,
fields,
userContext)
.map { case (newCtx, timing) =>
new PreparedQuery[Ctx, Root, Input](
queryAst,
operation,
tpe,
newCtx,
root,
preparedFields,
(c: Ctx, r: Root, m: ResultMarshaller, scheme: ExecutionScheme) =>
executeOperation(
queryAst,
operationName,
variables,
um,
operation,
queryAst.sourceMapper,
valueCollector,
fieldCollector,
m,
unmarshalledVariables,
tpe,
fields,
c,
r,
scheme,
validationTiming,
timing
))
}
}
executionResult match {
case Success(future) => future
case Failure(error) => Future.failed(error)
}
}
}
def execute[Input](
queryAst: ast.Document,
userContext: Ctx,
root: Root,
operationName: Option[String] = None,
variables: Input = emptyMapVars
)(implicit
marshaller: ResultMarshaller,
um: InputUnmarshaller[Input],
scheme: ExecutionScheme): scheme.Result[Ctx, marshaller.Node] = {
val (violations, validationTiming) =
TimeMeasurement.measure(queryValidator.validateQuery(schema, queryAst))
if (violations.nonEmpty)
scheme.failed(ValidationError(violations, exceptionHandler))
else {
val scalarMiddleware = Middleware.composeFromScalarMiddleware(middleware, userContext)
val valueCollector = new ValueCollector[Ctx, Input](
schema,
variables,
queryAst.sourceMapper,
deprecationTracker,
userContext,
exceptionHandler,
scalarMiddleware,
false)(um)
val executionResult = for {
operation <- Executor.getOperation(exceptionHandler, queryAst, operationName)
unmarshalledVariables <- valueCollector.getVariableValues(
operation.variables,
scalarMiddleware)
fieldCollector = new FieldCollector[Ctx, Root](
schema,
queryAst,
unmarshalledVariables,
queryAst.sourceMapper,
valueCollector,
exceptionHandler)
tpe <- Executor.getOperationRootType(
schema,
exceptionHandler,
operation,
queryAst.sourceMapper)
fields <- fieldCollector.collectFields(ExecutionPath.empty, tpe, Vector(operation))
} yield {
val reduced = QueryReducerExecutor.reduceQuery(
schema,
queryReducers,
exceptionHandler,
fieldCollector,
valueCollector,
unmarshalledVariables,
tpe,
fields,
userContext)
scheme.flatMapFuture(reduced) { case (newCtx, timing) =>
executeOperation(
queryAst,
operationName,
variables,
um,
operation,
queryAst.sourceMapper,
valueCollector,
fieldCollector,
marshaller,
unmarshalledVariables,
tpe,
fields,
newCtx,
root,
scheme,
validationTiming,
timing
)
}
}
executionResult match {
case Success(result) => result
case Failure(error) => scheme.failed(error)
}
}
}
private def executeOperation[Input](
queryAst: ast.Document,
operationName: Option[String],
inputVariables: Input,
inputUnmarshaller: InputUnmarshaller[Input],
operation: ast.OperationDefinition,
sourceMapper: Option[SourceMapper],
valueCollector: ValueCollector[Ctx, _],
fieldCollector: FieldCollector[Ctx, Root],
marshaller: ResultMarshaller,
variables: Map[String, VariableValue],
tpe: ObjectType[Ctx, Root],
fields: CollectedFields,
ctx: Ctx,
root: Root,
scheme: ExecutionScheme,
validationTiming: TimeMeasurement,
queryReducerTiming: TimeMeasurement
): scheme.Result[Ctx, marshaller.Node] = {
val middlewareCtx = MiddlewareQueryContext(
ctx,
this,
queryAst,
operationName,
inputVariables,
inputUnmarshaller,
validationTiming,
queryReducerTiming)
try {
val middlewareVal = middleware.map(m => m.beforeQuery(middlewareCtx) -> m)
val deferredResolverState = deferredResolver.initialQueryState
val resolver = new Resolver[Ctx](
marshaller,
middlewareCtx,
schema,
valueCollector,
variables,
fieldCollector,
ctx,
exceptionHandler,
deferredResolver,
sourceMapper,
deprecationTracker,
middlewareVal,
maxQueryDepth,
deferredResolverState,
scheme.extended,
validationTiming,
queryReducerTiming,
queryAst)
val result =
operation.operationType match {
case ast.OperationType.Query =>
resolver
.resolveFieldsPar(tpe, root, fields)(scheme)
.asInstanceOf[scheme.Result[Ctx, marshaller.Node]]
case ast.OperationType.Mutation =>
resolver
.resolveFieldsSeq(tpe, root, fields)(scheme)
.asInstanceOf[scheme.Result[Ctx, marshaller.Node]]
case ast.OperationType.Subscription =>
tpe.uniqueFields.head.tags.collectFirst { case SubscriptionField(s) => s } match {
case Some(stream) =>
// Streaming is supported - resolve as a real subscription
resolver
.resolveFieldsSubs(tpe, root, fields)(scheme)
.asInstanceOf[scheme.Result[Ctx, marshaller.Node]]
case None =>
// No streaming is supported - resolve as a normal "query" operation
resolver
.resolveFieldsPar(tpe, root, fields)(scheme)
.asInstanceOf[scheme.Result[Ctx, marshaller.Node]]
}
}
if (middlewareVal.nonEmpty)
scheme.onComplete(result)(middlewareVal.foreach { case (v, m) =>
m.afterQuery(v.asInstanceOf[m.QueryVal], middlewareCtx)
})
else result
} catch {
case NonFatal(error) =>
scheme.failed(error)
}
}
}
object Executor {
type ExceptionHandler = sangria.execution.ExceptionHandler
def execute[Ctx, Root, Input](
schema: Schema[Ctx, Root],
queryAst: ast.Document,
userContext: Ctx = (),
root: Root = (),
operationName: Option[String] = None,
variables: Input = emptyMapVars,
queryValidator: QueryValidator = QueryValidator.default,
deferredResolver: DeferredResolver[Ctx] = DeferredResolver.empty,
exceptionHandler: ExceptionHandler = ExceptionHandler.empty,
deprecationTracker: DeprecationTracker = DeprecationTracker.empty,
middleware: List[Middleware[Ctx]] = Nil,
maxQueryDepth: Option[Int] = None,
queryReducers: List[QueryReducer[Ctx, _]] = Nil
)(implicit
executionContext: ExecutionContext,
marshaller: ResultMarshaller,
um: InputUnmarshaller[Input],
scheme: ExecutionScheme): scheme.Result[Ctx, marshaller.Node] =
Executor(
schema,
queryValidator,
deferredResolver,
exceptionHandler,
deprecationTracker,
middleware,
maxQueryDepth,
queryReducers)
.execute(queryAst, userContext, root, operationName, variables)
def prepare[Ctx, Root, Input](
schema: Schema[Ctx, Root],
queryAst: ast.Document,
userContext: Ctx = (),
root: Root = (),
operationName: Option[String] = None,
variables: Input = emptyMapVars,
queryValidator: QueryValidator = QueryValidator.default,
deferredResolver: DeferredResolver[Ctx] = DeferredResolver.empty,
exceptionHandler: ExceptionHandler = ExceptionHandler.empty,
deprecationTracker: DeprecationTracker = DeprecationTracker.empty,
middleware: List[Middleware[Ctx]] = Nil,
maxQueryDepth: Option[Int] = None,
queryReducers: List[QueryReducer[Ctx, _]] = Nil
)(implicit
executionContext: ExecutionContext,
um: InputUnmarshaller[Input]): Future[PreparedQuery[Ctx, Root, Input]] =
Executor(
schema,
queryValidator,
deferredResolver,
exceptionHandler,
deprecationTracker,
middleware,
maxQueryDepth,
queryReducers)
.prepare(queryAst, userContext, root, operationName, variables)
def getOperationRootType[Ctx, Root](
schema: Schema[Ctx, Root],
exceptionHandler: ExceptionHandler,
operation: ast.OperationDefinition,
sourceMapper: Option[SourceMapper]) = operation.operationType match {
case ast.OperationType.Query =>
Success(schema.query)
case ast.OperationType.Mutation =>
schema.mutation
.map(Success(_))
.getOrElse(
Failure(
OperationSelectionError(
"Schema is not configured for mutations",
exceptionHandler,
sourceMapper,
operation.location.toList)))
case ast.OperationType.Subscription =>
schema.subscription
.map(Success(_))
.getOrElse(
Failure(
OperationSelectionError(
"Schema is not configured for subscriptions",
exceptionHandler,
sourceMapper,
operation.location.toList)))
}
def getOperation(
exceptionHandler: ExceptionHandler,
document: ast.Document,
operationName: Option[String]): Try[ast.OperationDefinition] =
if (document.operations.size != 1 && operationName.isEmpty)
Failure(
OperationSelectionError(
"Must provide operation name if query contains multiple operations",
exceptionHandler))
else {
val unexpectedDefinition = document.definitions.find(d =>
!(d.isInstanceOf[ast.OperationDefinition] || d.isInstanceOf[ast.FragmentDefinition]))
unexpectedDefinition match {
case Some(unexpected) =>
Failure(new ExecutionError(
s"GraphQL cannot execute a request containing a ${unexpected.getClass.getSimpleName}.",
exceptionHandler))
case None =>
operationName match {
case Some(opName) =>
document.operations
.get(Some(opName))
.map(Success(_))
.getOrElse(Failure(
OperationSelectionError(s"Unknown operation name '$opName'", exceptionHandler)))
case None =>
Success(document.operations.values.head)
}
}
}
}
class PreparedQuery[Ctx, Root, Input] private[execution] (
val queryAst: ast.Document,
val operation: ast.OperationDefinition,
val tpe: ObjectType[Ctx, Root],
val userContext: Ctx,
val root: Root,
val fields: Seq[PreparedField[Ctx, Root]],
execFn: (Ctx, Root, ResultMarshaller, ExecutionScheme) => Any) {
def execute(userContext: Ctx = userContext, root: Root = root)(implicit
marshaller: ResultMarshaller,
scheme: ExecutionScheme): scheme.Result[Ctx, marshaller.Node] =
execFn(userContext, root, marshaller, scheme).asInstanceOf[scheme.Result[Ctx, marshaller.Node]]
}
case class PreparedField[Ctx, Root](field: Field[Ctx, Root], args: Args)
| OlegIlyenko/sangria | modules/core/src/main/scala/sangria/execution/Executor.scala | Scala | apache-2.0 | 14,983 |
package org.apache.spark.ml.parity.feature
import org.apache.spark.ml.Transformer
import org.apache.spark.ml.feature.FeatureHasher
import org.apache.spark.ml.parity.SparkParityBase
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.types._
import scala.util.Random
class FeatureHasherParitySpec extends SparkParityBase {
val categories = Seq(
"spark",
"and",
"mleap",
"are",
"super",
"dope",
"together"
)
def randomRow(): Row = Row(Random.nextDouble(), Random.nextBoolean(), Random.nextInt(20), Random.nextInt(20).toString,
Random.shuffle(categories).head)
val rows = spark.sparkContext.parallelize(Seq.tabulate(100) { _ => randomRow() })
val schema = new StructType()
.add("real", DoubleType, nullable = false)
.add("bool", BooleanType, nullable = false)
.add("int", IntegerType, nullable = false)
.add("stringNum", StringType, nullable = true)
.add("string", StringType, nullable = true)
override val dataset: DataFrame = spark.sqlContext.createDataFrame(rows, schema)
override val sparkTransformer: Transformer = new FeatureHasher()
.setInputCols("real", "bool", "int", "stringNum", "string")
.setOutputCol("features")
.setNumFeatures(1 << 17)
.setCategoricalCols(Array("int"))
}
| combust/mleap | mleap-spark/src/test/scala/org/apache/spark/ml/parity/feature/FeatureHasherParitySpec.scala | Scala | apache-2.0 | 1,296 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.metadata
import org.apache.flink.table.plan.nodes.calcite.{Expand, Rank, WindowAggregate}
import org.apache.flink.table.plan.nodes.physical.batch._
import org.apache.flink.table.plan.util.FlinkRelMdUtil
import org.apache.flink.table.{JArrayList, JDouble}
import org.apache.calcite.plan.RelOptUtil
import org.apache.calcite.plan.volcano.RelSubset
import org.apache.calcite.rel.core._
import org.apache.calcite.rel.metadata._
import org.apache.calcite.rel.{RelNode, SingleRel}
import org.apache.calcite.rex.{RexNode, RexUtil}
import org.apache.calcite.sql.fun.SqlStdOperatorTable
import org.apache.calcite.util.{BuiltInMethod, ImmutableBitSet, Util}
import scala.collection.JavaConversions._
/**
* FlinkRelMdSelectivity supplies a implementation of
* [[RelMetadataQuery#getSelectivity]] for the standard logical algebra.
*/
class FlinkRelMdSelectivity private extends MetadataHandler[BuiltInMetadata.Selectivity] {
def getDef: MetadataDef[BuiltInMetadata.Selectivity] = BuiltInMetadata.Selectivity.DEF
def getSelectivity(rel: TableScan, mq: RelMetadataQuery, predicate: RexNode): JDouble =
estimateSelectivity(rel, mq, predicate)
def getSelectivity(rel: Project, mq: RelMetadataQuery, predicate: RexNode): JDouble =
estimateSelectivity(rel, mq, predicate)
def getSelectivity(rel: Filter, mq: RelMetadataQuery, predicate: RexNode): JDouble =
estimateSelectivity(rel, mq, predicate)
def getSelectivity(rel: Calc, mq: RelMetadataQuery, predicate: RexNode): JDouble =
estimateSelectivity(rel, mq, predicate)
def getSelectivity(rel: Expand, mq: RelMetadataQuery, predicate: RexNode): JDouble = {
if (predicate == null || predicate.isAlwaysTrue) {
1.0
} else if (RelOptUtil.InputFinder.bits(predicate).toList.contains(rel.expandIdIndex)) {
// sql like:
// select count(*) as c, from emp group by rollup(deptno, gender) +
// having grouping(deptno) <= grouping_id(deptno, gender)"
// will trigger this case.
RelMdUtil.guessSelectivity(predicate)
} else {
mq.getSelectivity(rel.getInput, predicate)
}
}
def getSelectivity(rel: Exchange, mq: RelMetadataQuery, predicate: RexNode): JDouble =
mq.getSelectivity(rel.getInput, predicate)
def getSelectivity(rel: Rank, mq: RelMetadataQuery, predicate: RexNode): JDouble = {
if (predicate == null || predicate.isAlwaysTrue) {
return 1D
}
val (nonRankPred, rankPred) = FlinkRelMdUtil.splitPredicateOnRank(rel, predicate)
val childSelectivity: JDouble = nonRankPred match {
case Some(p) => mq.getSelectivity(rel.getInput, p)
case _ => 1D
}
val rankSelectivity: JDouble = rankPred match {
case Some(p) => estimateSelectivity(rel, mq, p)
case _ => 1D
}
childSelectivity * rankSelectivity
}
def getSelectivity(rel: Sort, mq: RelMetadataQuery, predicate: RexNode): JDouble =
mq.getSelectivity(rel.getInput, predicate)
def getSelectivity(
rel: Aggregate,
mq: RelMetadataQuery,
predicate: RexNode): JDouble = getSelectivityOfAgg(rel, mq, predicate)
def getSelectivity(
rel: BatchExecGroupAggregateBase,
mq: RelMetadataQuery,
predicate: RexNode): JDouble = getSelectivityOfAgg(rel, mq, predicate)
def getSelectivity(
rel: WindowAggregate,
mq: RelMetadataQuery,
predicate: RexNode): JDouble = {
val newPredicate = FlinkRelMdUtil.makeNamePropertiesSelectivityRexNode(rel, predicate)
getSelectivityOfAgg(rel, mq, newPredicate)
}
def getSelectivity(
rel: BatchExecWindowAggregateBase,
mq: RelMetadataQuery,
predicate: RexNode): JDouble = {
val newPredicate = if (rel.isFinal) {
FlinkRelMdUtil.makeNamePropertiesSelectivityRexNode(rel, predicate)
} else {
predicate
}
getSelectivityOfAgg(rel, mq, newPredicate)
}
private def getSelectivityOfAgg(
agg: SingleRel,
mq: RelMetadataQuery,
predicate: RexNode): JDouble = {
if (predicate == null || predicate.isAlwaysTrue) {
1.0
} else {
val hasLocalAgg = agg match {
case _: Aggregate => false
case rel: BatchExecGroupAggregateBase => rel.isFinal && rel.isMerge
case rel: BatchExecWindowAggregateBase => rel.isFinal && rel.isMerge
case _ => throw new IllegalArgumentException(s"Cannot handle ${agg.getRelTypeName}!")
}
if (hasLocalAgg) {
val childPredicate = agg match {
case rel: BatchExecWindowAggregateBase =>
// set the predicate as they correspond to local window aggregate
FlinkRelMdUtil.setChildPredicateOfWinAgg(predicate, rel)
case _ => predicate
}
return mq.getSelectivity(agg.getInput, childPredicate)
}
val (childPred, restPred) = agg match {
case rel: Aggregate =>
FlinkRelMdUtil.splitPredicateOnAggregate(rel, predicate)
case rel: BatchExecGroupAggregateBase =>
FlinkRelMdUtil.splitPredicateOnAggregate(rel, predicate)
case rel: BatchExecWindowAggregateBase =>
FlinkRelMdUtil.splitPredicateOnAggregate(rel, predicate)
case _ => throw new IllegalArgumentException(s"Cannot handle ${agg.getRelTypeName}!")
}
val childSelectivity = mq.getSelectivity(agg.getInput(), childPred.orNull)
if (childSelectivity == null) {
null
} else {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
val aggCallEstimator = new AggCallSelectivityEstimator(agg, fmq)
val restSelectivity = aggCallEstimator.evaluate(restPred.orNull) match {
case Some(s) => s
case _ => RelMdUtil.guessSelectivity(restPred.orNull)
}
childSelectivity * restSelectivity
}
}
}
def getSelectivity(
overWindow: Window,
mq: RelMetadataQuery,
predicate: RexNode): JDouble = getSelectivityOfOverAgg(overWindow, mq, predicate)
def getSelectivity(
rel: BatchExecOverAggregate,
mq: RelMetadataQuery,
predicate: RexNode): JDouble = getSelectivityOfOverAgg(rel, mq, predicate)
private def getSelectivityOfOverAgg(
over: SingleRel,
mq: RelMetadataQuery,
predicate: RexNode): JDouble = {
if (predicate == null || predicate.isAlwaysTrue) {
1.0
} else {
val input = over.getInput
val childBitmap = over match {
case _: BatchExecOverAggregate | _: Window =>
ImmutableBitSet.range(0, input.getRowType.getFieldCount)
case _ => throw new IllegalArgumentException(s"Unknown node type ${over.getRelTypeName}")
}
val notPushable = new JArrayList[RexNode]
val pushable = new JArrayList[RexNode]
RelOptUtil.splitFilters(
childBitmap,
predicate,
pushable,
notPushable)
val rexBuilder = over.getCluster.getRexBuilder
val childPreds = RexUtil.composeConjunction(rexBuilder, pushable, true)
val partSelectivity = mq.getSelectivity(input, childPreds)
if (partSelectivity == null) {
null
} else {
val rest = RexUtil.composeConjunction(rexBuilder, notPushable, true)
val restSelectivity = RelMdUtil.guessSelectivity(rest)
partSelectivity * restSelectivity
}
}
}
def getSelectivity(rel: Join, mq: RelMetadataQuery, predicate: RexNode): JDouble = {
if (predicate == null || predicate.isAlwaysTrue) {
1.0
} else {
rel.getJoinType match {
case JoinRelType.SEMI | JoinRelType.ANTI =>
// create a RexNode representing the selectivity of the
// semi-join filter and pass it to getSelectivity
val rexBuilder = rel.getCluster.getRexBuilder
var newPred = FlinkRelMdUtil.makeSemiAntiJoinSelectivityRexNode(mq, rel)
if (predicate != null) {
newPred = rexBuilder.makeCall(SqlStdOperatorTable.AND, newPred, predicate)
}
mq.getSelectivity(rel.getLeft, newPred)
case _ =>
estimateSelectivity(rel, mq, predicate)
}
}
}
def getSelectivity(rel: Union, mq: RelMetadataQuery, predicate: RexNode): JDouble = {
if (predicate == null || predicate.isAlwaysTrue || rel.getInputs.size == 0) {
1.0
} else {
// convert the predicate to reference the types of the union child
val rexBuilder = rel.getCluster.getRexBuilder
val adjustments = new Array[Int](rel.getRowType.getFieldCount)
var inputRows: Seq[JDouble] = Nil
var inputSelectedRows: Seq[JDouble] = Nil
rel.getInputs foreach { input =>
val inputRowCount = mq.getRowCount(input)
inputRows = inputRows :+ inputRowCount
val inputSelectedRow: JDouble = if (inputRowCount == null) {
null
} else {
val modifiedPred = predicate.accept(
new RelOptUtil.RexInputConverter(
rexBuilder, null, input.getRowType.getFieldList, adjustments))
val selectivity = mq.getSelectivity(input, modifiedPred)
if (selectivity == null) null else selectivity * inputRowCount
}
inputSelectedRows = inputSelectedRows :+ inputSelectedRow
}
if (inputRows.contains(null) || inputSelectedRows.contains(null)) {
null
} else {
val sumRows = inputRows.reduce(_ + _)
val sumSelectedRows = inputSelectedRows.reduce(_ + _)
if (sumRows < 1.0) sumSelectedRows else sumSelectedRows / sumRows
}
}
}
def getSelectivity(subset: RelSubset, mq: RelMetadataQuery, predicate: RexNode): JDouble = {
val rel = Util.first(subset.getBest, subset.getOriginal)
mq.getSelectivity(rel, predicate)
}
// TODO only effects BatchPhysicalRel instead of all RelNode now
def getSelectivity(rel: RelNode, mq: RelMetadataQuery, predicate: RexNode): JDouble = {
rel match {
case _: BatchPhysicalRel => estimateSelectivity(rel, mq, predicate)
case _ => RelMdUtil.guessSelectivity(predicate)
}
}
private def estimateSelectivity(
rel: RelNode,
mq: RelMetadataQuery,
predicate: RexNode): JDouble = {
val fmq = FlinkRelMetadataQuery.reuseOrCreate(mq)
val estimator = new SelectivityEstimator(rel, fmq)
estimator.evaluate(predicate) match {
case Some(s) => s
case _ => RelMdUtil.guessSelectivity(predicate)
}
}
}
object FlinkRelMdSelectivity {
private val INSTANCE = new FlinkRelMdSelectivity
val SOURCE: RelMetadataProvider = ReflectiveRelMetadataProvider.reflectiveSource(
BuiltInMethod.SELECTIVITY.method, INSTANCE)
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/metadata/FlinkRelMdSelectivity.scala | Scala | apache-2.0 | 11,380 |
/*
* AkkaSolrDocumentList.scala
*
* Updated: Sep 26, 2014
*
* Copyright (c) 2014, CodeMettle
*/
package com.codemettle.akkasolr.solrtypes
import org.apache.solr.common.{SolrDocument, SolrDocumentList}
import com.codemettle.akkasolr.CollectionConverters._
/**
* @author steven
*
*/
@SerialVersionUID(1L)
case class AkkaSolrDocumentList(original: Option[SolrDocumentList]) {
@transient
val resultInfo = {
original.fold(SolrResultInfo(-1, -1, None))(o => SolrResultInfo(o.getNumFound, o.getStart, o.getMaxScore))
}
def numFound = resultInfo.numFound
def start = resultInfo.start
def maxScore = resultInfo.maxScore
@transient
lazy val documents = original.fold(Seq.empty[SolrDocument])(o => o.asScala.toSeq) map AkkaSolrDocument.apply
}
| CodeMettle/akka-solr | src/main/scala/com/codemettle/akkasolr/solrtypes/AkkaSolrDocumentList.scala | Scala | apache-2.0 | 787 |
/* Copyright 2015 UniCredit S.p.A.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package unicredit.example
import org.apache.spark.{ SparkContext, SparkConf }
import unicredit.spark.hbase._
object Read extends App {
val name = "Example of read from HBase table"
lazy val sparkConf = new SparkConf().setAppName(name)
lazy val sc = new SparkContext(sparkConf)
implicit val config = HBaseConfig() // Assumes hbase-site.xml is on classpath
val columns = Map(
"cf1" -> Set("col1", "col2"),
"cf2" -> Set("col3")
)
sc.hbase[String]("test-table", columns)
.map({ case (k, v) =>
val cf1 = v("cf1")
val col1 = cf1("col1")
val col2 = cf1("col2")
val col3 = v("cf2")("col3")
List(k, col1, col2, col3) mkString "\\t"
})
.saveAsTextFile("test-output")
} | unicredit/hbase-rdd-examples | src/main/scala/unicredit/example/Read.scala | Scala | apache-2.0 | 1,308 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.concurrent.duration
import DurationConversions._
// Would be nice to limit the visibility of this trait a little bit,
// but it crashes scalac to do so.
trait DurationConversions {
protected def durationIn(unit: TimeUnit): FiniteDuration
def nanoseconds = durationIn(NANOSECONDS)
def nanos = nanoseconds
def nanosecond = nanoseconds
def nano = nanoseconds
def microseconds = durationIn(MICROSECONDS)
def micros = microseconds
def microsecond = microseconds
def micro = microseconds
def milliseconds = durationIn(MILLISECONDS)
def millis = milliseconds
def millisecond = milliseconds
def milli = milliseconds
def seconds = durationIn(SECONDS)
def second = seconds
def minutes = durationIn(MINUTES)
def minute = minutes
def hours = durationIn(HOURS)
def hour = hours
def days = durationIn(DAYS)
def day = days
def nanoseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(nanoseconds)
def nanos[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
def nanosecond[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
def nano[C](c: C)(implicit ev: Classifier[C]): ev.R = nanoseconds(c)
def microseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(microseconds)
def micros[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
def microsecond[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
def micro[C](c: C)(implicit ev: Classifier[C]): ev.R = microseconds(c)
def milliseconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(milliseconds)
def millis[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
def millisecond[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
def milli[C](c: C)(implicit ev: Classifier[C]): ev.R = milliseconds(c)
def seconds[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(seconds)
def second[C](c: C)(implicit ev: Classifier[C]): ev.R = seconds(c)
def minutes[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(minutes)
def minute[C](c: C)(implicit ev: Classifier[C]): ev.R = minutes(c)
def hours[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(hours)
def hour[C](c: C)(implicit ev: Classifier[C]): ev.R = hours(c)
def days[C](c: C)(implicit ev: Classifier[C]): ev.R = ev.convert(days)
def day[C](c: C)(implicit ev: Classifier[C]): ev.R = days(c)
}
/**
* This object just holds some cogs which make the DSL machine work, not for direct consumption.
*/
object DurationConversions {
trait Classifier[C] {
type R
def convert(d: FiniteDuration): R
}
implicit object spanConvert extends Classifier[span.type] {
type R = FiniteDuration
def convert(d: FiniteDuration) = d
}
implicit object fromNowConvert extends Classifier[fromNow.type] {
type R = Deadline
def convert(d: FiniteDuration) = Deadline.now + d
}
}
| jrudolph/futures-backport | src/main/scala/scala/concurrent/duration/DurationConversions.scala | Scala | bsd-3-clause | 3,415 |
package com.seremis.geninfusion.model
import com.seremis.geninfusion.api.model.{IAttachmentPoint, IModelPartType}
import net.minecraft.util.math.Vec3d
class AttachmentPoint(location: Vec3d, partTypes: Array[IModelPartType]) extends IAttachmentPoint {
override def getConnectablePartTypes: Array[IModelPartType] = partTypes
override def getLocation: Vec3d = location
}
| Seremis/Genetic-Infusion | src/main/scala/com/seremis/geninfusion/model/AttachmentPoint.scala | Scala | gpl-3.0 | 380 |
package com.arcusys.valamis.content.model
case class ContentTree(contentAmount: Int,
nodes: Seq[ContentTreeNode])
trait ContentTreeNode{
def item: Content
}
case class CategoryTreeNode(item: Category,
contentAmount: Int,
nodes: Seq[ContentTreeNode]) extends ContentTreeNode
case class PlainTextNode(item: PlainText) extends ContentTreeNode
case class QuestionNode(item: Question,
answer: Seq[Answer]) extends ContentTreeNode
| igor-borisov/valamis | valamis-questionbank/src/main/scala/com/arcusys/valamis/content/model/ContentTree.scala | Scala | gpl-3.0 | 535 |
package com.gx.observer
/**
* Copyright 2017 josephguan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
object App extends App{
val weather = new Weather()
weather.addObserver(new Boy)
weather.addObserver(new Girl)
weather.changing(WeatherType.COLD)
weather.changing(WeatherType.RAINY)
weather.changing(WeatherType.SUNNY)
}
| josephguan/scala-design-patterns | behavioral/observer/src/main/scala/com/gx/observer/App.scala | Scala | apache-2.0 | 866 |
package nest.sparkle.tools
import scala.concurrent.{ExecutionContext, Future}
import scala.language.existentials
import scala.reflect.runtime.universe._
import nest.sparkle.store.Column
import nest.sparkle.store.EventGroup.transposeSlices
import nest.sparkle.store.Store
import nest.sparkle.util.Log
import nest.sparkle.util.ObservableFuture.WrappedObservable
import nest.sparkle.util.StringUtil.lastPart
/** Base trait for exporting data from the column Store in tabular form.
* Subclasses should call leafDatSet() to retrieve a table of data from all
* the columns in a leaf DataSet, or column() to retrieve a table of data
* from a particular column.
*/
trait Exporter extends Log {
implicit def execution: ExecutionContext
def store: Store
/** a table of data along with the name and type of its columns. */
case class Tabular(rows: Future[Seq[Row]], columns: Seq[NameAndType])
case class Row(values: Seq[Option[Any]])
case class NameAndType(name: String, typed: TypeTag[_])
/** return a table of data from all the columns in a leaf dataSet, where
* a leaf dataSet is a dataSet with only columns (not other dataSets) as
* children */
protected def leafDatSet(dataSet: String): Future[Tabular] = {
store.leafDataSetColumnPaths(dataSet).flatMap { columnPaths =>
exportRows(columnPaths)
}
}
/** return a table of data from a particular column */
protected def column(columnPath: String): Future[Tabular] = {
exportRows(Seq(columnPath))
}
/** return a table of data from the specified columns */
private def exportRows(columnPaths: Seq[String]): Future[Tabular] = {
val futureNameColumns = fetchColumns(columnPaths)
futureNameColumns.map { nameColumns =>
val (names, columns) = nameColumns.unzip
val columnTypes = {
val keyTypes = columns.map(_.keyType)
require(keyTypes.forall { _ == keyTypes(0) })
val valueTypes = columns.map(_.valueType)
keyTypes.head +: valueTypes
}
val columnNames = {
val columnValueNames = names.map { lastPart(_) }
"key" +: columnValueNames
}
val namesAndTypes = columnNames zip columnTypes map { case (name, typed) => NameAndType(name, typed) }
val rows = fetchAndCompositeRows(columns)
Tabular(rows, namesAndTypes)
}
}
/** return a future containing Column from the store along with their columnPath */
private def fetchColumns(names: Seq[String]): Future[Seq[(String, Column[_, _])]] = {
val futureColumns =
names.map { name =>
store.column[Any, Any](name).map { column => name -> column }
}
Future.sequence(futureColumns)
}
/** fetch all the events from a set of columns and composite the column data into
* into tabular rows
*/
private def fetchAndCompositeRows(columns: Seq[Column[_, _]]): Future[Seq[Row]] = {
val events = columns.map { column =>
val castColumn = column.asInstanceOf[Column[Any, Any]]
castColumn.readRangeOld().initial.toFutureSeq
}
Future.sequence(events).map { allEvents =>
// convert from columnar form to tabular form.
transposeSlices(allEvents).map { values => Row(values) }
}
}
}
| mighdoll/sparkle | protocol/src/main/scala/nest/sparkle/tools/Exporter.scala | Scala | apache-2.0 | 3,214 |
import scala.util.parsing.combinator._
import java.io.FileReader
class JSON extends JavaTokenParsers {
def value : Parser[Any] = obj | arr |
stringLiteral |
floatingPointNumber |
"null" | "true" | "false"
def obj : Parser[Any] = "{" ~ repsep(member, ",") ~ "}"
def arr : Parser[Any] = "[" ~ repsep(value, ",") ~ "]"
def member: Parser[Any] = stringLiteral ~ ":" ~ value
}
object ParseExpr extends JSON {
def main(args: Array[String]) {
val reader = new FileReader(args(0))
println(parseAll(value, reader))
}
}
| Bolt64/my_code | scala/combinator_parser.scala | Scala | mit | 616 |
/*
* Scala classfile decoder (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.scalap
package scalax
package rules
package scalasig
import language.postfixOps
import java.io.IOException
object ByteCode {
def apply(bytes: Array[Byte]) = new ByteCode(bytes, 0, bytes.length)
def forClass(clazz: Class[_]) = {
val name = clazz.getName
val subPath = name.substring(name.lastIndexOf('.') + 1) + ".class"
val in = clazz.getResourceAsStream(subPath)
try {
var rest = in.available()
val bytes = new Array[Byte](rest)
while (rest > 0) {
val res = in.read(bytes, bytes.length - rest, rest)
if (res == -1) throw new IOException("read error")
rest -= res
}
ByteCode(bytes)
} finally {
in.close()
}
}
}
/** Represents a chunk of raw bytecode. Used as input for the parsers
*/
class ByteCode(val bytes: Array[Byte], val pos: Int, val length: Int) {
assert(pos >= 0 && length >= 0 && pos + length <= bytes.length)
def nextByte = if (length == 0) Failure else Success(drop(1), bytes(pos))
def next(n: Int) = if (length >= n) Success(drop(n), take(n)) else Failure
def take(n: Int) = new ByteCode(bytes, pos, n)
def drop(n: Int) = new ByteCode(bytes, pos + n, length - n)
def fold[X](x: X)(f: (X, Byte) => X): X = {
var result = x
var i = pos
while (i < pos + length) {
result = f(result, bytes(i))
i += 1
}
result
}
override def toString = length + " bytes"
def toInt = fold(0) { (x, b) => (x << 8) + (b & 0xFF)}
def toLong = fold(0L) { (x, b) => (x << 8) + (b & 0xFF)}
/**
* Transforms array subsequence of the current buffer into the UTF8 String and
* stores and array of bytes for the decompiler
*/
def fromUTF8StringAndBytes = {
val chunk: Array[Byte] = new Array[Byte](length)
System.arraycopy(bytes, pos, chunk, 0, length)
val str = new String(io.Codec.fromUTF8(bytes, pos, length))
StringBytesPair(str, chunk)
}
def byte(i: Int) = bytes(pos) & 0xFF
}
/**
* The wrapper for decode UTF-8 string
*/
case class StringBytesPair(string: String, bytes: Array[Byte])
/** Provides rules for parsing byte-code.
*/
trait ByteCodeReader extends RulesWithState {
type S = ByteCode
type Parser[A] = Rule[A, String]
val byte = apply(_.nextByte)
val u1 = byte ^^ (_ & 0xFF)
val u2 = bytes(2) ^^ (_.toInt)
val u4 = bytes(4) ^^ (_.toInt) // should map to Long??
def bytes(n: Int) = apply(_ next n)
}
object ClassFileParser extends ByteCodeReader {
def parse(byteCode: ByteCode) = expect(classFile)(byteCode)
def parseAnnotations(byteCode: ByteCode) = expect(annotations)(byteCode)
val magicNumber = (u4 filter (_ == 0xCAFEBABE)) | error("Not a valid class file")
val version = u2 ~ u2 ^^ { case minor ~ major => (major, minor) }
val constantPool = (u2 ^^ ConstantPool) >> repeatUntil(constantPoolEntry)(_.isFull)
// NOTE currently most constants just evaluate to a string description
// TODO evaluate to useful values
val utf8String = (u2 >> bytes) ^^ add1 { raw => pool => raw.fromUTF8StringAndBytes }
val intConstant = u4 ^^ add1 { x => pool => x }
val floatConstant = bytes(4) ^^ add1 { raw => pool => "Float: TODO" }
val longConstant = bytes(8) ^^ add2 { raw => pool => raw.toLong }
val doubleConstant = bytes(8) ^^ add2 { raw => pool => "Double: TODO" }
val classRef = u2 ^^ add1 { x => pool => "Class: " + pool(x) }
val stringRef = u2 ^^ add1 { x => pool => "String: " + pool(x) }
val fieldRef = memberRef("Field")
val methodRef = memberRef("Method")
val interfaceMethodRef = memberRef("InterfaceMethod")
val nameAndType = u2 ~ u2 ^^ add1 { case name ~ descriptor => pool => "NameAndType: " + pool(name) + ", " + pool(descriptor) }
val methodHandle = u1 ~ u2 ^^ add1 { case referenceKind ~ referenceIndex => pool => "MethodHandle: " + referenceKind + ", " + pool(referenceIndex) }
val methodType = u2 ^^ add1 { case descriptorIndex => pool => "MethodType: " + pool(descriptorIndex) }
val invokeDynamic = u2 ~ u2 ^^ add1 { case bootstrapMethodAttrIndex ~ nameAndTypeIndex => pool => "InvokeDynamic: " + "bootstrapMethodAttrIndex = " + bootstrapMethodAttrIndex + ", " + pool(nameAndTypeIndex) }
val constantPoolEntry = u1 >> {
case 1 => utf8String
case 3 => intConstant
case 4 => floatConstant
case 5 => longConstant
case 6 => doubleConstant
case 7 => classRef
case 8 => stringRef
case 9 => fieldRef
case 10 => methodRef
case 11 => interfaceMethodRef
case 12 => nameAndType
case 15 => methodHandle
case 16 => methodType
case 18 => invokeDynamic
}
val interfaces = u2 >> u2.times
// bytes are parametrizes by the length, declared in u4 section
val attribute = u2 ~ (u4 >> bytes) ^~^ Attribute
// parse attributes u2 times
val attributes = u2 >> attribute.times
// parse runtime-visible annotations
abstract class ElementValue
case class AnnotationElement(elementNameIndex: Int, elementValue: ElementValue)
case class ConstValueIndex(index: Int) extends ElementValue
case class EnumConstValue(typeNameIndex: Int, constNameIndex: Int) extends ElementValue
case class ClassInfoIndex(index: Int) extends ElementValue
case class Annotation(typeIndex: Int, elementValuePairs: Seq[AnnotationElement]) extends ElementValue
case class ArrayValue(values: Seq[ElementValue]) extends ElementValue
def element_value: Parser[ElementValue] = u1 >> {
case 'B'|'C'|'D'|'F'|'I'|'J'|'S'|'Z'|'s' => u2 ^^ ConstValueIndex
case 'e' => u2 ~ u2 ^~^ EnumConstValue
case 'c' => u2 ^^ ClassInfoIndex
case '@' => annotation //nested annotation
case '[' => u2 >> element_value.times ^^ ArrayValue
}
val element_value_pair = u2 ~ element_value ^~^ AnnotationElement
val annotation: Parser[Annotation] = u2 ~ (u2 >> element_value_pair.times) ^~^ Annotation
val annotations = u2 >> annotation.times
val field = u2 ~ u2 ~ u2 ~ attributes ^~~~^ Field
val fields = u2 >> field.times
val method = u2 ~ u2 ~ u2 ~ attributes ^~~~^ Method
val methods = u2 >> method.times
val header = magicNumber -~ u2 ~ u2 ~ constantPool ~ u2 ~ u2 ~ u2 ~ interfaces ^~~~~~~^ ClassFileHeader
val classFile = header ~ fields ~ methods ~ attributes ~- !u1 ^~~~^ ClassFile
// TODO create a useful object, not just a string
def memberRef(description: String) = u2 ~ u2 ^^ add1 {
case classRef ~ nameAndTypeRef => pool => description + ": " + pool(classRef) + ", " + pool(nameAndTypeRef)
}
def add1[T](f: T => ConstantPool => Any)(raw: T)(pool: ConstantPool) = pool add f(raw)
def add2[T](f: T => ConstantPool => Any)(raw: T)(pool: ConstantPool) = pool add f(raw) add { pool => "<empty>" }
}
case class ClassFile(
header: ClassFileHeader,
fields: Seq[Field],
methods: Seq[Method],
attributes: Seq[Attribute]) {
def majorVersion = header.major
def minorVersion = header.minor
def className = constant(header.classIndex)
def superClass = constant(header.superClassIndex)
def interfaces = header.interfaces.map(constant)
def constant(index: Int) = header.constants(index) match {
case StringBytesPair(str, _) => str
case z => z
}
def constantWrapped(index: Int) = header.constants(index)
def attribute(name: String) = attributes.find {attrib => constant(attrib.nameIndex) == name }
val RUNTIME_VISIBLE_ANNOTATIONS = "RuntimeVisibleAnnotations"
def annotations = (attributes.find(attr => constant(attr.nameIndex) == RUNTIME_VISIBLE_ANNOTATIONS)
.map(attr => ClassFileParser.parseAnnotations(attr.byteCode)))
def annotation(name: String) = annotations.flatMap(seq => seq.find(annot => constant(annot.typeIndex) == name))
}
case class Attribute(nameIndex: Int, byteCode: ByteCode)
case class Field(flags: Int, nameIndex: Int, descriptorIndex: Int, attributes: Seq[Attribute])
case class Method(flags: Int, nameIndex: Int, descriptorIndex: Int, attributes: Seq[Attribute])
case class ClassFileHeader(
minor: Int,
major: Int,
constants: ConstantPool,
flags: Int,
classIndex: Int,
superClassIndex: Int,
interfaces: Seq[Int]) {
def constant(index: Int) = constants(index)
}
case class ConstantPool(len: Int) {
val size = len - 1
private val buffer = new scala.collection.mutable.ArrayBuffer[ConstantPool => Any]
private val values = Array.fill[Option[Any]](size)(None)
def isFull = buffer.length >= size
def apply(index: Int) = {
// Note constant pool indices are 1-based
val i = index - 1
values(i) getOrElse {
val value = buffer(i)(this)
buffer(i) = null
values(i) = Some(value)
value
}
}
def add(f: ConstantPool => Any) = {
buffer += f
this
}
}
| martijnhoekstra/scala | src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala | Scala | apache-2.0 | 8,985 |
package com.letstalkdata.hexiles.game
import com.letstalkdata.hexiles.graphics.Colors.Color
import com.letstalkdata.hexiles.shapes.Cube
/**
* Author: Phillip Johnson
* Date: 5/30/15
*/
class Solution(pieces:Seq[Piece]) {
//TODO: Consider implementing this: require(pieces.size == 5)
val map:Map[Color,Seq[Cube]] = pieces.map(p => p.color -> p.getCubes).toMap
override def toString = map.toString()
def canEqual(other: Any): Boolean = other.isInstanceOf[Solution]
override def equals(other: Any): Boolean = other match {
case that: Solution =>
(that canEqual this) &&
this.map.equals(that.map)
case _ => false
}
override def hashCode(): Int = {
map.hashCode()
}
}
| phillipjohnson/hexiles-web | src/main/scala/com/letstalkdata/hexiles/game/Solution.scala | Scala | mit | 716 |
package nsmc.mongo
import com.mongodb.{BasicDBObject, DBObject}
private[nsmc]
case class MongoInterval(min: DBObject, max: DBObject, destination: Destination) extends Serializable {
}
case object MongoInterval {
def apply(destination: Destination) = new MongoInterval(new BasicDBObject(), new BasicDBObject(), destination)
}
| shotishu/spark-mongodb-connector | src/main/scala/nsmc/mongo/MongoInterval.scala | Scala | apache-2.0 | 331 |
package feh.tec.cvis.gui
import java.awt.image.BufferedImage
import feh.dsl.swing2.Var
import scala.swing.Swing._
import scala.swing._
import scala.swing.event.{Key, MouseMoved}
trait SimplePreview extends ScrollPane{
def img: BufferedImage
horizontalScrollBarPolicy = ScrollPane.BarPolicy.AsNeeded
verticalScrollBarPolicy = ScrollPane.BarPolicy.AsNeeded
private def w = img.getWidth
private def h = img.getHeight
val underlying = new Component {
maximumSize = w -> h
minimumSize = w -> h
preferredSize = w -> h
override def paint(g: Graphics2D): Unit = g.drawImage(img, 0, 0, null)
}
this.contents = underlying
}
trait PreviewMouseReaction{
self: SimplePreview =>
def onMouseMovement: (Point, Key.Modifiers) => Unit
listenTo(underlying.mouse.moves)
reactions += {
case MouseMoved(`underlying`, p, k) => onMouseMovement(p, k)
}
}
trait PreviewHighlights{
self: SimplePreview =>
val highlights: Var[Set[Point]] = Var(Set())
def highlightColor: Color
override def paint(g: Graphics2D): Unit = {
g.drawImage(img, 0, 0, null)
g.setColor(highlightColor)
highlights.get.foreach(p => g.drawOval(p.y - 1, p.x - 1, 2, 2))
}
}
| fehu/comp-vis | gui/src/main/scala/feh/tec/cvis/gui/Previews.scala | Scala | mit | 1,206 |
package org.questions.arrays
import scala.annotation.tailrec
import scala.collection.immutable.HashSet
/**
* @author maximn
* @since 30-Oct-2015
*/
class TwoSum {
def findPairSum(seq: Seq[Int], sum: Int): Option[(Int, Int)] = {
@tailrec
def inner(seq: Seq[Int], hash: HashSet[Int]): Option[(Int, Int)] = seq match {
case Nil => None
case head :: tail =>
val lookingFor = sum - head
if (hash.contains(lookingFor))
Some(head -> lookingFor)
else inner(tail, hash + head)
}
inner(seq, HashSet.empty[Int])
}
} | maximn/coding-interview-questions-scala | src/main/scala/org/questions/arrays/TwoSum.scala | Scala | apache-2.0 | 577 |
package io.kaitai.struct.translators
import io.kaitai.struct.datatype.DataType
import io.kaitai.struct.datatype.DataType._
import io.kaitai.struct.exprlang.{Ast, Expressions}
import io.kaitai.struct.format.{ClassSpec, FixedSized}
import io.kaitai.struct.languages._
import io.kaitai.struct.languages.components.{CppImportList, LanguageCompilerStatic}
import io.kaitai.struct.{ImportList, RuntimeConfig, StringLanguageOutputWriter}
import org.scalatest.{FunSuite, Tag}
import org.scalatest.Matchers._
class TranslatorSpec extends FunSuite {
// Integer literals + unary minus
everybody("123", "123", Int1Type(true))
everybody("223", "223", Int1Type(false))
everybody("1234", "1234")
everybody("-456", "-456")
everybody("0x1234", "4660")
// less and more than 32 Bit signed int
everybody("1000000000", "1000000000")
everybodyExcept("100000000000", "100000000000", Map[LanguageCompilerStatic, String](
CppCompiler -> "100000000000LL",
GoCompiler -> "int64(100000000000)",
JavaCompiler -> "100000000000L"
))
// Float literals
everybody("1.0", "1.0", CalcFloatType)
everybody("123.456", "123.456", CalcFloatType)
everybody("-123.456", "-123.456", CalcFloatType)
// Simple integer operations
everybody("1 + 2", "(1 + 2)")
everybodyExcept("3 / 2", "(3 / 2)", Map(
JavaScriptCompiler -> "Math.floor(3 / 2)",
LuaCompiler -> "math.floor(3 / 2)",
PerlCompiler -> "int(3 / 2)",
PHPCompiler -> "intval(3 / 2)",
PythonCompiler -> "3 // 2"
))
everybody("1 + 2 + 5", "((1 + 2) + 5)")
everybodyExcept("(1 + 2) / (7 * 8)", "((1 + 2) / (7 * 8))", Map(
JavaScriptCompiler -> "Math.floor((1 + 2) / (7 * 8))",
LuaCompiler -> "math.floor((1 + 2) / (7 * 8))",
PerlCompiler -> "int((1 + 2) / (7 * 8))",
PHPCompiler -> "intval((1 + 2) / (7 * 8))",
PythonCompiler -> "(1 + 2) // (7 * 8)"
))
everybody("1 < 2", "1 < 2", CalcBooleanType)
everybody("1 == 2", "1 == 2", CalcBooleanType)
full("2 < 3 ? \\"foo\\" : \\"bar\\"", CalcIntType, CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "(2 < 3) ? (std::string(\\"foo\\")) : (std::string(\\"bar\\"))",
CSharpCompiler -> "2 < 3 ? \\"foo\\" : \\"bar\\"",
GoCompiler -> """var tmp1 string;
|if (2 < 3) {
| tmp1 = "foo"
|} else {
| tmp1 = "bar"
|}
|tmp1""".stripMargin,
JavaCompiler -> "2 < 3 ? \\"foo\\" : \\"bar\\"",
JavaScriptCompiler -> "2 < 3 ? \\"foo\\" : \\"bar\\"",
LuaCompiler -> "utils.box_unwrap(2 < 3 and utils.box_wrap(\\"foo\\") or \\"bar\\")",
PerlCompiler -> "2 < 3 ? \\"foo\\" : \\"bar\\"",
PHPCompiler -> "2 < 3 ? \\"foo\\" : \\"bar\\"",
PythonCompiler -> "u\\"foo\\" if 2 < 3 else u\\"bar\\"",
RubyCompiler -> "2 < 3 ? \\"foo\\" : \\"bar\\""
))
everybodyExcept("~777", "~777", Map[LanguageCompilerStatic, String](
GoCompiler -> "^777"
))
everybodyExcept("~(7+3)", "~((7 + 3))", Map[LanguageCompilerStatic, String](
GoCompiler -> "^((7 + 3))"
))
// Simple float operations
everybody("1.2 + 3.4", "(1.2 + 3.4)", CalcFloatType)
everybody("1.2 + 3", "(1.2 + 3)", CalcFloatType)
everybody("1 + 3.4", "(1 + 3.4)", CalcFloatType)
everybody("1.0 < 2", "1.0 < 2", CalcBooleanType)
everybody("3 / 2.0", "(3 / 2.0)", CalcFloatType)
everybody("(1 + 2) / (7 * 8.1)", "((1 + 2) / (7 * 8.1))", CalcFloatType)
// Boolean literals
full("true", CalcBooleanType, CalcBooleanType, Map[LanguageCompilerStatic, String](
CppCompiler -> "true",
CSharpCompiler -> "true",
GoCompiler -> "true",
JavaCompiler -> "true",
JavaScriptCompiler -> "true",
LuaCompiler -> "true",
PerlCompiler -> "1",
PHPCompiler -> "true",
PythonCompiler -> "True",
RubyCompiler -> "true"
))
full("false", CalcBooleanType, CalcBooleanType, Map[LanguageCompilerStatic, String](
CppCompiler -> "false",
CSharpCompiler -> "false",
GoCompiler -> "false",
JavaCompiler -> "false",
JavaScriptCompiler -> "false",
LuaCompiler -> "false",
PerlCompiler -> "0",
PHPCompiler -> "false",
PythonCompiler -> "False",
RubyCompiler -> "false"
))
full("some_bool.to_i", CalcBooleanType, CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "some_bool()",
CSharpCompiler -> "(SomeBool ? 1 : 0)",
GoCompiler -> """tmp1 := 0
|if this.SomeBool {
| tmp1 = 1
|}
|tmp1""".stripMargin,
JavaCompiler -> "(someBool() ? 1 : 0)",
JavaScriptCompiler -> "(this.someBool | 0)",
LuaCompiler -> "self.some_bool and 1 or 0",
PerlCompiler -> "$self->some_bool()",
PHPCompiler -> "intval($this->someBool())",
PythonCompiler -> "int(self.some_bool)",
RubyCompiler -> "(some_bool ? 1 : 0)"
))
// Member access
full("foo_str", CalcStrType, CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "foo_str()",
CSharpCompiler -> "FooStr",
GoCompiler -> "this.FooStr",
JavaCompiler -> "fooStr()",
JavaScriptCompiler -> "this.fooStr",
LuaCompiler -> "self.foo_str",
PerlCompiler -> "$self->foo_str()",
PHPCompiler -> "$this->fooStr()",
PythonCompiler -> "self.foo_str",
RubyCompiler -> "foo_str"
))
full("foo_block", userOwnedType(List("block")), userBorrowedType(List("block")), Map[LanguageCompilerStatic, String](
CppCompiler -> "foo_block()",
CSharpCompiler -> "FooBlock",
GoCompiler -> "this.FooBlock",
JavaCompiler -> "fooBlock()",
JavaScriptCompiler -> "this.fooBlock",
LuaCompiler -> "self.foo_block",
PerlCompiler -> "$self->foo_block()",
PHPCompiler -> "$this->fooBlock()",
PythonCompiler -> "self.foo_block",
RubyCompiler -> "foo_block"
))
full("foo.bar", FooBarProvider, CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "foo()->bar()",
CSharpCompiler -> "Foo.Bar",
GoCompiler -> "this.Foo.Bar",
JavaCompiler -> "foo().bar()",
JavaScriptCompiler -> "this.foo.bar",
LuaCompiler -> "self.foo.bar",
PerlCompiler -> "$self->foo()->bar()",
PHPCompiler -> "$this->foo()->bar()",
PythonCompiler -> "self.foo.bar",
RubyCompiler -> "foo.bar"
))
full("foo.inner.baz", FooBarProvider, CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "foo()->inner()->baz()",
CSharpCompiler -> "Foo.Inner.Baz",
GoCompiler -> "this.Foo.Inner.Baz",
JavaCompiler -> "foo().inner().baz()",
JavaScriptCompiler -> "this.foo.inner.baz",
LuaCompiler -> "self.foo.inner.baz",
PerlCompiler -> "$self->foo()->inner()->baz()",
PHPCompiler -> "$this->foo()->inner()->baz()",
PythonCompiler -> "self.foo.inner.baz",
RubyCompiler -> "foo.inner.baz"
))
full("_root.foo", userOwnedType(List("top_class", "block")), userBorrowedType(List("top_class", "block")), Map[LanguageCompilerStatic, String](
CppCompiler -> "_root()->foo()",
CSharpCompiler -> "M_Root.Foo",
GoCompiler -> "this._root.Foo",
JavaCompiler -> "_root.foo()",
JavaScriptCompiler -> "this._root.foo",
LuaCompiler -> "self._root.foo",
PerlCompiler -> "$self->_root()->foo()",
PHPCompiler -> "$this->_root()->foo()",
PythonCompiler -> "self._root.foo",
RubyCompiler -> "_root.foo"
))
full("a != 2 and a != 5", CalcIntType, CalcBooleanType, Map[LanguageCompilerStatic, String](
CppCompiler -> "a() != 2 && a() != 5",
CSharpCompiler -> "A != 2 && A != 5",
GoCompiler -> "a != 2 && a != 5",
JavaCompiler -> "a() != 2 && a() != 5",
JavaScriptCompiler -> "this.a != 2 && this.a != 5",
LuaCompiler -> "self.a ~= 2 and self.a ~= 5",
PerlCompiler -> "$self->a() != 2 && $self->a() != 5",
PHPCompiler -> "$this->a() != 2 && $this->a() != 5",
PythonCompiler -> "self.a != 2 and self.a != 5",
RubyCompiler -> "a != 2 && a != 5"
))
// Arrays
full("[0, 1, 100500]", CalcIntType, ArrayTypeInStream(CalcIntType), Map[LanguageCompilerStatic, String](
CSharpCompiler -> "new List<int> { 0, 1, 100500 }",
GoCompiler -> "[]int{0, 1, 100500}",
JavaCompiler -> "new ArrayList<Integer>(Arrays.asList(0, 1, 100500))",
JavaScriptCompiler -> "[0, 1, 100500]",
LuaCompiler -> "{0, 1, 100500}",
PerlCompiler -> "(0, 1, 100500)",
PHPCompiler -> "[0, 1, 100500]",
PythonCompiler -> "[0, 1, 100500]",
RubyCompiler -> "[0, 1, 100500]"
))
full("[34, 0, 10, 64, 65, 66, 92]", CalcIntType, CalcBytesType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"\\\\x22\\\\x00\\\\x0A\\\\x40\\\\x41\\\\x42\\\\x5C\\", 7)",
CSharpCompiler -> "new byte[] { 34, 0, 10, 64, 65, 66, 92 }",
GoCompiler -> "[]uint8{34, 0, 10, 64, 65, 66, 92}",
JavaCompiler -> "new byte[] { 34, 0, 10, 64, 65, 66, 92 }",
JavaScriptCompiler -> "[34, 0, 10, 64, 65, 66, 92]",
LuaCompiler -> "\\"\\\\034\\\\000\\\\010\\\\064\\\\065\\\\066\\\\092\\"",
PerlCompiler -> "pack('C*', (34, 0, 10, 64, 65, 66, 92))",
PHPCompiler -> "\\"\\\\x22\\\\x00\\\\x0A\\\\x40\\\\x41\\\\x42\\\\x5C\\"",
PythonCompiler -> "b\\"\\\\x22\\\\x00\\\\x0A\\\\x40\\\\x41\\\\x42\\\\x5C\\"",
RubyCompiler -> "[34, 0, 10, 64, 65, 66, 92].pack('C*')"
))
full("[255, 0, 255]", CalcIntType, CalcBytesType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"\\\\xFF\\\\x00\\\\xFF\\", 3)",
CSharpCompiler -> "new byte[] { 255, 0, 255 }",
GoCompiler -> "[]uint8{255, 0, 255}",
JavaCompiler -> "new byte[] { -1, 0, -1 }",
JavaScriptCompiler -> "[255, 0, 255]",
LuaCompiler -> "\\"\\\\255\\\\000\\\\255\\"",
PerlCompiler -> "pack('C*', (255, 0, 255))",
PHPCompiler -> "\\"\\\\xFF\\\\x00\\\\xFF\\"",
PythonCompiler -> "b\\"\\\\255\\\\000\\\\255\\"",
RubyCompiler -> "[255, 0, 255].pack('C*')"
))
full("[0, 1, 2].length", CalcIntType, CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"\\\\x00\\\\x01\\\\x02\\", 3).length()",
GoCompiler -> "len([]uint8{0, 1, 2})",
JavaCompiler -> "new byte[] { 0, 1, 2 }.length",
LuaCompiler -> "string.len(\\"str\\")",
PerlCompiler -> "length(pack('C*', (0, 1, 2)))",
PHPCompiler -> "strlen(\\"\\\\x00\\\\x01\\\\x02\\")",
PythonCompiler -> "len(b\\"\\\\x00\\\\x01\\\\x02\\")",
RubyCompiler -> "[0, 1, 2].pack('C*').size"
))
full("a[42]", ArrayTypeInStream(CalcStrType), CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "a()->at(42)",
CSharpCompiler -> "A[42]",
GoCompiler -> "this.A[42]",
JavaCompiler -> "a().get((int) 42)",
JavaScriptCompiler -> "this.a[42]",
LuaCompiler -> "self.a[43]",
PHPCompiler -> "$this->a()[42]",
PythonCompiler -> "self.a[42]",
RubyCompiler -> "a[42]"
))
full("a[42 - 2]", ArrayTypeInStream(CalcStrType), CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "a()->at((42 - 2))",
CSharpCompiler -> "A[(42 - 2)]",
GoCompiler -> "this.A[(42 - 2)]",
JavaCompiler -> "a().get((42 - 2))",
JavaScriptCompiler -> "this.a[(42 - 2)]",
LuaCompiler -> "self.a[(43 - 2)]",
PHPCompiler -> "$this->a()[(42 - 2)]",
PythonCompiler -> "self.a[(42 - 2)]",
RubyCompiler -> "a[(42 - 2)]"
))
full("a.first", ArrayTypeInStream(CalcIntType), CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "a()->front()",
CSharpCompiler -> "A[0]",
GoCompiler -> "this.A[0]",
JavaCompiler -> "a().get(0)",
JavaScriptCompiler -> "this.a[0]",
LuaCompiler -> "self.a[1]",
PHPCompiler -> "$this->a()[0]",
PythonCompiler -> "self.a[0]",
RubyCompiler -> "a.first"
))
full("a.last", ArrayTypeInStream(CalcIntType), CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "a()->back()",
CSharpCompiler -> "A[A.Count - 1]",
GoCompiler -> "this.A[len(this.A)-1]",
JavaCompiler -> "a().get(a().size() - 1)",
JavaScriptCompiler -> "this.a[this.a.length - 1]",
LuaCompiler -> "self.a[#self.a]",
PHPCompiler -> "$this->a()[count($this->a()) - 1]",
PythonCompiler -> "self.a[-1]",
RubyCompiler -> "a.last"
))
full("a.size", ArrayTypeInStream(CalcIntType), CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "a()->size()",
CSharpCompiler -> "A.Count",
GoCompiler -> "len(this.A)",
JavaCompiler -> "a().size()",
JavaScriptCompiler -> "this.a.length",
LuaCompiler -> "#self.a",
PHPCompiler -> "count($this->a())",
PerlCompiler -> "scalar($self->a())",
PythonCompiler -> "len(self.a)",
RubyCompiler -> "a.length"
))
// Strings
full("\\"str\\"", CalcIntType, CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"str\\")",
CSharpCompiler -> "\\"str\\"",
GoCompiler -> "\\"str\\"",
JavaCompiler -> "\\"str\\"",
JavaScriptCompiler -> "\\"str\\"",
LuaCompiler -> "\\"str\\"",
PerlCompiler -> "\\"str\\"",
PHPCompiler -> "\\"str\\"",
PythonCompiler -> "u\\"str\\"",
RubyCompiler -> "\\"str\\""
))
full("\\"str\\\\nnext\\"", CalcIntType, CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"str\\\\nnext\\")",
CSharpCompiler -> "\\"str\\\\nnext\\"",
GoCompiler -> "\\"str\\\\nnext\\"",
JavaCompiler -> "\\"str\\\\nnext\\"",
JavaScriptCompiler -> "\\"str\\\\nnext\\"",
LuaCompiler -> "\\"str\\\\nnext\\"",
PerlCompiler -> "\\"str\\\\nnext\\"",
PHPCompiler -> "\\"str\\\\nnext\\"",
PythonCompiler -> "u\\"str\\\\nnext\\"",
RubyCompiler -> "\\"str\\\\nnext\\""
))
full("\\"str\\\\u000anext\\"", CalcIntType, CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"str\\\\nnext\\")",
CSharpCompiler -> "\\"str\\\\nnext\\"",
GoCompiler -> "\\"str\\\\u000anext\\"",
JavaCompiler -> "\\"str\\\\nnext\\"",
JavaScriptCompiler -> "\\"str\\\\nnext\\"",
LuaCompiler -> "\\"str\\\\nnext\\"",
PerlCompiler -> "\\"str\\\\nnext\\"",
PHPCompiler -> "\\"str\\\\nnext\\"",
PythonCompiler -> "u\\"str\\\\nnext\\"",
RubyCompiler -> "\\"str\\\\nnext\\""
))
full("\\"str\\\\0next\\"", CalcIntType, CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"str\\\\000next\\", 8)",
CSharpCompiler -> "\\"str\\\\0next\\"",
GoCompiler -> "\\"str\\\\000next\\"",
JavaCompiler -> "\\"str\\\\000next\\"",
JavaScriptCompiler -> "\\"str\\\\000next\\"",
LuaCompiler -> "\\"str\\\\000next\\"",
PerlCompiler -> "\\"str\\\\000next\\"",
PHPCompiler -> "\\"str\\\\000next\\"",
PythonCompiler -> "u\\"str\\\\000next\\"",
RubyCompiler -> "\\"str\\\\000next\\""
))
everybodyExcept("\\"str1\\" + \\"str2\\"", "\\"str1\\" + \\"str2\\"", Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"str1\\") + std::string(\\"str2\\")",
LuaCompiler -> "\\"str1\\" .. \\"str2\\"",
PerlCompiler -> "\\"str1\\" . \\"str2\\"",
PHPCompiler -> "\\"str1\\" . \\"str2\\"",
PythonCompiler -> "u\\"str1\\" + u\\"str2\\""
), CalcStrType)
everybodyExcept("\\"str1\\" == \\"str2\\"", "\\"str1\\" == \\"str2\\"", Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"str1\\") == (std::string(\\"str2\\"))",
JavaCompiler -> "\\"str1\\".equals(\\"str2\\")",
LuaCompiler -> "\\"str1\\" == \\"str2\\"",
PerlCompiler -> "\\"str1\\" eq \\"str2\\"",
PythonCompiler -> "u\\"str1\\" == u\\"str2\\""
), CalcBooleanType)
everybodyExcept("\\"str1\\" != \\"str2\\"", "\\"str1\\" != \\"str2\\"", Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"str1\\") != std::string(\\"str2\\")",
JavaCompiler -> "!(\\"str1\\").equals(\\"str2\\")",
LuaCompiler -> "\\"str1\\" ~= \\"str2\\"",
PerlCompiler -> "\\"str1\\" ne \\"str2\\"",
PythonCompiler -> "u\\"str1\\" != u\\"str2\\""
), CalcBooleanType)
everybodyExcept("\\"str1\\" < \\"str2\\"", "\\"str1\\" < \\"str2\\"", Map[LanguageCompilerStatic, String](
CppCompiler -> "(std::string(\\"str1\\").compare(std::string(\\"str2\\")) < 0)",
CSharpCompiler -> "(\\"str1\\".CompareTo(\\"str2\\") < 0)",
JavaCompiler -> "(\\"str1\\".compareTo(\\"str2\\") < 0)",
LuaCompiler -> "\\"str1\\" < \\"str2\\"",
PerlCompiler -> "\\"str1\\" lt \\"str2\\"",
PythonCompiler -> "u\\"str1\\" < u\\"str2\\""
), CalcBooleanType)
full("\\"str\\".length", CalcIntType, CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"str\\").length()",
CSharpCompiler -> "\\"str\\".Length",
GoCompiler -> "utf8.RuneCountInString(\\"str\\")",
JavaCompiler -> "\\"str\\".length()",
JavaScriptCompiler -> "\\"str\\".length",
LuaCompiler -> "string.len(\\"str\\")",
PerlCompiler -> "length(\\"str\\")",
PHPCompiler -> "strlen(\\"str\\")",
PythonCompiler -> "len(u\\"str\\")",
RubyCompiler -> "\\"str\\".size"
))
full("\\"str\\".reverse", CalcIntType, CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "kaitai::kstream::reverse(std::string(\\"str\\"))",
CSharpCompiler -> "new string(Array.Reverse(\\"str\\".ToCharArray()))",
GoCompiler -> "kaitai.StringReverse(\\"str\\")",
JavaCompiler -> "new StringBuilder(\\"str\\").reverse().toString()",
JavaScriptCompiler -> "Array.from(\\"str\\").reverse().join('')",
LuaCompiler -> "string.reverse(\\"str\\")",
PerlCompiler -> "scalar(reverse(\\"str\\"))",
PHPCompiler -> "strrev(\\"str\\")",
PythonCompiler -> "u\\"str\\"[::-1]",
RubyCompiler -> "\\"str\\".reverse"
))
full("\\"12345\\".to_i", CalcIntType, CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::stoi(std::string(\\"12345\\"))",
CSharpCompiler -> "Convert.ToInt64(\\"12345\\", 10)",
GoCompiler -> "func()(int){i, err := strconv.Atoi(\\"12345\\"); if (err != nil) { panic(err) }; return i}()",
JavaCompiler -> "Long.parseLong(\\"12345\\", 10)",
JavaScriptCompiler -> "Number.parseInt(\\"12345\\", 10)",
LuaCompiler -> "tonumber(\\"12345\\")",
PerlCompiler -> "\\"12345\\"",
PHPCompiler -> "intval(\\"12345\\", 10)",
PythonCompiler -> "int(u\\"12345\\")",
RubyCompiler -> "\\"12345\\".to_i"
))
full("\\"1234fe\\".to_i(16)", CalcIntType, CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::stoi(std::string(\\"1234fe\\"), 0, 16)",
CSharpCompiler -> "Convert.ToInt64(\\"1234fe\\", 16)",
GoCompiler -> "func()(int64){i, err := strconv.ParseInt(\\"1234fe\\", 16, 64); if (err != nil) { panic(err) }; return i}()",
JavaCompiler -> "Long.parseLong(\\"1234fe\\", 16)",
JavaScriptCompiler -> "Number.parseInt(\\"1234fe\\", 16)",
LuaCompiler -> "tonumber(\\"1234fe\\", 16)",
PerlCompiler -> "hex(\\"1234fe\\")",
PHPCompiler -> "intval(\\"1234fe\\", 16)",
PythonCompiler -> "int(u\\"1234fe\\", 16)",
RubyCompiler -> "\\"1234fe\\".to_i(16)"
))
// casts
full("other.as<block>.bar", FooBarProvider, CalcStrType, Map[LanguageCompilerStatic, String](
CppCompiler -> "static_cast<top_class_t::block_t*>(other())->bar()",
CSharpCompiler -> "((TopClass.Block) (Other)).Bar",
GoCompiler -> "this.Other.(TopClass.Block).Bar",
JavaCompiler -> "((TopClass.Block) (other())).bar()",
JavaScriptCompiler -> "this.other.bar",
LuaCompiler -> "self.other.bar",
PerlCompiler -> "$self->other()->bar()",
PHPCompiler -> "$this->other()->bar()",
PythonCompiler -> "self.other.bar",
RubyCompiler -> "other.bar"
))
full("other.as<block::innerblock>.baz", FooBarProvider, CalcIntType, Map[LanguageCompilerStatic, String](
CppCompiler -> "static_cast<top_class_t::block_t::innerblock_t*>(other())->baz()",
CSharpCompiler -> "((TopClass.Block.Innerblock) (Other)).Baz",
GoCompiler -> "this.Other.(TopClass.Block.Innerblock).Baz",
JavaCompiler -> "((TopClass.Block.Innerblock) (other())).baz()",
JavaScriptCompiler -> "this.other.baz",
LuaCompiler -> "self.other.baz",
PerlCompiler -> "$self->other()->baz()",
PHPCompiler -> "$this->other()->baz()",
PythonCompiler -> "self.other.baz",
RubyCompiler -> "other.baz"
))
// primitive pure types
full("(1 + 2).as<s2>", CalcIntType, IntMultiType(true, Width2, None), Map[LanguageCompilerStatic, String](
CppCompiler -> "static_cast<int16_t>((1 + 2))",
CSharpCompiler -> "((short) ((1 + 2)))",
GoCompiler -> "int16((1 + 2))",
JavaCompiler -> "((short) ((1 + 2)))",
JavaScriptCompiler -> "(1 + 2)",
LuaCompiler -> "(1 + 2)",
PerlCompiler -> "(1 + 2)",
PHPCompiler -> "(1 + 2)",
PythonCompiler -> "(1 + 2)",
RubyCompiler -> "(1 + 2)"
))
// empty array casting
full("[].as<bytes>", CalcIntType, CalcBytesType, Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"\\", 0)",
CSharpCompiler -> "new byte[] { }",
GoCompiler -> "\\"\\"",
JavaCompiler -> "new byte[] { }",
JavaScriptCompiler -> "[]",
LuaCompiler -> "\\"\\"",
PerlCompiler -> "pack('C*', ())",
PHPCompiler -> "\\"\\"",
PythonCompiler -> "b\\"\\"",
RubyCompiler -> "[].pack('C*')"
))
full("[].as<u1[]>", CalcIntType, ArrayTypeInStream(Int1Type(false)), Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"\\")",
CSharpCompiler -> "new List<byte> { }",
GoCompiler -> "[]uint8{}",
JavaCompiler -> "new ArrayList<Integer>(Arrays.asList())",
JavaScriptCompiler -> "[]",
LuaCompiler -> "{}",
PerlCompiler -> "()",
PHPCompiler -> "[]",
PythonCompiler -> "[]",
RubyCompiler -> "[]"
))
full("[].as<f8[]>", CalcIntType, ArrayTypeInStream(FloatMultiType(Width8, None)), Map[LanguageCompilerStatic, String](
CppCompiler -> "std::string(\\"\\", 0)",
CSharpCompiler -> "new List<double> { }",
GoCompiler -> "[]float64{}",
JavaCompiler -> "new ArrayList<Double>(Arrays.asList())",
JavaScriptCompiler -> "[]",
LuaCompiler -> "{}",
PerlCompiler -> "()",
PHPCompiler -> "[]",
PythonCompiler -> "[]",
RubyCompiler -> "[]"
))
// type enforcement: casting to non-literal byte array
full("[0 + 1, 5].as<bytes>", CalcIntType, CalcBytesType, Map[LanguageCompilerStatic, String](
CppCompiler -> "???",
CSharpCompiler -> "new byte[] { (0 + 1), 5 }",
GoCompiler -> "string([]byte{(0 + 1), 5})",
JavaCompiler -> "new byte[] { (0 + 1), 5 }",
JavaScriptCompiler -> "new Uint8Array([(0 + 1), 5])",
LuaCompiler -> "???",
PerlCompiler -> "pack('C*', ((0 + 1), 5))",
PHPCompiler -> "pack('C*', (0 + 1), 5)",
PythonCompiler -> "struct.pack('2b', (0 + 1), 5)",
RubyCompiler -> "[(0 + 1), 5].pack('C*')"
))
// type enforcement: casting to array of integers
full("[0, 1, 2].as<u1[]>", CalcIntType, ArrayTypeInStream(Int1Type(false)), Map[LanguageCompilerStatic, String](
CSharpCompiler -> "new List<byte> { 0, 1, 2 }",
GoCompiler -> "[]uint8{0, 1, 2}",
JavaCompiler -> "new ArrayList<Integer>(Arrays.asList(0, 1, 2))",
JavaScriptCompiler -> "[0, 1, 2]",
LuaCompiler -> "{0, 1, 2}",
PerlCompiler -> "(0, 1, 2)",
PHPCompiler -> "[0, 1, 2]",
PythonCompiler -> "[0, 1, 2]",
RubyCompiler -> "[0, 1, 2]"
))
// sizeof of primitive types
everybody("sizeof<b1>", "1", CalcIntType)
everybody("sizeof<b7>", "1", CalcIntType)
everybody("sizeof<b8>", "1", CalcIntType)
everybody("sizeof<b9>", "2", CalcIntType)
everybody("sizeof<s1>", "1", CalcIntType)
everybody("sizeof<s2>", "2", CalcIntType)
everybody("sizeof<u4>", "4", CalcIntType)
everybody("sizeof<f8>", "8", CalcIntType)
// sizeof of fixed user type
everybody("sizeof<block>", "7", CalcIntType)
// bitsizeof of primitive types
everybody("bitsizeof<b1>", "1", CalcIntType)
everybody("bitsizeof<b7>", "7", CalcIntType)
everybody("bitsizeof<b8>", "8", CalcIntType)
everybody("bitsizeof<b9>", "9", CalcIntType)
everybody("bitsizeof<s1>", "8", CalcIntType)
everybody("bitsizeof<s2>", "16", CalcIntType)
everybody("bitsizeof<u4>", "32", CalcIntType)
everybody("bitsizeof<f8>", "64", CalcIntType)
// sizeof of fixed user type
everybody("bitsizeof<block>", "56", CalcIntType)
/**
* Checks translation of expression `src` into target languages
*
* @param src KS expression to translate
* @param tp Type model that provides information about used user-defined types in expression
* @param expType Expected type that should be detected by [[TypeDetector]]
* @param expOut Map with expected outputs for each language
*/
def runTest(src: String, tp: TypeProvider, expType: DataType, expOut: ResultMap) {
var eo: Option[Ast.expr] = None
test(s"_expr:$src") {
eo = Some(Expressions.parse(src))
}
val goOutput = new StringLanguageOutputWriter(" ")
val langs = Map[LanguageCompilerStatic, AbstractTranslator with TypeDetector](
CppCompiler -> new CppTranslator(tp, new CppImportList(), new CppImportList(), RuntimeConfig()),
CSharpCompiler -> new CSharpTranslator(tp, new ImportList()),
GoCompiler -> new GoTranslator(goOutput, tp, new ImportList()),
JavaCompiler -> new JavaTranslator(tp, new ImportList()),
JavaScriptCompiler -> new JavaScriptTranslator(tp),
LuaCompiler -> new LuaTranslator(tp, new ImportList()),
PerlCompiler -> new PerlTranslator(tp, new ImportList()),
PHPCompiler -> new PHPTranslator(tp, RuntimeConfig()),
PythonCompiler -> new PythonTranslator(tp, new ImportList()),
RubyCompiler -> new RubyTranslator(tp)
)
langs.foreach { case (langObj, tr) =>
val langName = LanguageCompilerStatic.CLASS_TO_NAME(langObj)
test(s"$langName:$src", Tag(langName), Tag(src)) {
eo match {
case Some(e) =>
expOut.get(langObj) match {
case Some(expResult) =>
tr.detectType(e) should be(expType)
val actResult1 = tr.translate(e)
val actResult2 = langObj match {
case GoCompiler => goOutput.result + actResult1
case _ => actResult1
}
actResult2 should be(expResult)
case None =>
fail(s"no expected result, but actual result is ${tr.translate(e)}")
}
case None =>
fail("expression didn't parse")
}
}
}
}
type ResultMap = Map[LanguageCompilerStatic, String]
type TestSpec = (String, TypeProvider, DataType, ResultMap)
abstract class FakeTypeProvider extends TypeProvider {
val nowClass = ClassSpec.opaquePlaceholder(List("top_class"))
override def resolveEnum(inType: Ast.typeId, enumName: String) =
throw new NotImplementedError
override def resolveType(typeName: Ast.typeId): DataType = {
if (typeName == Ast.typeId(false, List("block"), false)) {
val name = List("top_class", "block")
val r = CalcUserType(name, None, Seq())
val cs = ClassSpec.opaquePlaceholder(name)
cs.seqSize = FixedSized(56)
r.classSpec = Some(cs)
return r
} else {
throw new NotImplementedError
}
}
override def isLazy(attrName: String): Boolean = false
override def isLazy(inClass: ClassSpec, attrName: String): Boolean = false
}
case class Always(t: DataType) extends FakeTypeProvider {
override def determineType(name: String): DataType = t
override def determineType(inClass: ClassSpec, name: String): DataType = t
}
/**
* Emulates the following system of types:
*
* <pre>
* meta:
* id: top_class
* types:
* block:
* seq:
* - id: bar
* type: str
* - id: inner
* type: innerblock
* types:
* innerblock:
* instances:
* baz:
* value: 123
* </pre>
*/
case object FooBarProvider extends FakeTypeProvider {
override def determineType(name: String): DataType = {
name match {
case "foo" => userOwnedType(List("top_class", "block"))
}
}
override def determineType(inClass: ClassSpec, name: String): DataType = {
(inClass.name.last, name) match {
case ("block", "bar") => CalcStrType
case ("block", "inner") => userOwnedType(List("top_class", "block", "innerblock"))
case ("innerblock", "baz") => CalcIntType
}
}
override def resolveType(typeName: Ast.typeId): DataType = {
typeName.names match {
case Seq("top_class") =>
userOwnedType(List("top_class"))
case Seq("block") |
Seq("top_class", "block") =>
userOwnedType(List("top_class", "block"))
case Seq("innerblock") |
Seq("block", "innerblock") |
Seq("top_class", "block", "innerblock") =>
userOwnedType(List("top_class", "block", "innerblock"))
}
}
}
def userOwnedType(lname: List[String]) = {
val cs = ClassSpec.opaquePlaceholder(lname)
val ut = UserTypeInstream(lname, None)
ut.classSpec = Some(cs)
ut
}
def userBorrowedType(lname: List[String]) = {
val cs = ClassSpec.opaquePlaceholder(lname)
val ut = CalcUserType(lname, None)
ut.classSpec = Some(cs)
ut
}
lazy val ALL_LANGS = LanguageCompilerStatic.NAME_TO_CLASS.values
def full(src: String, srcType: DataType, expType: DataType, expOut: ResultMap) =
runTest(src, Always(srcType), expType, expOut)
def full(src: String, tp: TypeProvider, expType: DataType, expOut: ResultMap) =
runTest(src, tp, expType, expOut)
def everybody(src: String, expOut: String, expType: DataType = CalcIntType) =
runTest(src, Always(CalcIntType), expType, ALL_LANGS.map((langObj) => langObj -> expOut).toMap)
def everybodyExcept(src: String, commonExpOut: String, rm: ResultMap, expType: DataType = CalcIntType) =
runTest(src, Always(CalcIntType), expType, ALL_LANGS.map((langObj) =>
langObj -> rm.getOrElse(langObj, commonExpOut)
).toMap)
}
| kaitai-io/kaitai_struct_compiler | jvm/src/test/scala/io/kaitai/struct/translators/TranslatorSpec.scala | Scala | gpl-3.0 | 29,617 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.harness
import org.apache.flink.api.common.time.Time
import org.apache.flink.api.scala._
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord
import org.apache.flink.streaming.util.KeyedOneInputStreamOperatorTestHarness
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.api.bridge.scala.internal.StreamTableEnvironmentImpl
import org.apache.flink.table.data.RowData
import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.StateBackendMode
import org.apache.flink.table.runtime.typeutils.RowDataSerializer
import org.apache.flink.table.runtime.util.RowDataHarnessAssertor
import org.apache.flink.table.runtime.util.StreamRecordUtils.{binaryrow, row}
import org.apache.flink.table.types.logical.LogicalType
import org.apache.flink.types.Row
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import org.junit.{Before, Test}
import java.lang.{Long => JLong}
import java.util.concurrent.ConcurrentLinkedQueue
import scala.collection.mutable
@RunWith(classOf[Parameterized])
class OverAggregateHarnessTest(mode: StateBackendMode) extends HarnessTestBase(mode) {
@Before
override def before(): Unit = {
super.before()
val setting = EnvironmentSettings.newInstance().inStreamingMode().build()
val config = new TestTableConfig
this.tEnv = StreamTableEnvironmentImpl.create(env, setting, config)
}
@Test
def testProcTimeBoundedRowsOver(): Unit = {
val (testHarness, outputType) = createProcTimeBoundedRowsOver
val assertor = new RowDataHarnessAssertor(outputType)
testHarness.open()
// register cleanup timer with 3001
testHarness.setProcessingTime(1)
testHarness.processElement(new StreamRecord(
binaryrow(1L: JLong, "aaa", 1L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(1L: JLong, "bbb", 10L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(1L: JLong, "aaa", 2L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(1L: JLong, "aaa", 3L: JLong, null)))
// register cleanup timer with 4100
testHarness.setProcessingTime(1100)
testHarness.processElement(new StreamRecord(
binaryrow(1L: JLong, "bbb", 20L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(1L: JLong, "aaa", 4L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(1L: JLong, "aaa", 5L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(1L: JLong, "aaa", 6L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(1L: JLong, "bbb", 30L: JLong, null)))
// register cleanup timer with 6001
testHarness.setProcessingTime(3001)
testHarness.processElement(new StreamRecord(
binaryrow(2L: JLong, "aaa", 7L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(2L: JLong, "aaa", 8L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(2L: JLong, "aaa", 9L: JLong, null)))
// trigger cleanup timer and register cleanup timer with 9002
testHarness.setProcessingTime(6002)
testHarness.processElement(new StreamRecord(
binaryrow(2L: JLong, "aaa", 10L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(2L: JLong, "bbb", 40L: JLong, null)))
val result = testHarness.getOutput
val expectedOutput = new ConcurrentLinkedQueue[Object]()
expectedOutput.add(new StreamRecord(
row(1L: JLong, "aaa", 1L: JLong, null, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(1L: JLong, "bbb", 10L: JLong, null, 10L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(1L: JLong, "aaa", 2L: JLong, null, 1L: JLong, 2L: JLong)))
expectedOutput.add(new StreamRecord(
row(1L: JLong, "aaa", 3L: JLong, null, 2L: JLong, 3L: JLong)))
expectedOutput.add(new StreamRecord(
row(1L: JLong, "bbb", 20L: JLong, null, 10L: JLong, 20L: JLong)))
expectedOutput.add(new StreamRecord(
row(1L: JLong, "aaa", 4L: JLong, null, 3L: JLong, 4L: JLong)))
expectedOutput.add(new StreamRecord(
row(1L: JLong, "aaa", 5L: JLong, null, 4L: JLong, 5L: JLong)))
expectedOutput.add(new StreamRecord(
row(1L: JLong, "aaa", 6L: JLong, null, 5L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(1L: JLong, "bbb", 30L: JLong, null, 20L: JLong, 30L: JLong)))
expectedOutput.add(new StreamRecord(
row(2L: JLong, "aaa", 7L: JLong, null, 6L: JLong, 7L: JLong)))
expectedOutput.add(new StreamRecord(
row(2L: JLong, "aaa", 8L: JLong, null, 7L: JLong, 8L: JLong)))
expectedOutput.add(new StreamRecord(
row(2L: JLong, "aaa", 9L: JLong, null, 8L: JLong, 9L: JLong)))
expectedOutput.add(new StreamRecord(
row(2L: JLong, "aaa", 10L: JLong, null, 9L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(2L: JLong, "bbb", 40L: JLong, null, 40L: JLong, 40L: JLong)))
assertor.assertOutputEqualsSorted("result mismatch", expectedOutput, result)
testHarness.close()
}
private def createProcTimeBoundedRowsOver()
: (KeyedOneInputStreamOperatorTestHarness[RowData, RowData, RowData], Array[LogicalType]) = {
val data = new mutable.MutableList[(Long, String, Long)]
val t = env.fromCollection(data).toTable(tEnv, 'currtime, 'b, 'c, 'proctime.proctime)
tEnv.registerTable("T", t)
val sql =
"""
|SELECT currtime, b, c,
| min(c) OVER
| (PARTITION BY b ORDER BY proctime ROWS BETWEEN 1 PRECEDING AND CURRENT ROW),
| max(c) OVER
| (PARTITION BY b ORDER BY proctime ROWS BETWEEN 1 PRECEDING AND CURRENT ROW)
|FROM T
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
tEnv.getConfig.setIdleStateRetentionTime(Time.seconds(2), Time.seconds(4))
val testHarness = createHarnessTester(t1.toAppendStream[Row], "OverAggregate")
val outputType = Array(
DataTypes.BIGINT().getLogicalType,
DataTypes.STRING().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType)
(testHarness, outputType)
}
/**
* NOTE: all elements at the same proc timestamp have the same value per key
*/
@Test
def testProcTimeBoundedRangeOver(): Unit = {
val data = new mutable.MutableList[(Long, String, Long)]
val t = env.fromCollection(data).toTable(tEnv, 'currtime, 'b, 'c, 'proctime.proctime)
tEnv.registerTable("T", t)
val sql =
"""
|SELECT currtime, b, c,
| min(c) OVER
| (PARTITION BY b ORDER BY proctime
| RANGE BETWEEN INTERVAL '4' SECOND PRECEDING AND CURRENT ROW),
| max(c) OVER
| (PARTITION BY b ORDER BY proctime
| RANGE BETWEEN INTERVAL '4' SECOND PRECEDING AND CURRENT ROW)
|FROM T
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
val testHarness = createHarnessTester(t1.toAppendStream[Row], "OverAggregate")
val assertor = new RowDataHarnessAssertor(
Array(
DataTypes.BIGINT().getLogicalType,
DataTypes.STRING().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType))
testHarness.open()
testHarness.setProcessingTime(3)
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 1L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "bbb", 10L: JLong, null)))
testHarness.setProcessingTime(4)
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 2L: JLong, null)))
testHarness.setProcessingTime(3003)
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 3L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "bbb", 20L: JLong, null)))
testHarness.setProcessingTime(5)
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 4L: JLong, null)))
testHarness.setProcessingTime(6002)
testHarness.setProcessingTime(7002)
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 5L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 6L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "bbb", 30L: JLong, null)))
testHarness.setProcessingTime(11002)
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 7L: JLong, null)))
testHarness.setProcessingTime(11004)
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 8L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 9L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 10L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "bbb", 40L: JLong, null)))
testHarness.setProcessingTime(11006)
val result = testHarness.getOutput
val expectedOutput = new ConcurrentLinkedQueue[Object]()
// all elements at the same proc timestamp have the same value per key
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 1L: JLong, null, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "bbb", 10L: JLong, null, 10L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 2L: JLong, null, 1L: JLong, 2L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 3L: JLong, null, 1L: JLong, 4L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "bbb", 20L: JLong, null, 10L: JLong, 20L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 4L: JLong, null, 1L: JLong, 4L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 5L: JLong, null, 3L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 6L: JLong, null, 3L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "bbb", 30L: JLong, null, 20L: JLong, 30L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 7L: JLong, null, 5L: JLong, 7L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 8L: JLong, null, 7L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 9L: JLong, null, 7L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 10L: JLong, null, 7L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "bbb", 40L: JLong, null, 40L: JLong, 40L: JLong)))
assertor.assertOutputEqualsSorted("result mismatch", expectedOutput, result)
testHarness.close()
}
@Test
def testProcTimeUnboundedOver(): Unit = {
val data = new mutable.MutableList[(Long, String, Long)]
val t = env.fromCollection(data).toTable(tEnv, 'currtime, 'b, 'c, 'proctime.proctime)
tEnv.registerTable("T", t)
val sql =
"""
|SELECT currtime, b, c,
| min(c) OVER
| (PARTITION BY b ORDER BY proctime ROWS BETWEEN UNBOUNDED preceding AND CURRENT ROW),
| max(c) OVER
| (PARTITION BY b ORDER BY proctime ROWS BETWEEN UNBOUNDED preceding AND CURRENT ROW)
|FROM T
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
tEnv.getConfig.setIdleStateRetentionTime(Time.seconds(2), Time.seconds(4))
val testHarness = createHarnessTester(t1.toAppendStream[Row], "OverAggregate")
val assertor = new RowDataHarnessAssertor(
Array(
DataTypes.BIGINT().getLogicalType,
DataTypes.STRING().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType))
testHarness.open()
// register cleanup timer with 4003
testHarness.setProcessingTime(1003)
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 1L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "bbb", 10L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 2L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 3L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "bbb", 20L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 4L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 5L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 6L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "bbb", 30L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 7L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 8L: JLong, null)))
// trigger cleanup timer and register cleanup timer with 8003
testHarness.setProcessingTime(5003)
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 9L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "aaa", 10L: JLong, null)))
testHarness.processElement(new StreamRecord(
binaryrow(0L: JLong, "bbb", 40L: JLong, null)))
val result = testHarness.getOutput
val expectedOutput = new ConcurrentLinkedQueue[Object]()
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 1L: JLong, null, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "bbb", 10L: JLong, null, 10L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 2L: JLong, null, 1L: JLong, 2L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 3L: JLong, null, 1L: JLong, 3L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "bbb", 20L: JLong, null, 10L: JLong, 20L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 4L: JLong, null, 1L: JLong, 4L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 5L: JLong, null, 1L: JLong, 5L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 6L: JLong, null, 1L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "bbb", 30L: JLong, null, 10L: JLong, 30L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 7L: JLong, null, 1L: JLong, 7L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 8L: JLong, null, 1L: JLong, 8L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 9L: JLong, null, 9L: JLong, 9L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "aaa", 10L: JLong, null, 9L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(0L: JLong, "bbb", 40L: JLong, null, 40L: JLong, 40L: JLong)))
assertor.assertOutputEqualsSorted("result mismatch", expectedOutput, result)
testHarness.close()
}
/**
* all elements at the same row-time have the same value per key
*/
@Test
def testRowTimeBoundedRangeOver(): Unit = {
val data = new mutable.MutableList[(Long, String, Long)]
val t = env.fromCollection(data).toTable(tEnv, 'rowtime.rowtime, 'b, 'c)
tEnv.registerTable("T", t)
val sql =
"""
|SELECT rowtime, b, c,
| min(c) OVER
| (PARTITION BY b ORDER BY rowtime
| RANGE BETWEEN INTERVAL '4' SECOND PRECEDING AND CURRENT ROW),
| max(c) OVER
| (PARTITION BY b ORDER BY rowtime
| RANGE BETWEEN INTERVAL '4' SECOND PRECEDING AND CURRENT ROW)
|FROM T
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
val testHarness = createHarnessTester(t1.toAppendStream[Row], "OverAggregate")
val assertor = new RowDataHarnessAssertor(
Array(
DataTypes.BIGINT().getLogicalType,
DataTypes.STRING().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType))
testHarness.open()
testHarness.processWatermark(1)
testHarness.processElement(new StreamRecord(
binaryrow(2L: JLong, "aaa", 1L: JLong)))
testHarness.processWatermark(2)
testHarness.processElement(new StreamRecord(
binaryrow(3L: JLong, "bbb", 10L: JLong)))
testHarness.processWatermark(4000)
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "aaa", 2L: JLong)))
testHarness.processWatermark(4001)
testHarness.processElement(new StreamRecord(
binaryrow(4002L: JLong, "aaa", 3L: JLong)))
testHarness.processWatermark(4002)
testHarness.processElement(new StreamRecord(
binaryrow(4003L: JLong, "aaa", 4L: JLong)))
testHarness.processWatermark(4800)
testHarness.processElement(new StreamRecord(
binaryrow(4801L: JLong, "bbb", 25L: JLong)))
testHarness.processWatermark(6500)
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "aaa", 5L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "aaa", 6L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "bbb", 30L: JLong)))
testHarness.processWatermark(7000)
testHarness.processElement(new StreamRecord(
binaryrow(7001L: JLong, "aaa", 7L: JLong)))
testHarness.processWatermark(8000)
testHarness.processElement(new StreamRecord(
binaryrow(8001L: JLong, "aaa", 8L: JLong)))
testHarness.processWatermark(12000)
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "aaa", 9L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "aaa", 10L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "bbb", 40L: JLong)))
testHarness.processWatermark(19000)
val result = dropWatermarks(testHarness.getOutput.toArray)
val expectedOutput = new ConcurrentLinkedQueue[Object]()
// all elements at the same row-time have the same value per key
expectedOutput.add(new StreamRecord(
row(2L: JLong, "aaa", 1L: JLong, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(3L: JLong, "bbb", 10L: JLong, 10L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "aaa", 2L: JLong, 1L: JLong, 2L: JLong)))
expectedOutput.add(new StreamRecord(
row(4002L: JLong, "aaa", 3L: JLong, 1L: JLong, 3L: JLong)))
expectedOutput.add(new StreamRecord(
row(4003L: JLong, "aaa", 4L: JLong, 2L: JLong, 4L: JLong)))
expectedOutput.add(new StreamRecord(
row(4801L: JLong, "bbb", 25L: JLong, 25L: JLong, 25L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "aaa", 5L: JLong, 2L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "aaa", 6L: JLong, 2L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(7001L: JLong, "aaa", 7L: JLong, 2L: JLong, 7L: JLong)))
expectedOutput.add(new StreamRecord(
row(8001L: JLong, "aaa", 8L: JLong, 2L: JLong, 8L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "bbb", 30L: JLong, 25L: JLong, 30L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "aaa", 9L: JLong, 8L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "aaa", 10L: JLong, 8L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "bbb", 40L: JLong, 40L: JLong, 40L: JLong)))
assertor.assertOutputEqualsSorted("result mismatch", expectedOutput, result)
testHarness.close()
}
@Test
def testRowTimeBoundedRowsOver(): Unit = {
val data = new mutable.MutableList[(Long, String, Long)]
val t = env.fromCollection(data).toTable(tEnv, 'rowtime.rowtime, 'b, 'c)
tEnv.registerTable("T", t)
val sql =
"""
|SELECT rowtime, b, c,
| min(c) OVER
| (PARTITION BY b ORDER BY rowtime
| ROWS BETWEEN 2 PRECEDING AND CURRENT ROW),
| max(c) OVER
| (PARTITION BY b ORDER BY rowtime
| ROWS BETWEEN 2 PRECEDING AND CURRENT ROW)
|FROM T
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
tEnv.getConfig.setIdleStateRetentionTime(Time.seconds(1), Time.seconds(2))
val testHarness = createHarnessTester(t1.toAppendStream[Row], "OverAggregate")
val assertor = new RowDataHarnessAssertor(
Array(
DataTypes.BIGINT().getLogicalType,
DataTypes.STRING().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType))
testHarness.open()
testHarness.processWatermark(800)
testHarness.processElement(new StreamRecord(
binaryrow(801L: JLong, "aaa", 1L: JLong)))
testHarness.processWatermark(2500)
testHarness.processElement(new StreamRecord(
binaryrow(2501L: JLong, "bbb", 10L: JLong)))
testHarness.processWatermark(4000)
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "aaa", 2L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "aaa", 3L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "bbb", 20L: JLong)))
testHarness.processWatermark(4800)
testHarness.processElement(new StreamRecord(
binaryrow(4801L: JLong, "aaa", 4L: JLong)))
testHarness.processWatermark(6500)
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "aaa", 5L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "aaa", 6L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "bbb", 30L: JLong)))
testHarness.processWatermark(7000)
testHarness.processElement(new StreamRecord(
binaryrow(7001L: JLong, "aaa", 7L: JLong)))
testHarness.processWatermark(8000)
testHarness.processElement(new StreamRecord(
binaryrow(8001L: JLong, "aaa", 8L: JLong)))
testHarness.processWatermark(12000)
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "aaa", 9L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "aaa", 10L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "bbb", 40L: JLong)))
testHarness.processWatermark(19000)
// test cleanup
testHarness.setProcessingTime(1000)
testHarness.processWatermark(20000)
// check that state is removed after max retention time
testHarness.processElement(new StreamRecord(
binaryrow(20001L: JLong, "ccc", 1L: JLong))) // clean-up 3000
testHarness.setProcessingTime(2500)
testHarness.processElement(new StreamRecord(
binaryrow(20002L: JLong, "ccc", 2L: JLong))) // clean-up 4500
testHarness.processWatermark(20010) // compute output
testHarness.setProcessingTime(4499)
testHarness.setProcessingTime(4500)
// check that state is only removed if all data was processed
testHarness.processElement(new StreamRecord(
binaryrow(20011L: JLong, "ccc", 3L: JLong))) // clean-up 6500
testHarness.setProcessingTime(6500) // clean-up attempt but rescheduled to 8500
testHarness.processWatermark(20020) // schedule emission
testHarness.setProcessingTime(8499) // clean-up
testHarness.setProcessingTime(8500) // clean-up
val result = dropWatermarks(testHarness.getOutput.toArray)
val expectedOutput = new ConcurrentLinkedQueue[Object]()
expectedOutput.add(new StreamRecord(
row(801L: JLong, "aaa", 1L: JLong, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(2501L: JLong, "bbb", 10L: JLong, 10L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "aaa", 2L: JLong, 1L: JLong, 2L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "aaa", 3L: JLong, 1L: JLong, 3L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "bbb", 20L: JLong, 10L: JLong, 20L: JLong)))
expectedOutput.add(new StreamRecord(
row(4801L: JLong, "aaa", 4L: JLong, 2L: JLong, 4L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "aaa", 5L: JLong, 3L: JLong, 5L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "aaa", 6L: JLong, 4L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "bbb", 30L: JLong, 10L: JLong, 30L: JLong)))
expectedOutput.add(new StreamRecord(
row(7001L: JLong, "aaa", 7L: JLong, 5L: JLong, 7L: JLong)))
expectedOutput.add(new StreamRecord(
row(8001L: JLong, "aaa", 8L: JLong, 6L: JLong, 8L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "aaa", 9L: JLong, 7L: JLong, 9L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "aaa", 10L: JLong, 8L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "bbb", 40L: JLong, 20L: JLong, 40L: JLong)))
expectedOutput.add(new StreamRecord(
row(20001L: JLong, "ccc", 1L: JLong, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(20002L: JLong, "ccc", 2L: JLong, 1L: JLong, 2L: JLong)))
expectedOutput.add(new StreamRecord(
row(20011L: JLong, "ccc", 3L: JLong, 3L: JLong, 3L: JLong)))
assertor.assertOutputEqualsSorted("result mismatch", expectedOutput, result)
testHarness.close()
}
/**
* all elements at the same row-time have the same value per key
*/
@Test
def testRowTimeUnboundedRangeOver(): Unit = {
val data = new mutable.MutableList[(Long, String, Long)]
val t = env.fromCollection(data).toTable(tEnv, 'rowtime.rowtime, 'b, 'c)
tEnv.registerTable("T", t)
val sql =
"""
|SELECT rowtime, b, c,
| min(c) OVER
| (PARTITION BY b ORDER BY rowtime
| RANGE BETWEEN UNBOUNDED preceding AND CURRENT ROW),
| max(c) OVER
| (PARTITION BY b ORDER BY rowtime
| RANGE BETWEEN UNBOUNDED preceding AND CURRENT ROW)
|FROM T
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
tEnv.getConfig.setIdleStateRetentionTime(Time.seconds(1), Time.seconds(2))
val testHarness = createHarnessTester(t1.toAppendStream[Row], "OverAggregate")
val assertor = new RowDataHarnessAssertor(
Array(
DataTypes.BIGINT().getLogicalType,
DataTypes.STRING().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType))
testHarness.open()
testHarness.setProcessingTime(1000)
testHarness.processWatermark(800)
testHarness.processElement(new StreamRecord(
binaryrow(801L: JLong, "aaa", 1L: JLong)))
testHarness.processWatermark(2500)
testHarness.processElement(new StreamRecord(
binaryrow(2501L: JLong, "bbb", 10L: JLong)))
testHarness.processWatermark(4000)
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "aaa", 2L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "aaa", 3L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "bbb", 20L: JLong)))
testHarness.processWatermark(4800)
testHarness.processElement(new StreamRecord(
binaryrow(4801L: JLong, "aaa", 4L: JLong)))
testHarness.processWatermark(6500)
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "aaa", 5L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "aaa", 6L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "bbb", 30L: JLong)))
testHarness.processWatermark(7000)
testHarness.processElement(new StreamRecord(
binaryrow(7001L: JLong, "aaa", 7L: JLong)))
testHarness.processWatermark(8000)
testHarness.processElement(new StreamRecord(
binaryrow(8001L: JLong, "aaa", 8L: JLong)))
testHarness.processWatermark(12000)
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "aaa", 9L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "aaa", 10L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "bbb", 40L: JLong)))
testHarness.processWatermark(19000)
// test cleanup
testHarness.setProcessingTime(2999) // clean up timer is 3000, so nothing should happen
testHarness.setProcessingTime(3000) // clean up is triggered
testHarness.processWatermark(20000)
testHarness.processElement(new StreamRecord(
binaryrow(20000L: JLong, "ccc", 1L: JLong))) // test for late data
testHarness.processElement(new StreamRecord(
binaryrow(20001L: JLong, "ccc", 1L: JLong))) // clean-up 5000
testHarness.setProcessingTime(2500)
testHarness.processElement(new StreamRecord(
binaryrow(20002L: JLong, "ccc", 2L: JLong))) // clean-up 5000
testHarness.setProcessingTime(5000) // does not clean up, because data left. New timer 7000
testHarness.processWatermark(20010) // compute output
testHarness.setProcessingTime(6999) // clean up timer is 3000, so nothing should happen
testHarness.setProcessingTime(7000) // clean up is triggered
val result = dropWatermarks(testHarness.getOutput.toArray)
val expectedOutput = new ConcurrentLinkedQueue[Object]()
// all elements at the same row-time have the same value per key
expectedOutput.add(new StreamRecord(
row(801L: JLong, "aaa", 1L: JLong, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(2501L: JLong, "bbb", 10L: JLong, 10L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "aaa", 2L: JLong, 1L: JLong, 3L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "aaa", 3L: JLong, 1L: JLong, 3L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "bbb", 20L: JLong, 10L: JLong, 20L: JLong)))
expectedOutput.add(new StreamRecord(
row(4801L: JLong, "aaa", 4L: JLong, 1L: JLong, 4L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "aaa", 5L: JLong, 1L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "aaa", 6L: JLong, 1L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "bbb", 30L: JLong, 10L: JLong, 30L: JLong)))
expectedOutput.add(new StreamRecord(
row(7001L: JLong, "aaa", 7L: JLong, 1L: JLong, 7L: JLong)))
expectedOutput.add(new StreamRecord(
row(8001L: JLong, "aaa", 8L: JLong, 1L: JLong, 8L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "aaa", 9L: JLong, 1L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "aaa", 10L: JLong, 1L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "bbb", 40L: JLong, 10L: JLong, 40L: JLong)))
expectedOutput.add(new StreamRecord(
row(20001L: JLong, "ccc", 1L: JLong, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(20002L: JLong, "ccc", 2L: JLong, 1L: JLong, 2L: JLong)))
assertor.assertOutputEqualsSorted("result mismatch", expectedOutput, result)
testHarness.close()
}
@Test
def testRowTimeUnboundedRowsOver(): Unit = {
val data = new mutable.MutableList[(Long, String, Long)]
val t = env.fromCollection(data).toTable(tEnv, 'rowtime.rowtime, 'b, 'c)
tEnv.registerTable("T", t)
val sql =
"""
|SELECT rowtime, b, c,
| min(c) OVER
| (PARTITION BY b ORDER BY rowtime
| ROWS BETWEEN UNBOUNDED preceding AND CURRENT ROW),
| max(c) OVER
| (PARTITION BY b ORDER BY rowtime
| ROWS BETWEEN UNBOUNDED preceding AND CURRENT ROW)
|FROM T
""".stripMargin
val t1 = tEnv.sqlQuery(sql)
tEnv.getConfig.setIdleStateRetentionTime(Time.seconds(1), Time.seconds(2))
val testHarness = createHarnessTester(t1.toAppendStream[Row], "OverAggregate")
val assertor = new RowDataHarnessAssertor(
Array(
DataTypes.BIGINT().getLogicalType,
DataTypes.STRING().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType,
DataTypes.BIGINT().getLogicalType))
testHarness.open()
testHarness.setProcessingTime(1000)
testHarness.processWatermark(800)
testHarness.processElement(new StreamRecord(
binaryrow(801L: JLong, "aaa", 1L: JLong)))
testHarness.processWatermark(2500)
testHarness.processElement(new StreamRecord(
binaryrow(2501L: JLong, "bbb", 10L: JLong)))
testHarness.processWatermark(4000)
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "aaa", 2L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "aaa", 3L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(4001L: JLong, "bbb", 20L: JLong)))
testHarness.processWatermark(4800)
testHarness.processElement(new StreamRecord(
binaryrow(4801L: JLong, "aaa", 4L: JLong)))
testHarness.processWatermark(6500)
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "aaa", 5L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "aaa", 6L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(6501L: JLong, "bbb", 30L: JLong)))
testHarness.processWatermark(7000)
testHarness.processElement(new StreamRecord(
binaryrow(7001L: JLong, "aaa", 7L: JLong)))
testHarness.processWatermark(8000)
testHarness.processElement(new StreamRecord(
binaryrow(8001L: JLong, "aaa", 8L: JLong)))
testHarness.processWatermark(12000)
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "aaa", 9L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "aaa", 10L: JLong)))
testHarness.processElement(new StreamRecord(
binaryrow(12001L: JLong, "bbb", 40L: JLong)))
testHarness.processWatermark(19000)
// test cleanup
testHarness.setProcessingTime(2999) // clean up timer is 3000, so nothing should happen
testHarness.setProcessingTime(3000) // clean up is triggered
testHarness.processWatermark(20000)
testHarness.processElement(new StreamRecord(
binaryrow(20000L: JLong, "ccc", 2L: JLong))) // test for late data
testHarness.processElement(new StreamRecord(
binaryrow(20001L: JLong, "ccc", 1L: JLong))) // clean-up 5000
testHarness.setProcessingTime(2500)
testHarness.processElement(new StreamRecord(
binaryrow(20002L: JLong, "ccc", 2L: JLong))) // clean-up 5000
testHarness.setProcessingTime(5000) // does not clean up, because data left. New timer 7000
testHarness.processWatermark(20010) // compute output
testHarness.setProcessingTime(6999) // clean up timer is 3000, so nothing should happen
testHarness.setProcessingTime(7000) // clean up is triggered
val result = dropWatermarks(testHarness.getOutput.toArray)
val expectedOutput = new ConcurrentLinkedQueue[Object]()
expectedOutput.add(new StreamRecord(
row(801L: JLong, "aaa", 1L: JLong, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(2501L: JLong, "bbb", 10L: JLong, 10L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "aaa", 2L: JLong, 1L: JLong, 2L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "aaa", 3L: JLong, 1L: JLong, 3L: JLong)))
expectedOutput.add(new StreamRecord(
row(4001L: JLong, "bbb", 20L: JLong, 10L: JLong, 20L: JLong)))
expectedOutput.add(new StreamRecord(
row(4801L: JLong, "aaa", 4L: JLong, 1L: JLong, 4L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "aaa", 5L: JLong, 1L: JLong, 5L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "aaa", 6L: JLong, 1L: JLong, 6L: JLong)))
expectedOutput.add(new StreamRecord(
row(6501L: JLong, "bbb", 30L: JLong, 10L: JLong, 30L: JLong)))
expectedOutput.add(new StreamRecord(
row(7001L: JLong, "aaa", 7L: JLong, 1L: JLong, 7L: JLong)))
expectedOutput.add(new StreamRecord(
row(8001L: JLong, "aaa", 8L: JLong, 1L: JLong, 8L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "aaa", 9L: JLong, 1L: JLong, 9L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "aaa", 10L: JLong, 1L: JLong, 10L: JLong)))
expectedOutput.add(new StreamRecord(
row(12001L: JLong, "bbb", 40L: JLong, 10L: JLong, 40L: JLong)))
expectedOutput.add(new StreamRecord(
row(20001L: JLong, "ccc", 1L: JLong, 1L: JLong, 1L: JLong)))
expectedOutput.add(new StreamRecord(
row(20002L: JLong, "ccc", 2L: JLong, 1L: JLong, 2L: JLong)))
assertor.assertOutputEqualsSorted("result mismatch", expectedOutput, result)
testHarness.close()
}
@Test
def testCloseWithoutOpen(): Unit = {
val (testHarness, outputType) = createProcTimeBoundedRowsOver
testHarness.setup(new RowDataSerializer(outputType: _*))
// simulate a failover after a failed task open, expect no exception happens
testHarness.open()
}
}
| apache/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/runtime/harness/OverAggregateHarnessTest.scala | Scala | apache-2.0 | 39,141 |
/*
* Copyright 2013 Maurício Linhares
*
* Maurício Linhares licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.mauricio.async.db.mysql.binary.encoder
import io.netty.buffer.ByteBuf
import com.github.mauricio.async.db.mysql.column.ColumnTypes
object ShortEncoder extends BinaryEncoder {
def encode(value: Any, buffer: ByteBuf) {
buffer.writeShort(value.asInstanceOf[Short])
}
def encodesTo: Int = ColumnTypes.FIELD_TYPE_SHORT
}
| leosilvadev/postgresql-async | mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ShortEncoder.scala | Scala | apache-2.0 | 986 |
package net.debasishg.domain.trade
package event
import serialization.Serialization._
import serialization.Util._
import akka.dispatch._
import akka.util.Timeout
import akka.util.duration._
import akka.actor.{ Actor, ActorRef, Props, ActorSystem }
import akka.pattern.Patterns.ask
import Actor._
import com.redis._
import com.redis.serialization._
class RedisEventLog(clients: RedisClientPool, as: ActorSystem) extends EventLog {
val loggerActorName = "redis-event-logger"
// need a pinned dispatcher to maintain order of log entries
lazy val logger = as.actorOf(Props(new Logger(clients)).withDispatcher("my-pinned-dispatcher"), name = loggerActorName)
// implicit val timeout = as.settings.ActorTimeout
implicit val timeout = Timeout(20 seconds)
def iterator = iterator(0L)
def iterator(fromEntryId: Long) =
getEntries.drop(fromEntryId.toInt).iterator
def appendAsync(id: String, state: State, data: Option[Any], event: Event): Future[EventLogEntry] =
ask(logger, LogEvent(id, state, data, event), timeout.duration).asInstanceOf[Future[EventLogEntry]]
def getEntries: List[EventLogEntry] = {
val future = ask(logger, GetEntries(), timeout.duration)
Await.result(future, timeout.duration).asInstanceOf[List[EventLogEntry]]
}
case class LogEvent(objectId: String, state: State, data: Option[Any], event: Event)
case class GetEntries()
class Logger(clients: RedisClientPool) extends Actor {
implicit val format = Format { case l: EventLogEntry => serializeEventLogEntry(l) }
implicit val parseList = Parse[EventLogEntry](deSerializeEventLogEntry(_))
def receive = {
case LogEvent(id, state, data, event) =>
val entry = EventLogEntry(RedisEventLog.nextId(), id, state, data, event)
clients.withClient { client =>
client.lpush(RedisEventLog.logName, entry)
}
sender ! entry
case GetEntries() =>
import Parse.Implicits.parseByteArray
val entries =
clients.withClient { client =>
client.lrange[EventLogEntry](RedisEventLog.logName, 0, -1)
}
val ren = entries.map(_.map(e => e.get)).getOrElse(List.empty[EventLogEntry]).reverse
sender ! ren
}
}
}
import Parse.Implicits.parseDouble
object RedisEventLog {
var current = 0L
def logName = "events"
def nextId() = {
current = current + 1
current
}
}
| Tjoene/thesis | benchmark/src/main/scala/cqrs/RedisEventLog.scala | Scala | gpl-2.0 | 2,591 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.cache
import play.api.mvc.Result
import play.api.mvc.Results
import play.api.test._
class SerializableResultSpec extends PlaySpecification {
sequential
"SerializableResult" should {
def serializeAndDeserialize(result: Result): Result = {
val inWrapper = new SerializableResult(result)
import java.io._
val baos = new ByteArrayOutputStream()
val oos = new ObjectOutputStream(baos)
oos.writeObject(inWrapper)
oos.flush()
oos.close()
baos.close()
val bytes = baos.toByteArray
val bais = new ByteArrayInputStream(bytes)
val ois = new ObjectInputStream(bais)
val outWrapper = ois.readObject().asInstanceOf[SerializableResult]
ois.close()
bais.close()
outWrapper.result
}
// To be fancy could use a Matcher
def compareResults(r1: Result, r2: Result) = {
r1.header.status must_== r2.header.status
r1.header.headers must_== r2.header.headers
r1.body must_== r2.body
}
def checkSerialization(r: Result) = {
val r2 = serializeAndDeserialize(r)
compareResults(r, r2)
}
"serialize and deserialize statūs" in {
checkSerialization(Results.Ok("x").withHeaders(CONTENT_TYPE -> "text/banana"))
checkSerialization(Results.NotFound)
}
"serialize and deserialize simple Results" in {
checkSerialization(Results.Ok("hello!"))
checkSerialization(Results.Ok("hello!").withHeaders(CONTENT_TYPE -> "text/banana"))
checkSerialization(Results.Ok("hello!").withHeaders(CONTENT_TYPE -> "text/banana", "X-Foo" -> "bar"))
}
}
}
| benmccann/playframework | cache/play-ehcache/src/test/scala/play/api/cache/SerializableResultSpec.scala | Scala | apache-2.0 | 1,708 |
package models.query
import org.joda.time.LocalDate
import org.joda.time.format.ISODateTimeFormat
import scalikejdbc._
import scala.util.Try
/**
* Date: 14/11/27.
*/
case class Period(from: Option[LocalDate], to: Option[LocalDate], default: Boolean) {
import Period._
def where(target: SQLSyntax): SQLSyntax = whereOpt(target).getOrElse(sqls"true")
def whereOpt(target: SQLSyntax): Option[SQLSyntax] = {
val x = from.map { it => sqls.gt(target, it.toDate.getTime) }
val y = to.map { it => sqls.lt(target, it.toDate.getTime) }
sqls.toAndConditionOpt(x, y)
}
def fromStr = from.getOrElse(DefaultStart).toString(ISODateTimeFormat.date())
def toStr = to.getOrElse(LocalDate.now()).toString(ISODateTimeFormat.date())
}
object Period {
lazy val DefaultStart = new LocalDate(2014, 1, 1)
def fromStr(from: String, to: String): Period = {
val fromDate = Try { LocalDate.parse(from, ISODateTimeFormat.date()) }
val toDate = Try { LocalDate.parse(to, ISODateTimeFormat.date()) }
Period(fromDate.toOption, toDate.toOption, fromDate.isFailure && toDate.isFailure)
}
}
| nekoworkshop/MyFleetGirls | server/app/models/query/Period.scala | Scala | mit | 1,106 |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.github.microburn.integration.support.scrum
import java.util.Date
import net.liftweb.actor.LAFuture
import net.liftweb.common.Box
import org.github.microburn.domain.actors._
import org.github.microburn.domain.{MajorSprintDetails, UserStory}
import org.github.microburn.integration.Integration
class ScrumIntegration(sprintsProvider: SprintsDataProvider, tasksProvider: TasksDataProvider)(projectActor: ProjectActor)
extends Integration {
import org.github.microburn.util.concurrent.FutureEnrichments._
import org.github.microburn.util.concurrent.ActorEnrichments._
override def updateProject(implicit timestamp: Date): LAFuture[_] = {
for {
(currentSprints, updatedSprintIds) <- parallelCurrentAndUpdatedSprints
_ <- parallelCreateAndUpdate(currentSprints, updatedSprintIds)
} yield Unit
}
private def parallelCurrentAndUpdatedSprints: LAFuture[(ProjectState, Seq[Int])] = {
val currentStateFuture = (projectActor ?? GetProjectState).mapTo[ProjectState]
.withLoggingFinished("current sprint ids: " + _.sprints.map(_.id).mkString(", "))
val updatedIdsFuture = sprintsProvider.allSprintIds.withLoggingFinished("updated sprints ids: " + _.mkString(", "))
for {
currentState <- currentStateFuture
updatedIds <- updatedIdsFuture
} yield (currentState, updatedIds)
}
private def parallelCreateAndUpdate(current: ProjectState, updatedSprintIds: Seq[Int])
(implicit timestamp: Date): LAFuture[_] = {
val createResultFuture = createNewSprints(current, updatedSprintIds).withLoggingFinished("created sprints: " + _.mkString(", "))
val updateResultFuture = updateActiveSprints(current).withLoggingFinished("updated sprints: " + _.mkString(", "))
for {
_ <- createResultFuture
_ <- updateResultFuture
} yield Unit
}
private def createNewSprints(current: ProjectState, retrieved: Seq[Int])
(implicit timestamp: Date): LAFuture[List[Int]] = {
val currentIds = current.sprintIds
val missing = retrieved.filterNot(currentIds.contains)
val createResults = missing.map { sprintId =>
for {
(details, userStories) <- parallelSprintDetailsAndUserStories(sprintId)
createResult <- (projectActor !< CreateNewSprint(sprintId, details, userStories, timestamp)).mapTo[Box[Int]].map(_.toOption)
} yield createResult
}
LAFuture.collect(createResults : _*).map(_.flatten)
}
private def updateActiveSprints(current: ProjectState)
(implicit timestamp: Date): LAFuture[List[Int]] = {
val updateResults = current.sprints.collect {
case withDetails if withDetails.isActive =>
for {
(details, userStories) <- parallelSprintDetailsAndUserStories(withDetails.id)
updateResult <- (projectActor ?? UpdateSprint(withDetails.id, userStories, details, timestamp)).mapTo[Int]
} yield updateResult
}
LAFuture.collect(updateResults : _*)
}
private def parallelSprintDetailsAndUserStories(sprintId: Int): LAFuture[(MajorSprintDetails, Seq[UserStory])] = {
val detailsFuture = sprintsProvider.sprintDetails(sprintId).withLoggingFinished(s"sprint details for sprint $sprintId: " + _)
val tasksFuture = tasksProvider.userStories(sprintId).withLoggingFinished(s"user stories count for sprint $sprintId: " + _.size)
for {
details <- detailsFuture
tasks <- tasksFuture
} yield (details, tasks)
}
} | arkadius/micro-burn | src/main/scala/org/github/microburn/integration/support/scrum/ScrumIntegration.scala | Scala | apache-2.0 | 4,124 |
package scalaprops
import scalaz._
import scalaz.std.anyVal._
import scalaz.std.stream._
import ScalapropsScalaz._
object ShrinkTest extends Scalaprops {
private[this] implicit def equal[A: Gen: Equal]: Equal[Shrink[A]] = {
import FunctionEqual._
Equal[A => Stream[A]].contramap(_.f)
}
val law = scalazlaws.invariantFunctor.all[Shrink]
val int = {
val x = 128
Property.property { (seed: Int) =>
val param0 = super.param.copy(
seed = Seed.IntSeed(seed)
)
val m = 6
val g = Gen.choose(List.fill(m)(2).product, Int.MaxValue)
Property.forAllG(Gen.choose(1, x)) { (n: Int) =>
val result =
Property.property1[Int] { i => Property.prop(i < n) }(g, implicitly).check(param0, () => false, _ => ())
result match {
case CheckResult.Falsified(_, 0, arg :: Nil, _)
if (m < arg.shrinks) && (arg.shrinks < Integer.SIZE) && (arg.value == n) =>
true
case fail =>
sys.error(fail.toString)
}
}
}(implicitly, Shrink.empty)
}
}
| scalaprops/scalaprops | scalaz/src/test/scala/scalaprops/ShrinkTest.scala | Scala | mit | 1,081 |
/*
* Copyright (c) 2016 dawid.melewski
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package actyxpoweruseralert.actors
import akka.actor.{ Actor, ActorLogging, ActorRef, Props }
import akka.contrib.throttle.Throttler.SetTarget
import actyxpoweruseralert.model.MachineEndpoint
import actyxpoweruseralert.services.MachineParkApi
import scala.util.{ Failure, Success }
class MachinesActor(api: MachineParkApi, mainActor: ActorRef)
extends Actor
with ActorLogging {
import MachinesActor._
import context.dispatcher
override def receive: Receive = {
case GetMachinesEndpoints =>
api.machines().onComplete {
case Success(list) =>
mainActor ! MachineEndpoints(list)
case Failure(ex) =>
log.error(ex, "Occurred error when retrieving machines endpoints")
}
}
}
object MachinesActor {
def props(api: MachineParkApi, mainActor: ActorRef): Props =
Props(new MachinesActor(api, mainActor))
case object GetMachinesEndpoints
case class MachineEndpoints(list: List[MachineEndpoint])
}
| meloniasty/ActyxPowerUserAlert | src/main/scala/actyxpoweruseralert/actors/MachinesActor.scala | Scala | mit | 2,088 |
/*
* Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/agpl.html.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package lancet
package analysis
class TestAnalysis1 extends FileDiffSuite {
val prefix = "test-out/test-analysis-1"
/*
generalizing the store abstraction
(general order of optimizations:)
1) rewrite to constant (3+1 --> 4; x*0 --> 0)
2) rewrite to previously defined value (x+0 --> x; x+y-y --> x)
3) don't perform operation twice (cse)
(requirements:)
1) forward analysis to shortcut and eliminate
reads (propagate values through data structures)
2) generate dependencies between statements,
so that they can be soundly moved around
and dce'd (enable backwards analysis on dep
graph)
concrete semantics:
Frame = Local -> Val (Frame not used here)
Val = Prim Int | Ref Addr
Obj = Class x Field -> Val
Store = Addr -> Obj
eval: Exp => Val
e[ new C ] = { val a = freshAddr
store = store update [a -> (C, fields(C) -> null))]
a }
e[ x.f = y ] = { val Ref a = eval(x)
val y' = eval(y)
val C,fields = store(a)
store = store update [a -> (C, fields update [f -> y'])]
() }
e[ x.f ] = { val Ref a = eval(x)
val C,fields = store(a)
fields(f) }
e[ if (c) a else b ] = { val Prim c' = eval(x)
if (c' != 0) eval(a) else eval(b) }
(threading of store elided)
collecting/abstract semantics:
e[ if (c) a else b ] = { val Prim c' = eval(x)
if (!mayZero(x'))
eval(a)
else if (mustZero(c'))
eval(b)
else eval(a) join eval(b) }
Val^ = Prim Int^ | Ref Addr^
Obj^ = Field -> Val^
Store^ = Addr -> Obj^
so: need to define Int^ and Addr^
Int^: mustZero, mayZero
Addr^:
store.apply and store.update now
need to take Addr^ objects
need to precise what to update: we want
strong updates -- only possible if we know
that the target is a singleton
*/
object Test1 {
// concrete evaluation semantics
type Addr = Int
type Field = String
abstract class Val
case class Prim(x: Int) extends Val
case class Ref(x: Addr) extends Val
type Obj = Map[Field,Val]
type Store = Map[Addr,Obj]
abstract class Exp
case class Const(x: Int) extends Exp
case class Static(x: Int) extends Exp
case class New() extends Exp
case class Get(x: Exp, f: Field) extends Exp
case class Put(x: Exp, f: Field, y: Exp) extends Exp
case class If(c: Exp, a: Exp, b: Exp) extends Exp
case class Block(xs: List[Exp]) extends Exp {
override def toString = "{\\n " + xs.map(_.toString).mkString("\\n").replace("\\n","\\n ") + "\\n}"
}
var store: Store = Map(0 -> Map.empty)
var curAddr = 100
def freshAddr() = { try curAddr finally curAddr += 1 }
def eval(e: Exp): Val = e match {
case Const(x) => Prim(x)
case Static(x) => Ref(x)
case If(c,a,b) =>
val Prim(c1) = eval(c)
if (c1 != 0) eval(a) else eval(b)
case Block(xs) => xs map eval reduceLeft ((a,b) => b)
case New() =>
val a = freshAddr()
store = store + (a -> Map.empty)
Ref(a)
case Get(x, f) =>
val Ref(a) = eval(x)
store(a)(f)
case Put(x, f, y) =>
val Ref(a) = eval(x)
val y1 = eval(y)
val x1 = store.getOrElse(a, Map.empty)
store = store + (a -> (x1 + (f -> y1)))
Prim(0)
}
val testProg1 = Block(List(
Put(Static(0), "counter", Const(1)),
If(Get(Static(0), "counter"),
Block(List(
Put(Static(1), "a", New()),
Put(Get(Static(1), "a"), "foo", Const(5))
)),
Block(List(
Put(Static(1), "a", New()),
Put(Get(Static(1), "a"), "bar", Const(5))
))
)
))
def run() = {
println("prog: " + testProg1)
val res = eval(testProg1)
println("res: " + res)
println("store: " + store)
}
}
object Test2 {
// abstract semantics with sets as values
// 'may' style information; for example loses
// information about undefined locations
type Addr = Int
type Field = String
type AbsInt = Set[Int]
type AbsAddr = Set[Addr]
def mayZero(x: AbsInt) = x contains 0
def mustZero(x: AbsInt) = x == Set(0)
abstract class Val {
def join(that: Val) = (this,that) match {
case (Prim(x), Prim(y)) => Prim(x ++ y)
case (Ref(x), Ref(y)) => Ref(x ++ y)
}
}
case class Prim(x: AbsInt) extends Val
case class Ref(x: AbsAddr) extends Val
type Obj = Map[Field,Val]
type Store = Map[Addr,Obj]
def objJoin(a: Obj, b: Obj): Obj = {
val m = (a.keys ++ b.keys).map(k => k -> ((a.get(k),b.get(k)) match {
case (Some(u),Some(v)) => u join v
case (Some(u),_) => u
case (u,Some(v)) => v
}))
m.toMap
}
def objJoin(os: List[Obj]): Option[Obj] =
if (os.isEmpty) None else Some(os.reduceLeft(objJoin))
abstract class AbsStore { self =>
override def toString = List.range(0,1000).flatMap(a=>get(Set(a)).map(a -> _)).mkString("\\n")
def getOrElse(a: AbsAddr, b: => Obj): Obj = get(a).getOrElse(b)
def apply(a: AbsAddr): Obj = get(a).get
def get(a: AbsAddr): Option[Obj]
def +(p: (AbsAddr,Obj)): AbsStore = join(AbsStore(p._1.map(_ -> p._2).toMap))
def join(other: AbsStore): AbsStore = new AbsStore {
def get(a: AbsAddr): Option[Obj] =
objJoin(self.get(a).toList ++ other.get(a).toList)
}
}
object AbsStore {
def apply(x: Store) = new AbsStore {
def get(a: AbsAddr): Option[Obj] =
objJoin(a.toList.flatMap(x.get(_).toList))
}
}
abstract class Exp
case class Const(x: Int) extends Exp
case class Static(x: Int) extends Exp
case class New() extends Exp
case class Get(x: Exp, f: Field) extends Exp
case class Put(x: Exp, f: Field, y: Exp) extends Exp
case class If(c: Exp, a: Exp, b: Exp) extends Exp
case class Block(xs: List[Exp]) extends Exp {
override def toString = "{\\n " + xs.map(_.toString).mkString("\\n").replace("\\n","\\n ") + "\\n}"
}
val store0: AbsStore = AbsStore(Map(0 -> Map("counter" -> Prim(Set(0,1,2,3,4,5,6,7,8,9)))))
var store: AbsStore = _
var curAddr = 100
def freshAddr() = { try Set(curAddr) finally curAddr += 1 }
def eval(e: Exp): Val = e match {
case Const(x) => Prim(Set(x))
case Static(x) => Ref(Set(x))
case If(c,a,b) =>
val Prim(c1) = eval(c)
println("c "+c1)
if (!mayZero(c1)) eval(a) else if (mustZero(c1)) eval(b) else {
val save = store
val e1 = eval(a)
val s1 = store
store = save
val e2 = eval(b)
val s2 = store
store = s1 join s2
e1 join e2
}
case Block(xs) => xs map eval reduceLeft ((a,b) => b)
case New() =>
val a = freshAddr()
store = store + (a -> Map.empty)
Ref(a)
case Get(x, f) =>
val Ref(a) = eval(x)
store(a)(f)
case Put(x, f, y) =>
val Ref(a) = eval(x)
val y1 = eval(y)
val x1 = store.getOrElse(a, Map.empty)
store = store + (a -> (x1 + (f -> y1)))
Prim(Set(0))
}
val testProg1 = Block(List(
//Put(Static(0), "counter", Const(1)),
If(Get(Static(0), "counter"),
Block(List(
Put(Static(1), "a", New()),
Put(Get(Static(1), "a"), "foo", Const(5))
)),
Block(List(
Put(Static(1), "a", New()),
Put(Get(Static(1), "a"), "bar", Const(5))
))
)
))
val testProg2 = Block(List(
//Put(Static(0), "counter", Const(1)),
Put(Static(1), "a", New()),
Put(Get(Static(1), "a"), "baz", Const(3)),
If(Get(Static(0), "counter"),
Block(List(
Put(Static(1), "a", New()), // strong update, overwrite
Put(Get(Static(1), "a"), "foo", Const(5))
)),
Block(List(
Put(Static(1), "a", New()), // strong update, overwrite
Put(Get(Static(1), "a"), "bar", Const(5))
))
),
Put(Get(Static(1), "a"), "bar", Const(7)) // this is not a strong update, because 1.a may be one of two allocs
))
def run(testProg: Exp) = {
println("prog: " + testProg)
store = store0
val res = eval(testProg)
println("res: " + res)
println("store: \\n" + store)
}
}
object Test3 {
// symbolic abstract values.
// can answer both may and must queries,
// enabling strong updates
type Addr = Int
type Field = String
abstract class AbsInt {
def ++(x: AbsInt) = PhiInt(this,x)
}
case class UndefinedInt() extends AbsInt
case class ConstInt(x:Int) extends AbsInt
case class PhiInt(x:AbsInt,y:AbsInt) extends AbsInt
def mayContain(x: AbsInt): Set[Int] = x match {
case UndefinedInt() => Set()
case ConstInt(x) => Set(x)
case PhiInt(x,y) => mayContain(x) ++ mayContain(y)
}
def mustContain(x: AbsInt): Set[Int] = x match {
case UndefinedInt() => Set()
case ConstInt(x) => Set(x)
case PhiInt(x,y) => mustContain(x) intersect mustContain(y)
}
def mayZero(x: AbsInt): Boolean = x match {
case UndefinedInt() => false
case ConstInt(0) => true
case ConstInt(_) => false
case PhiInt(x,y) => mayZero(x) || mayZero(y)
}
def mustZero(x: AbsInt): Boolean = x match {
case UndefinedInt() => false
case ConstInt(0) => true
case ConstInt(_) => false
case PhiInt(x,y) => mustZero(x) && mustZero(y)
}
abstract class AbsAddr {
def ++(x: AbsAddr) = PhiAddr(this,x)
}
case class UndefinedAddr() extends AbsAddr
case class ConstAddr(x:Addr) extends AbsAddr
case class PhiAddr(x:AbsAddr,y:AbsAddr) extends AbsAddr
def mayContain(x: AbsAddr): Set[Addr] = x match {
case UndefinedAddr() => Set()
case ConstAddr(x) => Set(x)
case PhiAddr(x,y) => mayContain(x) ++ mayContain(y)
}
def mustContain(x: AbsAddr): Set[Addr] = x match {
case UndefinedAddr() => Set()
case ConstAddr(x) => Set(x)
case PhiAddr(x,y) => mustContain(x) intersect mustContain(y)
}
abstract class Val {
def toFlatString = this match {
case Prim(x) => "must:"+mustContain(x)+"may:"+mayContain(x)
case Ref(x) => "must:"+mustContain(x)+"may:"+mayContain(x)
}
def join(that: Val) = (this,that) match {
case (Prim(x), Prim(y)) => Prim(x ++ y)
case (Ref(x), Ref(y)) => Ref(x ++ y)
case (Ref(UndefinedAddr()), Prim(y)) => Prim(UndefinedInt() ++ y) // HACK
case (Prim(x), Ref(UndefinedAddr())) => Prim(x ++ UndefinedInt()) // HACK
}
}
case class Prim(x: AbsInt) extends Val
case class Ref(x: AbsAddr) extends Val
type Obj = Map[Field,Val]
abstract class AbsObj {
def toFlatString = mayFields(this).map(f=>f->this(f).toFlatString).toMap.toString
def apply(f: Field): Val = this match {
case UndefinedObj() => Ref(UndefinedAddr()) // FIXME: might be addr or int
case ConstObj(x) => x.getOrElse(f, Ref(UndefinedAddr())) // FIXME: might be addr or int
case PhiObj(x,y) => x(f) join y(f)
case UpdateObj(x,f2,y) if f == f2 => y
case UpdateObj(x,f2,y) => x(f)
}
def +(p: (Field,Val)): AbsObj = this match {
case ConstObj(x) => ConstObj(x + p)
case UpdateObj(that, f, _) if f == p._1 => that + p
case PhiObj(x,y) => objJoin(x + p, y + p)
case _ => UpdateObj(this,p._1,p._2)
}
}
case class UndefinedObj() extends AbsObj
case class ConstObj(x:Obj) extends AbsObj
case class PhiObj(x:AbsObj,y:AbsObj) extends AbsObj
case class UpdateObj(x:AbsObj,f:Field,y:Val) extends AbsObj
def mayFields(x: AbsObj): Set[Field] = x match {
case UndefinedObj() => Set()
case ConstObj(x) => x.keys.toSet
case PhiObj(x,y) => mayFields(x) ++ mayFields(y)
case UpdateObj(x,f,y) => mayFields(x) ++ Set(f)
}
type Store = Map[Addr,Obj]
def objJoin(a: AbsObj, b: AbsObj): AbsObj = if (a == b) a else PhiObj(a,b)
def objJoin(os: List[AbsObj]): Option[AbsObj] =
if (os.isEmpty) None else Some(os.reduceLeft(objJoin))
abstract class AbsStore { self =>
override def toString = List.range(0,1000).flatMap(a=>get(ConstAddr(a)).map(a -> _)).mkString("\\n")
def toPartialString = List.range(0,1000).flatMap(a=>get(ConstAddr(a)).map(b => a -> mayFields(b).map(f => f -> b(f)).toMap)).mkString("\\n")
def toFlatString = List.range(0,1000).flatMap(a=>get(ConstAddr(a)).map(b => a -> b.toFlatString)).mkString("\\n")
def getOrElse(a: AbsAddr, b: => AbsObj): AbsObj = get(a).getOrElse(b)
def apply(a: AbsAddr): AbsObj = get(a).get
def get(a: AbsAddr): Option[AbsObj]
def +(p: (AbsAddr,AbsObj)): AbsStore = new AbsStore {
val (u,o) = p
def get(a: AbsAddr): Option[AbsObj] =
// TODO
// concrete: if (a == u) o else self.get(a)
// abstract: if (u.mustContain(a)) o else
// if (u.mayContain(a))
(u,a) match {
case (ConstAddr(x),ConstAddr(y)) => // strong update!
if (x == y) Some(o) else self.get(a)
case _ =>
objJoin(self.get(a).toList ++ AbsStore(mayContain(u).map(_ -> o).toMap).get(a).toList)
}
}
def join(other: AbsStore): AbsStore = new AbsStore {
def get(a: AbsAddr): Option[AbsObj] =
objJoin(self.get(a).toList ++ other.get(a).toList)
}
}
object AbsStore {
def apply(x: Map[Addr,AbsObj]) = new AbsStore {
def get(a: AbsAddr): Option[AbsObj] = {
val as = mayContain(a)
objJoin(as.toList.flatMap(x.get(_).toList))
}
}
}
abstract class Exp
case class Const(x: Int) extends Exp
case class Static(x: Int) extends Exp
case class New() extends Exp
case class Get(x: Exp, f: Field) extends Exp
case class Put(x: Exp, f: Field, y: Exp) extends Exp
case class If(c: Exp, a: Exp, b: Exp) extends Exp
case class Block(xs: List[Exp]) extends Exp {
override def toString = "{\\n " + xs.map(_.toString).mkString("\\n").replace("\\n","\\n ") + "\\n}"
}
val store0: AbsStore = AbsStore(Map(0 -> ConstObj(Map("counter" ->
Prim(PhiInt(ConstInt(0),PhiInt(ConstInt(1),PhiInt(ConstInt(2),PhiInt(ConstInt(3),
PhiInt(ConstInt(4),PhiInt(ConstInt(5),PhiInt(ConstInt(6),PhiInt(ConstInt(7),
PhiInt(ConstInt(8),ConstInt(9))))))))))))),
1 -> ConstObj(Map("a" -> Ref(UndefinedAddr())))))
var store: AbsStore = _
var curAddr = 100
def freshAddr() = { try ConstAddr(curAddr) finally curAddr += 1 }
def eval(e: Exp): Val = e match {
case Const(x) => Prim(ConstInt(x))
case Static(x) => Ref(ConstAddr(x))
case If(c,a,b) =>
val Prim(c1) = eval(c)
println("c "+c1)
if (!mayZero(c1)) eval(a) else if (mustZero(c1)) eval(b) else {
val save = store
val e1 = eval(a)
val s1 = store
store = save
val e2 = eval(b)
val s2 = store
store = s1 join s2
e1 join e2
}
case Block(xs) => xs map eval reduceLeft ((a,b) => b)
case New() =>
val a = freshAddr()
store = store + (a -> ConstObj(Map.empty))
Ref(a)
case Get(x, f) =>
val Ref(a) = eval(x)
store(a)(f)
case Put(x, f, y) =>
val Ref(a) = eval(x)
val y1 = eval(y)
val x1 = store.getOrElse(a, UndefinedObj())
store = store + (a -> (x1 + (f -> y1)))
Prim(ConstInt(0))
}
val testProg1 = Block(List(
//Put(Static(0), "counter", Const(1)),
If(Get(Static(0), "counter"),
Block(List(
Put(Static(1), "a", New()),
Put(Get(Static(1), "a"), "foo", Const(5))
)),
Block(List(
Put(Static(1), "a", New()),
Put(Get(Static(1), "a"), "bar", Const(5))
))
)
))
val testProg2 = Block(List(
//Put(Static(0), "counter", Const(1)),
Put(Static(1), "a", New()),
Put(Get(Static(1), "a"), "baz", Const(3)),
If(Get(Static(0), "counter"),
Block(List(
Put(Static(1), "a", New()), // strong update, overwrite
Put(Get(Static(1), "a"), "foo", Const(5))
)),
Block(List(
Put(Static(1), "a", New()), // strong update, overwrite
Put(Get(Static(1), "a"), "bar", Const(5))
))
),
Put(Get(Static(1), "a"), "bar", Const(7)) // this is not a strong update, because 1.a may be one of two allocs
))
def run(testProg: Exp) = {
println("prog: " + testProg)
store = store0
val res = eval(testProg)
println("res: " + res)
println("store: \\n" + store)
println("partially flat store: \\n" + store.toPartialString)
println("flat store: \\n" + store.toFlatString)
}
}
// TODO:
// loops (finite set of addrs, need to analyze uniqueness)
// abstract GC (we know the baz record is no longer accessible after the if -> remove it)
// construct dependency graph
// run it
def testA = withOutFileChecked(prefix+"A") {
Test1.run()
}
def testB = withOutFileChecked(prefix+"B") {
Test2.run(Test2.testProg1)
Test2.run(Test2.testProg2)
println("imprecision due to lack of strong updates:")
println("1.a still seems to have a baz field, but cannot!")
}
def testC = withOutFileChecked(prefix+"C") {
Test3.run(Test3.testProg1)
Test3.run(Test3.testProg2)
println("strong updates: no baz field")
}
} | TiarkRompf/lancet | src/test/scala/lancet/analysis/test1.scala | Scala | agpl-3.0 | 19,336 |
/*******************************************************************************
Copyright (c) 2013, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.Tizen
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
import kr.ac.kaist.jsaf.analysis.cfg.{CFG, CFGExpr, InternalError}
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T}
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.analysis.typing.models.AbsInternalFunc
import kr.ac.kaist.jsaf.analysis.typing.models.AbsConstValue
import kr.ac.kaist.jsaf.analysis.typing.domain.Heap
import kr.ac.kaist.jsaf.analysis.typing.domain.Context
object TIZENNDEFRecordURI extends Tizen {
private val name = "NDEFRecordURI"
/* predefined locations */
val loc_cons = newSystemRecentLoc(name + "Cons")
val loc_proto = newSystemRecentLoc(name + "Proto")
val loc_parent = TIZENNDEFRecord.loc_proto
/* constructor or object*/
private val prop_cons: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Function")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("@scope", AbsConstValue(PropValue(Value(NullTop)))),
("@construct", AbsInternalFunc("tizen.NDEFRecordURI.constructor")),
("@hasinstance", AbsConstValue(PropValue(Value(NullTop)))),
("prototype", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F))))
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(loc_parent), F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue)))
)
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_cons, prop_cons), (loc_proto, prop_proto)
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("tizen.NDEFRecordURI.constructor" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_this = h(SinglePureLocalLoc)("@this")._1._2._2
val lset_env = h(SinglePureLocalLoc)("@env")._1._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = set_addr.head
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r1 = addrToLoc(addr1, Recent)
val (h_2, ctx_2) = Helper.Oldify(h, ctx, addr1)
val v_1 = getArgValue(h_2, ctx_2, args, "0")
val es_1 =
if (v_1._1._5 </ StrTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(UInt).
update("@default_number", PropValue(ObjectValue(Value(NumTop), T, T, T)))
val h_3 = h_2.update(l_r1, o_arr)
val o_new = ObjEmpty.
update("@class", PropValue(AbsString.alpha("Object"))).
update("@proto", PropValue(ObjectValue(Value(TIZENNDEFRecordMedia.loc_proto), F, F, F))).
update("@extensible", PropValue(T)).
update("tnf", PropValue(ObjectValue(Value(NumTop), F, T, T))).
update("type", PropValue(ObjectValue(Value(l_r1), F, T, T))).
update("payload", PropValue(ObjectValue(Value(l_r1), F, T, T))).
update("id", PropValue(ObjectValue(Value(l_r1), F, T, T))).
update("uri", PropValue(ObjectValue(Value(v_1._1._5), F, T, T)))
val h_4 = lset_this.foldLeft(h_3)((_h, l) => _h.update(l, o_new))
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1)
((Helper.ReturnStore(h_4, Value(lset_this)), ctx_2), (he + h_e, ctxe + ctx_e))
}
))
)
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
} | daejunpark/jsaf | src/kr/ac/kaist/jsaf/analysis/typing/models/Tizen/TIZENNDEFRecordURI.scala | Scala | bsd-3-clause | 4,565 |
package com.twitter.finagle.http
import com.twitter.conversions.time._
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class CookieTest extends FunSuite {
test("mutate underlying") {
val cookie = new Cookie("name", "value")
cookie.comment = "hello"
cookie.commentUrl = "hello.com"
cookie.domain = ".twitter.com"
cookie.maxAge = 100.seconds
cookie.path = "/1/statuses/show"
cookie.ports = Seq(1, 2, 3)
cookie.value = "value2"
cookie.version = 1
cookie.httpOnly = true
cookie.isDiscard = false
cookie.isSecure = true
assert(cookie.name == "name")
assert(cookie.comment == "hello")
assert(cookie.commentUrl == "hello.com")
assert(cookie.domain == ".twitter.com")
assert(cookie.maxAge == 100.seconds)
assert(cookie.path == "/1/statuses/show")
assert(cookie.ports == Set(1, 2, 3))
assert(cookie.value == "value2")
assert(cookie.version == 1)
assert(cookie.httpOnly == true)
assert(cookie.isDiscard == false)
assert(cookie.isSecure == true)
}
}
| adriancole/finagle | finagle-http/src/test/scala/com/twitter/finagle/http/CookieTest.scala | Scala | apache-2.0 | 1,215 |
package com.thoughtworks.pact.verify.pact
import com.thoughtworks.pact.verify.json.ResponseBodyJson
import com.thoughtworks.pact.verify.junit.Failure
import org.apache.commons.lang3.exception.ExceptionUtils
import play.api.libs.json.Json
import scala.util.Success
/**
* Created by xfwu on 12/07/2017.
*/
case class Interaction(description: String,
request: PactRequest,
response: PactResponse,
setParameters: Option[Map[String,String]] = None) {
def assert(request: PactRequest, actual: HttpResponse): Option[Failure] = {
val expect = this.response
actual match {
case _ if expect.status != actual.status =>
Some(Failure("status code failure",
generateStatuesFailureMessage(request, actual, expect), Some(actual.body)))
case _ if expect.getBody().isDefined =>
ResponseBodyJson.tryHardParseJsValue(actual.body) match {
case Success(jsValue) => expect.isMatch(jsValue) match {
case Some(err) => Some(Failure("context match failure",
generateBodyMatchFailureMessage(err,request), Some(Json.stringify(jsValue))))
case None => None
}
case scala.util.Failure(f) => Some(Failure("body parse failure",
generateBodyParseFailureMessage(f,request),Some(actual.body)))
}
case _ => None
}
}
private def generateBodyParseFailureMessage(err: Throwable, request: PactRequest) = {
val errorDetail = ExceptionUtils.getStackTrace(err)
s"request url: ${request.path}\\n Parse failure:$errorDetail \\n"
}
private def generateBodyMatchFailureMessage(err:String, request: PactRequest) = {
s"request url: ${request.path}\\n Match failure:$err \\n"
}
private def generateStatuesFailureMessage(request: PactRequest, actual: HttpResponse, expect: PactResponse) = {
s"request url: ${request.path}\\n Status Do not match: ${expect.status} != ${actual.status}"
}
}
| XuefengWu/pact_verify | src/main/scala/com/thoughtworks/pact/verify/pact/Interaction.scala | Scala | mit | 1,992 |
package sample.model
// Generally speaking I wouldnt import the whole namespace,
// but as it happens with mapper models you are pretty much using
// most of the types, and it becomes rather verbose to import
// more than 10 or 15 types.
import net.liftweb.mapper._
import net.liftweb.common.Full
object Book extends Book with LongKeyedMetaMapper[Book]{
override def dbTableName = "books"
override def dbAddTable = Full(populate _)
private def populate {
val titles =
"Lift in Action" ::
"Scala in Depth" ::
"Scala in Action" ::
"Hadoop in Action" :: Nil
for(title <- titles)
Book.create.title(title).save
}
}
class Book extends LongKeyedMapper[Book]
with CreatedUpdated
with IdPK {
def getSingleton = Book
object title extends MappedString(this, 255)
}
| timperrett/lift-in-action | chapter-14/src/main/scala/sample/model/Book.scala | Scala | apache-2.0 | 826 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.rdd
import org.apache.spark.SparkContext._
import org.bdgenomics.adam.models.{ ReferenceRegion, SequenceDictionary, SequenceRecord }
import org.bdgenomics.adam.util.ADAMFunSuite
import org.bdgenomics.formats.avro.{ AlignmentRecord, Contig }
class BroadcastRegionJoinSuite extends ADAMFunSuite {
test("alternating returns an alternating seq of items") {
import NonoverlappingRegions._
assert(alternating(Seq(), includeFirst = true) === Seq())
assert(alternating(Seq(1), includeFirst = true) === Seq(1))
assert(alternating(Seq(1, 2), includeFirst = true) === Seq(1))
assert(alternating(Seq(1, 2, 3), includeFirst = true) === Seq(1, 3))
assert(alternating(Seq(1, 2, 3, 4), includeFirst = true) === Seq(1, 3))
assert(alternating(Seq(1, 2, 3, 4, 5), includeFirst = true) === Seq(1, 3, 5))
assert(alternating(Seq(), includeFirst = false) === Seq())
assert(alternating(Seq(1), includeFirst = false) === Seq())
assert(alternating(Seq(1, 2), includeFirst = false) === Seq(2))
assert(alternating(Seq(1, 2, 3), includeFirst = false) === Seq(2))
assert(alternating(Seq(1, 2, 3, 4), includeFirst = false) === Seq(2, 4))
assert(alternating(Seq(1, 2, 3, 4, 5), includeFirst = false) === Seq(2, 4))
assert(alternating(Seq(1, 2, 3, 4, 5, 6), includeFirst = false) === Seq(2, 4, 6))
}
test("Single region returns itself") {
val region = new ReferenceRegion("chr1", 1, 2)
val regions = new NonoverlappingRegions(Seq(region))
val result = regions.findOverlappingRegions(region)
assert(result.size === 1)
assert(result.head === region)
}
test("Two adjacent regions will be merged") {
val regions = new NonoverlappingRegions(Seq(
ReferenceRegion("chr1", 10, 20),
ReferenceRegion("chr1", 20, 30)))
assert(regions.endpoints === Array(10L, 30L))
}
test("Nonoverlapping regions will all be returned") {
val region1 = new ReferenceRegion("chr1", 1, 2)
val region2 = new ReferenceRegion("chr1", 3, 5)
val testRegion3 = new ReferenceRegion("chr1", 1, 4)
val testRegion1 = new ReferenceRegion("chr1", 4, 5)
val regions = new NonoverlappingRegions(Seq(region1, region2))
// this should be 2, not 3, because binaryRegionSearch is (now) no longer returning
// ReferenceRegions in which no original RR's were placed (i.e. the 'gaps').
assert(regions.findOverlappingRegions(testRegion3).size === 2)
assert(regions.findOverlappingRegions(testRegion1).size === 1)
}
test("Many overlapping regions will all be merged") {
val region1 = new ReferenceRegion("chr1", 1, 3)
val region2 = new ReferenceRegion("chr1", 2, 4)
val region3 = new ReferenceRegion("chr1", 3, 5)
val testRegion = new ReferenceRegion("chr1", 1, 4)
val regions = new NonoverlappingRegions(Seq(region1, region2, region3))
assert(regions.findOverlappingRegions(testRegion).size === 1)
}
test("ADAMRecords return proper references") {
val contig = Contig.newBuilder
.setContigName("chr1")
.setContigLength(5L)
.setReferenceURL("test://chrom1")
.build
val built = AlignmentRecord.newBuilder()
.setContig(contig)
.setStart(1L)
.setReadMapped(true)
.setCigar("1M")
.setEnd(2L)
.build()
val record1 = built
val record2 = AlignmentRecord.newBuilder(built).setStart(3L).setEnd(4L).build()
val baseRecord = AlignmentRecord.newBuilder(built).setCigar("4M").setEnd(5L).build()
val baseMapping = new NonoverlappingRegions(Seq(ReferenceRegion(baseRecord)))
val regions1 = baseMapping.findOverlappingRegions(ReferenceRegion(record1))
val regions2 = baseMapping.findOverlappingRegions(ReferenceRegion(record2))
assert(regions1.size === 1)
assert(regions2.size === 1)
assert(regions1.head === regions2.head)
}
sparkTest("Ensure same reference regions get passed together") {
val contig = Contig.newBuilder
.setContigName("chr1")
.setContigLength(5L)
.setReferenceURL("test://chrom1")
.build
val builder = AlignmentRecord.newBuilder()
.setContig(contig)
.setStart(1L)
.setReadMapped(true)
.setCigar("1M")
.setEnd(2L)
val record1 = builder.build()
val record2 = builder.build()
val rdd1 = sc.parallelize(Seq(record1)).keyBy(ReferenceRegion(_))
val rdd2 = sc.parallelize(Seq(record2)).keyBy(ReferenceRegion(_))
assert(BroadcastRegionJoinSuite.getReferenceRegion(record1) ===
BroadcastRegionJoinSuite.getReferenceRegion(record2))
assert(BroadcastRegionJoin.partitionAndJoin[AlignmentRecord, AlignmentRecord](
rdd1,
rdd2).aggregate(true)(
BroadcastRegionJoinSuite.merge,
BroadcastRegionJoinSuite.and))
assert(BroadcastRegionJoin.partitionAndJoin[AlignmentRecord, AlignmentRecord](
rdd1,
rdd2)
.aggregate(0)(
BroadcastRegionJoinSuite.count,
BroadcastRegionJoinSuite.sum) === 1)
}
sparkTest("Overlapping reference regions") {
val contig = Contig.newBuilder
.setContigName("chr1")
.setContigLength(5L)
.setReferenceURL("test://chrom1")
.build
val built = AlignmentRecord.newBuilder()
.setContig(contig)
.setStart(1L)
.setReadMapped(true)
.setCigar("1M")
.setEnd(2L)
.build()
val record1 = built
val record2 = AlignmentRecord.newBuilder(built).setStart(3L).setEnd(4L).build()
val baseRecord = AlignmentRecord.newBuilder(built).setCigar("4M").setEnd(5L).build()
val baseRdd = sc.parallelize(Seq(baseRecord)).keyBy(ReferenceRegion(_))
val recordsRdd = sc.parallelize(Seq(record1, record2)).keyBy(ReferenceRegion(_))
assert(BroadcastRegionJoin.partitionAndJoin[AlignmentRecord, AlignmentRecord](
baseRdd,
recordsRdd)
.aggregate(true)(
BroadcastRegionJoinSuite.merge,
BroadcastRegionJoinSuite.and))
assert(BroadcastRegionJoin.partitionAndJoin[AlignmentRecord, AlignmentRecord](
baseRdd,
recordsRdd).count() === 2)
}
sparkTest("Multiple reference regions do not throw exception") {
val contig1 = Contig.newBuilder
.setContigName("chr1")
.setContigLength(5L)
.setReferenceURL("test://chrom1")
.build
val contig2 = Contig.newBuilder
.setContigName("chr2")
.setContigLength(5L)
.setReferenceURL("test://chrom2")
.build
val builtRef1 = AlignmentRecord.newBuilder()
.setContig(contig1)
.setStart(1L)
.setReadMapped(true)
.setCigar("1M")
.setEnd(2L)
.build()
val builtRef2 = AlignmentRecord.newBuilder()
.setContig(contig2)
.setStart(1)
.setReadMapped(true)
.setCigar("1M")
.setEnd(2L)
.build()
val record1 = builtRef1
val record2 = AlignmentRecord.newBuilder(builtRef1).setStart(3L).setEnd(4L).build()
val record3 = builtRef2
val baseRecord1 = AlignmentRecord.newBuilder(builtRef1).setCigar("4M").setEnd(5L).build()
val baseRecord2 = AlignmentRecord.newBuilder(builtRef2).setCigar("4M").setEnd(5L).build()
val baseRdd = sc.parallelize(Seq(baseRecord1, baseRecord2)).keyBy(ReferenceRegion(_))
val recordsRdd = sc.parallelize(Seq(record1, record2, record3)).keyBy(ReferenceRegion(_))
assert(BroadcastRegionJoin.partitionAndJoin[AlignmentRecord, AlignmentRecord](
baseRdd,
recordsRdd)
.aggregate(true)(
BroadcastRegionJoinSuite.merge,
BroadcastRegionJoinSuite.and))
assert(BroadcastRegionJoin.partitionAndJoin[AlignmentRecord, AlignmentRecord](
baseRdd,
recordsRdd).count() === 3)
}
sparkTest("regionJoin contains the same results as cartesianRegionJoin") {
val contig1 = Contig.newBuilder
.setContigName("chr1")
.setContigLength(5L)
.setReferenceURL("test://chrom1")
.build
val contig2 = Contig.newBuilder
.setContigName("chr2")
.setContigLength(5L)
.setReferenceURL("test://chrom2")
.build
val builtRef1 = AlignmentRecord.newBuilder()
.setContig(contig1)
.setStart(1L)
.setReadMapped(true)
.setCigar("1M")
.setEnd(2L)
.build()
val builtRef2 = AlignmentRecord.newBuilder()
.setContig(contig2)
.setStart(1L)
.setReadMapped(true)
.setCigar("1M")
.setEnd(2L)
.build()
val record1 = builtRef1
val record2 = AlignmentRecord.newBuilder(builtRef1).setStart(3L).setEnd(4L).build()
val record3 = builtRef2
val baseRecord1 = AlignmentRecord.newBuilder(builtRef1).setCigar("4M").setEnd(5L).build()
val baseRecord2 = AlignmentRecord.newBuilder(builtRef2).setCigar("4M").setEnd(5L).build()
val baseRdd = sc.parallelize(Seq(baseRecord1, baseRecord2)).keyBy(ReferenceRegion(_))
val recordsRdd = sc.parallelize(Seq(record1, record2, record3)).keyBy(ReferenceRegion(_))
assert(BroadcastRegionJoin.cartesianFilter(
baseRdd,
recordsRdd)
.leftOuterJoin(
BroadcastRegionJoin.partitionAndJoin(
baseRdd,
recordsRdd))
.filter({
case ((_: AlignmentRecord, (cartesian: AlignmentRecord, region: Option[AlignmentRecord]))) =>
region match {
case None => false
case Some(record) => cartesian == record
}
})
.count() === 3)
}
}
object BroadcastRegionJoinSuite {
def getReferenceRegion(record: AlignmentRecord): ReferenceRegion =
ReferenceRegion(record)
def merge(prev: Boolean, next: (AlignmentRecord, AlignmentRecord)): Boolean =
prev && getReferenceRegion(next._1).overlaps(getReferenceRegion(next._2))
def count[T](prev: Int, next: (T, T)): Int =
prev + 1
def sum(value1: Int, value2: Int): Int =
value1 + value2
def and(value1: Boolean, value2: Boolean): Boolean =
value1 && value2
}
| VinACE/adam | adam-core/src/test/scala/org/bdgenomics/adam/rdd/BroadcastRegionJoinSuite.scala | Scala | apache-2.0 | 10,658 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services.testdata.allocation
import javax.inject.{ Inject, Singleton }
import model.exchange.testdata.CreateAssessorAllocationResponse
import model.testdata.CreateAssessorAllocationData
import repositories.AssessorAllocationRepository
import scala.concurrent.Future
@Singleton
class AssessorAllocationGenerator @Inject() (assessorAllocationRepository: AssessorAllocationRepository) {
import scala.concurrent.ExecutionContext.Implicits.global
def generate(generationId: Int, createData: CreateAssessorAllocationData): Future[CreateAssessorAllocationResponse] = {
val assessorAllocation = createData.toAssessorAllocation
assessorAllocationRepository.save(List(assessorAllocation)).map { _ => CreateAssessorAllocationResponse(generationId, createData) }
}
}
| hmrc/fset-faststream | app/services/testdata/allocation/AssessorAllocationGenerator.scala | Scala | apache-2.0 | 1,386 |
package org.jetbrains.plugins.scala
package worksheet.ui
import java.util
import com.intellij.openapi.editor.impl.{EditorImpl, FoldingModelImpl}
import com.intellij.openapi.editor.{Editor, FoldRegion}
import com.intellij.openapi.project.Project
import com.intellij.openapi.vfs.newvfs.FileAttribute
import com.intellij.psi.PsiFile
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.worksheet.processor.FileAttributeUtilCache
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
/**
* User: Dmitry.Naydanov
* Date: 10.04.14.
*/
class WorksheetFoldGroup(private val viewerEditor: Editor, private val originalEditor: Editor, project: Project,
private val splitter: WorksheetDiffSplitters.SimpleWorksheetSplitter) {
private val doc = originalEditor.getDocument
private val regions = mutable.ArrayBuffer[FoldRegionInfo]()
private val unfolded = new util.TreeMap[Int, Int]()
def left2rightOffset(left: Int) = {
val key: Int = unfolded floorKey left
if (key == 0) left else {
unfolded.get(key) + left
}
}
def addRegion(region: WorksheetFoldRegionDelegate, start: Int, spaces: Int, leftSideLength: Int) {
regions += FoldRegionInfo(region, region.isExpanded, start, spaces, leftSideLength)
}
def removeRegion(region: WorksheetFoldRegionDelegate) {
regions -= FoldRegionInfo(region, expanded = true, 0, 0, 0)
}
def onExpand(expandedRegion: WorksheetFoldRegionDelegate): Boolean = {
traverseAndChange(expandedRegion, expand = true)
}
def onCollapse(collapsedRegion: WorksheetFoldRegionDelegate): Boolean = {
traverseAndChange(collapsedRegion, expand = false)
}
def getCorrespondInfo = regions map {
case FoldRegionInfo(region: WorksheetFoldRegionDelegate, _, leftStart, spaces, lsLength) =>
(region.getStartOffset, region.getEndOffset, leftStart, spaces, lsLength)
}
private def traverseAndChange(target: WorksheetFoldRegionDelegate, expand: Boolean): Boolean = {
if (!viewerEditor.asInstanceOf[EditorImpl].getContentComponent.hasFocus) return false
val ((fromTo, offsetsSpaces), targetInfo) = traverseRegions(target) match {
case (all, info, _) => (all.unzip, info)
}
if (targetInfo == null || targetInfo.expanded == expand) return false
if (splitter != null) splitter.update(fromTo, offsetsSpaces)
targetInfo.expanded = expand
updateChangeFolded(targetInfo, expand)
true
}
protected def serialize() = regions map {
case FoldRegionInfo(region, expanded, trueStart, spaces, lsLength) =>
s"${region.getStartOffset},${region.getEndOffset},$expanded,$trueStart,$spaces,$lsLength"
} mkString "|"
protected def deserialize(elem: String) {
val folding = viewerEditor.getFoldingModel.asInstanceOf[FoldingModelImpl]
folding runBatchFoldingOperation new Runnable {
override def run() {
elem split '|' foreach {
case regionElem =>
regionElem split ',' match {
case Array(start, end, expanded, trueStart, spaces, lsLength) =>
try {
val region = new WorksheetFoldRegionDelegate (
viewerEditor,
start.toInt,
end.toInt,
trueStart.toInt,
spaces.toInt,
WorksheetFoldGroup.this,
lsLength.toInt
)
region.setExpanded(expanded.length == 4)
folding addFoldRegion region
} catch {
case _: NumberFormatException =>
}
case _ =>
}
}
}
}
}
private def offset2Line(offset: Int) = doc getLineNumber offset
private def traverseRegions(target: WorksheetFoldRegionDelegate): (mutable.Iterable[((Int, Int), (Int, Int))], FoldRegionInfo, Int) = {
if (regions.isEmpty) return (mutable.ArrayBuffer.empty, null, 0)
def numbers(reg: FoldRegionInfo, stored: Int) =
((offset2Line(reg.trueStart) - reg.lsLength, offset2Line(reg.trueStart)),
(offset2Line(reg.trueStart) + stored, reg.spaces))
((mutable.ArrayBuffer[((Int, Int), (Int, Int))](), null: FoldRegionInfo, 0) /: regions) {
case ((res, _, ff), reg) if reg.expanded && reg.region == target => (res, reg, ff)
case ((res, _, ff), reg) if !reg.expanded && reg.region == target =>
res append numbers(reg, ff)
(res, reg, ff + reg.spaces)
case ((res, a, ff), reg) if reg.expanded && reg.region != target =>
res append numbers(reg, ff)
(res, a, ff + reg.spaces)
case (res, _) => res
}
}
private def updateChangeFolded(target: FoldRegionInfo, expand: Boolean) {
val line = offset2Line(target.trueStart)
val key = unfolded floorKey line
val spaces = target.spaces
if (unfolded.get(key) == 0) {
if (expand) unfolded.put(line, spaces) else unfolded remove line
return
}
val lower = unfolded.tailMap(line).entrySet().iterator()
while (lower.hasNext) {
val t = lower.next()
unfolded.put(t.getKey, if (expand) t.getValue + spaces else t.getValue - spaces)
}
if (expand) unfolded.put(line, unfolded.get(key) + spaces) else unfolded.remove(line)
}
private case class FoldRegionInfo(region: FoldRegion, var expanded: Boolean, trueStart: Int, spaces: Int, lsLength: Int) {
override def equals(obj: scala.Any): Boolean = obj match {
case info: FoldRegionInfo => this.region.equals(info.region)
case _ => false
}
override def hashCode(): Int = region.hashCode()
}
}
object WorksheetFoldGroup {
private val WORKSHEET_PERSISTENT_FOLD_KEY = new FileAttribute("WorksheetPersistentFoldings", 1, false)
def save(file: ScalaFile, group: WorksheetFoldGroup) {
val virtualFile = file.getVirtualFile
if (!virtualFile.isValid) return
FileAttributeUtilCache.writeAttribute(WORKSHEET_PERSISTENT_FOLD_KEY, file, group.serialize())
}
def load(viewerEditor: Editor, originalEditor: Editor, project: Project,
splitter: WorksheetDiffSplitters.SimpleWorksheetSplitter, file: PsiFile) {
val bytes = FileAttributeUtilCache.readAttribute(WORKSHEET_PERSISTENT_FOLD_KEY, file)
if (bytes == null) return
lazy val group = new WorksheetFoldGroup(viewerEditor, originalEditor, project, splitter)
bytes foreach {
case nonEmpty if nonEmpty.length > 0 => group deserialize nonEmpty
case _ =>
}
}
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/worksheet/ui/WorksheetFoldGroup.scala | Scala | apache-2.0 | 6,543 |
package coffeehaus
class VanillaCompileSpec extends CoffeehausSpec {
describe("Vanilla Compile") {
it ("should compile vanilla coffee") {
assert(Compile.vanilla()("alert 'hello'") ===
Right(JsSource(file("/vanilla/basic.js"))))
}
it ("should compile bare coffee") {
assert(Compile.vanilla(Options(bare = true))("alert 'hello'") ===
Right(JsSource(file("/vanilla/bare.js"))))
}
it ("should compile source maps") {
val coffeeFile = "/Users/dougtangren/code/scala/projects/coffeehaus/src/test/resources/vanilla/sourcemap.coffee"
assert(Compile.vanilla(Options(sourceMap = true, inline = Some(true)))(file("/vanilla/sourcemap.coffee")) ===
Right(SourceMapping(file("/vanilla/sourcemap.js"), file("/vanilla/sourcemap.map"))))
}
}
}
| softprops/coffeehaus | src/test/scala/VanillaCompileSpec.scala | Scala | mit | 810 |
package org.jetbrains.plugins.scala.codeInspection.hashCodeUsesVar
import com.intellij.codeInspection.ProblemsHolder
import com.intellij.psi._
import com.siyeh.ig.psiutils.MethodUtils
import org.jetbrains.plugins.scala.codeInspection.AbstractInspection
import org.jetbrains.plugins.scala.lang.psi.api.ScalaRecursiveElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScReferencePattern
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScReferenceExpression
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScVariable
/**
* Daniyar Itegulov
* 2016-02-08
*/
class HashCodeUsesVarInspection extends AbstractInspection {
override def actionFor(implicit holder: ProblemsHolder): PartialFunction[PsiElement, Any] = {
case hashCodeMethod: PsiMethod if MethodUtils.isHashCode(hashCodeMethod) =>
hashCodeMethod.accept(new ScalaRecursiveElementVisitor {
override def visitReferenceExpression(exp: ScReferenceExpression): Unit = {
super.visitReferenceExpression(exp)
exp.resolve() match {
case field: ScReferencePattern =>
field.nameContext match {
case variable: ScVariable if !variable.isLocal =>
holder.registerProblem(exp, "Non-value field is accessed in 'hashCode()'")
case _ =>
}
case _ =>
}
}
})
}
}
| loskutov/intellij-scala | src/org/jetbrains/plugins/scala/codeInspection/hashCodeUsesVar/HashCodeUsesVarInspection.scala | Scala | apache-2.0 | 1,412 |
package org.jetbrains.sbt.project
import com.intellij.ide.projectWizard.ProjectSettingsStep
import com.intellij.openapi.project.Project
import org.jetbrains.plugins.scala.DependencyManagerBase.scalaLibraryDescription
import org.jetbrains.plugins.scala.ScalaVersion
import org.jetbrains.sbt.project.ProjectStructureDsl.{excluded, libraries, libraryDependencies, module, modules, project, sources, testSources, _}
import org.jetbrains.sbt.project.template.techhub.{TechHubModuleBuilder, TechHubProjectTemplate}
class NewLightbendStarterProjectWizardTest extends NewScalaProjectWizardTestBase with ExactMatch {
def testCreateLightbendStarterProject_LowerCaseNameWithDashes(): Unit =
runCreateLightbendStarterProjectTest("lower-case-name-with-dashes")
def testCreateLightbendStarterProject_LowerCaseName(): Unit =
runCreateLightbendStarterProjectTest("lower_case_name")
def testCreateLightbendStarterProject_UpperCaseName(): Unit =
runCreateLightbendStarterProjectTest("UpperCaseName")
private def runCreateLightbendStarterProjectTest(
projectName: String
): Unit = {
// The smallest Lightbend template with few dependencies
val templateName = "Hello, Scala!"
val scalaVersion = ScalaVersion.fromString("2.12.10").get
//noinspection TypeAnnotation
val expectedProject = new project(projectName) {
// NOTE: we do not test path of scala-library & SDK classpath for "Hello, Scala!"
// template because it uses paths to `.sbt/boot` artifacts: `~/.sbt/boot/scala-2.12.10/lib/scala-reflect.jar`
// (because scala version in the template is the same as used by sbt itself)
lazy val scalaLibrary = new library(s"sbt: ${scalaLibraryDescription(scalaVersion)}:jar") {
scalaSdkSettings := Some(ScalaSdkAttributes(
scalaVersion.languageLevel,
classpath = None
))
}
lazy val scalaTestLibraries = Seq(
"sbt: org.scala-lang.modules:scala-xml_2.12:1.2.0:jar",
"sbt: org.scala-lang:scala-reflect:2.12.10:jar",
"sbt: org.scalactic:scalactic_2.12:3.0.8:jar",
"sbt: org.scalatest:scalatest_2.12:3.0.8:jar",
).map(new library(_))
// NOTE: actually there are much more libraries in the dependencies but we health-check just a single one
lazy val myLibraries = scalaLibrary +: scalaTestLibraries
libraries ++= myLibraries
modules := Seq(
new module(projectName) {
libraryDependencies ++= myLibraries
sources := Seq("src/main/scala")
testSources := Seq("src/test/scala")
excluded := Seq("project/target", "target")
},
new module(s"$projectName-build") {
sources := Seq("")
excluded := Seq("project/target", "target")
}
)
}
runCreateLightbendStarterProjectTest(projectName, templateName, expectedProject)
}
private def runCreateLightbendStarterProjectTest(
projectName: String,
templateName: String,
expectedProject: project
): Unit = {
val project: Project = createScalaProject(
new TechHubProjectTemplate().getName,
projectName
) {
case projectSettingsStep: ProjectSettingsStep =>
val settingsStep = projectSettingsStep.getSettingsStepTyped[TechHubModuleBuilder#Step]
settingsStep.setTemplate(templateName)
case _ =>
}
assertProjectsEqual(expectedProject, project)
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/sbt/project/NewLightbendStarterProjectWizardTest.scala | Scala | apache-2.0 | 3,418 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services
import org.joda.time.LocalDateTime
import testkit.UnitSpec
class GBTimeZoneServiceSpec extends UnitSpec {
val service = GBTimeZoneService
"Time Zone Service (GB)" should {
"advertise 'Europe/London' as its timezone" in {
service.timeZone.getID mustBe "Europe/London"
}
"convert UTC time to GB time" in {
// UTC time which maps onto a British _SUMMER_ time (UTC+1)
val input = new LocalDateTime(2016, 3, 27, 1, 30)
val expected = new LocalDateTime(2016, 3, 27, 2, 30)
val actual = service.localize(getUtcMillis(input))
actual mustBe expected
}
"make no changes when UTC and GB are equal" in {
// UTC time which maps onto a British _WINTER_ time (UTC+0)
val input = new LocalDateTime(2016, 3, 27, 0, 30)
val expected = new LocalDateTime(2016, 3, 27, 0, 30)
val actual = service.localize(getUtcMillis(input))
actual mustBe expected
}
}
def getUtcMillis(localDateTime: LocalDateTime): Long =
localDateTime.toDateTime(org.joda.time.DateTimeZone.UTC).getMillis
}
| hmrc/fset-faststream | test/services/GBTimeZoneServiceSpec.scala | Scala | apache-2.0 | 1,691 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.keras
import com.intel.analytics.bigdl.dllib.nn.{Abs, MeanAbsolutePercentageCriterion}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.serializer.{ModuleLoader, ModulePersister}
import scala.util.Random
class MeanAbsolutePercentageCriterionSpec extends KerasBaseSpec {
"MeanAbsolutePercentageCriterion" should "be ok" in {
val kerasCode =
"""
|input_tensor = Input(shape=[10])
|target_tensor = Input(shape=[10])
|loss = mean_absolute_percentage_error(target_tensor, input_tensor)
|input = np.random.uniform(-1000, 1000, [2, 10])
|Y = np.random.uniform(-1000, 1000, [2, 10])
""".stripMargin
val criterion = MeanAbsolutePercentageCriterion[Float]()
checkOutputAndGradForLoss(criterion, kerasCode)
}
"MeanAbsolutePercentageCriterion" should "be ok with epsilon" in {
val kerasCode =
"""
|input_tensor = Input(shape=[3])
|target_tensor = Input(shape=[3])
|loss = mean_absolute_percentage_error(target_tensor, input_tensor)
|input = np.array([[1e-07, 1e-06, 1e-08]])
|Y = np.array([[1, 2, 3]])
""".stripMargin
val criterion = MeanAbsolutePercentageCriterion[Float]()
checkOutputAndGradForLoss(criterion, kerasCode)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/keras/MeanAbsolutePercentageCriterionSpec.scala | Scala | apache-2.0 | 1,941 |
package omerxi.semantize
import java.io.File
import java.io._
trait HTML2Cells extends Utils {
/**
* convert HTML to simple rows and columns through MarkDown
* TODO : maybe replace system calls to grep with Scala regex
*/
def convertHTML2Cells(htmlFile: String): File = {
if (htmlFile.endsWith(".html")) {
val usr = "/usr/bin"
val command1 = s"""$usr/pandoc -f html -t markdown $htmlFile -o $htmlFile.md"""
val ret0 = runCommand(command1)
println(s"$htmlFile : pandoc return $ret0")
import scala.sys.process._
val ret =
Seq("egrep", "^ |Fiche mise à jour le ", htmlFile+".md" ) #>
new File(s"$htmlFile.txt") !
println(s"$htmlFile : egrep return $ret")
new File(s"$htmlFile.txt")
} else new File("/tmp/null")
}
def runCommand(command: String): Int = {
log(command)
// import scala.language.postfixOps
import scala.sys.process._
val ret: Int = command.!
ret
}
} | omerxi/semantize | semantize/src/main/scala/HTML2Cells.scala | Scala | lgpl-3.0 | 983 |
package org.denigma.kappa.notebook.views.papers
import org.denigma.binding.binders.{Events, GeneralBinder}
import org.denigma.binding.extensions._
import org.denigma.binding.views.{BindableView, CollectionMapView}
import org.denigma.controls.code.CodeBinder
import org.denigma.controls.papers._
import org.denigma.kappa.messages._
import org.denigma.kappa.notebook._
import org.denigma.kappa.notebook.actions.Commands
import org.denigma.kappa.notebook.circuits.PaperCircuit
import org.denigma.kappa.notebook.parsers.PaperSelection
import org.denigma.kappa.notebook.views.annotations.CommentInserter
import org.denigma.kappa.notebook.views.common.FileTabHeaders
import org.denigma.kappa.notebook.views.editor.KappaCursor
import org.denigma.kappa.parsers.AST
import org.scalajs.dom
import org.scalajs.dom.raw._
import rx.Ctx.Owner.Unsafe.Unsafe
import rx._
import scala.collection.immutable._
import scala.concurrent.duration._
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
import scala.util.{Failure, Success}
/**
* This class manages papers
* @param elem Element to which view is bound to
* @param kappaCursor is used to suggest insertions of comments
*/
class PapersView(val elem: Element,
val paperCircuit: PaperCircuit,
val kappaCursor: Var[KappaCursor]
) extends BindableView
with CollectionMapView with CommentInserter {
val paperSelections: Var[List[PaperSelection]] = Var(Nil)
val currentSelection = paperSelections.map(s => s.headOption)
val paperURI: Var[String] = paperCircuit.paperURI
val items: Var[Map[String, Paper]] = paperCircuit.papers
val empty = items.map(its=>its.isEmpty)
override type Key = String
override type Value = Paper
override type ItemView = PublicationView
val headers = itemViews.map(its=> its.values.map(_.id).toList) // TODO: fix to normal list
val goTo = paperCircuit.intoOutoingPort[Go.ToSource, Animate](Go.ToSource.empty, true){ v=> Animate(v, true) }
val toSource = Var(Events.createMouseEvent())
toSource.onChange{
ev => currentSelection.now match {
case Some(s) => goTo() = Go.ToSource(AST.IRI(codeFile.now), lineNumber.now, lineNumber.now)
case None =>
}
}
val selectionOpt: Var[Option[Selection]] = Var(None)
paperCircuit.paperSelectionOpt.onChange{ case Some(Go.ToPaperSelection(selection, exc)) =>
itemViews.now.values.foreach(p => p.selections() = Set.empty) //TODO: fix this ugly cleaning workaround
itemViews.now.get(paperURI.now) match {
case Some(v) =>
v.selections() = if(exc) Set(selection) else v.selections.now + selection
additionalComment() = ""
v.scrollTo(selection, 5, 300 millis) //scrolls to loaded paper
case None => dom.console.error(s"Paper URI for ${selection.label} does not exist")
}
case None => itemViews.now.values.foreach(p => p.selections() = Set.empty) //TODO: fix this ugly cleaning workaround
}
lazy val domSelections: Var[Map[Item, (Map[Int, List[TextLayerSelection]])]] = Var(Map.empty[Item, (Map[Int, List[TextLayerSelection]])])
override protected def subscribeUpdates(): Unit =
{
super.subscribeUpdates()
selectionOpt.afterLastChange(900 millis){
case Some(sel) =>
val sels: Map[Item, Map[Int, List[TextLayerSelection]]] = itemViews.now.map{ case (item, child) => (item, child.select(sel)) }
domSelections() = sels
case None =>
}
dom.document.addEventListener("selectionchange", onSelectionChange _)
}
protected def insideTextLayer(selection: Selection) =
selection.anchorNode.insidePartial{ case el: HTMLElement if el.classList.contains("textLayer") || el.classList.contains("textlayer")=>} ||
selection.focusNode.insidePartial{ case el: HTMLElement if el.classList.contains("textLayer") || el.classList.contains("textlayer")=>}
protected def onSelectionChange(event: Event) = {
val selection: Selection = dom.window.getSelection()
if (insideTextLayer(selection)) {
selectionOpt.Internal.value = Some(selection)
selectionOpt.recalc()
}
}
protected def makeComment(chosen: Map[Item, (Map[Int, List[TextLayerSelection]])], addComment: String) = {
val com = if(addComment=="") " " else ":comment \\""+ addComment +"\\"; "
chosen.foldLeft(""){
case (acc, (item, mp))=>
val sstr = mp.foldLeft(""){
case (a, (num, list)) =>
val str = list.foldLeft(""){
case (aa, s) =>
aa + s"#^${com}:in_paper ${toURI(item)}; :on_page ${num}; :from_chunk ${s.fromChunk}; :to_chunk ${s.toChunk}; :from_token ${s.fromToken}; :to_token ${s.toToken} .\\n"
}
a + str
}
acc + sstr
}
}
override lazy val comment: Rx[String] = Rx{ //TODO: fix this bad unoptimized code
val chosen = domSelections()
val com = additionalComment()
makeComment(chosen, com)
}
lazy val hasComment: Rx[Boolean] = comment.map(c => c != "")
override def newItemView(item: Key, paper: Paper): PublicationView = this.constructItemView(item){
case (el, params)=>
el.id = paper.name
val v = new PublicationView(el, paperURI, Var(paper)).withBinder(v=>new CodeBinder(v))
v
}
override lazy val injector = defaultInjector
.register("headers")((el, args) => new FileTabHeaders(el, headers, paperCircuit.input, paperURI)(FileTabHeaders.prettyPath2Name(30)).withBinder(new GeneralBinder(_)))
override def updateView(view: PublicationView, key: Key, old: Paper, current: Paper): Unit = {
view.paper() = current
}
} | antonkulaga/kappa-notebook | app/js/src/main/scala/org/denigma/kappa/notebook/views/papers/PapersView.scala | Scala | mpl-2.0 | 5,618 |
package scala.util
/** A utility object to support command line parsing for @main methods */
object CommandLineParser {
/** An exception raised for an illegal command line
* @param idx The index of the argument that's faulty (starting from 0)
* @param msg The error message
*/
class ParseError(val idx: Int, val msg: String) extends Exception
/** Parse command line argument `s`, which has index `n`, as a value of type `T`
* @throws ParseError if argument cannot be converted to type `T`.
*/
def parseString[T](str: String, n: Int)(using fs: FromString[T]): T = {
try fs.fromString(str)
catch {
case ex: IllegalArgumentException => throw ParseError(n, ex.toString)
}
}
/** Parse `n`'th argument in `args` (counting from 0) as a value of type `T`
* @throws ParseError if argument does not exist or cannot be converted to type `T`.
*/
def parseArgument[T](args: Array[String], n: Int)(using fs: FromString[T]): T =
if n < args.length then parseString(args(n), n)
else throw ParseError(n, "more arguments expected")
/** Parse all arguments from `n`'th one (counting from 0) as a list of values of type `T`
* @throws ParseError if some of the arguments cannot be converted to type `T`.
*/
def parseRemainingArguments[T](args: Array[String], n: Int)(using fs: FromString[T]): List[T] =
if n < args.length then parseString(args(n), n) :: parseRemainingArguments(args, n + 1)
else Nil
/** Print error message explaining given ParserError */
def showError(err: ParseError): Unit = {
val where =
if err.idx == 0 then ""
else if err.idx == 1 then " after first argument"
else s" after ${err.idx} arguments"
println(s"Illegal command line$where: ${err.msg}")
}
trait FromString[T] {
/** Can throw java.lang.IllegalArgumentException */
def fromString(s: String): T
def fromStringOption(s: String): Option[T] =
try Some(fromString(s))
catch {
case ex: IllegalArgumentException => None
}
}
object FromString {
given FromString[String] with
def fromString(s: String) = s
given FromString[Boolean] with
def fromString(s: String) = s.toBoolean
given FromString[Byte] with
def fromString(s: String) = s.toByte
given FromString[Short] with
def fromString(s: String) = s.toShort
given FromString[Int] with
def fromString(s: String) = s.toInt
given FromString[Long] with
def fromString(s: String) = s.toLong
given FromString[Float] with
def fromString(s: String) = s.toFloat
given FromString[Double] with
def fromString(s: String) = s.toDouble
}
}
| dotty-staging/dotty | library/src/scala/util/CommandLineParser.scala | Scala | apache-2.0 | 2,691 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
// GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp.
package scala
/** A function of 2 parameters.
*
* In the following example, the definition of `max` is
* shorthand, conceptually, for the anonymous class definition
* `anonfun2`, although the implementation details of how the
* function value is constructed may differ:
*
* {{{
* object Main extends App {
* val max = (x: Int, y: Int) => if (x < y) y else x
*
* val anonfun2 = new Function2[Int, Int, Int] {
* def apply(x: Int, y: Int): Int = if (x < y) y else x
* }
* assert(max(0, 1) == anonfun2(0, 1))
* }
* }}}
*/
trait Function2[@specialized(Specializable.Args) -T1, @specialized(Specializable.Args) -T2, @specialized(Specializable.Return) +R] extends AnyRef { self =>
/** Apply the body of this function to the arguments.
* @return the result of function application.
*/
def apply(v1: T1, v2: T2): R
/** Creates a curried version of this function.
*
* @return a function `f` such that `f(x1)(x2) == apply(x1, x2)`
*/
@annotation.unspecialized def curried: T1 => T2 => R = {
(x1: T1) => (x2: T2) => apply(x1, x2)
}
/** Creates a tupled version of this function: instead of 2 arguments,
* it accepts a single [[scala.Tuple2]] argument.
*
* @return a function `f` such that `f((x1, x2)) == f(Tuple2(x1, x2)) == apply(x1, x2)`
*/
@annotation.unspecialized def tupled: ((T1, T2)) => R = {
case ((x1, x2)) => apply(x1, x2)
}
override def toString(): String = "<function2>"
}
| scala/scala | src/library/scala/Function2.scala | Scala | apache-2.0 | 1,842 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.data.orm.tool
import org.beangle.commons.collection.Collections
import org.beangle.commons.io.Files./
import org.beangle.commons.io.{IOs, ResourcePatternResolver}
import org.beangle.commons.lang.{Charsets, Locales, Strings, SystemInfo}
import org.beangle.commons.logging.Logging
import org.beangle.data.jdbc.engine.{Engine, Engines}
import org.beangle.data.jdbc.meta._
import org.beangle.data.orm.Mappings
import java.io.{File, FileWriter}
import java.util.Locale
/**
* Generate DDL and Sequences and Comments
*/
object DdlGenerator {
def main(args: Array[String]): Unit = {
if (args.length < 3) {
System.out.println("Usage: DdlGenerator PostgreSQL /tmp zh_CN")
return
}
var dir = SystemInfo.tmpDir
if (args.length > 1) dir = args(1)
var locale = Locale.getDefault
if (args.length > 2) locale = Locales.toLocale(args(2))
val dialectName = args(0)
val warnings = new collection.mutable.ListBuffer[String]
Strings.split(dialectName) foreach { d =>
warnings ++= gen(d, dir + / + d.toLowerCase, locale)
}
val w = warnings.toSet.toBuffer.sorted
writeTo(dir, "warnings.txt", w)
}
private def gen(dialect: String, dir: String, locale: Locale): List[String] = {
val target = new File(dir)
target.mkdirs()
if (!target.exists()) {
println("Cannot makedir " + target.getAbsolutePath)
return List.empty
}
val engine = Engines.forName(dialect)
val ormLocations = ResourcePatternResolver.getResources("classpath*:META-INF/beangle/orm.xml")
val database = new Database(engine)
val version = System.getProperty("database.version")
if (Strings.isNotBlank(version)) {
database.version = version
}
val mappings = new Mappings(database, ormLocations)
mappings.locale = locale
mappings.autobind()
val scripts = new SchemaExporter(mappings, engine).generate()
//export to files
writeTo(dir, "0-schemas.sql", scripts.schemas)
writeTo(dir, "1-tables.sql", scripts.tables)
writeTo(dir, "2-keys.sql", scripts.keys)
writeTo(dir, "3-indices.sql", scripts.indices)
writeTo(dir, "4-constraints.sql", scripts.constraints)
writeTo(dir, "5-sequences.sql", scripts.sequences)
writeTo(dir, "6-comments.sql", scripts.comments)
writeLinesTo(dir, "7-auxiliaries.sql", scripts.auxiliaries)
writeLinesTo(dir, "database.xml", List(Serializer.toXml(database)))
scripts.warnings
}
private def writeLinesTo(dir: String, file: String, contents: List[String]): Unit = {
if (contents.nonEmpty) {
val writer = new FileWriter(dir + "/" + file, Charsets.UTF_8, false)
contents foreach { c =>
if (null != c && c.nonEmpty) {
writer.write(c)
}
}
writer.flush()
writer.close()
}
}
private def writeTo(dir: String, file: String, contents: collection.Seq[String]): Unit = {
if (null != contents && contents.nonEmpty) {
val writer = new FileWriter(dir + "/" + file, Charsets.UTF_8, false)
contents foreach { c =>
writer.write(c)
writer.write(";\n")
}
writer.flush()
writer.close()
} else {
new File(dir + "/" + file).delete()
}
}
}
class SchemaExporter(mappings: Mappings, engine: Engine) extends Logging {
private val schemas = new collection.mutable.ListBuffer[String]
private val tables = new collection.mutable.ListBuffer[String]
private val sequences = new collection.mutable.ListBuffer[String]
private val keys = new collection.mutable.ListBuffer[String]
private val comments = new collection.mutable.ListBuffer[String]
private val constraints = new collection.mutable.ListBuffer[String]
private val indexes = new collection.mutable.ListBuffer[String]
private val warnings = new collection.mutable.ListBuffer[String]
private val processed = new collection.mutable.HashSet[Table]
def generate(): DBScripts = {
val database = mappings.database
database.schemas.values foreach {
schema => schema.tables.values foreach generateTableSql
}
val scripts = new DBScripts()
val uncommentLines = comments.count(_.contains("?"))
if (uncommentLines > 0) {
warnings += s"${engine.name}:find ${uncommentLines} uncomment lines"
}
if (database.hasQuotedIdentifier) {
warnings += s"${engine.name}:find quoted identifiers"
}
schemas ++= database.schemas.keys.filter(i => i.value.length > 0).map(s => s"create schema $s")
scripts.schemas = schemas.sorted.toList
scripts.comments = comments.toSet.toList.sorted
scripts.tables = tables.sorted.toList
scripts.keys = keys.sorted.toList
scripts.indices = indexes.sorted.map(x => Strings.substringAfter(x, "--")).toList
scripts.sequences = sequences.sorted.toList
scripts.constraints = constraints.sorted.toList
val auxiliaries = Collections.newBuffer[String]
val dialectShortName = engine.getClass.getSimpleName.toLowerCase
ResourcePatternResolver.getResources(s"classpath*:META-INF/beangle/ddl/$dialectShortName/*.sql") foreach { r =>
auxiliaries += IOs.readString(r.openStream())
}
scripts.auxiliaries = auxiliaries.toList
scripts.warnings = warnings.toList
scripts
}
private def generateTableSql(table: Table): Unit = {
if (processed.contains(table)) return
processed.add(table)
checkNameLength(table.schema.name.value, table.name)
comments ++= engine.commentsOnTable(table, true)
tables += engine.createTable(table)
table.primaryKey foreach { pk =>
checkNameLength(table.qualifiedName, pk.name)
keys += engine.alterTableAddPrimaryKey(table, pk)
}
table.uniqueKeys foreach { uk =>
checkNameLength(table.qualifiedName, uk.name)
keys += engine.alterTableAddUnique(uk)
}
table.indexes foreach { idx =>
checkNameLength(idx.literalName, idx.name)
//for order by table
indexes += table.qualifiedName + "--" + engine.createIndex(idx)
}
table.foreignKeys foreach { fk =>
constraints += engine.alterTableAddForeignKey(fk)
}
}
def checkNameLength(owner: String, i: Identifier): Unit = {
if (i.value.length > engine.maxIdentifierLength) {
warnings += s"${engine.name}:${owner}.${i.value}'s length is ${i.value.length},greate than ${engine.maxIdentifierLength}"
}
}
}
| beangle/data | orm/src/main/scala/org/beangle/data/orm/tool/DdlGenerator.scala | Scala | lgpl-3.0 | 7,084 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.fs.mount
import slamdata.Predef._
import quasar.connector.EnvironmentError
import quasar.fp.ski.κ
import quasar.fs.{BackendEffect, FileSystemType}
import scala.StringContext
import scalaz._
import scalaz.syntax.either._
import scalaz.syntax.monadError._
import BackendDef._
final case class BackendDef[F[_]](run: FsCfg => Option[DefErrT[F, DefinitionResult[F]]]) {
def apply(typ: FileSystemType, uri: ConnectionUri)(implicit F: Monad[F]): DefErrT[F, DefinitionResult[F]] =
run((typ, uri)).getOrElse(NonEmptyList(
s"Unsupported filesystem type: ${typ.value}, are you sure you enabled the appropriate plugin?"
).left[EnvironmentError].raiseError[DefErrT[F, ?], DefinitionResult[F]])
def orElse(other: => BackendDef[F]): BackendDef[F] =
BackendDef(cfg => run(cfg) orElse other.run(cfg))
def translate[G[_]: Functor](f: F ~> G): BackendDef[G] =
BackendDef(c => run(c).map(r => EitherT(f(r.run)).map(_ translate f)))
}
object BackendDef {
type FsCfg = (FileSystemType, ConnectionUri)
/** Reasons why the configuration is invalid or an environment error. */
type DefinitionError = NonEmptyList[String] \\/ EnvironmentError
type DefErrT[F[_], A] = EitherT[F, DefinitionError, A]
final case class DefinitionResult[F[_]](run: BackendEffect ~> F, close: F[Unit]) {
def translate[G[_]](f: F ~> G): DefinitionResult[G] =
DefinitionResult(f compose run, f(close))
}
def fromPF[F[_]](
pf: PartialFunction[FsCfg, DefErrT[F, DefinitionResult[F]]]
): BackendDef[F] =
BackendDef(pf.lift)
implicit def backendDefMonoid[F[_]]: Monoid[BackendDef[F]] =
new Monoid[BackendDef[F]] {
def zero = BackendDef(κ(None))
def append(d1: BackendDef[F], d2: => BackendDef[F]) = d1 orElse d2
}
}
| jedesah/Quasar | connector/src/main/scala/quasar/fs/mount/BackendDef.scala | Scala | apache-2.0 | 2,389 |
package client
import js.Dynamic.{ global => g }
object HelloWorld {
def main(): Unit = {
g.console.log("Hello")
g.alert("Hey!")
val paragraph = g.document.createElement("p")
paragraph.updateDynamic("innerHTML")("<strong>It works!</strong>")
g.document.getElementById("playground").appendChild(paragraph)
}
}
| TheBizzle/ScalaJS-Sandbox | assets/javascripts/HelloWorld.scala | Scala | bsd-3-clause | 335 |
package com.arcusys.valamis.web.servlet.lesson
import javax.servlet.http.HttpServletResponse
import com.arcusys.learn.liferay.constants.QueryUtilHelper
import com.arcusys.learn.liferay.services.GroupLocalServiceHelper
import com.arcusys.valamis.course.CourseService
import com.arcusys.valamis.lesson.model._
import com.arcusys.valamis.lesson.scorm.service.ActivityServiceContract
import com.arcusys.valamis.lesson.service._
import com.arcusys.valamis.member.model.MemberTypes
import com.arcusys.valamis.ratings.RatingService
import com.arcusys.valamis.tag.TagService
import com.arcusys.valamis.util.serialization.JsonHelper.StringOpts
import com.arcusys.valamis.web.portlet.util.PlayerPortletPreferences
import com.arcusys.valamis.web.servlet.base.BaseJsonApiController
import com.arcusys.valamis.web.servlet.lesson.serializer.LessonInfoSerializer
import com.arcusys.valamis.web.servlet.user.UserResponse
import org.json4s.ext.{DateTimeSerializer, EnumNameSerializer}
import org.json4s.{DefaultFormats, Formats}
import org.scalatra.servlet.ServletBase
class LessonServlet
extends BaseJsonApiController
with PackagePolicy
with ServletBase {
lazy val req = LessonRequest(this)
lazy val tagService = inject[TagService[Lesson]]
lazy val lessonService = inject[LessonService]
lazy val lessonViewersService = inject[LessonMembersService]
lazy val lessonRatingService = inject[RatingService[Lesson]]
lazy val lessonPlayerService = inject[LessonPlayerService]
lazy val lessonLimitService = inject[LessonLimitService]
lazy val lessonAttemptService = inject[UserLessonResultService]
lazy val courseService = inject[CourseService]
lazy val scormActivityService = inject[ActivityServiceContract]
implicit override val jsonFormats: Formats =
DefaultFormats + new LessonInfoSerializer + new EnumNameSerializer(LessonType) + DateTimeSerializer
get("/packages(/)", request.getParameter("action") == "VISIBLE") {
def getSuspendedId(userId: Long, lesson: Lesson): Option[String] = {
lesson.lessonType match {
case LessonType.Scorm => scormActivityService.getSuspendedId(userId, lesson.id)
case LessonType.Tincan => None
}
}
val companyId = req.companyId
val courseId = req.courseIdRequired
val playerId = req.playerId
val user = getUser
val tagId = req.tagId
val titleFilter = req.textFilter
lessonPlayerService.getForPlayer(courseId, playerId, user, titleFilter, tagId, req.ascending, req.sortBy, req.skipTake, getSuspendedId)
}
//TODO: rename action
get("/packages(/)", request.getParameter("action") == "ALL_FOR_PLAYER") {
lessonPlayerService.getAll(
req.playerId,
req.courseIdRequired
)
}
get("/packages(/)", request.getParameter("action") == "ALL_AVAILABLE_FOR_PLAYER") {
val courseId = req.courseIdRequired
val sourceCourseIds = GroupLocalServiceHelper
.searchExceptPrivateSites(getCompanyId, QueryUtilHelper.ALL_POS, QueryUtilHelper.ALL_POS)
.map(i => i.getGroupId)
.filterNot(_ == courseId)
val filter = LessonFilter(
Nil,
req.lessonType,
onlyVisible = true,
req.textFilter,
req.tagId
)
lessonPlayerService.getAvailableToAdd(
req.playerId,
req.courseIdRequired,
sourceCourseIds,
filter,
req.ascending,
req.skipTake
)
}
get("/packages(/)", request.getParameter("action") == "ALL") {
val courseIds = if(req.instanceScope) {
GroupLocalServiceHelper.searchIdsExceptPrivateSites(req.companyId)
} else {
Seq(req.courseIdRequired)
}
val filter = LessonFilter(
courseIds,
req.lessonType,
title = req.textFilter,
tagId = req.tagId
)
lessonService.getAll(
filter,
req.ascending,
req.skipTake
)
}
get("/packages/:id/logo") {
val content = lessonService.getLogo(req.id)
.getOrElse(halt(HttpServletResponse.SC_NOT_FOUND, s"Package with id: ${req.id} doesn't exist"))
response.reset()
response.setStatus(HttpServletResponse.SC_OK)
response.setContentType("image/png")
content
}
post("/packages(/)", request.getParameter("action") == "ADD_LESSONS_TO_PLAYER") {
val lessonIds = req.ids
val playerId = req.playerId
lessonPlayerService.addLessonsToPlayer(playerId, lessonIds)
}
post("/packages(/)", request.getParameter("action") == "DELETE_LESSON_FROM_PLAYER") {
val lessonId = req.id
val playerId = req.playerId
lessonPlayerService.deleteLessonFromPlayer(playerId, lessonId)
}
post("/packages(/)", request.getParameter("action") == "SET_PLAYER_DEFAULT") {
val lessonId = req.idOption
val playerId = req.playerId
PlayerPortletPreferences(playerId, getCompanyId)
.setDefaultLessonId(lessonId)
}
post("/packages(/)", request.getParameter("action") == "UPDATE") {
val lessonId = req.id
val title = req.title
val description = req.description.getOrElse("")
val isVisible = req.isVisible
val beginDate = req.beginDate
val endDate = req.endDate
val limit = new LessonLimit(
lessonId,
req.passingLimit,
req.rerunInterval,
req.rerunIntervalType
)
val tags = tagService.getOrCreateTagIds(req.tags, getCompanyId)
lessonLimitService.setLimit(limit)
lessonService.update(lessonId, title, description, isVisible, beginDate, endDate, tags, req.requiredReview, req.scoreLimit)
}
post("/packages(/)", request.getParameter("action") == "UPDATE_VISIBLE") {
lessonService.updateVisibility(req.id, req.isVisible)
}
post("/packages(/)", request.getParameter("action") == "ADD_MEMBERS") {
lessonViewersService.addMembers(req.id, req.viewerIds, req.viewerType)
}
post("/packages(/)", request.getParameter("action") == "REMOVE_MEMBERS") {
lessonViewersService.removeMembers(req.id, req.viewerIds, req.viewerType)
}
get("/packages(/)", request.getParameter("action") == "MEMBERS") {
req.viewerType match {
case MemberTypes.User =>
lessonViewersService.getUserMembers(req.id, req.textFilter, req.ascending, req.skipTake, req.organizationId)
.map(u => new UserResponse(u))
case _ =>
lessonViewersService.getMembers(req.id, req.viewerType, req.textFilter, req.ascending, req.skipTake)
}
}
get("/packages(/)", request.getParameter("action") == "AVAILABLE_MEMBERS") {
req.viewerType match {
case MemberTypes.User =>
lessonViewersService.getAvailableUserMembers(req.id, req.textFilter, req.ascending, req.skipTake, req.organizationId)
.map(u => new UserResponse(u))
case _ =>
lessonViewersService.getAvailableMembers(req.id, req.viewerType, req.textFilter, req.ascending, req.skipTake)
}
}
post("/packages(/)", request.getParameter("action") == "UPDATEPACKAGES") {
val lessonsInfo = req.packages.parseTo[Seq[LessonInfo]]
lessonService.updateLessonsInfo(lessonsInfo)
}
delete("/packages/:id(/)") {
lessonService.delete(req.id)
}
post("/packages(/)", request.getParameter("action") == "REMOVEPACKAGES") {
req.lessonIds foreach lessonService.delete
}
//TODO: move part to player
get("/packages/tags(/)") {
val playerId = req.playerIdOption
val courseId = req.courseId
if (playerId.isDefined && courseId.isDefined) {
lessonPlayerService.getTagsFromPlayer(playerId.get, courseId.get)
} else if (courseId.isDefined) {
lessonService.getTagsFromCourse(courseId.get)
} else {
val courseIds = GroupLocalServiceHelper.searchIdsExceptPrivateSites(req.companyId)
lessonService.getTagsFromCourses(courseIds)
}
}
post("/packages/rate(/)", request.getParameter("action") == "UPDATERATING") {
lessonRatingService.updateRating(getUserId, req.ratingScore, req.id)
}
post("/packages/rate(/)", request.getParameter("action") == "DELETERATING") {
lessonRatingService.deleteRating(getUserId, req.id)
}
post("/packages/order(/)") {
lessonPlayerService.updateOrder(req.playerId, req.lessonIds)
}
}
| igor-borisov/JSCORM | valamis-portlets/src/main/scala/com/arcusys/valamis/web/servlet/lesson/LessonServlet.scala | Scala | gpl-3.0 | 8,077 |
package unfiltered.response
trait BaseContentType extends Responder[Any] {
def respond(res: HttpResponse[Any]): Unit = {
res.header("Content-Type", contentType(res))
}
def contentType(res: HttpResponse[Any]): String
}
case class CharContentType(contentType: String) extends BaseContentType {
def contentType(res: HttpResponse[Any]) =
"%s; charset=%s".format(contentType, res.charset.name.toLowerCase)
}
object CssContent extends CharContentType("text/css")
object HtmlContent extends CharContentType("text/html")
object JsContent extends CharContentType("text/javascript")
object CsvContent extends CharContentType("text/csv")
object TextXmlContent extends CharContentType("text/xml")
object PlainTextContent extends CharContentType("text/plain")
object JsonContent extends CharContentType("application/json")
object ApplicationXmlContent extends CharContentType("application/xml")
object FormEncodedContent extends ContentType("application/x-www-form-urlencoded")
case class ContentType(val staticContentType: String)
extends BaseContentType {
def contentType(res: HttpResponse[Any]) = staticContentType
}
object PdfContent extends ContentType("application/pdf")
| omarkilani/unfiltered | library/src/main/scala/response/types.scala | Scala | mit | 1,186 |
package com.twitter.finagle
import scala.annotation.tailrec
import scala.collection.mutable
/**
* Stacks represent stackable elements of type T. It is assumed that
* T-typed elements can be stacked in some meaningful way; examples
* are functions (function composition) [[Filter Filters]] (chaining),
* and [[ServiceFactory ServiceFactories]] (through
* transformers). T-typed values are also meant to compose: the stack
* itself materializes into a T-typed value.
*
* Stacks are persistent, allowing for nondestructive
* transformations; they are designed to represent 'template' stacks
* which can be configured in various ways before materializing the
* stack itself.
*
* Note: Stacks are advanced and sometimes subtle. For expert use
* only!
*/
sealed trait Stack[T] {
import Stack._
/**
* The head field of the Stack composes all associated metadata
* of the topmost element of the stack.
*
* @note `head` does not give access to the value `T`, use `make` instead.
* @see [[Stack.Head]]
*/
val head: Stack.Head
/**
* Materialize the current stack with the given parameters,
* producing a `T`-typed value representing the current
* configuration.
*/
def make(params: Params): T
/**
* Transform one stack to another by applying `fn` on each element;
* the map traverses on the element produced by `fn`, not the
* original stack.
*/
def transform(fn: Stack[T] => Stack[T]): Stack[T] =
fn(this) match {
case Node(head, mk, next) => Node(head, mk, next.transform(fn))
case leaf@Leaf(_, _) => leaf
}
/**
* Insert the given [[Stackable]] before the stack elements matching
* the argument role. If no elements match the role, then an
* unmodified stack is returned.
*/
def insertBefore(target: Role, insertion: Stackable[T]): Stack[T] =
this match {
case Node(head, mk, next) if head.role == target =>
insertion +: Node(head, mk, next.insertBefore(target, insertion))
case Node(head, mk, next) =>
Node(head, mk, next.insertBefore(target, insertion))
case leaf@Leaf(_, _) => leaf
}
/**
* Insert the given [[Stackable]] before the stack elements matching
* the argument role. If no elements match the role, then an
* unmodified stack is returned. `insertion` must conform to
* typeclass [[CanStackFrom]].
*/
def insertBefore[U](target: Role, insertion: U)(implicit csf: CanStackFrom[U, T]): Stack[T] =
insertBefore(target, csf.toStackable(target, insertion))
/**
* Insert the given [[Stackable]] after the stack elements matching
* the argument role. If no elements match the role, then an
* unmodified stack is returned.
*/
def insertAfter(target: Role, insertion: Stackable[T]): Stack[T] = transform {
case Node(head, mk, next) if head.role == target =>
Node(head, mk, insertion +: next)
case stk => stk
}
/**
* Insert the given [[Stackable]] after the stack elements matching
* the argument role. If no elements match the role, then an
* unmodified stack is returned. `insertion` must conform to
* typeclass [[CanStackFrom]].
*/
def insertAfter[U](target: Role, insertion: U)(implicit csf: CanStackFrom[U, T]): Stack[T] =
insertAfter(target, csf.toStackable(target, insertion))
/**
* Remove all nodes in the stack that match the `target` role.
* Leaf nodes are not removable.
*/
def remove(target: Role): Stack[T] =
this match {
case Node(head, mk, next) =>
if (head.role == target) next.remove(target)
else Node(head, mk, next.remove(target))
case leaf@Leaf(_, _) => leaf
}
/**
* Replace any stack elements matching the argument role with a
* given [[Stackable]]. If no elements match the role, then an
* unmodified stack is returned.
*/
def replace(target: Role, replacement: Stackable[T]): Stack[T] = transform {
case n@Node(head, _, next) if head.role == target =>
replacement +: next
case stk => stk
}
/**
* Replace any stack elements matching the argument role with a
* given [[Stackable]]. If no elements match the role, then an
* unmodified stack is returned. `replacement` must conform to
* typeclass [[CanStackFrom]].
*/
def replace[U](target: Role, replacement: U)(implicit csf: CanStackFrom[U, T]): Stack[T] =
replace(target, csf.toStackable(target, replacement))
/**
* Traverse the stack, invoking `fn` on each element.
*/
@tailrec
final def foreach(fn: Stack[T] => Unit): Unit = {
fn(this)
this match {
case Node(_, _, next) => next.foreach(fn)
case Leaf(_, _) =>
}
}
/**
* Traverse the stack, until you find that pred has been evaluated to true.
* If `pred` finds an element, return true, otherwise, false.
*/
@tailrec
final def exists(pred: Stack[T] => Boolean): Boolean = this match {
case _ if pred(this) => true
case Node(_, _, next) => next.exists(pred)
case Leaf(_, _) => false
}
/**
* Returns whether the stack contains a given role or not.
*/
def contains(role: Stack.Role): Boolean = exists(_.head.role == role)
/**
* Enumerate each well-formed stack contained within this stack.
*/
def tails: Iterator[Stack[T]] = {
val buf = new mutable.ArrayBuffer[Stack[T]]
foreach { buf += _ }
buf.toIterator
}
/**
* Produce a new stack representing the concatenation of `this`
* with `right`. Note that this replaces the terminating element of
* `this`.
*
* Alias for [[Stack.++]].
*/
def concat(right: Stack[T]): Stack[T] =
this ++ right
/**
* Produce a new stack representing the concatenation of `this`
* with `right`. Note that this replaces the terminating element of
* `this`.
*/
def ++(right: Stack[T]): Stack[T] = this match {
case Node(head, mk, left) => Node(head, mk, left++right)
case Leaf(_, _) => right
}
/**
* A copy of this Stack with `stk` prepended.
*
* An alias for [[Stack.+:]].
*/
def prepend(stk: Stackable[T]): Stack[T] =
stk +: this
/**
* A copy of this Stack with `stk` prepended.
*/
def +:(stk: Stackable[T]): Stack[T] =
stk.toStack(this)
override def toString = {
val elems = tails map {
case Node(head, mk, _) => s"Node(role = ${head.role}, description = ${head.description})"
case Leaf(head, t) => s"Leaf(role = ${head.role}, description = ${head.description})"
}
elems mkString "\\n"
}
}
/**
* @see [[stack.nilStack]] for starting construction of an
* empty stack for [[ServiceFactory]]s.
*/
object Stack {
/**
* Base trait for Stack roles. A stack's role is indicative of its
* functionality. Roles provide a way to group similarly-purposed stacks and
* slot stack elements into specific usages.
*
* TODO: CSL-869
*/
case class Role(name: String) {
// Override `toString` to return the flat, lowercase object name for use in stats.
private[this] lazy val _toString = name.toLowerCase
override def toString = _toString
}
/**
* Trait encompassing all associated metadata of a stack element.
* [[Stackable Stackables]] extend this trait.
*/
trait Head {
/**
* The [[Stack.Role Role]] that the element can serve.
*/
def role: Stack.Role
/**
* The description of the functionality of the element.
*/
def description: String
/**
* The [[Stack.Param Params]] that the element
* is interested in.
*/
def parameters: Seq[Stack.Param[_]]
}
/**
* Nodes materialize by transforming the underlying stack in
* some way.
*/
case class Node[T](head: Stack.Head, mk: (Params, Stack[T]) => Stack[T], next: Stack[T])
extends Stack[T]
{
def make(params: Params) = mk(params, next).make(params)
}
object Node {
/**
* A constructor for a 'simple' Node.
*/
def apply[T](head: Stack.Head, mk: T => T, next: Stack[T]): Node[T] =
Node(head, (p, stk) => Leaf(head, mk(stk.make(p))), next)
}
/**
* A static stack element; necessarily the last.
*/
case class Leaf[T](head: Stack.Head, t: T) extends Stack[T] {
def make(params: Params) = t
}
object Leaf {
/**
* If only a role is given when constructing a leaf, then the head
* is created automatically
*/
def apply[T](_role: Stack.Role, t: T): Leaf[T] = {
val head = new Stack.Head {
val role = _role
val description = _role.toString
val parameters = Nil
}
Leaf(head, t)
}
}
/**
* A typeclass representing P-typed elements, eligible as
* parameters for stack configuration. Note that the typeclass
* instance itself is used as the key in parameter maps; thus
* typeclasses should be persistent:
*
* {{{
* case class Multiplier(i: Int) {
* def mk(): (Multipler, Stack.Param[Multipler]) =
* (this, Multiplier.param)
* }
* object Multiplier {
* implicit val param = Stack.Param(Multiplier(123))
* }
* }}}
*
* The `mk()` function together with `Parameterized.configured`
* provides a convenient Java interface.
*/
trait Param[P] {
def default: P
}
object Param {
def apply[T](t: => T): Param[T] = new Param[T] {
// Note, this is lazy to avoid potential failures during
// static initialization.
lazy val default = t
}
}
/**
* A parameter map.
*/
trait Params extends Iterable[(Param[_], Any)] {
/**
* Get the current value of the P-typed parameter.
*/
def apply[P: Param]: P
/**
* Returns true if there is a non-default value for
* the P-typed parameter.
*/
def contains[P: Param]: Boolean
/**
* Iterator of all `Param`s and their associated values.
*/
def iterator: Iterator[(Param[_], Any)]
/**
* Produce a new parameter map, overriding any previous
* `P`-typed value.
*/
def +[P: Param](p: P): Params
/**
* Alias for [[addAll(Params)]].
*/
def ++(ps: Params): Params =
addAll(ps)
/**
* Produce a new parameter map, overriding any previously
* mapped values.
*/
def addAll(ps: Params): Params
}
object Params {
private case class Prms(map: Map[Param[_], Any]) extends Params {
def apply[P](implicit param: Param[P]): P =
map.get(param) match {
case Some(v) => v.asInstanceOf[P]
case None => param.default
}
def contains[P](implicit param: Param[P]): Boolean =
map.contains(param)
def iterator: Iterator[(Param[_], Any)] =
map.iterator
def +[P](p: P)(implicit param: Param[P]): Params =
copy(map + (param -> p))
def addAll(ps: Params): Params =
copy(map ++ ps.iterator)
}
/**
* The empty parameter map.
*/
val empty: Params = Prms(Map.empty)
}
/**
* A mix-in for describing an object that is parameterized.
*/
trait Parameterized[+T] {
def params: Stack.Params
def configured[P: Stack.Param](p: P): T =
withParams(params+p)
def configured[P](psp: (P, Stack.Param[P])): T = {
val (p, sp) = psp
configured(p)(sp)
}
def withParams(ps: Stack.Params): T
}
/**
* Encodes transformations for stacks of
* [[com.twitter.finagle.ServiceFactory ServiceFactories]] of
* arbitrary `Req` and `Rep` types. Such transformations must be
* indifferent to these types in order to typecheck.
*/
trait Transformer {
def apply[Req, Rep](stack: Stack[ServiceFactory[Req, Rep]]): Stack[ServiceFactory[Req, Rep]]
}
trait Transformable[+T] {
/**
* Transform the stack using the given `Transformer`.
*/
def transformed(t: Transformer): T
}
/**
* A convenience class to construct stackable modules. This variant
* operates over stacks and the entire parameter map. The `ModuleN` variants
* may be more convenient for most definitions as they operate over `T` types
* and the parameter extraction is derived from type parameters.
*
* {{{
* def myNode = new Module[Int=>Int]("myelem") {
* val role = "Multiplier"
* val description = "Multiplies values by a multiplier"
* val parameters = Seq(implicitly[Stack.Param[Multiplied]])
* def make(params: Params, next: Stack[Int=>Int]): Stack[Int=>Int] = {
* val Multiplier(m) = params[Multiplier]
* if (m == 1) next // It's a no-op, skip it.
* else Stack.Leaf("multiply", i => next.make(params)(i)*m)
* }
* }
* }}}
*/
abstract class Module[T] extends Stackable[T] {
def make(params: Params, next: Stack[T]): Stack[T]
def toStack(next: Stack[T]): Stack[T] =
Node(this, (prms, next) => make(prms, next), next)
}
/** A module of 0 parameters. */
abstract class Module0[T] extends Stackable[T] {
final val parameters: Seq[Stack.Param[_]] = Nil
def make(next: T): T
def toStack(next: Stack[T]): Stack[T] =
Node(this, (prms, next) => Leaf(this, make(next.make(prms))), next)
}
/** A module of 1 parameter. */
abstract class Module1[P1: Param, T] extends Stackable[T] {
final val parameters: Seq[Stack.Param[_]] =
Seq(implicitly[Param[P1]])
def make(p1: P1, next: T): T
def toStack(next: Stack[T]): Stack[T] =
Node(this, (prms, next) => Leaf(this, make(prms[P1], next.make(prms))), next)
}
/** A module of 2 parameters. */
abstract class Module2[P1: Param, P2: Param, T] extends Stackable[T] {
final val parameters: Seq[Stack.Param[_]] =
Seq(implicitly[Param[P1]], implicitly[Param[P2]])
def make(p1: P1, p2: P2, next: T): T
def toStack(next: Stack[T]) =
Node(this, (prms, next) => Leaf(this,
make(prms[P1], prms[P2], next.make(prms))), next)
}
/** A module of 3 parameters. */
abstract class Module3[P1: Param, P2: Param, P3: Param, T] extends Stackable[T] {
final val parameters: Seq[Stack.Param[_]] = Seq(
implicitly[Param[P1]],
implicitly[Param[P2]],
implicitly[Param[P3]])
def make(p1: P1, p2: P2, p3: P3, next: T): T
def toStack(next: Stack[T]): Stack[T] =
Node(this, (prms, next) => Leaf(this,
make(prms[P1], prms[P2], prms[P3], next.make(prms))), next)
}
/** A module of 4 parameters. */
abstract class Module4[P1: Param, P2: Param, P3: Param, P4: Param, T] extends Stackable[T] {
final val parameters: Seq[Stack.Param[_]] = Seq(
implicitly[Param[P1]],
implicitly[Param[P2]],
implicitly[Param[P3]],
implicitly[Param[P4]])
def make(p1: P1, p2: P2, p3: P3, p4: P4, next: T): T
def toStack(next: Stack[T]): Stack[T] =
Node(this, (prms, next) => Leaf(this,
make(prms[P1], prms[P2], prms[P3], prms[P4], next.make(prms))), next)
}
/** A module of 5 parameters. */
abstract class Module5[P1: Param, P2: Param, P3: Param, P4: Param, P5: Param, T] extends Stackable[T] {
final val parameters: Seq[Stack.Param[_]] = Seq(
implicitly[Param[P1]],
implicitly[Param[P2]],
implicitly[Param[P3]],
implicitly[Param[P4]],
implicitly[Param[P5]])
def make(p1: P1, p2: P2, p3: P3, p4: P4, p5: P5, next: T): T
def toStack(next: Stack[T]): Stack[T] =
Node(this, (prms, next) => Leaf(this,
make(prms[P1], prms[P2], prms[P3], prms[P4], prms[P5], next.make(prms))), next)
}
/** A module of 6 parameters. */
abstract class Module6[P1: Param, P2: Param, P3: Param, P4: Param, P5: Param, P6: Param, T] extends Stackable[T] {
final val parameters: Seq[Stack.Param[_]] = Seq(
implicitly[Param[P1]],
implicitly[Param[P2]],
implicitly[Param[P3]],
implicitly[Param[P4]],
implicitly[Param[P5]],
implicitly[Param[P6]])
def make(p1: P1, p2: P2, p3: P3, p4: P4, p5: P5, p6: P6, next: T): T
def toStack(next: Stack[T]): Stack[T] =
Node(this, (prms, next) => Leaf(this,
make(prms[P1], prms[P2], prms[P3], prms[P4], prms[P5], prms[P6], next.make(prms))), next)
}
}
/**
* Produce a stack from a `T`-typed element.
*/
trait Stackable[T] extends Stack.Head {
def toStack(next: Stack[T]): Stack[T]
}
/**
* A typeclass for "stackable" items. This is used by the
* [[StackBuilder]] to provide a convenient interface for constructing
* Stacks.
*/
@scala.annotation.implicitNotFound("${From} is not Stackable to ${To}")
trait CanStackFrom[-From, To] {
def toStackable(role: Stack.Role, el: From): Stackable[To]
}
object CanStackFrom {
implicit def fromFun[T]: CanStackFrom[T=>T, T] =
new CanStackFrom[T=>T, T] {
def toStackable(r: Stack.Role, fn: T => T): Stackable[T] = {
new Stack.Module0[T] {
val role = r
val description = r.name
def make(next: T) = fn(next)
}
}
}
}
/**
* StackBuilders are imperative-style builders for Stacks. It
* maintains a stack onto which new elements can be pushed (defining
* a new stack).
*
* @see [[stack.nilStack]] for starting construction of an
* empty stack for [[ServiceFactory]]s.
*/
class StackBuilder[T](init: Stack[T]) {
def this(role: Stack.Role, end: T) = this(Stack.Leaf(role, end))
private[this] var stack = init
/**
* Push the stack element `el` onto the stack; el must conform to
* typeclass [[CanStackFrom]].
*/
def push[U](role: Stack.Role, el: U)(implicit csf: CanStackFrom[U, T]): this.type = {
stack = csf.toStackable(role, el) +: stack
this
}
/**
* Push a [[Stackable]] module onto the stack.
*/
def push(module: Stackable[T]): this.type = {
stack = module +: stack
this
}
/**
* Get the current stack as defined by the builder.
*/
def result: Stack[T] = stack
/**
* Materialize the current stack: equivalent to
* `result.make()`.
*/
def make(params: Stack.Params): T = result.make(params)
override def toString = s"Builder($stack)"
}
| suls/finagle | finagle-core/src/main/scala/com/twitter/finagle/Stack.scala | Scala | apache-2.0 | 17,940 |
package silky.persistence.file
import java.nio.charset.StandardCharsets._
import java.nio.file.Files._
import java.nio.file.StandardCopyOption._
import java.nio.file.StandardOpenOption._
import java.nio.file.{Files, Path}
object Filepath {
def save(content: String, path: Path): Path = write(path, content.getBytes(UTF_8), CREATE, WRITE, TRUNCATE_EXISTING)
def move(source: Path, target: Path): Path = Files.move(source, target, ATOMIC_MOVE)
}
| PILTT/silky-persistence | src/main/scala/silky/persistence/file/Filepath.scala | Scala | apache-2.0 | 451 |
/*
* Copyright 2011-2018 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.nta.ntarule.rules_2_02
import java.net.URI
import scala.collection.immutable
import eu.cdevreeze.nta.common.taxonomy.Taxonomy
import eu.cdevreeze.nta.common.validator.Result
import eu.cdevreeze.nta.common.validator.TaxonomyDocumentValidator
import eu.cdevreeze.nta.common.validator.TaxonomyValidatorFactory
import eu.cdevreeze.nta.common.validator.ValidationScope
import eu.cdevreeze.nta.ntarule.NtaRuleConfigWrapper
import eu.cdevreeze.nta.ntarule.NtaRules
import eu.cdevreeze.tqa.ENames
import eu.cdevreeze.tqa.base.dom.TaxonomyDocument
import eu.cdevreeze.tqa.base.dom.XsdSchema
/**
* Validator of rule 2.02.00.08. The rule says that the schema document must have a @targetNamespace attribute.
*
* @author Chris de Vreeze
*/
final class Validator_2_02_00_08(val excludedDocumentUris: Set[URI]) extends TaxonomyDocumentValidator {
def ruleName: String = NtaRules.extractRuleName(getClass)
def validateDocument(
doc: TaxonomyDocument,
taxonomy: Taxonomy,
validationScope: ValidationScope): immutable.IndexedSeq[Result] = {
require(isTypeOfDocumentToValidate(doc, taxonomy), s"Document ${doc.uri} should not be validated")
if (doc.documentElement.attributeOption(ENames.TargetNamespaceEName).isDefined) {
immutable.IndexedSeq()
} else {
immutable.IndexedSeq(Result.makeErrorResult(
ruleName,
"no-target-namespace",
s"Target namespace attribute required but found none in '${doc.uri}'"))
}
}
def isTypeOfDocumentToValidate(doc: TaxonomyDocument, taxonomy: Taxonomy): Boolean = {
doc.documentElement.isInstanceOf[XsdSchema]
}
}
object Validator_2_02_00_08 extends TaxonomyValidatorFactory {
type Validator = Validator_2_02_00_08
type CfgWrapper = NtaRuleConfigWrapper
def ruleName: String = {
NtaRules.extractRuleName(classOf[Validator_2_02_00_08])
}
def create(configWrapper: NtaRuleConfigWrapper): Validator_2_02_00_08 = {
new Validator_2_02_00_08(
configWrapper.excludedDocumentUrisForRule(ruleName))
}
}
| dvreeze/nta | src/main/scala/eu/cdevreeze/nta/ntarule/rules_2_02/Validator_2_02_00_08.scala | Scala | apache-2.0 | 2,650 |
package cn.edu.neu.chiewen.roadDemo.road
import scala.math.{pow, sqrt}
/**
* Created by Chiewen on 2015/9/15.
*/
case class Position(x: Double, y: Double) {
def distanceTo(other: Position): Double = sqrt(pow(x - other.x, 2) + pow(y - other.y, 2))
}
| chiewen/CkNN | CkNN/src/main/scala/cn/edu/neu/chiewen/roadDemo/road/Position.scala | Scala | gpl-2.0 | 256 |
/*
* Copyright 2016-2020 47 Degrees Open Source <https://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package github4s.unit
import cats.effect.IO
import cats.syntax.either._
import github4s.GHResponse
import github4s.interpreters.ActivitiesInterpreter
import github4s.domain._
import github4s.utils.BaseSpec
class ActivitiesSpec extends BaseSpec {
implicit val token = sampleToken
"Activity.setThreadSub" should "call to httpClient.put with the right parameters" in {
val response: IO[GHResponse[Subscription]] =
IO(GHResponse(subscription.asRight, okStatusCode, Map.empty))
val request = SubscriptionRequest(true, false)
implicit val httpClientMock = httpClientMockPut[SubscriptionRequest, Subscription](
url = s"notifications/threads/$validThreadId/subscription",
req = request,
response = response
)
val activities = new ActivitiesInterpreter[IO]
activities.setThreadSub(validThreadId, true, false, headerUserAgent)
}
"Activity.listStargazers" should "call to httpClient.get with the right parameters" in {
val response: IO[GHResponse[List[Stargazer]]] =
IO(GHResponse(List(stargazer).asRight, okStatusCode, Map.empty))
implicit val httpClientMock = httpClientMockGet[List[Stargazer]](
url = s"repos/$validRepoOwner/$validRepoName/stargazers",
response = response
)
val activities = new ActivitiesInterpreter[IO]
activities.listStargazers(validRepoOwner, validRepoName, false, None, headerUserAgent)
}
"Activity.listStarredRepositories" should "call to httpClient.get with the right parameters" in {
val response: IO[GHResponse[List[StarredRepository]]] =
IO(GHResponse(List(starredRepository).asRight, okStatusCode, Map.empty))
implicit val httpClientMock = httpClientMockGet[List[StarredRepository]](
url = s"users/$validUsername/starred",
response = response
)
val activities = new ActivitiesInterpreter[IO]
activities.listStarredRepositories(validUsername, false, None, None, None, headerUserAgent)
}
}
| 47deg/github4s | github4s/src/test/scala/github4s/unit/ActivitiesSpec.scala | Scala | apache-2.0 | 2,602 |
/*
* Copyright 2014 JHC Systems Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sqlest.sql
import sqlest.ast._
trait BaseStatementBuilder {
def columnAliasListSql(columns: Seq[Column[_]]): String =
columns map columnAliasSql mkString ", "
def columnAliasSql(column: Column[_]): String = column match {
case column: TableColumn[_] =>
columnSql(column.column) + " as " +
identifierSql(column.columnAlias)
case column: AliasColumn[_] =>
columnSql(column.column) + " as " +
identifierSql(column.columnAlias)
case column =>
columnSql(column)
}
def columnSql(column: Column[_]): String = column match {
case LiteralColumn(literal) => literalSql(literal)
case column: ConstantColumn[_] => constantSql(column.columnType, column.value)
case column: PrefixFunctionColumn[_] => prefixSql(column.name, column.parameter)
case column: InfixFunctionColumn[_] => infixSql(column.name, column.parameter1, column.parameter2)
case column: PostfixFunctionColumn[_] => postfixSql(column.name, column.parameter)
case column: DoubleInfixFunctionColumn[_] => doubleInfixSql(column.infix1, column.infix2, column.parameter1, column.parameter2, column.parameter3)
case column: ScalarFunctionColumn[_] => functionSql(column.name, column.parameters: _*)
case column: TableColumn[_] => identifierSql(column.tableAlias) + "." + identifierSql(column.columnName)
case column: AliasColumn[_] => identifierSql(column.columnAlias)
}
def prefixSql(op: String, parameter: Column[_]): String =
s"($op ${columnSql(parameter)})"
def infixSql(op: String, parameter1: Column[_], parameter2: Column[_]): String =
s"(${columnSql(parameter1)} $op ${columnSql(parameter2)})"
def postfixSql(op: String, parameter: Column[_]): String =
s"(${columnSql(parameter)} $op)"
def doubleInfixSql(op1: String, op2: String, parameter1: Column[_], parameter2: Column[_], parameter3: Column[_]): String =
s"(${columnSql(parameter1)} $op1 ${columnSql(parameter2)} $op2 ${columnSql(parameter3)})"
def functionSql(op: String, parameters: Column[_]*): String =
parameters.map(columnSql).mkString(s"$op(", ", ", ")")
def orderListSql(order: Seq[Order]) =
order map orderSql mkString ", "
def orderSql(order: Order) = {
if (order.ascending) {
columnSql(order.column)
} else {
columnSql(order.column) + " desc"
}
}
def literalSql[A](literal: A) =
"?"
def constantSql[A](columnType: ColumnType[A], value: A): String = columnType match {
case BooleanColumnType => value.toString
case IntColumnType => value.toString
case LongColumnType => value.toString
case DoubleColumnType => value.toString
case BigDecimalColumnType => value.toString
case StringColumnType => "'" + escapeSqlString(value.toString) + "'"
case DateTimeColumnType => value.toString
case optionType: OptionColumnType[_] =>
val option = value.asInstanceOf[Option[_]]
if (option.isEmpty) "null" else constantSql(optionType.baseType, option.get)
case mappedType: MappedColumnType[A, _] => constantSql(mappedType.baseType, mappedType.write(value.asInstanceOf[A]))
}
def identifierSql(identifier: String) =
identifier
def escapeSqlString(string: String) =
string.replace("'", "''")
// -------------------------------------------------
def columnAliasListArgs(columns: Seq[Column[_]]): List[LiteralColumn[_]] =
columns.toList flatMap columnAliasArgs
def columnAliasArgs(column: Column[_]): List[LiteralColumn[_]] = column match {
case column: TableColumn[_] => Nil
case column: AliasColumn[_] => columnArgs(column.column)
case column => columnArgs(column)
}
def columnArgs(column: Column[_]): List[LiteralColumn[_]] = column match {
case column: LiteralColumn[_] => List(column)
case column: ConstantColumn[_] => Nil
case PrefixFunctionColumn(_, a) => columnArgs(a)
case InfixFunctionColumn(_, a, b) => columnArgs(a) ++ columnArgs(b)
case PostfixFunctionColumn(_, a) => columnArgs(a)
case DoubleInfixFunctionColumn(_, _, a, b, c) => columnArgs(a) ++ columnArgs(b) ++ columnArgs(c)
case ScalarFunctionColumn(_, parameters) => parameters.toList flatMap columnArgs
case column: TableColumn[_] => Nil
case column: AliasColumn[_] => Nil
}
def orderListArgs(order: Seq[Order]): List[LiteralColumn[_]] =
order.toList flatMap orderArgs
def orderArgs(order: Order): List[LiteralColumn[_]] =
columnArgs(order.column)
}
| andrewjskatz/sqlest | src/main/scala/sqlest/sql/BaseStatementBuilder.scala | Scala | apache-2.0 | 5,060 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.plans.logical
import java.math.{MathContext, RoundingMode}
import scala.util.control.NonFatal
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
/**
* Estimates of various statistics. The default estimation logic simply lazily multiplies the
* corresponding statistic produced by the children. To override this behavior, override
* `statistics` and assign it an overridden version of `Statistics`.
*
* '''NOTE''': concrete and/or overridden versions of statistics fields should pay attention to the
* performance of the implementations. The reason is that estimations might get triggered in
* performance-critical processes, such as query plan planning.
*
* Note that we are using a BigInt here since it is easy to overflow a 64-bit integer in
* cardinality estimation (e.g. cartesian joins).
*
* @param sizeInBytes Physical size in bytes. For leaf operators this defaults to 1, otherwise it
* defaults to the product of children's `sizeInBytes`.
* @param rowCount Estimated number of rows.
* @param attributeStats Statistics for Attributes.
* @param hints Query hints.
*/
case class Statistics(
sizeInBytes: BigInt,
rowCount: Option[BigInt] = None,
attributeStats: AttributeMap[ColumnStat] = AttributeMap(Nil),
hints: HintInfo = HintInfo()) {
override def toString: String = "Statistics(" + simpleString + ")"
/** Readable string representation for the Statistics. */
def simpleString: String = {
Seq(s"sizeInBytes=${Utils.bytesToString(sizeInBytes)}",
if (rowCount.isDefined) {
// Show row count in scientific notation.
s"rowCount=${BigDecimal(rowCount.get, new MathContext(3, RoundingMode.HALF_UP)).toString()}"
} else {
""
},
s"hints=$hints"
).filter(_.nonEmpty).mkString(", ")
}
}
/**
* Statistics collected for a column.
*
* 1. Supported data types are defined in `ColumnStat.supportsType`.
* 2. The JVM data type stored in min/max is the internal data type for the corresponding
* Catalyst data type. For example, the internal type of DateType is Int, and that the internal
* type of TimestampType is Long.
* 3. There is no guarantee that the statistics collected are accurate. Approximation algorithms
* (sketches) might have been used, and the data collected can also be stale.
*
* @param distinctCount number of distinct values
* @param min minimum value
* @param max maximum value
* @param nullCount number of nulls
* @param avgLen average length of the values. For fixed-length types, this should be a constant.
* @param maxLen maximum length of the values. For fixed-length types, this should be a constant.
*/
case class ColumnStat(
distinctCount: BigInt,
min: Option[Any],
max: Option[Any],
nullCount: BigInt,
avgLen: Long,
maxLen: Long) {
// We currently don't store min/max for binary/string type. This can change in the future and
// then we need to remove this require.
require(min.isEmpty || (!min.get.isInstanceOf[Array[Byte]] && !min.get.isInstanceOf[String]))
require(max.isEmpty || (!max.get.isInstanceOf[Array[Byte]] && !max.get.isInstanceOf[String]))
/**
* Returns a map from string to string that can be used to serialize the column stats.
* The key is the name of the field (e.g. "distinctCount" or "min"), and the value is the string
* representation for the value. min/max values are converted to the external data type. For
* example, for DateType we store java.sql.Date, and for TimestampType we store
* java.sql.Timestamp. The deserialization side is defined in [[ColumnStat.fromMap]].
*
* As part of the protocol, the returned map always contains a key called "version".
* In the case min/max values are null (None), they won't appear in the map.
*/
def toMap(colName: String, dataType: DataType): Map[String, String] = {
val map = new scala.collection.mutable.HashMap[String, String]
map.put(ColumnStat.KEY_VERSION, "1")
map.put(ColumnStat.KEY_DISTINCT_COUNT, distinctCount.toString)
map.put(ColumnStat.KEY_NULL_COUNT, nullCount.toString)
map.put(ColumnStat.KEY_AVG_LEN, avgLen.toString)
map.put(ColumnStat.KEY_MAX_LEN, maxLen.toString)
min.foreach { v => map.put(ColumnStat.KEY_MIN_VALUE, toExternalString(v, colName, dataType)) }
max.foreach { v => map.put(ColumnStat.KEY_MAX_VALUE, toExternalString(v, colName, dataType)) }
map.toMap
}
/**
* Converts the given value from Catalyst data type to string representation of external
* data type.
*/
private def toExternalString(v: Any, colName: String, dataType: DataType): String = {
val externalValue = dataType match {
case DateType => DateTimeUtils.toJavaDate(v.asInstanceOf[Int])
case TimestampType => DateTimeUtils.toJavaTimestamp(v.asInstanceOf[Long])
case BooleanType | _: IntegralType | FloatType | DoubleType => v
case _: DecimalType => v.asInstanceOf[Decimal].toJavaBigDecimal
// This version of Spark does not use min/max for binary/string types so we ignore it.
case _ =>
throw new AnalysisException("Column statistics deserialization is not supported for " +
s"column $colName of data type: $dataType.")
}
externalValue.toString
}
}
object ColumnStat extends Logging {
// List of string keys used to serialize ColumnStat
val KEY_VERSION = "version"
private val KEY_DISTINCT_COUNT = "distinctCount"
private val KEY_MIN_VALUE = "min"
private val KEY_MAX_VALUE = "max"
private val KEY_NULL_COUNT = "nullCount"
private val KEY_AVG_LEN = "avgLen"
private val KEY_MAX_LEN = "maxLen"
/** Returns true iff the we support gathering column statistics on column of the given type. */
def supportsType(dataType: DataType): Boolean = dataType match {
case _: IntegralType => true
case _: DecimalType => true
case DoubleType | FloatType => true
case BooleanType => true
case DateType => true
case TimestampType => true
case BinaryType | StringType => true
case _ => false
}
/**
* Creates a [[ColumnStat]] object from the given map. This is used to deserialize column stats
* from some external storage. The serialization side is defined in [[ColumnStat.toMap]].
*/
def fromMap(table: String, field: StructField, map: Map[String, String]): Option[ColumnStat] = {
try {
Some(ColumnStat(
distinctCount = BigInt(map(KEY_DISTINCT_COUNT).toLong),
// Note that flatMap(Option.apply) turns Option(null) into None.
min = map.get(KEY_MIN_VALUE)
.map(fromExternalString(_, field.name, field.dataType)).flatMap(Option.apply),
max = map.get(KEY_MAX_VALUE)
.map(fromExternalString(_, field.name, field.dataType)).flatMap(Option.apply),
nullCount = BigInt(map(KEY_NULL_COUNT).toLong),
avgLen = map.getOrElse(KEY_AVG_LEN, field.dataType.defaultSize.toString).toLong,
maxLen = map.getOrElse(KEY_MAX_LEN, field.dataType.defaultSize.toString).toLong
))
} catch {
case NonFatal(e) =>
logWarning(s"Failed to parse column statistics for column ${field.name} in table $table", e)
None
}
}
/**
* Converts from string representation of external data type to the corresponding Catalyst data
* type.
*/
private def fromExternalString(s: String, name: String, dataType: DataType): Any = {
dataType match {
case BooleanType => s.toBoolean
case DateType => DateTimeUtils.fromJavaDate(java.sql.Date.valueOf(s))
case TimestampType => DateTimeUtils.fromJavaTimestamp(java.sql.Timestamp.valueOf(s))
case ByteType => s.toByte
case ShortType => s.toShort
case IntegerType => s.toInt
case LongType => s.toLong
case FloatType => s.toFloat
case DoubleType => s.toDouble
case _: DecimalType => Decimal(s)
// This version of Spark does not use min/max for binary/string types so we ignore it.
case BinaryType | StringType => null
case _ =>
throw new AnalysisException("Column statistics deserialization is not supported for " +
s"column $name of data type: $dataType.")
}
}
/**
* Constructs an expression to compute column statistics for a given column.
*
* The expression should create a single struct column with the following schema:
* distinctCount: Long, min: T, max: T, nullCount: Long, avgLen: Long, maxLen: Long
*
* Together with [[rowToColumnStat]], this function is used to create [[ColumnStat]] and
* as a result should stay in sync with it.
*/
def statExprs(col: Attribute, relativeSD: Double): CreateNamedStruct = {
def struct(exprs: Expression*): CreateNamedStruct = CreateStruct(exprs.map { expr =>
expr.transformUp { case af: AggregateFunction => af.toAggregateExpression() }
})
val one = Literal(1, LongType)
// the approximate ndv (num distinct value) should never be larger than the number of rows
val numNonNulls = if (col.nullable) Count(col) else Count(one)
val ndv = Least(Seq(HyperLogLogPlusPlus(col, relativeSD), numNonNulls))
val numNulls = Subtract(Count(one), numNonNulls)
val defaultSize = Literal(col.dataType.defaultSize, LongType)
def fixedLenTypeStruct(castType: DataType) = {
// For fixed width types, avg size should be the same as max size.
struct(ndv, Cast(Min(col), castType), Cast(Max(col), castType), numNulls, defaultSize,
defaultSize)
}
col.dataType match {
case _: IntegralType => fixedLenTypeStruct(LongType)
case _: DecimalType => fixedLenTypeStruct(col.dataType)
case DoubleType | FloatType => fixedLenTypeStruct(DoubleType)
case BooleanType => fixedLenTypeStruct(col.dataType)
case DateType => fixedLenTypeStruct(col.dataType)
case TimestampType => fixedLenTypeStruct(col.dataType)
case BinaryType | StringType =>
// For string and binary type, we don't store min/max.
val nullLit = Literal(null, col.dataType)
struct(
ndv, nullLit, nullLit, numNulls,
// Set avg/max size to default size if all the values are null or there is no value.
Coalesce(Seq(Ceil(Average(Length(col))), defaultSize)),
Coalesce(Seq(Cast(Max(Length(col)), LongType), defaultSize)))
case _ =>
throw new AnalysisException("Analyzing column statistics is not supported for column " +
s"${col.name} of data type: ${col.dataType}.")
}
}
/** Convert a struct for column stats (defined in statExprs) into [[ColumnStat]]. */
def rowToColumnStat(row: Row, attr: Attribute): ColumnStat = {
ColumnStat(
distinctCount = BigInt(row.getLong(0)),
// for string/binary min/max, get should return null
min = Option(row.get(1))
.map(v => fromExternalString(v.toString, attr.name, attr.dataType)).flatMap(Option.apply),
max = Option(row.get(2))
.map(v => fromExternalString(v.toString, attr.name, attr.dataType)).flatMap(Option.apply),
nullCount = BigInt(row.getLong(3)),
avgLen = row.getLong(4),
maxLen = row.getLong(5)
)
}
}
| mzl9039/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Statistics.scala | Scala | apache-2.0 | 12,261 |
package io.hydrosphere.mist.master.interfaces.http
import akka.http.scaladsl.model.ws._
import akka.http.scaladsl.server.Directives
import akka.stream.scaladsl.{Flow, Sink}
import io.hydrosphere.mist.master.EventsStreamer
import io.hydrosphere.mist.master.interfaces.JsonCodecs
import spray.json._
import scala.language.postfixOps
/**
* Router for requests via websocket
*/
class WSApi(streamer: EventsStreamer) {
import Directives._
import JsonCodecs._
val route = CorsDirective.cors() {
path( "v2" / "api" / "jobs" / "ws") {
handleWebsocketMessages(allEventsWsFlow())
} ~
path( "v2" / "api" / "jobs" / Segment / "ws") { jobId =>
handleWebsocketMessages(jobWsFlow(jobId))
}
}
private def jobWsFlow(id: String): Flow[Message, Message, Any]= {
val source = streamer.eventsSource()
.filter(_.id == id)
.map(e => {
val json = e.toJson.toString()
TextMessage.Strict(json)
})
val sink = Sink.ignore
Flow.fromSinkAndSource(sink, source)
}
private def allEventsWsFlow(): Flow[Message, Message, Any] = {
val source = streamer.eventsSource().map(e => {
val json = e.toJson.toString()
TextMessage.Strict(json)
})
val sink = Sink.ignore
Flow.fromSinkAndSource(sink, source)
}
}
| KineticCookie/mist | src/main/scala/io/hydrosphere/mist/master/interfaces/http/WSApi.scala | Scala | apache-2.0 | 1,298 |
package org.typelevel.discipline
trait Predicate[A] extends (A => Boolean) {
def apply(a: A): Boolean
def &&(that: Predicate[A]) = Predicate[A](a => this(a) && that(a))
}
object Predicate {
def apply[A](f: A => Boolean) = new Predicate[A] {
def apply(a: A) = f(a)
}
def const[A](res: Boolean) = new Predicate[A] {
def apply(a: A) = res
}
}
// vim: expandtab:ts=2:sw=2
| etorreborre/discipline | src/main/scala/Predicate.scala | Scala | mit | 394 |
/*
* Copyright 2018 Vladimir Konstantinov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.illfaku.korro.dto
import com.github.illfaku.korro.config.HttpInstruction
import com.github.illfaku.korro.dto.HttpHeaders.Names.{AcceptLanguage, Host}
import com.github.illfaku.korro.util.{Locales, QueryStringCodec}
import java.net.URL
import java.util.Locale
/**
* HTTP message representation.
*/
sealed trait HttpMessage {
/**
* HTTP version.
*/
val version: HttpVersion
/**
* HTTP headers.
*/
val headers: HttpParams
/**
* HTTP message body.
*/
val content: HttpContent
}
/**
* HTTP request representation.
*/
case class HttpRequest(
version: HttpVersion,
method: HttpRequest.Method,
uri: HttpRequest.Uri,
headers: HttpParams,
content: HttpContent
) extends HttpMessage {
/**
* Locale parsed from `Accept-Language` header using [[com.github.illfaku.korro.util.Locales#parse Locales.parse]].
*/
implicit val locale: Locale = headers.get(AcceptLanguage).map(Locales.parse).getOrElse(Locale.getDefault)
/**
* Creates [[com.github.illfaku.korro.dto.HttpRequest.Outgoing HttpRequest.Outgoing]] command for HTTP client.
* Adds `Host` header extracted from provided URL (if not already present)
* and concatenates path from it with uri from this request.
* @param url Destination URL.
* @param instructions Additional instructions.
*/
def to(url: URL, instructions: List[HttpInstruction] = Nil): HttpRequest.Outgoing = {
val newHeaders =
if (!headers.contains(Host)) {
val host =
if (url.getPort == -1) url.getHost
else url.getHost + ":" + url.getPort
headers + (Host -> host)
} else {
headers
}
val req = copy(uri = uri.withPrefix(url.getPath), headers = newHeaders)
new HttpRequest.Outgoing(req, url, instructions)
}
}
object HttpRequest {
/**
* Command for HTTP client created by `HttpRequest#to` methods.
*/
class Outgoing private[korro] (val req: HttpRequest, val url: URL, val instructions: List[HttpInstruction])
private[korro] object Outgoing {
def unapply(out: Outgoing): Option[(HttpRequest, URL, List[HttpInstruction])] = {
Some(out.req, out.url, out.instructions)
}
}
/**
* HTTP request methods as constructors and handy extractors of request.
* <br><br>
* Extractors usage:
* {{{
* val req: HttpRequest = ???
* req match {
* case Get(r) => ???
* case Post(r) => ???
* }
* }}}
*/
object Method {
val Get = new Method("GET")
val Post = new Method("POST")
val Put = new Method("PUT")
val Delete = new Method("DELETE")
val Head = new Method("HEAD")
val Connect = new Method("CONNECT")
val Options = new Method("OPTIONS")
val Trace = new Method("TRACE")
def apply(name: String): Method = new Method(name)
}
/**
* HTTP request method representation.
*/
class Method(val name: String) {
def apply(
path: String = "",
params: HttpParams = HttpParams.empty,
content: HttpContent = HttpContent.empty,
headers: HttpParams = HttpParams.empty,
version: HttpVersion = HttpVersion.Http11
): HttpRequest = {
HttpRequest(version, this, Uri(path, params), headers, content)
}
def unapply(req: HttpRequest): Option[HttpRequest] = if (this == req.method) Some(req) else None
override def equals(other: Any): Boolean = other match {
case that: Method => name == that.name
case _ => false
}
override lazy val hashCode = name.hashCode
override val toString = name
}
object Uri {
def apply(path: String, params: HttpParams): Uri = apply(path, QueryStringCodec encode params.entries)
def apply(path: String, query: String): Uri = {
if (query.isEmpty) apply(path)
else apply(s"$path?$query")
}
def apply(uri: String): Uri = new Uri(uri)
def unapply(req: HttpRequest): Option[(String, HttpParams)] = Some(req.uri.path, req.uri.params)
}
/**
* HTTP request URI representation.
* @param pathWithQuery Path with URL-encoded query
*/
class Uri(pathWithQuery: String) {
lazy val (path: String, query: String) = {
val pos = pathWithQuery.indexOf('?')
if (pos == -1) (pathWithQuery, "") else (pathWithQuery.substring(0, pos), pathWithQuery.substring(pos + 1))
}
lazy val params = new HttpParams(QueryStringCodec decode query)
def withPrefix(prefix: String): Uri = Uri(prefix + pathWithQuery)
override val toString = pathWithQuery
override def equals(other: Any): Boolean = other match {
case that: Uri => toString == that.toString
case _ => false
}
override def hashCode: Int = pathWithQuery.hashCode
}
/**
* Extracts path from HttpRequest.
* {{{
* val req: HttpRequest = ...
* req match {
* case Path("/some/path") => ...
* }
* }}}
*/
object Path {
def unapply(req: HttpRequest): Option[String] = Some(req.uri.path)
}
/**
* Extracts segments of a path.
* {{{
* Get("/a/b/c/d/e") match {
* case Path("/a/b" / x / "d" / y) => x + "-" + y // c-e
* }
* }}}
*/
object / {
def unapply(path: String): Option[(String, String)] = {
val pos = path.lastIndexOf('/')
if (pos >= 0) Some(path.substring(0, pos) -> path.substring(pos + 1))
else None
}
}
/**
* Matches path of HttpRequest against a pattern and extracts matching groups from it.
* {{{
* val req: HttpRequest = ...
* val ApiRegex = new PathRegex("/api/(\\\\d\\\\.\\\\d)/.*")
* req match {
* case ApiRegex("1.0") => ...
* }
* }}}
*
* @param pattern Path prefix to test against HttpRequest.
*/
class PathRegex(pattern: String) {
private val re = pattern.r
def unapplySeq(req: HttpRequest): Option[List[String]] = re.unapplySeq(req.uri.path)
}
/**
* String interpolation for paths. It allows to extract path's segments.
* {{{
* import HttpRequest.PathInterpolation
* Get("/a/b/c/d/e") match {
* case path"/a/b/$x/d/$y" => x + "-" + y // c-e
* }
* }}}
*/
implicit class PathInterpolation(sc: StringContext) {
def path = new PathRegex(sc.parts.mkString("([^/]+)"))
}
}
/**
* HTTP response representation.
*/
case class HttpResponse(
version: HttpVersion,
status: HttpResponse.Status,
headers: HttpParams,
content: HttpContent
) extends HttpMessage
object HttpResponse {
def Redirect(status: Status, uri: HttpRequest.Uri, headers: HttpParams = HttpParams.empty): HttpResponse = {
status(headers = headers + (HttpHeaders.Names.Location -> uri.toString))
}
object Status {
val Ok = new Status(200, "OK")
val MovedPermanently = new Status(301, "Moved Permanently")
val Found = new Status(302, "Found")
val SeeOther = new Status(303, "See Other")
val TemporaryRedirect = new Status(307, "Temporary Redirect")
val PermanentRedirect = new Status(308, "Permanent Redirect")
val BadRequest = new Status(400, "Bad Request")
val Unauthorized = new Status(401, "Unauthorized")
val NotFound = new Status(404, "Not Found")
val RequestTimeout = new Status(408, "Request Timeout")
val ServerError = new Status(500, "Internal Server Error")
val ServiceUnavailable = new Status(503, "Service Unavailable")
private def reasonFor(code: Int): String = {
if (code < 100) "Unknown Status"
else if (code < 200) "Informational"
else if (code < 300) "Successful"
else if (code < 400) "Redirection"
else if (code < 500) "Client Error"
else if (code < 600) "Server Error"
else "Unknown Status"
}
def apply(code: Int): Status = new Status(code, reasonFor(code))
def apply(code: Int, reason: String): Status = new Status(code, reason)
}
/**
* HTTP response status representation.
*/
class Status(val code: Int, val reason: String) {
def apply(
content: HttpContent = HttpContent.empty,
headers: HttpParams = HttpParams.empty,
version: HttpVersion = HttpVersion.Http11
): HttpResponse = {
HttpResponse(version, this, headers, content)
}
def unapply(res: HttpResponse): Option[HttpResponse] = if (this == res.status) Some(res) else None
override def equals(other: Any): Boolean = other match {
case that: Status => code == that.code
case _ => false
}
override val hashCode: Int = code
override lazy val toString = s"$code $reason"
}
}
| yet-another-cafebabe/korro | src/main/scala/com/github/illfaku/korro/dto/HttpMessage.scala | Scala | lgpl-3.0 | 9,083 |
/*
* Copyright (c) 2018. Lorem ipsum dolor sit amet, consectetur adipiscing elit.
* Morbi non lorem porttitor neque feugiat blandit. Ut vitae ipsum eget quam lacinia accumsan.
* Etiam sed turpis ac ipsum condimentum fringilla. Maecenas magna.
* Proin dapibus sapien vel ante. Aliquam erat volutpat. Pellentesque sagittis ligula eget metus.
* Vestibulum commodo. Ut rhoncus gravida arcu.
*/
package com.wallace.demo.app.actordemo.master_worker
import java.util.UUID
import java.util.concurrent.{ConcurrentHashMap, LinkedBlockingQueue, TimeUnit}
import akka.actor.{Actor, ActorRef, ActorSelection, ActorSystem, Props, UnhandledMessage}
import com.typesafe.config.{Config, ConfigFactory}
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.postfixOps
/**
* Created by wallace on 2018/6/21 0021.
*/
class Worker(val host: String, val port: Int, masterHost: String, masterPort: Int) extends Actor {
private var master: ActorSelection = _
private val memory: Int = 8
private val cores: Int = 4
//Worker唯一标识
val id_Worker: String = UUID.randomUUID().toString
//心跳间隔
val HEATBEAT_INTERVAL: Long = 5000L
override def preStart(): Unit = {
println("Worker-preStart")
master = context.actorSelection(s"akka.tcp://MasterSystem@$masterHost:$masterPort/user/Master")
master ! RegisterWorker(id_Worker, memory, cores, s"akka.tcp://WorkerSystem@$host:$port/user/Worker")
}
override def receive: Receive = {
case RegisteredWorker(masterUrl) =>
import context.dispatcher
master = context.actorSelection(masterUrl)
context.system.scheduler.schedule(0 millis, HEATBEAT_INTERVAL millis, self, SendHeartBeat)
case RegisterWorker(workerId, -1, -1, "") =>
master ! RegisterWorker(workerId, 8, 4, s"akka.tcp://WorkerSystem@$host:$port/user/Worker")
case SendHeartBeat =>
println(s"定时发生心跳给${master.toString()}, ReceivedJob总数: ${Worker.wQueue.size()}")
master ! HeartBeat(id_Worker)
case PersistJob(job) =>
if (Worker.wQueue.size() < 8 && Worker.wQueue.add(job)) {
println(s"Receive Job: $job")
master ! ReceivedJob(job)
} else {
println(s"Failed Receive Job: $job")
master ! FailedReceiveJob(job)
}
case msg: UnhandledMessage => println(msg.message)
}
}
object Worker {
val wQueue: LinkedBlockingQueue[String] = new LinkedBlockingQueue[String]()
val receivedQueue: ConcurrentHashMap[String, String] = new ConcurrentHashMap[String, String]()
def main(args: Array[String]): Unit = {
if (args.length < 4) {
System.exit(1)
}
val host: String = args(0)
val port: Int = args(1).toInt
val masterHost = args(2)
val masterPort = args(3).toInt
val confStr: String =
s"""
|akka.actor.provider = "akka.remote.RemoteActorRefProvider"
|akka.remote.netty.tcp.hostname = "$host"
|akka.remote.netty.tcp.port = "$port"
|akka.actor.warn-about-java-serializer-usage = "false"
""".stripMargin
val conf: Config = ConfigFactory.parseString(confStr)
val actorSystem: ActorSystem = ActorSystem("WorkerSystem", conf)
val workerActor: ActorRef = actorSystem.actorOf(Props(new Worker(host, port, masterHost, masterPort)), "Worker")
println(workerActor.path.toString)
Await.ready(actorSystem.whenTerminated, Duration(365, TimeUnit.DAYS))
}
}
| BiyuHuang/CodePrototypesDemo | demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/Worker.scala | Scala | apache-2.0 | 3,427 |
package net.cruhland.gentzen
import scala.util.Random
import org.scalacheck._
import Arbitrary.arbitrary
object GenExtras {
def genStringContainingAnyOf(chars: Set[Char]): Gen[String] = {
Gen.sized { size =>
for {
numChars <- Gen.choose(1, size)
charSeq <- Gen.listOfN(numChars, Gen.oneOf(chars.toSeq))
numStrings = numChars + 1
stringSize = size / numStrings
sizedStringGen = Gen.resize(stringSize, arbitrary[String])
stringSeq <- Gen.listOfN(numStrings, sizedStringGen)
} yield interleave(stringSeq, charSeq).mkString
}
}
def actuallyNonEmptyListOf[A](genElem: Gen[A]): Gen[List[A]] = {
for {
head <- genElem
tail <- Gen.listOf(genElem)
} yield head :: tail
}
def interleave[A, B >: A](first: List[A], second: List[B]): List[B] = {
first match {
case a :: as => a :: interleave(second, as)
case _ => second
}
}
def composition(n: Int): Gen[List[Int]] = {
for {
bits <- Gen.listOfN(n - 1, Gen.choose(0, 1))
} yield {
bits.foldLeft(List(1)) { (ans, b) =>
if (b == 0) 1 :: ans else 1 + ans.head :: ans.tail
}
}
}
def partition(n: Int): Gen[List[Int]] = {
partition(n, n)
}
// TODO: Can this be made tail recursive?
def partition(n: Int, k: Int): Gen[List[Int]] = {
val possibleElements = 1 to k
val partitionsWithMaxElement =
possibleElements
.view
.map(m => pmax(n, m) -> m)
.filter { case (count, _) => count > 0 }
.map { case (count, element) => count -> Gen.const(element) }
if (partitionsWithMaxElement.isEmpty) Gen.const(Nil)
else {
for {
element <- Gen.frequency(partitionsWithMaxElement: _*)
subPartition <- partition(n - element, element)
} yield element :: subPartition
}
}
// TODO: Memoize
def pmax(n: Int, k: Int): Int = {
(n, k) match {
case (0, 0) => 1
case _ if n <= 0 || k <= 0 => 0
case _ => pmax(n - k, k) + pmax(n - 1, k - 1)
}
}
// TODO: Move this into DerivationSpec once it's complete
def tempGenAtoms(p: List[Int]): Gen[List[AtomValue]] = {
val buildVariable: String => FormulaVariable = FormulaVariable(_, None)
val genAtomValueConstructor =
Gen.oneOf(buildVariable, Constant.buildWithoutValidation _)
val listOfGens = p.zipWithIndex.map {
case (n, i) => genAtomValueConstructor.map(f => n -> f(i.toString))
}
for {
list <- Gen.sequence[List[(Int, AtomValue)], (Int, AtomValue)](listOfGens)
} yield {
Random.shuffle(list.flatMap { case (n, av) => List.fill(n)(av) })
}
}
}
| cruhland/gentzen | src/test/scala/net/cruhland/gentzen/GenExtras.scala | Scala | mit | 2,662 |
// ==> 18b253a4a89a84c5674165c6fc3efafad535eee3.scala <==
object x0 {
def x1[x2 <:_[ // error
// error | lampepfl/dotty | tests/neg/i4373c.scala | Scala | apache-2.0 | 104 |
package scala.meta.tests
package parsers
import scala.meta.dialects.Scala211
class TrickySuite extends ParseSuite {
test("annot > \\n > comment > \\n > defn") {
templStat("@foo\\n//bar bar\\ndef baz = qux")
}
test("annot > whitespace > \\n > defn") {
templStat("@foo \\ndef baz = qux")
}
}
| beni55/scalameta | scalameta/parsers/src/test/scala/scala/meta/tests/parsers/TrickySuite.scala | Scala | bsd-3-clause | 302 |
package jkm.cineclub.raft.DB
import jkm.cineclub.raft.RaftConfig.DBInfo
import com.typesafe.scalalogging.slf4j.Logging
import java.io.File
/**
* Created with IntelliJ IDEA.
* User: cineclub
* Date: 12/26/13
* Time: 9:18 PM
* To change this template use File | Settings | File Templates.
*/
object RaftLevelDB extends RaftDB with Logging {
def checkExistDB(dbInfo:DBInfo):Boolean ={
val dbDir=new File(dbInfo.dbRootPath,dbInfo.dbName)
if ( ! dbDir.exists() ) return false
if ( ! dbDir.isDirectory ) {
logger.error("Something's wrong with the Level DB Dir. It is not a directory "+dbInfo)
throw new RuntimeException("Something wrong with the Level DB Dir. It is not a directory "+dbInfo)
}
if ( dbDir.listFiles().size == 0 ) return false
if (new File(dbInfo.dbRootPath,dbInfo.dbName+"/CURRENT").exists() & new File(dbInfo.dbRootPath,dbInfo.dbName+"/LOCK").exists() ) {
return true
}
else {
logger.error("Something's wrong with the Level DB Dir. LevelDB Files are not proper. "+dbInfo)
throw new RuntimeException("Something wrong with the Level DB Dir. LevelDB Files are not proper. "+dbInfo)
}
}
def getLogEntryDB(dbInfo: DBInfo): LogEntryDB = {
new LogEntryLevelDB(dbInfo)
}
def getPersistentStateDB(dbInfo: DBInfo): PersistentStateDB = {
new PersistentStateLevelDB(dbInfo)
}
}
| stepist/scalaraft | src/main/scala/jkm/cineclub/raft/DB/RaftLevelDB.scala | Scala | apache-2.0 | 1,378 |
/*
Authors:
Matei Zaharia & Kristal Curtis
Developed as part of the SNAP project (http://snap.cs.berkeley.edu/)
*/
package siren
class SAMEntry(
val readId: String,
val flags: Int,
val piece: String,
val position: Int,
val mapQuality: Int,
val cigar: String,
val nextPiece: String,
val nextPosition: Int,
val templateLen: Int,
val sequence: String,
val quality: String,
val tags: SAMTags) {
override def toString(): String = readId
def toSAMLine: String = List(readId, flags, piece, position, mapQuality, cigar, nextPiece, nextPosition, templateLen, sequence, quality).mkString("\\t") + "\\t" + tags.toString
def direction = if ((flags & SAM.REVERSE) != 0) 1 else 0
def reversed: Boolean = ((flags & SAM.REVERSE) != 0)
def paired: Boolean = ((flags & SAM.MULTIPLE_SEGMENTS) != 0)
def unmapped: Boolean = ((flags & SAM.UNMAPPED) != 0)
def readLen = sequence.length
def readIdWithoutSuffix: String = {
if (readId.endsWith("/1") || readId.endsWith("/2")) {
readId.split("/")(0)
} else {
readId
}
}
def toRead = {
if (reversed) new Read(readId.getBytes, DNA.rc(sequence).getBytes, quality.reverse.getBytes)
else new Read(readId.getBytes, sequence.getBytes, quality.getBytes)
}
def toReadWithSuffix(whichRead: Int) = {
val id = readId + "/" + whichRead
if (reversed) new Read(id.getBytes, DNA.rc(sequence).getBytes, quality.reverse.getBytes)
else new Read(id.getBytes, sequence.getBytes, quality.getBytes)
}
def hasSameAlignmentAs(that: SAMEntry): Boolean = {
piece == that.piece && position == that.position
}
/*
def forceToUnpaired(idSuffix: String = ""): SAMEntry = {
// return new SAMEntry, discarding info about its mate
// that means updating the id (with a suffix), flags, nextPiece, nextPosition, & templateLen fields
// Bits relating to pairs: 0x1, 0x2, 0x8, 0x20, 0x40, 0x80 => set them all to 0
// ** could be incorrect **
var newFlags = flags
SAM.pairFlags.foreach(f => {
if ((flags & f) != 0) newFlags -= f
})
new SAMEntry(
readId + idSuffix,
newFlags,
piece,
position,
mapQuality,
cigar,
"*",
0,
0,
sequence,
quality,
new SAMTags(List(tags.getTag("RG").toString)) // remove tags besides read group
)
}
def updateAlignment(res: AlignResult, genome: Genome): SAMEntry = {
res match {
case RichSingleHit(absPos, isRC, editDistance) => {
val (truePiece, truePos) = genome.getLocation(absPos)
val newFlags =
if (isRC) SAM.REVERSE
else 0
val newSequence =
if (isRC) DNA.rc(sequence)
else sequence
val newQuality =
if (isRC) quality.reverse
else quality
// create new sam entry with updated alignment
new SAMEntry(
readId,
newFlags,
truePiece,
truePos + 1, // because true position is 0-indexed, but SAM should be 1-indexed
60, // dummy value
readLen + "M", // cigar.toString
"*", // assume unpaired
0, // assume unpaired
0, // assume unpaired
newSequence,
newQuality,
new SAMTags(List(tags.getTag("RG").toString)) //, "MD:Z:" + mdTag.toString)) // keep read group of original
)
} case _ => {
println("Cannot update alignment.")
this
}
}
}
// sets to unmapped; if reverse aligned (and therefore sequence is RC, quality is reversed), will reset sequence & quality
// keep read group
def removeAlignment: SAMEntry = {
val newSequence =
if (reversed) DNA.rc(sequence)
else sequence
val newQuality =
if (reversed) quality.reverse
else quality
val t = tags.getTag("RG")
val newTags =
if (t == null) new SAMTags(Nil)
else new SAMTags(List(t.toString))
new SAMEntry(
readId,
0, // remove flags
"*", // do not specify piece
0, // do not specify position
0, // do not specify mapq
"*", // do not specify cigar
"*", // do not specify next piece
0, // do not specify next position
0, // do not specify template len
newSequence,
newQuality,
newTags
)
}
def contigToChrPos: SAMEntry = {
val contigRange = """(\\w+):(\\d+)-(\\d+)""".r
// fix piece & position -- initially, it's formatted as a range & offset, e.g. "chr22:42950584-42955394 2980"
// new format will be piece & position as usual, like "chr22 42953564"
try{
val contigRange(newPiece, startStr, endStr) = piece
val offset = position
val newEntry = new SAMEntry(
readId, // read ID
flags, // flags
newPiece, // piece
startStr.toInt + offset, // position
mapQuality, // map quality
cigar, // cigar
nextPiece, // next piece
nextPosition, // next position
templateLen, // template len
sequence, // sequence
quality, // quality
tags
)
newEntry
} catch {
case error: MatchError => println("Match error: " + piece + ", " + position)
this
}
}
// return new entry with all data same as current entry, except use a sanitized CIGAR (e.g., "101M")
// will also update tags: only retains RG tag (to avoid confusion with NM or MD tags); all other tags are dropped
def sanitizeCigar: SAMEntry = {
new SAMEntry(
readId,
flags,
piece,
position,
mapQuality,
readLen + "M", // new dummy cigar string
nextPiece,
nextPosition,
templateLen,
sequence,
quality,
new SAMTags(List(tags.getTag("RG").toString)) // remove tags besides read group
)
}
*/
}
| fnothaft/siren-release | src/main/scala/siren/SAMEntry.scala | Scala | bsd-2-clause | 6,055 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.core
abstract class BaseServing[Q, P]
extends AbstractDoer {
private[prediction]
def serveBase(q: Q, ps: Seq[P]): P
}
| ydanilenko/PredictionIO | core/src/main/scala/io/prediction/core/BaseServing.scala | Scala | apache-2.0 | 761 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.orc
import java.io.File
import java.nio.charset.StandardCharsets.UTF_8
import java.sql.Timestamp
import java.util.Locale
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
import org.apache.orc.OrcConf.COMPRESS
import org.apache.orc.OrcFile
import org.apache.orc.OrcProto.ColumnEncoding.Kind.{DICTIONARY_V2, DIRECT, DIRECT_V2}
import org.apache.orc.OrcProto.Stream.Kind
import org.apache.orc.impl.RecordReaderImpl
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.{SPARK_VERSION_SHORT, SparkException}
import org.apache.spark.sql.{Row, SPARK_VERSION_METADATA_KEY}
import org.apache.spark.sql.execution.datasources.SchemaMergeUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{LongType, StructField, StructType}
import org.apache.spark.util.Utils
case class OrcData(intField: Int, stringField: String)
abstract class OrcSuite extends OrcTest with BeforeAndAfterAll {
import testImplicits._
var orcTableDir: File = null
var orcTableAsDir: File = null
protected override def beforeAll(): Unit = {
super.beforeAll()
orcTableAsDir = Utils.createTempDir(namePrefix = "orctests")
orcTableDir = Utils.createTempDir(namePrefix = "orctests")
sparkContext
.makeRDD(1 to 10)
.map(i => OrcData(i, s"part-$i"))
.toDF()
.createOrReplaceTempView("orc_temp_table")
}
protected def testBloomFilterCreation(bloomFilterKind: Kind) {
val tableName = "bloomFilter"
withTempDir { dir =>
withTable(tableName) {
val sqlStatement = orcImp match {
case "native" =>
s"""
|CREATE TABLE $tableName (a INT, b STRING)
|USING ORC
|OPTIONS (
| path '${dir.toURI}',
| orc.bloom.filter.columns '*',
| orc.bloom.filter.fpp 0.1
|)
""".stripMargin
case "hive" =>
s"""
|CREATE TABLE $tableName (a INT, b STRING)
|STORED AS ORC
|LOCATION '${dir.toURI}'
|TBLPROPERTIES (
| orc.bloom.filter.columns='*',
| orc.bloom.filter.fpp=0.1
|)
""".stripMargin
case impl =>
throw new UnsupportedOperationException(s"Unknown ORC implementation: $impl")
}
sql(sqlStatement)
sql(s"INSERT INTO $tableName VALUES (1, 'str')")
val partFiles = dir.listFiles()
.filter(f => f.isFile && !f.getName.startsWith(".") && !f.getName.startsWith("_"))
assert(partFiles.length === 1)
val orcFilePath = new Path(partFiles.head.getAbsolutePath)
val readerOptions = OrcFile.readerOptions(new Configuration())
val reader = OrcFile.createReader(orcFilePath, readerOptions)
var recordReader: RecordReaderImpl = null
try {
recordReader = reader.rows.asInstanceOf[RecordReaderImpl]
// BloomFilter array is created for all types; `struct`, int (`a`), string (`b`)
val sargColumns = Array(true, true, true)
val orcIndex = recordReader.readRowIndex(0, null, sargColumns)
// Check the types and counts of bloom filters
assert(orcIndex.getBloomFilterKinds.forall(_ === bloomFilterKind))
assert(orcIndex.getBloomFilterIndex.forall(_.getBloomFilterCount > 0))
} finally {
if (recordReader != null) {
recordReader.close()
}
}
}
}
}
protected def testSelectiveDictionaryEncoding(isSelective: Boolean, isHive23: Boolean = false) {
val tableName = "orcTable"
withTempDir { dir =>
withTable(tableName) {
val sqlStatement = orcImp match {
case "native" =>
s"""
|CREATE TABLE $tableName (zipcode STRING, uniqColumn STRING, value DOUBLE)
|USING ORC
|OPTIONS (
| path '${dir.toURI}',
| orc.dictionary.key.threshold '1.0',
| orc.column.encoding.direct 'uniqColumn'
|)
""".stripMargin
case "hive" =>
s"""
|CREATE TABLE $tableName (zipcode STRING, uniqColumn STRING, value DOUBLE)
|STORED AS ORC
|LOCATION '${dir.toURI}'
|TBLPROPERTIES (
| orc.dictionary.key.threshold '1.0',
| hive.exec.orc.dictionary.key.size.threshold '1.0',
| orc.column.encoding.direct 'uniqColumn'
|)
""".stripMargin
case impl =>
throw new UnsupportedOperationException(s"Unknown ORC implementation: $impl")
}
sql(sqlStatement)
sql(s"INSERT INTO $tableName VALUES ('94086', 'random-uuid-string', 0.0)")
val partFiles = dir.listFiles()
.filter(f => f.isFile && !f.getName.startsWith(".") && !f.getName.startsWith("_"))
assert(partFiles.length === 1)
val orcFilePath = new Path(partFiles.head.getAbsolutePath)
val readerOptions = OrcFile.readerOptions(new Configuration())
val reader = OrcFile.createReader(orcFilePath, readerOptions)
var recordReader: RecordReaderImpl = null
try {
recordReader = reader.rows.asInstanceOf[RecordReaderImpl]
// Check the kind
val stripe = recordReader.readStripeFooter(reader.getStripes.get(0))
// The encodings are divided into direct or dictionary-based categories and
// further refined as to whether they use RLE v1 or v2. RLE v1 is used by
// Hive 0.11 and RLE v2 is introduced in Hive 0.12 ORC with more improvements.
// For more details, see https://orc.apache.org/specification/
assert(stripe.getColumns(1).getKind === DICTIONARY_V2)
if (isSelective || isHive23) {
assert(stripe.getColumns(2).getKind === DIRECT_V2)
} else {
assert(stripe.getColumns(2).getKind === DICTIONARY_V2)
}
// Floating point types are stored with DIRECT encoding in IEEE 754 floating
// point bit layout.
assert(stripe.getColumns(3).getKind === DIRECT)
} finally {
if (recordReader != null) {
recordReader.close()
}
}
}
}
}
protected def testMergeSchemasInParallel(
ignoreCorruptFiles: Boolean,
schemaReader: (Seq[FileStatus], Configuration, Boolean) => Seq[StructType]): Unit = {
withSQLConf(
SQLConf.IGNORE_CORRUPT_FILES.key -> ignoreCorruptFiles.toString,
SQLConf.ORC_IMPLEMENTATION.key -> orcImp) {
withTempDir { dir =>
val fs = FileSystem.get(spark.sessionState.newHadoopConf())
val basePath = dir.getCanonicalPath
val path1 = new Path(basePath, "first")
val path2 = new Path(basePath, "second")
val path3 = new Path(basePath, "third")
spark.range(1).toDF("a").coalesce(1).write.orc(path1.toString)
spark.range(1, 2).toDF("b").coalesce(1).write.orc(path2.toString)
spark.range(2, 3).toDF("a").coalesce(1).write.json(path3.toString)
val fileStatuses =
Seq(fs.listStatus(path1), fs.listStatus(path2), fs.listStatus(path3)).flatten
val schema = SchemaMergeUtils.mergeSchemasInParallel(
spark,
fileStatuses,
schemaReader)
assert(schema.isDefined)
assert(schema.get == StructType(Seq(
StructField("a", LongType, true),
StructField("b", LongType, true))))
}
}
}
protected def testMergeSchemasInParallel(
schemaReader: (Seq[FileStatus], Configuration, Boolean) => Seq[StructType]): Unit = {
testMergeSchemasInParallel(true, schemaReader)
val exception = intercept[SparkException] {
testMergeSchemasInParallel(false, schemaReader)
}.getCause
assert(exception.getCause.getMessage.contains("Could not read footer for file"))
}
test("create temporary orc table") {
checkAnswer(sql("SELECT COUNT(*) FROM normal_orc_source"), Row(10))
checkAnswer(
sql("SELECT * FROM normal_orc_source"),
(1 to 10).map(i => Row(i, s"part-$i")))
checkAnswer(
sql("SELECT * FROM normal_orc_source where intField > 5"),
(6 to 10).map(i => Row(i, s"part-$i")))
checkAnswer(
sql("SELECT COUNT(intField), stringField FROM normal_orc_source GROUP BY stringField"),
(1 to 10).map(i => Row(1, s"part-$i")))
}
test("create temporary orc table as") {
checkAnswer(sql("SELECT COUNT(*) FROM normal_orc_as_source"), Row(10))
checkAnswer(
sql("SELECT * FROM normal_orc_source"),
(1 to 10).map(i => Row(i, s"part-$i")))
checkAnswer(
sql("SELECT * FROM normal_orc_source WHERE intField > 5"),
(6 to 10).map(i => Row(i, s"part-$i")))
checkAnswer(
sql("SELECT COUNT(intField), stringField FROM normal_orc_source GROUP BY stringField"),
(1 to 10).map(i => Row(1, s"part-$i")))
}
test("appending insert") {
sql("INSERT INTO TABLE normal_orc_source SELECT * FROM orc_temp_table WHERE intField > 5")
checkAnswer(
sql("SELECT * FROM normal_orc_source"),
(1 to 5).map(i => Row(i, s"part-$i")) ++ (6 to 10).flatMap { i =>
Seq.fill(2)(Row(i, s"part-$i"))
})
}
test("overwrite insert") {
sql(
"""INSERT OVERWRITE TABLE normal_orc_as_source
|SELECT * FROM orc_temp_table WHERE intField > 5
""".stripMargin)
checkAnswer(
sql("SELECT * FROM normal_orc_as_source"),
(6 to 10).map(i => Row(i, s"part-$i")))
}
test("write null values") {
sql("DROP TABLE IF EXISTS orcNullValues")
val df = sql(
"""
|SELECT
| CAST(null as TINYINT) as c0,
| CAST(null as SMALLINT) as c1,
| CAST(null as INT) as c2,
| CAST(null as BIGINT) as c3,
| CAST(null as FLOAT) as c4,
| CAST(null as DOUBLE) as c5,
| CAST(null as DECIMAL(7,2)) as c6,
| CAST(null as TIMESTAMP) as c7,
| CAST(null as DATE) as c8,
| CAST(null as STRING) as c9,
| CAST(null as VARCHAR(10)) as c10
|FROM orc_temp_table limit 1
""".stripMargin)
df.write.format("orc").saveAsTable("orcNullValues")
checkAnswer(
sql("SELECT * FROM orcNullValues"),
Row.fromSeq(Seq.fill(11)(null)))
sql("DROP TABLE IF EXISTS orcNullValues")
}
test("SPARK-18433: Improve DataSource option keys to be more case-insensitive") {
val conf = spark.sessionState.conf
val option = new OrcOptions(Map(COMPRESS.getAttribute.toUpperCase(Locale.ROOT) -> "NONE"), conf)
assert(option.compressionCodec == "NONE")
}
test("SPARK-21839: Add SQL config for ORC compression") {
val conf = spark.sessionState.conf
// Test if the default of spark.sql.orc.compression.codec is snappy
assert(new OrcOptions(Map.empty[String, String], conf).compressionCodec == "SNAPPY")
// OrcOptions's parameters have a higher priority than SQL configuration.
// `compression` -> `orc.compression` -> `spark.sql.orc.compression.codec`
withSQLConf(SQLConf.ORC_COMPRESSION.key -> "uncompressed") {
assert(new OrcOptions(Map.empty[String, String], conf).compressionCodec == "NONE")
val map1 = Map(COMPRESS.getAttribute -> "zlib")
val map2 = Map(COMPRESS.getAttribute -> "zlib", "compression" -> "lzo")
assert(new OrcOptions(map1, conf).compressionCodec == "ZLIB")
assert(new OrcOptions(map2, conf).compressionCodec == "LZO")
}
// Test all the valid options of spark.sql.orc.compression.codec
Seq("NONE", "UNCOMPRESSED", "SNAPPY", "ZLIB", "LZO").foreach { c =>
withSQLConf(SQLConf.ORC_COMPRESSION.key -> c) {
val expected = if (c == "UNCOMPRESSED") "NONE" else c
assert(new OrcOptions(Map.empty[String, String], conf).compressionCodec == expected)
}
}
}
test("SPARK-23340 Empty float/double array columns raise EOFException") {
Seq(Seq(Array.empty[Float]).toDF(), Seq(Array.empty[Double]).toDF()).foreach { df =>
withTempPath { path =>
df.write.format("orc").save(path.getCanonicalPath)
checkAnswer(spark.read.orc(path.getCanonicalPath), df)
}
}
}
test("SPARK-24322 Fix incorrect workaround for bug in java.sql.Timestamp") {
withTempPath { path =>
val ts = Timestamp.valueOf("1900-05-05 12:34:56.000789")
Seq(ts).toDF.write.orc(path.getCanonicalPath)
checkAnswer(spark.read.orc(path.getCanonicalPath), Row(ts))
}
}
test("Write Spark version into ORC file metadata") {
withTempPath { path =>
spark.range(1).repartition(1).write.orc(path.getCanonicalPath)
val partFiles = path.listFiles()
.filter(f => f.isFile && !f.getName.startsWith(".") && !f.getName.startsWith("_"))
assert(partFiles.length === 1)
val orcFilePath = new Path(partFiles.head.getAbsolutePath)
val readerOptions = OrcFile.readerOptions(new Configuration())
Utils.tryWithResource(OrcFile.createReader(orcFilePath, readerOptions)) { reader =>
val version = UTF_8.decode(reader.getMetadataValue(SPARK_VERSION_METADATA_KEY)).toString
assert(version === SPARK_VERSION_SHORT)
}
}
}
test("SPARK-11412 test orc merge schema option") {
val conf = spark.sessionState.conf
// Test if the default of spark.sql.orc.mergeSchema is false
assert(new OrcOptions(Map.empty[String, String], conf).mergeSchema == false)
// OrcOptions's parameters have a higher priority than SQL configuration.
// `mergeSchema` -> `spark.sql.orc.mergeSchema`
withSQLConf(SQLConf.ORC_SCHEMA_MERGING_ENABLED.key -> "true") {
val map1 = Map(OrcOptions.MERGE_SCHEMA -> "true")
val map2 = Map(OrcOptions.MERGE_SCHEMA -> "false")
assert(new OrcOptions(map1, conf).mergeSchema == true)
assert(new OrcOptions(map2, conf).mergeSchema == false)
}
withSQLConf(SQLConf.ORC_SCHEMA_MERGING_ENABLED.key -> "false") {
val map1 = Map(OrcOptions.MERGE_SCHEMA -> "true")
val map2 = Map(OrcOptions.MERGE_SCHEMA -> "false")
assert(new OrcOptions(map1, conf).mergeSchema == true)
assert(new OrcOptions(map2, conf).mergeSchema == false)
}
}
test("SPARK-11412 test enabling/disabling schema merging") {
def testSchemaMerging(expectedColumnNumber: Int): Unit = {
withTempDir { dir =>
val basePath = dir.getCanonicalPath
spark.range(0, 10).toDF("a").write.orc(new Path(basePath, "foo=1").toString)
spark.range(0, 10).toDF("b").write.orc(new Path(basePath, "foo=2").toString)
assert(spark.read.orc(basePath).columns.length === expectedColumnNumber)
// OrcOptions.MERGE_SCHEMA has higher priority
assert(spark.read.option(OrcOptions.MERGE_SCHEMA, true)
.orc(basePath).columns.length === 3)
assert(spark.read.option(OrcOptions.MERGE_SCHEMA, false)
.orc(basePath).columns.length === 2)
}
}
withSQLConf(SQLConf.ORC_SCHEMA_MERGING_ENABLED.key -> "true") {
testSchemaMerging(3)
}
withSQLConf(SQLConf.ORC_SCHEMA_MERGING_ENABLED.key -> "false") {
testSchemaMerging(2)
}
}
test("SPARK-11412 test enabling/disabling schema merging with data type conflicts") {
withTempDir { dir =>
val basePath = dir.getCanonicalPath
spark.range(0, 10).toDF("a").write.orc(new Path(basePath, "foo=1").toString)
spark.range(0, 10).map(s => s"value_$s").toDF("a")
.write.orc(new Path(basePath, "foo=2").toString)
// with schema merging, there should throw exception
withSQLConf(SQLConf.ORC_SCHEMA_MERGING_ENABLED.key -> "true") {
val exception = intercept[SparkException] {
spark.read.orc(basePath).columns.length
}.getCause
val innerMessage = orcImp match {
case "native" => exception.getMessage
case "hive" => exception.getCause.getMessage
case impl =>
throw new UnsupportedOperationException(s"Unknown ORC implementation: $impl")
}
assert(innerMessage.contains("Failed to merge incompatible data types"))
}
// it is ok if no schema merging
withSQLConf(SQLConf.ORC_SCHEMA_MERGING_ENABLED.key -> "false") {
assert(spark.read.orc(basePath).columns.length === 2)
}
}
}
test("SPARK-11412 test schema merging with corrupt files") {
withSQLConf(SQLConf.ORC_SCHEMA_MERGING_ENABLED.key -> "true") {
withTempDir { dir =>
val basePath = dir.getCanonicalPath
spark.range(0, 10).toDF("a").write.orc(new Path(basePath, "foo=1").toString)
spark.range(0, 10).toDF("b").write.orc(new Path(basePath, "foo=2").toString)
spark.range(0, 10).toDF("c").write.json(new Path(basePath, "foo=3").toString)
// ignore corrupt files
withSQLConf(SQLConf.IGNORE_CORRUPT_FILES.key -> "true") {
assert(spark.read.orc(basePath).columns.length === 3)
}
// don't ignore corrupt files
withSQLConf(SQLConf.IGNORE_CORRUPT_FILES.key -> "false") {
val exception = intercept[SparkException] {
spark.read.orc(basePath).columns.length
}.getCause
assert(exception.getCause.getMessage.contains("Could not read footer for file"))
}
}
}
}
}
class OrcSourceSuite extends OrcSuite with SharedSparkSession {
protected override def beforeAll(): Unit = {
super.beforeAll()
sql(
s"""CREATE TABLE normal_orc(
| intField INT,
| stringField STRING
|)
|USING ORC
|LOCATION '${orcTableAsDir.toURI}'
""".stripMargin)
sql(
s"""INSERT INTO TABLE normal_orc
|SELECT intField, stringField FROM orc_temp_table
""".stripMargin)
spark.sql(
s"""CREATE TEMPORARY VIEW normal_orc_source
|USING ORC
|OPTIONS (
| PATH '${new File(orcTableAsDir.getAbsolutePath).toURI}'
|)
""".stripMargin)
spark.sql(
s"""CREATE TEMPORARY VIEW normal_orc_as_source
|USING ORC
|OPTIONS (
| PATH '${new File(orcTableAsDir.getAbsolutePath).toURI}'
|)
""".stripMargin)
}
test("Check BloomFilter creation") {
testBloomFilterCreation(Kind.BLOOM_FILTER_UTF8) // After ORC-101
}
test("Enforce direct encoding column-wise selectively") {
testSelectiveDictionaryEncoding(isSelective = true)
}
test("SPARK-11412 read and merge orc schemas in parallel") {
testMergeSchemasInParallel(OrcUtils.readOrcSchemasInParallel)
}
}
| bdrillard/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala | Scala | apache-2.0 | 19,588 |
package io.github.tailhq.dynaml.utils
/**
* @author tailhq date 17/05/2017.
* */
abstract class HasHyperParameters {
val hyper_parameters: List[String]
var blocked_hyper_parameters: List[String] = List()
var state: Map[String, Double] = Map()
def block(h: String*) = blocked_hyper_parameters = List(h:_*)
def block_all_hyper_parameters: Unit = {
blocked_hyper_parameters = hyper_parameters
}
def effective_state:Map[String, Double] =
state.filterNot(h => blocked_hyper_parameters.contains(h._1))
def effective_hyper_parameters: List[String] =
hyper_parameters.filterNot(h => blocked_hyper_parameters.contains(h))
def setHyperParameters(h: Map[String, Double]): this.type = {
assert(effective_hyper_parameters.forall(h.contains),
"All hyper parameters must be contained in the arguments")
effective_hyper_parameters.foreach((key) => {
state += (key -> h(key))
})
this
}
}
| mandar2812/DynaML | dynaml-core/src/main/scala/io/github/tailhq/dynaml/utils/HasHyperParameters.scala | Scala | apache-2.0 | 946 |
package com.dzegel.DynamockServer.types
case class HeaderParameters(included: HeaderSet, excluded: HeaderSet)
| dzegel/DynamockServer | src/main/scala/com/dzegel/DynamockServer/types/HeaderParameters.scala | Scala | apache-2.0 | 111 |
package net.mrkeks.clave.editor.tools
import net.mrkeks.clave.game.{GameObject, PlaceableObject}
import net.mrkeks.clave.game.GameObjectManagement
import org.denigma.threejs
class ObjectPlacer[T <: GameObject with PlaceableObject](override val name: String, factory: () => T) extends AbstractEditorTool {
private var newObject: Option[T] = None
def deactivate(): Unit = {
for (o <- newObject) {
o.markForDeletion()
}
newObject = None
}
def previewTool(intersection: threejs.Intersection, gameObjectManagement: GameObjectManagement): AbstractEditorTool.Result = {
createObjectIfNeeded(gameObjectManagement)
if (newObject.map(_.place(intersection.point.x.round.toInt, intersection.point.z.round.toInt)).getOrElse(false)) {
AbstractEditorTool.Success()
} else {
AbstractEditorTool.Fail()
}
}
def runTool(intersection: threejs.Intersection, gameObjectManagement: GameObjectManagement): AbstractEditorTool.Result = {
newObject.foreach(_.isPreview = false)
previewTool(intersection, gameObjectManagement) match {
case s @ AbstractEditorTool.Success() =>
newObject = None
s
case f @ AbstractEditorTool.Fail() =>
newObject.foreach(_.isPreview = true)
f
}
}
private def createObjectIfNeeded(gameObjectManagement: GameObjectManagement): Unit = {
if (newObject.isEmpty) {
val creation = factory()
creation.isPreview = true
gameObjectManagement.add(creation)
newObject = Some(creation)
}
}
} | benkeks/clave | src/main/scala/net/mrkeks/clave/editor/tools/ObjectPlacer.scala | Scala | gpl-3.0 | 1,554 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.core.benchmark
import org.apache.hadoop.hbase.util.Bytes
import org.apache.s2graph.core.GraphUtil
import org.apache.s2graph.core.mysqls.ServiceColumn
import org.apache.s2graph.core.types.{HBaseType, InnerVal, SourceVertexId}
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
class GraphUtilSpec extends BenchmarkCommon {
def between(bytes: Array[Byte], startKey: Array[Byte], endKey: Array[Byte]): Boolean =
Bytes.compareTo(startKey, bytes) <= 0 && Bytes.compareTo(endKey, bytes) >= 0
def betweenShort(value: Short, start: Short, end: Short): Boolean =
start <= value && value <= end
"GraphUtil" should {
"test murmur3 hash function distribution" in {
val testNum = 1000000
val bucketSize = Short.MaxValue / 40
val countsNew = new mutable.HashMap[Int, Int]()
val counts = new mutable.HashMap[Int, Int]()
for {
i <- (0 until testNum)
} {
val h = GraphUtil.murmur3(i.toString) / bucketSize
val hNew = GraphUtil.murmur3Int(i.toString) / bucketSize
counts += (h -> (counts.getOrElse(h, 0) + 1))
countsNew += (hNew -> (countsNew.getOrElse(hNew, 0) + 1))
}
val all = counts.toList.sortBy { case (bucket, count) => count }.reverse
val allNew = countsNew.toList.sortBy { case (bucket, count) => count }.reverse
val top = all.take(10)
val bottom = all.takeRight(10)
val topNew = allNew.take(10)
val bottomNew = allNew.takeRight(10)
println(s"Top: $top")
println(s"Bottom: $bottom")
println("-" * 50)
println(s"TopNew: $topNew")
println(s"Bottom: $bottomNew")
true
}
"test murmur hash skew2" in {
import HBaseType._
val testNum = 1000000L
val regionCount = 40
val window = Int.MaxValue / regionCount
val rangeBytes = new ListBuffer[(List[Byte], List[Byte])]()
for {
i <- (0 until regionCount)
} yield {
val startKey = Bytes.toBytes(i * window)
val endKey = Bytes.toBytes((i + 1) * window)
rangeBytes += (startKey.toList -> endKey.toList)
}
val stats = new collection.mutable.HashMap[Int, ((List[Byte], List[Byte]), Long)]()
val counts = new collection.mutable.HashMap[Short, Long]()
stats += (0 -> (rangeBytes.head -> 0L))
for (i <- (0L until testNum)) {
val vertexId = SourceVertexId(ServiceColumn.Default, InnerVal.withLong(i, HBaseType.DEFAULT_VERSION))
val bytes = vertexId.bytes
val shortKey = GraphUtil.murmur3(vertexId.innerId.toIdString())
val shortVal = counts.getOrElse(shortKey, 0L) + 1L
counts += (shortKey -> shortVal)
var j = 0
var found = false
while (j < rangeBytes.size && !found) {
val (start, end) = rangeBytes(j)
if (between(bytes, start.toArray, end.toArray)) {
found = true
}
j += 1
}
val head = rangeBytes(j - 1)
val key = j - 1
val value = stats.get(key) match {
case None => 0L
case Some(v) => v._2 + 1
}
stats += (key ->(head, value))
}
val sorted = stats.toList.sortBy(kv => kv._2._2).reverse
println(s"Index: StartBytes ~ EndBytes\\tStartShortBytes ~ EndShortBytes\\tStartShort ~ EndShort\\tCount\\tShortCount")
sorted.foreach { case (idx, ((start, end), cnt)) =>
val startShort = Bytes.toShort(start.take(2).toArray)
val endShort = Bytes.toShort(end.take(2).toArray)
val count = counts.count(t => startShort <= t._1 && t._1 < endShort)
println(s"$idx: $start ~ $end\\t${start.take(2)} ~ ${end.take(2)}\\t$startShort ~ $endShort\\t$cnt\\t$count")
}
println("\\n" * 10)
println(s"Index: StartBytes ~ EndBytes\\tStartShortBytes ~ EndShortBytes\\tStartShort ~ EndShort\\tCount\\tShortCount")
stats.toList.sortBy(kv => kv._1).reverse.foreach { case (idx, ((start, end), cnt)) =>
val startShort = Bytes.toShort(start.take(2).toArray)
val endShort = Bytes.toShort(end.take(2).toArray)
val count = counts.count(t => startShort <= t._1 && t._1 < endShort)
println(s"$idx: $start ~ $end\\t${start.take(2)} ~ ${end.take(2)}\\t$startShort ~ $endShort\\t$cnt\\t$count")
}
true
}
"Bytes compareTo" in {
val x = Array[Byte](11, -12, -26, -14, -23)
val startKey = Array[Byte](0, 0, 0, 0)
val endKey = Array[Byte](12, -52, -52, -52)
println(Bytes.compareTo(startKey, x))
println(Bytes.compareTo(endKey, x))
true
}
}
}
| daewon/incubator-s2graph | s2core/src/test/scala/org/apache/s2graph/core/benchmark/GraphUtilSpec.scala | Scala | apache-2.0 | 5,426 |
package org.geow.geohash.test
import org.geow.geohash._
import org.geow.generator.OsmObjectGenerator
import org.specs2.ScalaCheck
import org.specs2.execute.Result
import org.specs2.mutable.Specification
/**
* Created by janschulte on 12/02/15.
*/
class GeoHashSpec extends Specification with ScalaCheck {
sequential
val generator = OsmObjectGenerator()
private val hashUltraLow = new GeoHash(PrecisionUltraLow_630KM)
private val hashVeryLow = new GeoHash(PrecisionVeryLow_80KM)
private val hashLow = new GeoHash(PrecisionLow_20KM)
private val hashMedium = new GeoHash(PrecisionMedium_5KM)
private val hashHigh = new GeoHash(PrecisionHigh_100M)
private val hashVeryHigh = new GeoHash(PrecisionVeryHigh_1M)
private val hashUltra = new GeoHash(PrecisionUltra_1CM)
private val hashUltraHigh = new GeoHash(PrecisionUltraHigh_1MM)
val testCases = 100000
"The GeoHash" should {
s"encode/decode $testCases points at ultra low precision" in {
Result.unit {
(1 to testCases) foreach {
i =>
val expectedPoint = generator.generatePoint
val hash = hashUltraLow.encodeParallel(expectedPoint.lon, expectedPoint.lat)
val (lon, lat) = hashUltraLow.decodeParallel(hash)
lon must beCloseTo(expectedPoint.lon, 180 / Math.pow(2, 5))
lat must beCloseTo(expectedPoint.lat, 90 / Math.pow(2, 5))
}
}
}
s"encode/decode $testCases points at very low precision" in {
Result.unit {
(1 to testCases) foreach {
i =>
val expectedPoint = generator.generatePoint
val hash = hashVeryLow.encodeParallel(expectedPoint.lon, expectedPoint.lat)
val (lon, lat) = hashVeryLow.decodeParallel(hash)
lon must beCloseTo(expectedPoint.lon, 180 / Math.pow(2, 8))
lat must beCloseTo(expectedPoint.lat, 90 / Math.pow(2, 8))
}
}
}
s"encode/decode $testCases points at low precision" in {
Result.unit {
(1 to testCases) foreach {
i =>
val expectedPoint = generator.generatePoint
val hash = hashLow.encodeParallel(expectedPoint.lon, expectedPoint.lat)
val (lon, lat) = hashLow.decodeParallel(hash)
lon must beCloseTo(expectedPoint.lon, 180 / Math.pow(2, 10))
lat must beCloseTo(expectedPoint.lat, 90 / Math.pow(2, 10))
}
}
}
s"encode/decode $testCases points at medium precision" in {
Result.unit {
(1 to testCases) foreach {
i =>
val expectedPoint = generator.generatePoint
val hash = hashMedium.encodeParallel(expectedPoint.lon, expectedPoint.lat)
val (lon, lat) = hashMedium.decodeParallel(hash)
lon must beCloseTo(expectedPoint.lon, 180 / Math.pow(2, 13))
lat must beCloseTo(expectedPoint.lat, 90 / Math.pow(2, 13))
}
}
}
s"encode/decode $testCases points at high precision" in {
Result.unit {
(1 to testCases) foreach {
i =>
val expectedPoint = generator.generatePoint
val hash = hashHigh.encodeParallel(expectedPoint.lon, expectedPoint.lat)
val (lon, lat) = hashHigh.decodeParallel(hash)
lon must beCloseTo(expectedPoint.lon, 180 / Math.pow(2, 18))
lat must beCloseTo(expectedPoint.lat, 90 / Math.pow(2, 18))
}
}
}
s"encode/decode $testCases points at very high precision" in {
Result.unit {
(1 to testCases) foreach {
i =>
val expectedPoint = generator.generatePoint
val hash = hashVeryHigh.encodeParallel(expectedPoint.lon, expectedPoint.lat)
val (lon, lat) = hashVeryHigh.decodeParallel(hash)
lon must beCloseTo(expectedPoint.lon, 180 / Math.pow(2, 24))
lat must beCloseTo(expectedPoint.lat, 90 / Math.pow(2, 24))
}
}
}
s"encode/decode $testCases points at ultra precision" in {
Result.unit {
(1 to testCases) foreach {
i =>
val expectedPoint = generator.generatePoint
val hash = hashUltra.encodeParallel(expectedPoint.lon, expectedPoint.lat)
val (lon, lat) = hashUltra.decodeParallel(hash)
lon must beCloseTo(expectedPoint.lon, 180 / Math.pow(2, 30))
lat must beCloseTo(expectedPoint.lat, 90 / Math.pow(2, 30))
}
}
}
s"encode/decode $testCases points at ultra high precision" in {
Result.unit {
(1 to testCases) foreach {
i =>
val expectedPoint = generator.generatePoint
val hash = hashUltraHigh.encodeParallel(expectedPoint.lon, expectedPoint.lat)
val (lon, lat) = hashUltraHigh.decodeParallel(hash)
lon must beCloseTo(expectedPoint.lon, 180 / Math.pow(2, 32))
lat must beCloseTo(expectedPoint.lat, 90 / Math.pow(2, 32))
}
}
}
}
} | geow-org/api | src/test/scala/org/geow/geohash/test/GeoHashSpec.scala | Scala | apache-2.0 | 5,006 |
package castalia.actors
import akka.actor.{ActorSystem, Props}
import akka.actor.Status.Failure
import akka.http.scaladsl.model.{HttpMethods, HttpProtocols, HttpRequest}
import akka.testkit.TestProbe
import castalia.StubConfigParser._
import castalia.matcher.RequestMatch
import castalia.model.Messages.{EndpointMetricsInit, EndpointCalled}
import castalia.model.Model.StubResponse
import scala.concurrent.duration._
class JsonResponseProviderEndpointActorSpec(_system: ActorSystem) extends ActorSpecBase(_system) {
def this() = this(ActorSystem("StubServerSystem"))
"JsonResponseProviderEndpointActorTest" should {
val metricsCollector = new TestProbe(_system)
"execute successfully with an response" in {
val httpRequest = new HttpRequest(method = HttpMethods.GET, uri = "somepath/1/with/2", protocol = HttpProtocols.`HTTP/1.1`)
val jsonConfig = parseStubConfig("jsonprogrammedstub1.json")
val jsonEndpoint = system.actorOf(Props(
new JsonResponseProviderEndpointActor(jsonConfig.endpoint, jsonConfig.responseprovider.get, metricsCollector.ref)))
jsonEndpoint ! new RequestMatch(httpRequest, List("1" -> "1", "2" -> "2"), Nil)
expectMsg(StubResponse(200, """{"result":"1 with 2"}"""))
metricsCollector.expectMsg(EndpointMetricsInit(jsonConfig.endpoint))
metricsCollector.expectMsg(EndpointCalled(jsonConfig.endpoint))
}
"execute successfully with an exception" in {
val httpRequest = new HttpRequest(method = HttpMethods.GET, uri = "somepath/3/with/4", protocol = HttpProtocols.`HTTP/1.1`)
val jsonConfig = parseStubConfig("jsonprogrammedstub2.json")
val jsonEndpoint = system.actorOf(Props(
new JsonResponseProviderEndpointActor(jsonConfig.endpoint, jsonConfig.responseprovider.get, metricsCollector.ref)))
jsonEndpoint ! new RequestMatch(httpRequest, List("1" -> "3", "2" -> "4"), Nil)
expectMsgClass(classOf[Failure])
//expectMsg(Failure(new Exception("some expected failure")))
}
}
}
| TimSoethout/stubserver | src/test/scala/castalia/actors/JsonResponseProviderEndpointActorSpec.scala | Scala | mit | 2,022 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package bootstrap.liftweb
package checks
import com.normation.rudder.repository.ItemArchiveManager
import com.normation.rudder.services.user.PersonIdentService
import com.normation.rudder.domain.eventlog.RudderEventActor
import net.liftweb.common._
import com.normation.utils.StringUuidGenerator
import com.normation.eventlog.ModificationId
import com.normation.rudder.rule.category.RoRuleCategoryRepository
import java.io.File
import net.liftweb.util.ControlHelpers.tryo
/**
*
* Check that the rules archive directory in
* configuration-repository exsits.
*
*/
class CheckRootRuleCategoryExport(
itemArchiveManager : ItemArchiveManager
, categoryDirectory : File
, personIdentService : PersonIdentService
, uuidGen : StringUuidGenerator
) extends BootstrapChecks {
override val description = "Check rules archive directory in configuration-repository"
override def checks() : Unit = {
(for {
exists <- tryo{ categoryDirectory.exists() }
ident <- personIdentService.getPersonIdentOrDefault(RudderEventActor.name)
} yield {
if(!exists) {
logger.info(s"Directory '${categoryDirectory.getAbsolutePath()}' is missing, initialize it by exporting Rules")
itemArchiveManager.exportRules(ident, ModificationId(uuidGen.newUuid), RudderEventActor, Some("Initialising configuration-repository Rule categories directory"), false)
} else {
logger.trace(s"Directory '${categoryDirectory.getAbsolutePath()}' exists")
Full("OK")
}
}) match {
case eb: EmptyBox =>
val fail = eb ?~! s"Error when checking '${categoryDirectory}' directory existence"
logger.error(fail.msg)
fail.rootExceptionCause.foreach { t =>
logger.error("Root exception was:", t)
}
case Full(eb:EmptyBox) =>
val fail = eb ?~! "Initialising configuration-repository Rule categories directory with a Rule archive"
logger.error(fail.msg)
fail.rootExceptionCause.foreach { t =>
logger.error("Root exception was:", t)
}
case Full(Full(_)) =>
logger.info(s"Creating directory '${categoryDirectory.getAbsolutePath()}' exists, done")
}
}
}
| Kegeruneku/rudder | rudder-web/src/main/scala/bootstrap/liftweb/checks/CheckRuleCategoryExport.scala | Scala | agpl-3.0 | 3,887 |
package com.twitter.inject.server
import com.google.common.net.{HttpHeaders, MediaType}
import com.google.inject.Stage
import com.twitter.conversions.time._
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.http._
import com.twitter.finagle.service.Backoff._
import com.twitter.finagle.service.RetryPolicy
import com.twitter.finagle.service.RetryPolicy._
import com.twitter.finagle.stats.{NullStatsReceiver, InMemoryStatsReceiver, StatsReceiver}
import com.twitter.finagle.{ChannelClosedException, Service}
import com.twitter.inject.app.Banner._
import com.twitter.inject.app.{Banner, EmbeddedApp, App}
import com.twitter.inject.modules.InMemoryStatsReceiverModule
import com.twitter.inject.server.EmbeddedTwitterServer._
import com.twitter.util._
import java.net.{InetSocketAddress, URI}
import java.util.concurrent.TimeUnit._
import org.jboss.netty.handler.codec.http.{HttpMethod, HttpResponseStatus}
object EmbeddedTwitterServer {
private def resolveClientFlags(useSocksProxy: Boolean, clientFlags: Map[String, String]) = {
if (useSocksProxy) {
clientFlags ++ Map(
"com.twitter.server.resolverZkHosts" -> PortUtils.loopbackAddressForPort(2181),
"com.twitter.finagle.socks.socksProxyHost" -> PortUtils.loopbackAddress,
"com.twitter.finagle.socks.socksProxyPort" -> "50001")
}
else {
clientFlags
}
}
}
/**
* EmbeddedTwitterServer allows a twitter-server serving http or thrift endpoints to be started
* locally (on ephemeral ports), and tested through it's http/thrift interfaces.
*
* Note: All initialization fields are lazy to aid running multiple tests inside Intellij at the same time
* since Intellij "pre-constructs" ALL the tests before running each one.
*
* @param twitterServer The twitter server to be started locally for integration testing
* @param clientFlags Command line flags (e.g. "foo"->"bar" is translated into -foo=bar)
* @param extraArgs Extra command line arguments
* @param waitForWarmup Once the app is started, wait for App warmup to be completed
* @param stage Guice Stage used to create the server's injector. Since EmbeddedTwitterServer is used for testing, we default to Stage.DEVELOPMENT.
* This makes it possible to only mock objects that are used in a given test, at the expense of not checking that the entire
* object graph is valid. As such, you should always have at lease one Stage.PRODUCTION test for your service (which eagerly
* creates all Guice classes at startup)
* @param useSocksProxy Use a tunneled socks proxy for external service discovery/calls (useful for manually run external integration tests that connect to external services)
* @param skipAppMain Skip the running of appMain when the app starts. You will need to manually call app.appMain() later in your test.
*/
class EmbeddedTwitterServer(
val twitterServer: Ports,
clientFlags: Map[String, String] = Map(),
extraArgs: Seq[String] = Seq(),
waitForWarmup: Boolean = true,
stage: Stage = Stage.DEVELOPMENT,
useSocksProxy: Boolean = false,
skipAppMain: Boolean = false,
defaultRequestHeaders: Map[String, String] = Map(),
streamResponse: Boolean = false)
extends EmbeddedApp(
app = twitterServer,
clientFlags = resolveClientFlags(useSocksProxy, clientFlags),
resolverMap = Map(),
extraArgs = extraArgs,
waitForWarmup = waitForWarmup,
skipAppMain = skipAppMain,
stage = stage) {
/* Constructor */
// Add framework override modules
if (isGuiceApp) {
guiceApp.addFrameworkOverrideModules(InMemoryStatsReceiverModule)
}
/* Lazy Fields */
lazy val httpAdminClient = {
start()
createHttpClient(
"httpAdminClient",
twitterServer.httpAdminPort)
}
lazy val statsReceiver = if (isGuiceApp) injector.instance[StatsReceiver] else new InMemoryStatsReceiver
lazy val inMemoryStatsReceiver = statsReceiver.asInstanceOf[InMemoryStatsReceiver]
lazy val adminHostAndPort = PortUtils.loopbackAddressForPort(twitterServer.httpAdminPort)
def thriftPort: Int = {
start()
twitterServer.thriftPort.get
}
def thriftHostAndPort: String = {
PortUtils.loopbackAddressForPort(thriftPort)
}
/* Protected */
override protected def nonGuiceAppStarted(): Boolean = {
twitterServer.httpAdminPort != 0
}
override protected def logAppStartup() {
Banner.banner("Server Started: " + appName)
println(s"AdminHttp -> http://$adminHostAndPort/admin")
}
/* Public */
lazy val isGuiceTwitterServer = twitterServer.isInstanceOf[App]
override def close() {
if (!closed) {
super.close()
closed = true
}
}
def clearStats() = {
inMemoryStatsReceiver.counters.clear()
inMemoryStatsReceiver.stats.clear()
inMemoryStatsReceiver.gauges.clear()
}
def printStats() {
def prettyKeys(keys: Seq[String]): String = {
keys.mkString("/")
}
banner(appName + " Stats")
for ((keys, values) <- inMemoryStatsReceiver.stats.iterator.toSeq.sortBy {_._1.head}) {
val avg = values.sum / values.size
println(prettyKeys(keys) + "\\t = Avg " + avg + " with values " + values.mkString(", "))
}
for ((keys, value) <- inMemoryStatsReceiver.counters.iterator.toSeq.sortBy {_._1.head}) {
println(prettyKeys(keys) + "\\t = " + value)
}
for ((keys, value) <- inMemoryStatsReceiver.gauges.iterator.toSeq.sortBy {_._1.head}) {
println(prettyKeys(keys) + "\\t = " + value)
}
}
def assertHealthy(healthy: Boolean = true) {
val expectedBody = if (healthy) "OK\\n" else ""
httpGetAdmin(
"/health",
andExpect = Status.Ok,
withBody = expectedBody)
}
def httpGetAdmin(
path: String,
accept: MediaType = null,
headers: Map[String, String] = Map(),
suppress: Boolean = false,
andExpect: HttpResponseStatus = Status.Ok,
withLocation: String = null,
withBody: String = null): Response = {
start()
val request = createApiRequest(path, HttpMethod.GET)
httpExecute(httpAdminClient, request, addAcceptHeader(accept, headers), suppress, andExpect, withLocation, withBody)
}
override protected def combineArgs() = {
adminAndLogArgs ++ super.combineArgs
}
protected def httpExecute(
client: Service[Request, Response],
request: Request,
headers: Map[String, String] = Map(),
suppress: Boolean = false,
andExpect: HttpResponseStatus = Status.Ok,
withLocation: String = null,
withBody: String = null): Response = {
/* Pre - Execute */
printRequest(request, suppress)
/* Execute */
val response = handleRequest(request, client = client, additionalHeaders = headers)
/* Post - Execute */
printResponseMetadata(response, suppress)
printResponseBody(response, suppress)
if (andExpect != null && response.status != andExpect) {
response.status should equal(andExpect)
}
if (withBody != null) {
response.contentString should equal(withBody)
}
if (withLocation != null) {
response.location.get should endWith(withLocation)
}
response
}
protected def createHttpClient(
name: String,
port: Int,
tcpConnectTimeout: Duration = 60.seconds,
connectTimeout: Duration = 60.seconds,
requestTimeout: Duration = 300.seconds,
retryPolicy: RetryPolicy[Try[Any]] = httpRetryPolicy,
secure: Boolean = false): Service[Request, Response] = {
val host = new InetSocketAddress(PortUtils.loopbackAddress, port)
val builder = ClientBuilder()
.name(name)
.codec(RichHttp[Request](Http(), aggregateChunks = !streamResponse))
.tcpConnectTimeout(tcpConnectTimeout)
.connectTimeout(connectTimeout)
.requestTimeout(requestTimeout)
.hosts(host)
.hostConnectionLimit(75)
.retryPolicy(retryPolicy)
.reportTo(NullStatsReceiver)
.failFast(false)
if (secure)
builder.tlsWithoutValidation().build()
else
builder.build()
}
private def handleRequest(request: Request, client: Service[Request, Response], additionalHeaders: Map[String, String] = Map()): Response = {
// Don't overwrite request.headers set by RequestBuilder in httpFormPost.
val defaultNewHeaders = defaultRequestHeaders filterKeys {!request.headerMap.contains(_)}
addOrRemoveHeaders(request, defaultNewHeaders)
addOrRemoveHeaders(request, additionalHeaders) //additional headers get added second so they can overwrite defaults
val futureResponse = client(request)
val elapsed = Stopwatch.start()
try {
Await.result(futureResponse)
} catch {
case e: Throwable =>
println("ERROR in request: " + request + " " + e + " in " + elapsed().inUnit(MILLISECONDS) + " ms")
throw e
}
}
/* Private */
protected def httpRetryPolicy: RetryPolicy[Try[Any]] = {
backoff(
constant(1.second) take 15) {
case Throw(e: ChannelClosedException) =>
println("Retrying ChannelClosedException")
true
}
}
private def printRequest(request: Request, suppress: Boolean) {
if (!suppress) {
val headers = request.headerMap.mkString(
"[Header]\\t",
"\\n[Header]\\t",
"")
val msg = "HTTP " + request.method + " " + request.uri + "\\n" + headers
if (request.contentString.isEmpty)
banner(msg)
else
banner(msg + "\\n" + prettyRequestBody(request))
}
}
protected def prettyRequestBody(request: Request): String = {
request.contentString
}
private def printResponseMetadata(response: Response, suppress: Boolean) {
if (!suppress) {
println("-" * 75)
println("[Status]\\t" + response.status)
println(response.headerMap.mkString(
"[Header]\\t",
"\\n[Header]\\t",
""))
}
}
private def printResponseBody(response: Response, suppress: Boolean) {
if (!suppress) {
if (response.contentString.isEmpty) {
println("*EmptyBody*")
}
else {
printNonEmptyResponseBody(response)
}
}
}
protected def printNonEmptyResponseBody(response: Response): Unit = {
println(response.contentString)
println()
}
private def adminAndLogArgs = Array(
"-admin.port=" + PortUtils.ephemeralLoopback,
"-log.level=INFO")
// Deletes request headers with null-values in map.
private def addOrRemoveHeaders(request: Request, headers: Map[String, String]): Unit = {
for ((key, value) <- headers) {
if (value == null) {
request.headers.remove(key)
} else {
request.headers.set(key, value)
}
}
}
protected def createApiRequest(path: String, method: HttpMethod = Method.Get) = {
val pathToUse = if (path.startsWith("http"))
URI.create(path).getPath
else
path
Request(method, pathToUse)
}
private def addAcceptHeader(accept: MediaType, headers: Map[String, String]): Map[String, String] = {
if (accept != null)
headers + (HttpHeaders.ACCEPT -> accept.toString)
else
headers
}
}
| tom-chan/finatra | inject/inject-server/src/test/scala/com/twitter/inject/server/EmbeddedTwitterServer.scala | Scala | apache-2.0 | 11,082 |
package reddit_bot.service
import java.util.HashSet
import scala.jdk.CollectionConverters._
import org.apache.commons.lang3.time.DateUtils
import org.slf4j.{Logger, LoggerFactory}
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import reddit_bot.repository.LinkSendingRepository
import java.util.Calendar
import java.util.Date
import java.util.GregorianCalendar
import reddit_bot.entity.Subreddit
@Service
class SentLinksCounting(@Autowired linkSendingRepository: LinkSendingRepository){
def countLinksSentRecently( subreddit:Subreddit):Int =
linkSendingRepository.countLinksSentAfter(subreddit, DateUtils.truncate(new Date(), Calendar.DATE))
def feedsSentRecently(subreddit: Subreddit): Set[Long] = {
Option(subreddit.getRecentFeedsWindow())
.map(feedsWindow => {
var windowCalendar = new GregorianCalendar()
windowCalendar.add(Calendar.DATE, -1 * feedsWindow)
windowCalendar.getTime
})
.map( DateUtils.truncate(_, Calendar.DATE) )
.map( linkSendingRepository.feedsSentAfter(subreddit, _).asScala.map(Long2long(_) ).toSet )
.getOrElse( Set[Long]() )
}
def feedsSentToday( subreddit: Subreddit ) : Set[Long] =
linkSendingRepository.feedsSentAfter( subreddit, DateUtils.truncate( new Date(), Calendar.DATE ) ).asScala.map(Long2long).toSet
} | vitalijzad/java-rss-to-reddit | src/main/scala/reddit_bot/service/SentLinksCounting.scala | Scala | apache-2.0 | 1,430 |
package org.reactress
package bench
import scala.collection._
import org.scalameter.api._
class HashTableMemoryBench extends PerformanceTest.Regression {
def persistor = Persistor.None
override def measurer = new Executor.Measurer.MemoryFootprint
val hashTableSizes = Gen.range("size")(10000, 50000, 10000)
performance of "memory" config(
exec.minWarmupRuns -> 10,
exec.maxWarmupRuns -> 30,
exec.benchRuns -> 30,
exec.independentSamples -> 1
) in {
using(hashTableSizes) curve("ReactMap") in { sz =>
val m = ReactMap[Int, String]
for (i <- 0 until sz) m(i) = "value"
m
}
using(hashTableSizes) curve("ReactTable") in { sz =>
val m = ReactTable[Int, Int]
for (i <- 0 until sz) m(i) = i
m
}
using(hashTableSizes) curve("ReactSet") in { sz =>
val m = ReactSet[Int]
for (i <- 0 until sz) m += i
m
}
using(hashTableSizes) curve("mutable.HashMap") in { sz =>
val m = mutable.Map[Int, String]()
for (i <- 0 until sz) m(i) = "value"
m
}
using(hashTableSizes) curve("mutable.HashSet") in { sz =>
val m = mutable.Set[Int]()
for (i <- 0 until sz) m += i
m
}
}
}
| axel22/reactive-collections | src/test/scala/org/reactress/bench/HashTableMemoryBench.scala | Scala | bsd-3-clause | 1,271 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.race.Race
import io.truthencode.ddo.support.requisite.{
FeatRequisiteImpl,
RaceRequisite,
RequiresAllOfFeat,
RequiresAnyOfRace
}
/**
* Created by adarr on 2/20/2017.
*/
trait MithralFluidity
extends FeatRequisiteImpl with RaceRequisite with Passive with RequiresAnyOfRace
with RequiresAllOfFeat with ArtificerBonusFeat {
self: RacialFeat =>
override def anyOfRace: Seq[(Race, Int)] =
List((Race.Warforged, 1), (Race.Bladeforged, 1))
override def allOfFeats: Seq[Feat] = Seq(RacialFeat.MithralBody)
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/MithralFluidity.scala | Scala | apache-2.0 | 1,281 |
package com.twitter.finagle.loadbalancer
import com.twitter.finagle._
import com.twitter.finagle.service.FailingFactory
import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver}
import com.twitter.finagle.util.Rng
import com.twitter.util._
import java.util.concurrent.atomic.AtomicInteger
import org.junit.runner.RunWith
import org.scalactic.Tolerance
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import scala.collection.mutable
private trait ApertureTesting {
val N = 100000
class Empty extends Exception
protected trait TestBal extends Balancer[Unit, Unit] with Aperture[Unit, Unit] {
protected val rng = Rng(12345L)
protected val emptyException = new Empty
protected val maxEffort = 10
protected def statsReceiver = NullStatsReceiver
protected val minAperture = 1
def applyn(n: Int): Unit = {
val factories = Await.result(Future.collect(Seq.fill(n)(apply())))
Await.result(Closable.all(factories:_*).close())
}
// Expose some protected methods for testing
def adjustx(n: Int) = adjust(n)
def aperturex: Int = aperture
def unitsx: Int = units
}
class Factory(val i: Int) extends ServiceFactory[Unit, Unit] {
var n = 0
var p = 0
def clear() { n = 0 }
def apply(conn: ClientConnection) = {
n += 1
p += 1
Future.value(new Service[Unit, Unit] {
def apply(unit: Unit) = ???
override def close(deadline: Time) = {
p -= 1
Future.Done
}
})
}
@volatile var _status: Status = Status.Open
override def status = _status
def status_=(v: Status) { _status = v }
def close(deadline: Time) = ???
}
class Counts extends Iterable[Factory] {
val factories = new mutable.HashMap[Int, Factory]
def iterator = factories.values.iterator
def clear() {
factories.values.foreach(_.clear())
}
def aperture = nonzero.size
def nonzero = factories.filter({
case (_, f) => f.n > 0
}).keys.toSet
def apply(i: Int) = factories.getOrElseUpdate(i, new Factory(i))
def range(n: Int): Traversable[ServiceFactory[Unit, Unit]] =
Traversable.tabulate(n) { i => apply(i) }
}
}
@RunWith(classOf[JUnitRunner])
private class ApertureTest extends FunSuite with ApertureTesting {
import Tolerance._
protected class Bal extends TestBal with LeastLoaded[Unit, Unit]
test("Balance only within the aperture") {
val counts = new Counts
val bal = new Bal
bal.update(counts.range(10))
assert(bal.unitsx === 10)
bal.applyn(100)
assert(counts.aperture === 1)
bal.adjustx(1)
bal.applyn(100)
assert(counts.aperture === 2)
counts.clear()
bal.adjustx(-1)
bal.applyn(100)
assert(counts.aperture === 1)
}
test("Don't operate outside of aperture range") {
val counts = new Counts
val bal = new Bal
bal.update(counts.range(10))
bal.adjustx(10000)
bal.applyn(1000)
assert(counts.aperture === 10)
counts.clear()
bal.adjustx(-100000)
bal.applyn(1000)
assert(counts.aperture === 1)
}
test("Increase aperture to match available hosts") {
val counts = new Counts
val bal = new Bal
bal.update(counts.range(10))
bal.adjustx(1)
bal.applyn(100)
assert(counts.aperture === 2)
// Since tokens are assigned, we don't know apriori what's in the
// aperture*, so figure it out by observation.
//
// *Ok, technically we can, since we're using deterministic
// randomness.
val keys2 = counts.nonzero
counts(keys2.head).status = Status.Closed
bal.applyn(100)
assert(counts.aperture === 3)
// Apertures are additive.
assert(keys2.forall(counts.nonzero.contains))
// When we shrink again, we should use the same keyset.
counts(keys2.head).status = Status.Open
counts.clear()
bal.applyn(100)
assert(counts.nonzero === keys2)
}
test("Empty vectors") {
val bal = new Bal
intercept[Empty] { Await.result(bal.apply()) }
}
test("Nonavailable vectors") {
val counts = new Counts
val bal = new Bal
bal.update(counts.range(10))
for (f <- counts)
f.status = Status.Closed
bal.applyn(1000)
// The correctness of this behavior could be argued either way.
assert(counts.aperture === 1)
val Seq(badkey) = counts.nonzero.toSeq
val goodkey = (badkey + 1) % 10
counts(goodkey).status = Status.Open
counts.clear()
bal.applyn(1000)
assert(counts.nonzero === Set(goodkey))
}
}
@RunWith(classOf[JUnitRunner])
private class LoadBandTest extends FunSuite with ApertureTesting {
import Tolerance._
val rng = Rng()
class Bal(protected val lowLoad: Double, protected val highLoad: Double)
extends TestBal with LoadBand[Unit, Unit] {
def this() = this(0.5, 2.0)
protected def smoothWin = Duration.Zero
}
class Avg {
var n = 0
var sum = 0
def update(v: Int) {
n += 1
sum += v
}
def apply(): Double = sum.toDouble/n
}
test("Aperture tracks concurrency") {
val counts = new Counts
val low = 0.5
val high = 2.0
val bal = new Bal(lowLoad = low, highLoad = high)
val numNodes = rng.nextInt(100)
bal.update(counts.range(numNodes))
val start = (high+1).toInt
val concurrency = (start to numNodes) ++ ((numNodes-1) to start by -1)
for (c <- concurrency) {
var ap = 0
// We load the balancer with `c` outstanding requests each
// run and sample the load. However, because the aperture
// distributor employs P2C we are forced to take a
// statistical view of things.
val avgLoad = new Avg
for (i <- 0 to 1000) {
counts.clear()
val factories = Seq.fill(c) { Await.result(bal.apply()) }
for (f <- counts if f.n > 0) { avgLoad.update(f.p) }
// no need to avg ap, it's independent of the load distribution
ap = bal.aperturex
Await.result(Closable.all(factories:_*).close())
}
// The controller tracks the avg concurrency over
// the aperture. For every `high` we detect, we widen
// the aperture.
// TODO: We should test this with a smoothWin to get a more
// accurate picture of the lag in our adjustments.
assert(math.abs(c/high - ap) <= 1)
// The changes to the aperture should correlate to
// the avg load per node but note that the distributor
// and controller work independently.
assert(math.abs(avgLoad() - high) <= 1)
assert(counts.forall(_.p == 0))
}
}
}
| suls/finagle | finagle-core/src/test/scala/com/twitter/finagle/loadbalancer/ApertureTest.scala | Scala | apache-2.0 | 6,597 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.utils
object Platform {
def scalaVersion: String =
scala.util.Properties.versionNumberString
/** Returns `true` if and only if the code is executing on a JVM.
* Note: Returns `false` when executing on any JS VM.
*/
final val executingInJVM = true
def executingInJVMOnJDK6: Boolean = jdkVersion == 6
def executingInJVMOnJDK7OrLower: Boolean = jdkVersion <= 7
def executingInJVMOnJDK8OrLower: Boolean = jdkVersion <= 8
private lazy val jdkVersion = {
val v = System.getProperty("java.version")
if (v.startsWith("1.")) Integer.parseInt(v.drop(2).takeWhile(_.isDigit))
else Integer.parseInt(v.takeWhile(_.isDigit))
}
def isInFullOpt: Boolean = false
def hasCompliantAsInstanceOfs: Boolean = true
def hasCompliantArrayIndexOutOfBounds: Boolean = true
def hasCompliantModule: Boolean = true
def hasStrictFloats: Boolean = true
}
| SebsLittleHelpers/scala-js | test-suite/jvm/src/main/scala/org/scalajs/testsuite/utils/Platform.scala | Scala | apache-2.0 | 1,185 |
package ch.uzh.ifi.pdeboer.pplib.hcomp.ballot.persistence
/**
* Created by Mattia on 19.01.2015.
*/
import ch.uzh.ifi.pdeboer.pplib.hcomp.ballot.integrationtest.console.ConsoleIntegrationTest._
import ch.uzh.ifi.pdeboer.pplib.util.LazyLogger
import scalikejdbc._
import scalikejdbc.config.DBs
trait DBSettings {
DBSettings.initialize()
}
object DBSettings extends LazyLogger {
private var isInitialized = false
def initialize(): Unit = this.synchronized {
if (!isInitialized) {
DBs.setupAll()
GlobalSettings.loggingSQLErrors = true
DBInitializer.run()
isInitialized = true
logger.debug("Database initialized")
}
logger.debug("Database already initialized")
}
def loadPermutations(init: String, path: String): Unit = {
if (init.equalsIgnoreCase("init")) {
logger.info("Loading permutations...")
//dao.loadPermutationsCSV(path)
}
}
} | manuelroesch/PaperValidator | app/helper/questiongenerator/persistence/DBSettings.scala | Scala | mit | 879 |
package es.own3dh2so4.ch6
import java.sql.Timestamp
import java.text.SimpleDateFormat
import es.own3dh2so4.Properties
import es.own3dh2so4.model.Order
import org.apache.spark._
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming._
import org.apache.spark.streaming.dstream.{DStream, PairDStreamFunctions}
/**
* Created by david on 27/05/17.
*/
object SparkStreaming extends App{
val prop = Properties()
//SparkConfig
val sparkMaster = prop("spark.session.master").getOrElse("local[*]")
val sparkAppName = prop("spark.session.appName").getOrElse("Spark App")
//Folder paths
val inputFiles = prop("input.folder").getOrElse("") + "orders-streaming/"
val outputFiles = prop("output.folder").getOrElse("") + "orders-streaming/output"
val tmpFiles = prop("tmp.folder").getOrElse("") + "orders-streaming/"
val spark = SparkSession.builder.
master(sparkMaster).appName(sparkAppName).getOrCreate()
val sc = spark.sparkContext
sc.setCheckpointDir(tmpFiles)
val ssc = new StreamingContext(sc, Seconds(5))
val filestream = ssc.textFileStream(inputFiles)
val orders = filestream.flatMap( line => {
val dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss")
val s = line.split(",")
try {
assert(s(6) == "B" || s(6) == "S")
List(Order(new Timestamp(dateFormat.parse(s(0)).getTime), s(1).toLong,s(2).toLong, s(3), s(4).toInt, s(5).toDouble,
s(6) == "B"))
} catch {
case _ : Throwable => println("Wrong line format")
List()
}
})
println("Contar el numero de orders de cada tipo")
val numPerType = orders.map( o => (o.buy, 1L)).reduceByKey(_+_)
//numPerType.repartition(1).saveAsTextFiles(outputFiles)
val amountPerClient= orders.map(o => (o.clientId, o.amount * o.price))
val amountState = amountPerClient.updateStateByKey((vals, totalOpt: Option[Double]) => {
totalOpt match {
case Some(total) => Some(vals.sum + total)
case None => Some(vals.sum)
}
})
val top5Clients = amountState.transform(_.sortBy(_._2,ascending = false).
zipWithIndex().filter( _._2 < 5).map(_._1))
val buySellList = numPerType.map( x =>
if (x._1) ("BUYS", List(x._2.toString))
else ("SELLS", List(x._2.toString)))
val top5clList = top5Clients.repartition(1).map(_._1.toString).
glom.map(arr => ("TOP5CLIENTS",arr.toList))
val finalStream = buySellList.union(top5clList)
finalStream.repartition(1).saveAsTextFiles(outputFiles)
ssc.start()
ssc.awaitTermination()
}
| own3dh2so4/spark-in-action-book | src/main/scala/es/own3dh2so4/ch6/SparkStreaming.scala | Scala | apache-2.0 | 2,573 |
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
import scala.tools.reflect._
object Test extends dotty.runtime.LegacyApp {
val tb = cm.mkToolBox()
val t1 = tb.parse("1 to 3 map (_+1)")
println(tb.eval(t1))
println(tb.eval(t1))
println(tb.eval(t1))
}
| yusuke2255/dotty | tests/pending/run/t6287.scala | Scala | bsd-3-clause | 304 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.io
import com.spotify.scio.{registerSysProps, SysProp}
import org.apache.beam.sdk.util.{BackOff, BackOffUtils, FluentBackoff, Sleeper}
import org.joda.time.Duration
import org.slf4j.LoggerFactory
import scala.concurrent.{Future, Promise}
/** Exception for when a tap is not available. */
class TapNotAvailableException(msg: String) extends Exception(msg)
/** Utility for managing `Future[Tap[T]]`s. */
trait Taps {
/** Get a `Future[Tap[String]]` for a text file. */
def textFile(path: String): Future[Tap[String]] =
mkTap(s"Text: $path", () => isPathDone(path), () => TextTap(path))
private[scio] def isPathDone(path: String): Boolean = FileStorage(path).isDone
/**
* Make a tap, to be implemented by concrete classes.
*
* @param name
* unique name of the tap
* @param readyFn
* function to check if the tap is ready
* @param tapFn
* function to create the tap
*/
private[scio] def mkTap[T](
name: String,
readyFn: () => Boolean,
tapFn: () => Tap[T]
): Future[Tap[T]]
}
/** Taps implementation that fails immediately if tap not available. */
final private class ImmediateTaps extends Taps {
override private[scio] def mkTap[T](
name: String,
readyFn: () => Boolean,
tapFn: () => Tap[T]
): Future[Tap[T]] =
if (readyFn()) Future.successful(tapFn())
else Future.failed(new TapNotAvailableException(name))
}
private object PollingTaps {
private val logger = LoggerFactory.getLogger(this.getClass)
final case class Poll(
name: String,
readyFn: () => Boolean,
tapFn: () => Tap[Any],
promise: Promise[AnyRef]
)
}
/** Taps implementation that polls for tap availability in the background. */
final private class PollingTaps(private[this] val backOff: BackOff) extends Taps {
import PollingTaps._
private[this] var polls: List[Poll] = _
override private[scio] def mkTap[T](
name: String,
readyFn: () => Boolean,
tapFn: () => Tap[T]
): Future[Tap[T]] =
this.synchronized {
val p = Promise[AnyRef]()
val init = if (polls == null) {
polls = Nil
true
} else {
false
}
logger.info(s"Polling for tap $name")
polls +:= Poll(name, readyFn, tapFn.asInstanceOf[() => Tap[Any]], p)
if (init) {
import scala.concurrent.ExecutionContext.Implicits.global
Future {
val sleeper = Sleeper.DEFAULT
do {
if (polls.nonEmpty) {
val tap = if (polls.size > 1) "taps" else "tap"
logger.info(s"Polling for ${polls.size} $tap")
}
this.synchronized {
val (ready, pending) = polls.partition(_.readyFn())
ready.foreach { p =>
logger.info(s"Tap available: ${p.name}")
p.promise.success(tapFn())
}
polls = pending
}
} while (BackOffUtils.next(sleeper, backOff))
polls.foreach(p => p.promise.failure(new TapNotAvailableException(p.name)))
}
}
p.future.asInstanceOf[Future[Tap[T]]]
}
}
/** Companion object for [[Taps]]. */
object Taps extends {
import TapsSysProps._
/** Default taps algorithm. */
val AlgorithmDefault = "immediate"
/** Default polling taps maximum interval. */
val PollingMaximumIntervalDefault = "600000"
/** Default polling taps initial interval. */
val PollingInitialIntervalDefault = "10000"
/** Default polling taps maximum number of attempts. */
val PollingMaximumAttemptsDefault = "0"
/**
* Create a new [[Taps]] instance.
*
* Taps algorithm can be set via the `taps.algorithm` property. Available algorithms are
* `immediate` (default) and `polling`.
*
* Additional properties can be set for the `polling` algorithm.
*
* - `taps.polling.maximum_interval`: maximum interval between polls.
*
* - `taps.polling.initial_interval`: initial interval between polls.
*
* - `taps.polling.maximum_attempts`: maximum number of attempts, unlimited if <= 0. Default is
* 0.
*/
def apply(): Taps =
Algorithm.value(AlgorithmDefault) match {
case "immediate" => new ImmediateTaps
case "polling" =>
val maxAttempts =
PollingMaximumAttempts.value(PollingMaximumAttemptsDefault).toInt
val initInterval =
PollingInitialInterval.value(PollingInitialIntervalDefault).toLong
val backOff = if (maxAttempts <= 0) {
val maxInterval =
PollingMaximumInterval.value(PollingMaximumIntervalDefault).toLong
FluentBackoff.DEFAULT
.withInitialBackoff(Duration.millis(initInterval))
.withMaxBackoff(Duration.millis(maxInterval))
.backoff()
} else {
FluentBackoff.DEFAULT
.withInitialBackoff(Duration.millis(initInterval))
.withMaxRetries(maxAttempts)
.backoff()
}
new PollingTaps(backOff)
case t => throw new IllegalArgumentException(s"Unsupported Taps $t")
}
}
@registerSysProps
object TapsSysProps {
val Algorithm: SysProp = SysProp("taps.algorithm", "System property key for taps algorithm")
val PollingMaximumInterval: SysProp = SysProp(
"taps.polling.maximum_interval",
"System property key for polling taps maximum interval in milliseconds"
)
val PollingInitialInterval: SysProp = SysProp(
"taps.polling.initial_interval",
"System property key for polling taps initial interval in milliseconds"
)
val PollingMaximumAttempts: SysProp = SysProp(
"taps.polling.maximum_attempts",
"System property key for polling taps maximum number of attempts, unlimited if <= 0. " +
"Default is 0"
)
}
| spotify/scio | scio-core/src/main/scala/com/spotify/scio/io/Taps.scala | Scala | apache-2.0 | 6,358 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.frs105.retriever.Frs105AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC456(value: Option[Int]) extends CtBoxIdentifier(name = "Current assets (previous PoA)")
with CtOptionalInteger
with Input
with ValidatableBox[Frs105AccountsBoxRetriever] {
override def validate(boxRetriever: Frs105AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value, min = 0)
)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC456.scala | Scala | apache-2.0 | 1,103 |
package com.enkidu.lignum.parsers.ast.expression.discardable.literals
case class BooleanLiteral(value: String) extends Literal
| marek1840/java-parser | src/main/scala/com/enkidu/lignum/parsers/ast/expression/discardable/literals/BooleanLiteral.scala | Scala | mit | 128 |
import org.apache.spark.rdd.RDD
import org.scalatest._
import techniques.PeakComparison._
class PeakTests extends SparkTestUtils with ShouldMatchers {
test("testMetricDerivative1") {
val word1 = Array[Double](1, 1, 1, 199, 1, 1, 1)
peakDerivativeMetric(("whatever", word1), ("plop", word1), 1) should be(1.0)
}
test("windowPeakDerivativeDetection1") {
val word1 = Array[Double](1, 1, 1, 199, 1, 1, 1)
windowPeakDerivative(("a", word1), 1, 1) should be(List((4, 198, -198)))
}
test("testMetricDerivative2") {
val word1 = Array[Double](3, 1, 1, 1, 199, 1, 1, 1, 200, 1, 6)
peakDerivativeMetric(("whatever", word1), ("plop", word1), 1) should be(1.0)
}
test("windowPeakDerivativeDetection2") {
val word1 = Array[Double](3, 1, 1, 1, 199, 1, 1, 1, 200, 1, 6)
windowPeakDerivative(("a", word1), 1, 1) should be(List((5, 198, -198), (9, 199, -199)))
}
test("testMetricMinMax") {
val word1 = Array[Double](2, 2, 1, 199, 1, 3, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2, 3, 2)
peakMaxMinMetric(("whatever", word1), ("plop", word1), 6, 3, 5) should be(1.0)
}
test("windowPeakMinMaxDetection") {
val word1 = Array[Double](2, 2, 1, 199, 1, 3, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2, 3, 2)
filterDuplicateYears(windowPeakMinMax(("a", word1), 5, 2)) should be(List((3, 198, 198)))
}
test("testMetricMean") {
val word1 = Array[Double](1, 1, 1, 199, 1, 3, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2, 3, 2)
peakMeanMetric(("whatever", word1), ("plop", word1), 4, 3, 0) should be(1.0)
}
test("windowPeakMeanDetection") {
val word1 = ("whatever", Array[Double](2, 4, 5, 199, 3, 2, 1))
windowPeakMean(word1, 1, 0) should be(List((3, 197, 198)))
}
test("windowPeakMeanDetection1") {
val word1 = ("whatever", Array[Double](2, 4, 5, 199, 1, 3, 2, 3, 2, 2, 2, 2, 3, 2, 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2,
3, 2))
windowPeakMean(word1, 1, 10) should be(List((3, 197, 198)))
}
test("windowPeakMeanDetection2") {
val word2 = ("bis", Array(2.0, 4.0, 5.0, 199.0, 1.0, 10.0, 2.0, 4.0, 30.0, 300.0, 2.0, 1.0))
windowPeakMean(word2, 1, 0) should be(List((3, 197.0, 198.0), (5, 9.0, 8.0), (9, 298.0, 299.0)))
}
test("windowPeakMeanDetection3") {
val word2 = ("bis", Array(2.0, 4.0, 5.0, 199.0, 1.0, 10.0, 2.0, 4.0, 30.0, 300.0, 2.0, 1.0))
windowPeakMean(word2, 10, 0) should be(List((9, 299.0, 299.0)))
}
test("windowPeakMeanDerivativeDetection") {
val word2 = ("bis", Array(1, 1, 1, 1, 1, 2.2, 2.3, 2.4, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 16.1, 16.2,
20, 21, 22, 23, 24, 2.0, 4.0, 5.0, 199.0, 1.0, 10.0, 2.0, 4.0, 30.0, 300.0, 2.0, 1.0))
windowPeakMeanDerivative(word2, 2, 1) should
be(List((32, 65.66666666666667, 198.0), (34, 9.0, 8.0), (38, 99.33333333333333, 149.5)))
}
/*sparkTest("Coupabe - Crime ") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("crime", "coupable")).collect()
println(peakMeanMetric(words(0), words(1)))
}
sparkTest("Coupabe - Crime 2") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("crime", "coupable")).collect()
println(peakMaxMinMetric(words(0), words(1), 10, 10))
}
sparkTest("Coupabe - Crime (derivative)") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("crime", "coupable")).collect()
val met = peakDerivativeMetric(words(0), words(1))
println(met)
met should be > 0.5
}
sparkTest("Coupabe - Droite (derivative)") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("crime", "droite")).collect()
val met = peakDerivativeMetric(words(0), words(1))
println(met)
met should be < 0.5
}
sparkTest("Gauche - Droite (derivative)") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("gauche", "droite")).collect()
val met = peakDerivativeMetric(words(0), words(1), 5)
println(met)
met should be > 0.5
}
sparkTest("Landsgemeinde - Appenzell (derivative)") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("landsgemeinde", "appenzell")).collect()
val met = peakDerivativeMetric(words(0), words(1), 3)
println(met)
met should be > 0.5
}
sparkTest("Landsgemeinde - Crime (derivative)") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("landsgemeinde", "crime")).collect()
val met = peakDerivativeMetric(words(0), words(1), 3)
println(met)
met should be < 0.5
}
sparkTest("Rire - Sourire (derivative)") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("rire", "sourire")).collect()
val met = peakDerivativeMetric(words(0), words(1), 3)
println(met)
met should be > 0.5
}
sparkTest("Similar words to rire") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("rire", "sourire", "crime", "vin", "EPFL", "avocat"))
val testedWord = words.filter(w => w._1 == "rire").collect().head
val derivativeWords = peakComparisonWithDerivative(words, testedWord, List(0.5, 10, 1, 1))
derivativeWords.collect should be(Array("rire", "sourire"))
}
sparkTest("Similar words to crime") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("rire", "sourire", "crime", "vin", "EPFL", "avocat"))
val testedWord = words.filter(w => w._1 == "crime").collect().head
val derivativeWords = peakComparisonWithDerivative(words, testedWord, List(0.5, 10, 2, 1))
derivativeWords.collect should be(Array("crime", "coupable"))
}
sparkTest("Similar words rire from data") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("rire"))
val testedWord = words.filter(w => w._1 == "rire").collect().head
val derivativeWords = peakComparisonWithMeanDerivative(data, testedWord, List(0.5, 5, 2, 1))
derivativeWords.collect should be(Array("rire", "sourire"))
}
sparkTest("Rire - Sourire (derivative-mean)") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
val words = searchWordFormatter(data, List("rire", "crime")).collect()
val met = peakMeanDerivativeMetric(words(0), words(1), 5, 1, 1)
println(met)
met should be < 0.5
}*/
sparkTest("count") {
val inputDir = "input"
val data = dataFormatter(sc.textFile(inputDir)) //parse data
println(data.collect().size)
}
}
| SidneyBovet/smargn | smargn/test/PeakTests.scala | Scala | gpl-2.0 | 7,204 |
package com.arcusys.valamis.persistence.impl.settings
import com.arcusys.valamis.persistence.common.SlickProfile
import com.arcusys.valamis.settings.model.{Setting, SettingType}
import com.arcusys.valamis.settings.storage.SettingStorage
import scala.slick.driver.JdbcProfile
import scala.slick.jdbc.JdbcBackend
/**
* Created by Igor Borisov on 04.09.15.
*/
class SettingStorageImpl(db: JdbcBackend#DatabaseDef, val driver: JdbcProfile)
extends SettingStorage
with SlickProfile
with SettingTableComponent {
import driver.simple._
implicit val SettingTypeTypeMapper = MappedColumnType.base[SettingType.SettingType, String](
s => s.toString,
s => SettingType.withName(s)
)
override def getByKey(key: SettingType.Value): Option[Setting] = db.withSession { implicit s =>
settings.filter(_.datakey === key).firstOption
}
override def modify(key: SettingType.Value, value: String): Unit = db.withTransaction { implicit s =>
val setting = Setting(key, value)
val updatedCount = settings
.filter(_.datakey === key)
.update(setting)
if (updatedCount == 0)
settings.insert(setting)
}
}
| igor-borisov/valamis | valamis-slick-persistence/src/main/scala/com/arcusys/valamis/persistence/impl/settings/SettingStorageImpl.scala | Scala | gpl-3.0 | 1,153 |
class Foo {
import java.util.ArrayList
// Test that as we extract return values, we're missing the |UncheckedNull in the return type.
// i.e. test that the nullability is propagated to nested containers.
val ll = new ArrayList[ArrayList[ArrayList[String]]]
val level1: ArrayList[ArrayList[String]] = ll.get(0) // error
val level2: ArrayList[String] = ll.get(0).get(0) // error
val level3: String = ll.get(0).get(0).get(0) // error
val ok: String = ll.get(0).get(0).get(0) // error
}
| som-snytt/dotty | tests/explicit-nulls/neg/interop-propagate.scala | Scala | apache-2.0 | 501 |
package org.scalatest.events
import org.scalatest.junit.JUnit3Suite
import org.scalatest.DoNotDiscover
@DoNotDiscover
class TestLocationMethodJUnit3Suite extends JUnit3Suite with TestLocationMethodServices {
val suiteTypeName = "org.scalatest.events.TestLocationMethodJUnit3Suite"
val expectedStartingList = List(TestStartingPair("testSucceed(org.scalatest.events.TestLocationMethodJUnit3Suite)", "org.scalatest.events.TestLocationMethodJUnit3Suite", "testSucceed()"))
val expectedResultList = List(TestResultPair(classOf[TestSucceeded], "org.scalatest.events.TestLocationMethodJUnit3Suite", "testSucceed()"))
val expectedScopeOpenedList = Nil
val expectedScopeClosedList = Nil
def testSucceed() {
}
} | hubertp/scalatest | src/test/scala/org/scalatest/events/TestLocationMethodJUnit3Suite.scala | Scala | apache-2.0 | 726 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.