code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* -------------------------------------------------------------------------------------------------
* - Project: Objectify -
* - Copyright: ©2014 Matygo Educational Incorporated operating as Learndot -
* - Author: Arthur Gonigberg (arthur@learndot.com) and contributors (see contributors.txt) -
* - License: Licensed under MIT license (see license.txt) -
* -------------------------------------------------------------------------------------------------
*/
package org.objectify.executor
import scala.reflect.ClassTag
import scala.reflect.runtime.universe._
/**
* This class is responsible for invoking instances of classes.
*/
private[executor] object Invoker {
/**
* This method will do constructor injection on resolvers, assuming they all take
* the same parameter -- the resolverParam passed in.
*
* @param clazz - the class to instantiate
* @param resolverParam - the resolver parameter to inject the constructor of the resolver with
* @return - a dependency-injected instance
*/
def invoke[T, P: ClassTag](clazz: Class[_ <: T], resolverParam: P, prefix: String = ""): T = {
/*
1. find the constructor annotations
2. for each constructor annotation, instantiate a resolver and pass it the above parameter
3. take the above list of resolved values and pass it into the constructor to instantiate the clazz
4. return instance
*/
// val tCons = clazz.type.typeConstructor
//
// val params = tCons.members.filter(symbol => symbol.isParameter)
// println(params)
// assume only one constructor
val constructor = clazz.getConstructors.head
val injectedValues = Injector.getInjectedResolverParams(constructor, resolverParam, prefix)
val ret = if (injectedValues.nonEmpty) {
// convert list to var args
val ret = constructor.newInstance(injectedValues.map {
_.asInstanceOf[AnyRef]
}: _*).asInstanceOf[T]
ret
}
else {
// assume there is no parameterized constructor
clazz.newInstance()
}
ret
}
def invoke[T](klass: Class[_ <: T]): T = {
klass.newInstance()
}
}
| learndot/Objectify.scala | src/main/scala/org/objectify/executor/Invoker.scala | Scala | mit | 2,273 |
package com.dominikgruber.fpinscala.chapter03
import org.scalatest._
class Exercise19Spec extends FlatSpec with Matchers {
"filter" should "remove odd numbers from a list" in {
val l = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
List.filter(l)(x => x % 2 == 0) should be (List(2, 4, 6, 8, 10))
}
} | TheDom/functional-programming-in-scala | src/test/scala/com/dominikgruber/fpinscala/chapter03/Exercise19Spec.scala | Scala | mit | 304 |
import controllers.ViewSta
import play.api.test.Helpers._
import play.api.test._
import scala.concurrent.Await
import scala.util.Try
import scala.concurrent.duration._
/**
*
* @author ponkotuy
* Date: 15/02/13.
*/
object DropStage extends App {
import Common._
val count = Try { args(0).toInt }.getOrElse(100)
running(FakeApplication(additionalConfiguration = Settings.dbMap)) {
val futures = (1 to count).map { _ =>
ViewSta.dropStage().apply(FakeRequest())
}
futures.map { f =>
Await.result(f, 10.second).body.run(printIteratee)
}
}
}
| ttdoda/MyFleetGirls | profiler/src/main/scala/DropStage.scala | Scala | mit | 578 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package src.main.scala
import src.main.java.Unsigned16
import src.main.java.Random16
import org.apache.log4j.Logger
import org.apache.log4j.Level
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import org.apache.hadoop.util.PureJavaCrc32
import com.google.common.primitives.UnsignedBytes
/**
* An application that reads sorted data according to the terasort spec and
* reports if it's indeed sorted.
* This is an example program to validate TeraSort results
*
* See http://sortbenchmark.org/
*/
object TeraValidate {
def main(args: Array[String]) {
Logger.getLogger("org.apache.spark").setLevel(Level.WARN);
Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF);
if (args.length < 1) {
println("Usage:")
println("DRIVER_MEMORY=[mem] spark-submit " +
"com.github.ehiggs.spark.terasort.TeraValidate " +
"spark-terasort-1.0-SNAPSHOT-with-dependencies.jar " +
"[input-directory]")
println(" ")
println("Example:")
println("DRIVER_MEMORY=50g spark-submit " +
"com.github.ehiggs.spark.terasort.TeraValidate " +
"spark-terasort-1.0-SNAPSHOT-with-dependencies.jar " +
"file:///scratch/username/terasort_in ")
System.exit(0)
}
// Process command line arguments
val inputFile = args(0)
val conf = new SparkConf()
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.setAppName(s"TeraValidate")
val sc = new SparkContext(conf)
val dataset = sc.newAPIHadoopFile[Array[Byte], Array[Byte], TeraInputFormat](inputFile)
validate(sc, dataset)
}
def validate(sc : SparkContext, dataset: RDD[(Array[Byte], Array[Byte])]) : Unit = {
val output : RDD[(Unsigned16, Array[Byte], Array[Byte])] =
dataset.mapPartitions( (iter) => {
val sum = new Unsigned16
val checksum = new Unsigned16
val crc32 = new PureJavaCrc32()
val min = new Array[Byte](10)
val max = new Array[Byte](10)
val cmp = UnsignedBytes.lexicographicalComparator()
var pos = 0L
var prev = new Array[Byte](10)
while (iter.hasNext) {
val key = iter.next()._1
assert(cmp.compare(key, prev) >= 0)
crc32.reset()
crc32.update(key, 0, key.length)
checksum.set(crc32.getValue)
sum.add(checksum)
if (pos == 0) {
key.copyToArray(min, 0, 10)
}
pos += 1
prev = key
}
prev.copyToArray(max, 0, 10)
Iterator((sum, min, max))
}, true)
val checksumOutput = output.collect()
val cmp = UnsignedBytes.lexicographicalComparator()
val sum = new Unsigned16
var numRecords = dataset.count
checksumOutput.foreach { case (partSum, min, max) =>
sum.add(partSum)
}
println("num records: " + numRecords)
println("checksum: " + sum.toString)
var lastMax = new Array[Byte](10)
checksumOutput.map{ case (partSum, min, max) =>
(partSum, min.clone(), max.clone())
}.zipWithIndex.foreach { case ((partSum, min, max), i) =>
println(s"part $i")
println(s"lastMax" + lastMax.toSeq.map(x => if (x < 0) 256 + x else x))
println(s"min " + min.toSeq.map(x => if (x < 0) 256 + x else x))
println(s"max " + max.toSeq.map(x => if (x < 0) 256 + x else x))
assert(cmp.compare(min, max) <= 0, "min >= max")
assert(cmp.compare(lastMax, min) <= 0, "current partition min < last partition max")
lastMax = max
}
println("num records: " + numRecords)
println("checksum: " + sum.toString)
println("partitions are properly sorted")
}
}
| ElfoLiNk/spark-bench | Terasort/src/main/scala/TerasortValidator.scala | Scala | apache-2.0 | 4,535 |
/* Copyright (c) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gdata.data.media;
import com.google.xml.combinators.~
import com.google.xml.combinators.Picklers._
import com.google.gdata.data.util.NormalPlayTime
import com.google.gdata.data.Uris.mediaNs
/**
* A media:credit element, as defined by Media RSS
*
* @see http://search.yahoo.com/mrss
* @author Iulian Dragos
*/
case class Credit(scheme: String, role: Option[String] = None, value: String)
object Credit {
/** Default credit scheme is European Broadcasting Union Role Codes. */
val DEFAULT_SCHEME = "urn:ebu"
val pickler: Pickler[Credit] =
(wrap (elem("credit", default(attr("scheme", text), DEFAULT_SCHEME) ~ opt(attr("role", text)) ~ text)(mediaNs))
(Credit.apply)
(fromCredit))
private def fromCredit(c: Credit) = new ~(c.scheme, c.role) ~ c.value
} | mjanson/gdata-scala-client | src/com/google/gdata/data/media/Credit.scala | Scala | apache-2.0 | 1,409 |
package breeze.util
import breeze.linalg.{DenseVector, DenseMatrix}
import scala.reflect.ClassTag
import breeze.math.Complex
/**This utility class facilitates transparent access of breeze data objects from plain Java without Scala-related Generic complications.
* @author ktakagaki
* @date 03/20/2014.
*/
object JavaArrayOps {
def dvCToArray(data: DenseVector[Complex]): Array[Complex] = data.toArray
def dvDToArray(data: DenseVector[Double]): Array[Double] = data.toArray
def dvFToArray(data: DenseVector[Float]): Array[Float] = data.toArray
def dvIToArray(data: DenseVector[Int]): Array[Int] = data.toArray
def dvLToArray(data: DenseVector[Long]): Array[Long] = data.toArray
def dmCToArray2(data: DenseMatrix[Complex]): Array[Array[Complex]] = dmToArray2(data)
def dmDToArray2(data: DenseMatrix[Double]): Array[Array[Double]] = dmToArray2(data)
def dmFToArray2(data: DenseMatrix[Float]): Array[Array[Float]] = dmToArray2(data)
def dmIToArray2(data: DenseMatrix[Int]): Array[Array[Int]] = dmToArray2(data)
def dmLToArray2(data: DenseMatrix[Long]): Array[Array[Long]] = dmToArray2(data)
def arrayCToDv(array: Array[Complex]): DenseVector[Complex] = arrayToDv( array )
def arrayDToDv(array: Array[Double]): DenseVector[Double] = arrayToDv( array )
def arrayFToDv(array: Array[Float]): DenseVector[Float] = arrayToDv( array )
def arrayIToDv(array: Array[Int]): DenseVector[Int] = arrayToDv( array )
def arrayLToDv(array: Array[Long]): DenseVector[Long] = arrayToDv( array )
def array2CToDm(array: Array[Array[Complex]]): DenseMatrix[Complex] = array2ToDm( array )
def array2DToDm(array: Array[Array[Double]]): DenseMatrix[Double] = array2ToDm( array )
def array2FToDm(array: Array[Array[Float]]): DenseMatrix[Float] = array2ToDm( array )
def array2IToDm(array: Array[Array[Int]]): DenseMatrix[Int] = array2ToDm( array )
def array2LToDm(array: Array[Array[Long]]): DenseMatrix[Long] = array2ToDm( array )
// <editor-fold defaultstate="collapsed" desc=" implementations ">
def dvToArray[@specialized(Int, Double, Long, Float) V: ClassTag](dv: DenseVector[V]): Array[V] = dv.toArray
def dmToArray2[@specialized(Int, Double, Long, Float) V: ClassTag](dm: DenseMatrix[V]): Array[Array[V]] = {
val ret = new Array[Array[V]](dm.rows)
var rowI = 0
while (rowI < dm.rows) {
ret(rowI) = new Array[V](dm.cols)
var colI = 0
while(colI < dm.cols) {
ret(rowI)(colI) = dm(rowI, colI)
colI += 1
}
//ret(i) = dm( i, :: ).toArray //How else to circumvent Transpose[] wrapper?
rowI += 1
}
ret
}
def arrayToDv[@specialized(Int, Double, Long, Float) V: ClassTag](array: Array[V]): DenseVector[V] = new DenseVector(array)
/** Constructs DenseMatrix from Array[Array[V]] input. Input is in row-major like
* format, similar to DenseMatrix( (1,2 3), (4,5,6),... ) syntax, which is defined in [[breeze.linalg.Matrix]].
* This constructor was written for JavaCompatible.
* @param values
* @return
*/
def array2ToDm[@specialized(Int, Double, Long, Float) V: ClassTag](values: Array[Array[V]]): DenseMatrix[V] = {
val tempRows= values.length
val tempCols = values(0).length
val tempret = new Array[V]( tempRows*tempCols )
var rowIndex = 0
var tempretIndex = 0
while(rowIndex < tempRows) {
//raggedness check
require(values(rowIndex).length == tempCols, "Input Array[Array[V]] is ragged!")
rowIndex += 1
}
var colIndex = 0
while(colIndex < tempCols) {
rowIndex = 0
while (rowIndex < tempRows) {
tempret(tempretIndex) = values(rowIndex)(colIndex)
tempretIndex += 1
rowIndex += 1
}
colIndex += 1
}
new DenseMatrix(tempRows, tempCols, tempret)
}
// </editor-fold>
}
| chen0031/breeze | math/src/main/scala/breeze/util/JavaArrayOps.scala | Scala | apache-2.0 | 3,810 |
import pl.iterators.kebs.tagged.slick.SlickSupport
import slick.lifted.Isomorphism
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
class TaggedTypeIsomorphismTests extends AnyFunSuite with Matchers with SlickSupport {
import pl.iterators.kebs.tagged._
trait Tag1
type Simple = Int @@ Tag1
object Simple {
def apply(i: Int) = i.@@[Tag1]
}
test("implicit isomorphism between bare type and type with tag") {
val iso = implicitly[Isomorphism[Int @@ Tag1, Int]]
iso.map(Simple(10)) shouldBe 10
iso.comap(10) shouldBe Simple(10)
}
test("implicit isomorphism between bare type and type with tag (alias)") {
val iso = implicitly[Isomorphism[Simple, Int]]
iso.map(Simple(10)) shouldBe 10
iso.comap(10) shouldBe Simple(10)
}
}
| theiterators/kebs | tagged/src/test/scala/TaggedTypeIsomorphismTests.scala | Scala | mit | 809 |
/**
* Copyright 2015 Devon Miller
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package im
package events
import scalajs.js
import _root_.org.scalajs.dom
import scala.language._
/**
* When processing events using a Delegate, Matcher is used to determine if the
* delegate function should be called on the node in the traversal
* from the node where the event occurred to the Delegate's root.
* Return true to allow a Delegate to process the current node in the
* traversal.
*/
trait Matcher extends Function2[dom.EventTarget, dom.EventTarget, Boolean] {
def and(rhs: Matcher) = AndMatch(this, rhs)
def &&(rhs: Matcher) = and(rhs)
def or(rhs: Matcher) = OrMatch(this, rhs)
def ||(rhs: Matcher) = or(rhs)
def not = NotMatch(this)
def unary_! = not
}
private[events] case class NotMatch(matcher: Matcher) extends Matcher {
def apply(root: dom.EventTarget, current: dom.EventTarget) = !matcher(root, current)
}
private[events] case class AndMatch(lhs: Matcher, rhs: Matcher) extends Matcher {
def apply(root: dom.EventTarget, current: dom.EventTarget) = lhs(root, current) && rhs(root, current)
}
private[events] case class OrMatch(lhs: Matcher, rhs: Matcher) extends Matcher {
def apply(root: dom.EventTarget, current: dom.EventTarget) = lhs(root, current) || rhs(root, current)
}
/**
* Convenience constructors.
*/
object Matcher {
def apply(f: Function2[dom.EventTarget, dom.EventTarget, Boolean]) =
new Matcher { def apply(root: dom.EventTarget, current: dom.EventTarget) = f(root, current) }
/**
* Match nothing.
*/
val NoMatch = Matcher { (_, _) => false }
/**
* Match everything.
*/
val MatchAll = Matcher { (_, _) => true }
/**
* Match on the tag. Current node must be an Element.
*/
def MatchTag(tagName: String) = Matcher {
(_, _) match {
case (_, el: dom.Element) if (el.tagName.toLowerCase == tagName.toLowerCase) => true
case _ => false
}
}
/**
* True if the current node is the root.
*/
def MatchRoot = Matcher { (root, target) => root == target }
/**
* Polyfill but not quite as robust as the reference below.
*
* @see [matches](https://developer.mozilla.org/en-US/docs/Web/API/Element/matches)
*/
private[this] def matches(el: dom.EventTarget, selector: String): Boolean = {
val ns = dom.document
val qmatches = ns.querySelectorAll(selector)
for (i <- 0 until qmatches.length)
if (qmatches(i) == el) return true
false
}
/**
* True if the potential target matches a selector.
*
* @see [HTML selectors](http://www.w3.org/TR/CSS21/selector.html%23id-selectors)
*/
def MatchSelector(selector: String) = Matcher { (_, el: dom.EventTarget) => matches(el, selector) }
/**
* True if the target node is an Element and has a matching id.
*/
def MatchId(id: String) = Matcher {
(_, _) match {
case (_, el: dom.Element) => el.id == id
case _ => false
}
}
}
/**
* A listener receives a dom.Event and the current Node that
* is allowed to process the event. Return true if the propagation up
* the tree should continue and false if it should stop. The handler
* is a scala function because the scala machinery eventually
* retrieves the handler and executes it.
*
*/
trait Handler extends scala.Function2[dom.Event, dom.Node, Boolean]
/**
* Importing Handler.Implicits into scope brings in some implicits for automatic
* conversion of scala functions to Handler objects.
*
*/
object Handler {
def apply(f: scala.Function2[dom.Event, dom.Node, Boolean]) = new Handler {
def apply(event: dom.Event, node: dom.Node) = f(event, node)
}
def apply(f: scala.Function1[dom.Event, Boolean]) = new Handler {
def apply(event: dom.Event, node: dom.Node) = f(event)
}
object Implicits {
implicit def toHandler(f: (dom.Event, dom.Node) => Boolean) = Handler(f)
implicit def toHandlerUnit(f: (dom.Event, dom.Node) => Unit) = new Handler {
def apply(event: dom.Event, node: dom.Node) = {
f(event, node)
true
}
}
implicit def toHandler1(f: dom.Event => Boolean) = Handler(f)
}
}
/**
* A handler, a matcher and a capture flag. The matcher and capture flag
* are used to qualify whether a handler is used in processing an event.
* Capture refers to the capturing or bubbling phases of event processing.
*/
private[events] case class QualifiedHandler(handler: Handler, matcher: Matcher = Matcher.MatchRoot, capture: Boolean = false)
/**
* An object that allows a side-effecting call to `cancel`.
* `delegate` is stuck in there for convienence.
*/
trait Cancelable {
def cancel(): Unit
def delegate(): Delegate
}
/**
* Delegate all event calls on the root to registered handlers.
* You can change the root object at any time and the handlers
* are properly deregistered/registered. The delegate is *not*
* attached to the DOM element. The calling program should do that
* if desired. A Delegate is mutable so you can add and remove
* handlers for specific event types and change the root
* object--this follows the design pattern in ftdomdelegate.
*
* The approach used is standard logic in java swing programs
* with jgoodies.
*
* @see [UI EVents](http://www.w3.org/TR/DOM-Level-3-Events/#interface-EventListener)
*/
case class Delegate(private[events] var root: Option[dom.EventTarget] = None,
private[events] val handlers: collection.mutable.Map[String, collection.mutable.Set[QualifiedHandler]] = collection.mutable.Map.empty) {
self =>
/**
* Construct with a specific root.
*/
def this(root: dom.EventTarget) = this(Some(root))
/**
* Handle an event. This is the universal listener attached
* to the root. A mechanism is in place to not process an event
* when it crosses to a different Delegate instances that may have
* attached handlers further up the tree and the event has been
* marked to be ignored by delegate processing.
*
* Handlers can return a false value to indicate that delegates
* should ignore the event.
*
* @see [eventPhase](https://developer.mozilla.org/en-US/docs/Web/API/Event/eventPhase)
*/
@js.annotation.JSExport
protected def handler(event: dom.Event): Unit = {
if(root.isEmpty) {
println("Delegate root is empty but a handler is firing")
assert(root.isDefined)
}
import js.DynamicImplicits.truthValue
// If a special marker is found, other instances of Delegate
// found up the chain should ignore this event as well.
if (truthValue(event.asInstanceOf[js.Dynamic].__DELEGATEIGNORE))
return
var target =
event.target match {
case d: dom.Node if (d.nodeType == 3 && d.parentNode != null) => d.parentNode
case n@_ => n.asInstanceOf[dom.Node]
}
// Build a listener list to process based on the event type and phase...
// If eventPhase is defined, use it, otherwise, determine it from the targets...
val phase =
if (truthValue(event.eventPhase.asInstanceOf[js.Dynamic])) event.eventPhase
else if (event.target != event.currentTarget) 3 // bubbling
else 2 // at targetfound
// filter registered handlers based on whether they are for capture and the processing phase.
val registeredHandlers = handlers.getOrElse(event.`type`, Set.empty).filter { qhandler =>
if (qhandler.capture && (phase == 1 || phase == 2)) true
else if (!qhandler.capture && (phase == 2 || phase == 3)) true
else false
}
var cont = true
root.foreach { rt =>
while (target != null) {
registeredHandlers.foreach { qhandler =>
if (qhandler.matcher(rt, target)) {
cont = qhandler.handler(event, target)
}
if (!cont) {
event.asInstanceOf[js.Dynamic].__DELEGATEIGNORE = true
event.preventDefault
return // ouch! rework logic so this is not needed in the middle
}
}
if (target == rt)
return
target = target.parentNode
}
}
}
/**
* List of events that by default should be captured versus bubbled.
*/
private[events] val captureList = Seq("abort", "blur",
"error", "focus", "load",
"mouseenter", "mouseleave",
"resize", "scroll", "unload")
/**
* Whether the event should by default, be processed in the capture phase or not.
*/
protected def isDefaultCapture(eventName: String) = captureList.contains(eventName)
/**
* Apply this configured Delegate to the root and return a new Delegate.
* You can attach to the root `Document.documentElement` to listen to
* events globally.
*/
def root(el: Option[dom.EventTarget]): Delegate = {
root.foreach(stopListeningTo(_))
root = el
el.foreach(startListeningTo(_))
this
}
protected def stopListeningTo(el: dom.EventTarget): Unit = {
require(el != null)
val stops = collection.mutable.ListBuffer[(String, QualifiedHandler)]()
for {
et <- handlers.keys
setOfQL <- handlers.get(et)
ql <- setOfQL
} { stops += ((et, ql)) }
stops.foreach { p =>
el.removeEventListener(p._1, handler _, p._2.capture)
}
}
protected def startListeningTo(el: dom.EventTarget): Unit = {
require(el != null)
for {
et <- handlers.keys
setOfQL <- handlers.get(et)
ql <- setOfQL
} {
el.addEventListener(et, handler _, ql.capture)
}
}
/**
* Turn off listening for events for a specific eventType. Individual
* handlers should be cancelled using the Cancelable returned from `on`.
*
* @param eventType The event type or None indicating all handlers for all event types.
*/
def off(eventType: Option[String] = None): Delegate = {
import vdom.OptionOps
var removals: collection.mutable.Set[(String, QualifiedHandler)] = collection.mutable.Set.empty
// Find removals.
for {
et <- handlers.keys
setOfQL <- handlers.get(et)
ql <- setOfQL
} {
if (Some(et) wildcardEq eventType) {
removals += ((et, ql))
setOfQL -= ql
}
}
// Remove event listeners based on removals.
for { r <- removals } root.foreach(_.removeEventListener(r._1, this.handler _, r._2.capture))
this
}
/**
* Add a handler for a specific event. The same handler can be added multiple times.
*
* @return A Cancelable used to cancel the listening of the handler.
*/
def on(eventType: String,
handler: Handler,
matcher: Matcher = Matcher.MatchRoot,
useCapture: Option[Boolean] = None): Cancelable = {
val capture = useCapture.getOrElse(isDefaultCapture(eventType))
// If root defined, add universal handler to root if this is the
// first time that eventType has been requested to monitor.
if (!handlers.contains(eventType))
root.foreach(_.addEventListener(eventType, this.handler _, capture))
// Create new listeners set for a specific event by adding the new delegate.
val qhandler = QualifiedHandler(handler, matcher, capture)
val newHandlers = handlers.getOrElse(eventType, collection.mutable.Set.empty) + qhandler
// Update the handler set.
handlers += (eventType -> newHandlers)
new Cancelable {
private val _eventType = eventType
private val _qhandler = qhandler
def cancel(): Unit = {
handlers.get(_eventType).foreach(_.remove(_qhandler))
if (handlers.get(_eventType).map(_.size).getOrElse(0) > 0) {
// no need to listen to this event type, no handlers
root.foreach(_.removeEventListener(_eventType, self.handler _, capture))
}
}
def delegate() = self
}
}
}
| aappddeevv/scala-vdom | js/src/main/scala/im/events/Delegate.scala | Scala | apache-2.0 | 12,207 |
package org.clulab.numeric.actions
import org.clulab.odin.{Actions, Mention, State}
import org.clulab.numeric.mentions._
import org.clulab.numeric._
import scala.collection.mutable.ArrayBuffer
class NumericActions extends Actions {
//
// local actions
//
/** Constructs a NumberRange mention from a token pattern */
def mkNumberRangeMention(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toNumberRangeMention)
}
/** Constructs a MeasurementMention from a token pattern */
def mkMeasurementMention(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toMeasurementMention)
}
def mkMeasurementWithRangeMention(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toMeasurementWithRangeMention)
}
/** Constructs a DateRangeMention from a token pattern */
def mkDateRangeMention(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toDateRangeMention)
}
/** Constructs a DateRangeMention from a token pattern */
def mkDateRangeMentionWithNumber(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toDateRangeMentionWithNumber)
}
/** Constructs a DateMention from a token pattern */
def mkDateMention(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toDateMention)
}
/** Constructs a DateMention from the yyyy-mm-dd single token */
def mkDateMentionYyyyMmDd(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toDateMentionYyyyMmDd)
}
/** Constructs a DateMention from the dd-mm-yyyy single token */
def mkDateMentionDdMmYyyy(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toDateMentionDdMmYyyy)
}
/** Constructs a DateMention from the mm-yyyy single token */
def mkDateMentionMmYyyy(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toDateMentionMmYyyy)
}
/** Constructs a DateMention from the yyyy-mm single token */
def mkDateMentionYyyyMm(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toDateMentionYyyyMm)
}
/** Constructs a DateMention from the yy-mm single token */
def mkDateMentionYyMm(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toDateMentionYyMm)
}
/** Constructs a DateMention from the yy-mm single token */
def mkDateMentionYyMmDd(mentions: Seq[Mention], state: State): Seq[Mention] = {
mentions.map(_.toDateMentionYyMmDd)
}
//
// global actions below this points
//
/** Global action for the numeric grammar */
def cleanupAction(mentions: Seq[Mention], state: State): Seq[Mention] =
cleanupAction(mentions)
def cleanupAction(mentions: Seq[Mention]): Seq[Mention] = {
val r1 = keepLongestDates(mentions)
r1
}
/** Keeps a date (or date range) mention only if it is not contained in another */
def keepLongestDates(mentions: Seq[Mention]): Seq[Mention] = {
val dates = mentions.filter(m => m.isInstanceOf[DateMention] || m.isInstanceOf[DateRangeMention])
val filteredDates = new ArrayBuffer[Mention]()
for(date <- dates) {
var foundContainer = false
for(m <- dates if m != date && ! foundContainer) {
if(m.sentence == date.sentence && m.tokenInterval.contains(date.tokenInterval)) {
foundContainer = true
}
}
if(! foundContainer) {
filteredDates += date
} else {
//println(s"REMOVED MENTION: ${date.raw.mkString(" ")}")
}
}
val filteredMentions = new ArrayBuffer[Mention]()
filteredMentions ++= filteredDates
filteredMentions ++= mentions.filterNot(m => m.isInstanceOf[DateMention] || m.isInstanceOf[DateRangeMention])
filteredMentions
}
}
| sistanlp/processors | main/src/main/scala/org/clulab/numeric/actions/NumericActions.scala | Scala | apache-2.0 | 3,768 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.component
import akka.actor.{ActorRef, Status}
import akka.pattern.ask
import akka.util.Timeout
import com.webtrends.harness.HarnessConstants
import com.webtrends.harness.app.HActor
import com.webtrends.harness.app.HarnessActor.{ConfigChange, PrepareForShutdown, SystemReady}
import scala.concurrent.duration._
import scala.util.{Failure, Success}
sealed class ComponentMessages()
case class StartComponent() extends ComponentMessages
case class StopComponent() extends ComponentMessages
case class ComponentRequest[T](msg:T, name:Option[String]=None, timeout:Timeout=5 seconds) extends ComponentMessages
case class ComponentMessage[T](msg:T, name:Option[String]=None) extends ComponentMessages
case class ComponentResponse[T](resp:T)
/**
* Each system component needs to extend this class so that it is loaded up correctly
*/
abstract class Component(name:String) extends HActor with ComponentHelper {
import context.dispatcher
protected def defaultChildName:Option[String] = None
/**
* Components will typically receive a couple type of messages
* 1. Start - Will execute after all components have loaded
* 2. Stop - Will execute prior to shutdown of the harness
*
* @return
*/
override def receive = health orElse {
case StartComponent => start
case StopComponent => stop
case ConfigChange() => // User can receive to do something
case ComponentRequest(msg, name, timeout) =>
val caller = sender
getChildActor(name) match {
case Some(a) => (a ? msg)(timeout) onComplete {
case Success(s) => caller ! ComponentResponse(s)
case Failure(f) => caller ! Status.Failure(f)
}
case None => sender ! Status.Failure(ComponentException(this.getClass.getName, s"$name actor not found"))
}
case ComponentMessage(msg, name) =>
getChildActor(name) match {
case Some(a) => a ! msg
case None => log.warn("Failed to send message to child actor [$name] for Component")
}
case SystemReady => systemReady
case PrepareForShutdown => prepareForShutdown
}
/**
* Gets the child actor based on a name, if name is None then checks the default child name,
* if default child name is none then if will check to see if there is only a single child
* and return that single child
*/
protected def getChildActor(name:Option[String]) : Option[ActorRef] = {
name match {
case Some(ComponentManager.ComponentRef) => Some(self)
case Some(n) => context.child(n)
case None =>
defaultChildName match {
case Some(dn) => context.child(dn)
case None =>
if (context.children.size == 1) {
Some(context.children.head)
} else {
None
}
}
}
}
/**
* Starts the component
*/
def start = {
// after completion of this, we need to send the started message from the component
context.parent ! ComponentStarted(self.path.name)
}
/**
* Any logic to stop the component
*/
def stop = {}
/**
* Any logic to run once all components and services are up
*/
def systemReady = {}
/**
* Any logic to run once we get the shutdown message but before we begin killing actors
*/
def prepareForShutdown = {}
}
object Component {
def getActorPath() : String = {
s"${HarnessConstants.ComponentName}/"
}
}
| pcross616/wookiee | wookiee-core/src/main/scala/com/webtrends/harness/component/Component.scala | Scala | apache-2.0 | 4,173 |
package de.zalando.beard.renderer
import de.zalando.beard.ast._
import de.zalando.beard.parser.BeardTemplateParser
import scala.annotation.tailrec
import scala.collection.GenTraversableOnce
import scala.collection.immutable.Seq
import scala.util.{Success, Try}
/**
* @author dpersa
*/
case class TemplateName(name: String) extends AnyVal
trait TemplateCompiler {
def compile(templateName: TemplateName,
yieldStatements: Seq[Statement] = Seq.empty,
contentForStatements: Map[Identifier, Seq[Statement]] = Map.empty): Try[BeardTemplate]
}
class CustomizableTemplateCompiler(templateLoader: TemplateLoader = new ClasspathTemplateLoader(),
templateCache: BeardTemplateCache = new BeardTemplateCache(),
templateParser: BeardTemplateParser = new BeardTemplateParser())
extends TemplateCompiler {
def compile(templateName: TemplateName,
yieldedStatements: Seq[Statement] = Seq.empty,
contentForStatementsMap: Map[Identifier, Seq[Statement]] = Map.empty): Try[BeardTemplate] = {
val beardTemplate = templateCache.get(templateName) match {
case Some(template) => template
case None =>
val templateFileSource = templateLoader.load(templateName) match {
case Some(content) => content
case _ => throw new IllegalStateException(s"Could not find template with name ${templateName}")
}
val rawTemplate = templateParser.parse(templateFileSource.mkString)
templateCache.add(templateName, rawTemplate)
// TODO maybe do this in parallel
compileRenderedTemplates(rawTemplate.renderStatements)
rawTemplate
}
val newContentForStatementsMap = addContentForStatementsToMap(contentForStatementsMap,
beardTemplate.contentForStatements)
val mergedBeardTemplate = createMergedTemplate(beardTemplate, yieldedStatements, newContentForStatementsMap)
mergedBeardTemplate.extended match {
case Some(extendsStatement) =>
val currentYieldedStatements = mergedBeardTemplate.statements
compile(TemplateName(extendsStatement.template), currentYieldedStatements, newContentForStatementsMap)
case None =>
// we need to merge the texts and new lines
val concatTextsTemplate = mergedBeardTemplate.copy(statements = concatTexts((mergedBeardTemplate.statements)))
Success(concatTextsTemplate)
}
}
private[renderer] def concatTextsSeq(existingTexts: Seq[HasText]): Seq[Text] =
Seq(existingTexts.foldLeft(Text(""))((text, next) => Text(text.text + next.text)))
/**
* Given a sequence of Statements, some of them will be of type Text and some of them of type NewLine (instance of HasText)
*
* In case we have two consecutive Statements with text, we concat them into one text
*
* @param statements initial statements
* @param mergedStatements here we put the result
* @param existingTexts if we find a text, we stack it here until we concat them
* @return
*/
@tailrec
private[renderer] final def concatTexts(statements: Seq[Statement],
mergedStatements: Seq[Statement] = Seq.empty,
existingTexts: Seq[HasText] = Seq.empty): Seq[Statement] = statements match {
case Nil => mergedStatements ++ concatTextsSeq(existingTexts)
case (head: HasText) :: tail => concatTexts(tail, mergedStatements, existingTexts :+ head)
case (head: ForStatement) :: tail => {
val concatTextsForStatement = head.copy(statements = concatTextsRec(head.statements))
concatTexts(tail, mergedStatements ++ concatTextsSeq(existingTexts) :+ concatTextsForStatement, Seq.empty)
}
case (head: IfStatement) :: tail => {
val concatTextsIfStatement = head.copy(
head.condition,
ifStatements = concatTextsRec(head.ifStatements),
elseStatements = concatTextsRec(head.elseStatements))
concatTexts(tail, mergedStatements ++ concatTextsSeq(existingTexts) :+ concatTextsIfStatement, Seq.empty)
}
// TODO concat the texts for the other statements
case head :: tail => concatTexts(tail, mergedStatements ++ concatTextsSeq(existingTexts) :+ head, Seq.empty)
}
private[renderer] def concatTextsRec(statements: Seq[Statement],
mergedStatements: Seq[Statement] = Seq.empty,
existingTexts: Seq[HasText] = Seq.empty): Seq[Statement] = {
concatTexts(statements, mergedStatements, existingTexts)
}
private[renderer] def addContentForStatementsToMap(contentForStatementsMap: Map[Identifier, Seq[Statement]],
newContentForStatements: Seq[ContentForStatement]): Map[Identifier, Seq[Statement]] = {
newContentForStatements match {
case Nil => {
contentForStatementsMap
}
case head :: tail =>
if (!contentForStatementsMap.contains(head.identifier)) {
addContentForStatementsToMap(contentForStatementsMap + (head.identifier -> head.statements), tail)
} else {
addContentForStatementsToMap(contentForStatementsMap, tail)
}
}
}
/**
* Compiles the templates that are rendered inside of a template
* @param renderStatements the statements which render templates inside of an existing template
*/
private def compileRenderedTemplates(renderStatements: Seq[RenderStatement]) = {
for {
statement <- renderStatements
} yield compile(TemplateName(statement.template))
}
/**
* Replaces the yield statements in the beard template with the ones specified as the argument
* Replaces the render statements without parameters with the content of the template
* Replaces blocks with the corresponding contentFor and in case the content for is missing, it replaces the block with it's statements
* @return a new BeardTemplate with the replaced statements
*/
private def createMergedTemplate(beardTemplate: BeardTemplate,
yieldStatements: Seq[Statement],
contentForStatementsMap: Map[Identifier, Seq[Statement]]): BeardTemplate = {
val newStatements: Seq[Statement] = beardTemplate.statements.flatMap {
case YieldStatement() => if (yieldStatements.nonEmpty) yieldStatements else Seq(YieldStatement())
case BlockStatement(identifier, statements) =>
if (contentForStatementsMap.contains(identifier))
contentForStatementsMap(identifier)
else
statements
// inline the render statements without parameters
case renderStatement@RenderStatement(templateName, Seq()) =>
templateCache.get(TemplateName(templateName)) match {
case Some(renderedTemplate) => renderedTemplate.statements
case None => Seq(renderStatement)
}
case statement => Seq(statement)
}
val yieldedBeardTemplate = BeardTemplate(newStatements,
beardTemplate.extended,
beardTemplate.renderStatements,
beardTemplate.contentForStatements)
yieldedBeardTemplate
}
}
object DefaultTemplateCompiler extends CustomizableTemplateCompiler(new ClasspathTemplateLoader(),
new BeardTemplateCache(),
new BeardTemplateParser())
| danpersa/beard | src/main/scala/de/zalando/beard/renderer/DefaultTemplateCompiler.scala | Scala | apache-2.0 | 7,361 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.infer
import cc.factorie.la.DenseTensor1
import scala.collection.mutable.ArrayBuffer
import cc.factorie.variable._
import cc.factorie.model.{Model, Factor}
/**
* User: apassos
* Date: 5/29/13
* Time: 8:46 AM
*/
class MPLP(variables: Seq[DiscreteVar], model: Model, maxIterations: Int = 100) extends cc.factorie.util.GlobalLogging {
val varying = variables.toSet
class MPLPFactor(val factor: Factor) {
val thisVariables = factor.variables.toSet
val varyingVariables = thisVariables.filter(v => v.isInstanceOf[DiscreteVar]).map(_.asInstanceOf[DiscreteVar]).filter(varying.contains).toSet
val lambdas = varyingVariables.map(v => v -> new DenseTensor1(v.domain.size)).toMap
def mapScore: Double = getMaxMarginals(varyingVariables.head).max
def getMaxMarginals(v: DiscreteVar): DenseTensor1 = {
assert(varyingVariables.contains(v))
val marginals = new DenseTensor1(v.domain.size)
varyingVariables.size match {
case 1 =>
// we're the only varying neighbor, get the score
val assignment = new Assignment1[v.type](v, v.domain(0).asInstanceOf[v.Value])
for (i <- 0 until v.domain.size) {
assignment.value1 = v.domain(i).asInstanceOf[v.Value]
marginals(i) = factor.assignmentScore(assignment)
}
case 2 =>
// there is one other varying neighbor we have to max over
val other = if (varyingVariables.head eq v) varyingVariables.drop(1).head else varyingVariables.head
val otherLambda = lambdas(other)
val assignment = new Assignment2[v.type,other.type](v, v.domain(0).asInstanceOf[v.Value], other, other.domain(0).asInstanceOf[other.Value])
for (value <- 0 until v.domain.size) {
assignment.value1 = v.domain(value).asInstanceOf[v.Value]
var maxScore = Double.NegativeInfinity
for (otherValue <- 0 until other.domain.size) {
assignment.value2 = other.domain(otherValue).asInstanceOf[other.Value]
val s = factor.assignmentScore(assignment) + otherLambda(otherValue)
if (s > maxScore) maxScore = s
}
marginals(value) = maxScore
}
case _ =>
def increment(a: Array[Int], domains: Array[Int]): Boolean = {
var i = a.length-1
var done = false
while (i >= 0 && !done) {
a(i) = (a(i) + 1) % domains(i)
if (a(i) != 0) done = true
i -= 1
}
!done
}
for (value <- 0 until v.domain.size) {
val others = varyingVariables.filterNot(_ eq v).map(_.asInstanceOf[MutableDiscreteVar]).toSeq
val domainSizes = others.map(_.domain.size).toArray
val values = (0 until others.size).map(i => 0).toArray
var maxScore = Double.NegativeInfinity
do {
implicit val d = new DiffList
for (i <- 0 until others.length) others(i).set(others(i).domain(values(i)).intValue)(d)
var score = factor.currentScore
d.undo()
for (i <- 0 until others.length) score += lambdas(others(i))(values(i))
if (score > maxScore) maxScore = score
} while (increment(values, domainSizes))
marginals(value) = maxScore
}
}
marginals += lambdas(v)
marginals
}
}
@inline final def near(a: Double, b: Double, eps: Double = 0.000001): Boolean = math.abs(a - b) < (math.abs(a)*eps + eps)
def isConverged(maxMarginals: Seq[DenseTensor1]): Boolean = {
val maxIndex0 = maxMarginals.head.maxIndex
for (i <- 1 until maxMarginals.length) {
val maxIndex = maxMarginals(i).maxIndex
if (maxIndex0 != maxIndex && !near(maxMarginals(i)(maxIndex), maxMarginals(i)(maxIndex0))) {
return false
}
}
true
}
def updateMessages(v: DiscreteVar, factors: Seq[MPLPFactor]): Boolean = {
val maxMarginals = factors.map(_.getMaxMarginals(v))
val converged = isConverged(maxMarginals)
if (!converged) {
val sumMarginals = new DenseTensor1(maxMarginals.head.length)
maxMarginals.foreach(sumMarginals += _)
sumMarginals *= 1.0/maxMarginals.length
for (i <- 0 until factors.length) {
val lambda = factors(i).lambdas(v)
for (j <- 0 until lambda.length)
lambda(j) += sumMarginals(j) - maxMarginals(i)(j)
}
assert(isConverged(factors.map(_.getMaxMarginals(v))))
}
converged
}
def infer: MAPSummary = {
val factors = model.factors(variables).map(new MPLPFactor(_))
val variableFactors = collection.mutable.LinkedHashMap[Var,ArrayBuffer[MPLPFactor]]()
for (f <- factors) {
for (v <- f.varyingVariables) {
val list = variableFactors.getOrElseUpdate(v, ArrayBuffer[MPLPFactor]())
list += f
}
}
var converged = true
var i = 0
do {
converged = true
for (v <- variables) converged = converged && updateMessages(v.asInstanceOf[DiscreteVar], variableFactors(v))
logger.debug("Dual is: " + factors.map(_.mapScore).sum)
i += 1
} while (!converged && i < maxIterations)
val assignment = new HashMapAssignment(ignoreNonPresent=false)
for (v <- variables) {
val value = v.domain(variableFactors(v).head.getMaxMarginals(v).maxIndex).asInstanceOf[DiscreteVar#Value]
assignment.update(v, value)
// go over the factors and "set" this variable to this value. This will avoid bad behavior when
// the LP relaxation is not tight
for (factor <- variableFactors(v)) {
val lambda = factor.lambdas(v)
for (i <- 0 until lambda.length)
lambda(i) = Double.NegativeInfinity
lambda(value.intValue) = 0
}
}
new MAPSummary(assignment, factors.map(_.factor).toSeq)
}
}
object MaximizeByMPLP extends Maximize[Iterable[DiscreteVar],Model] {
override def infer(variables:Iterable[DiscreteVar], model:Model, marginalizing:Summary = null): MAPSummary = {
if (marginalizing ne null) throw new Error("Multivariate case yet implemented.")
new MPLP(variables.toSeq, model).infer
}
}
class MaximizeByMPLP(maxIterations: Int) extends Maximize[Iterable[DiscreteVar],Model] {
override def infer(variables:Iterable[DiscreteVar], model:Model, marginalizing:Summary = null) = {
if (marginalizing ne null) throw new Error("Multivariate case yet implemented.")
new MPLP(variables.toSeq, model, maxIterations=maxIterations).infer
}
}
| zxsted/factorie | src/main/scala/cc/factorie/infer/MPLP.scala | Scala | apache-2.0 | 7,313 |
import java.security.KeyPairGenerator
import org.scalatestplus.play.PlaySpec
import pdi.jwt.{JwtAlgorithm, JwtCirce, JwtClaim, JwtJson, JwtOptions}
class JwtSpec extends PlaySpec {
"jwt-scala" should {
"be able to encode claim and decode it" in {
val keyGen = KeyPairGenerator.getInstance("RSA")
keyGen.initialize(512)
val keyPair = keyGen.genKeyPair()
val publicKey = keyPair.getPublic
val privateKey = keyPair.getPrivate
val claim = JwtClaim(
issuer = Some("test")
)
val encodeJwt = JwtCirce.encode(claim, privateKey, JwtAlgorithm.RS256)
val decodeJwt =
JwtJson.decode(encodeJwt, publicKey, Seq(JwtAlgorithm.RS256), JwtOptions.DEFAULT)
println((encodeJwt, decodeJwt))
assert(decodeJwt.toOption.contains(claim))
}
}
}
| j5ik2o/forseti | app/open-id-provider/test/JwtSpec.scala | Scala | mit | 823 |
/**
* Utilities.
*
* @author Yujian Zhang <yujian{dot}zhang[at]gmail(dot)com>
*
* License:
* GNU General Public License v2
* http://www.gnu.org/licenses/gpl-2.0.html
* Copyright (C) 2013 Yujian Zhang
*/
package net.whily.android.worldmetro
import android.app.Activity
import net.whily.scaland.Util
object Misc {
/** Set Material Light/Dark theme according to preference value. */
def setMaterialTheme(activity: Activity) {
val theme: Int =
if (Util.getThemePref(activity) == 0)
R.style.AppTheme_Dark
else
R.style.AppTheme_Light
activity.setTheme(theme)
}
}
| whily/worldmetro | src/main/scala/Misc.scala | Scala | gpl-2.0 | 618 |
package com.seanshubin.detangler.console
import java.nio.file.Path
import java.time.Clock
import com.seanshubin.detangler.analysis._
import com.seanshubin.detangler.bytecode.{ClassParser, ClassParserImpl}
import com.seanshubin.detangler.contract.{FilesContract, FilesDelegate}
import com.seanshubin.detangler.domain._
import com.seanshubin.detangler.model.{Detangled, Standalone}
import com.seanshubin.detangler.report.ReportResult
import com.seanshubin.detangler.scanner._
import com.seanshubin.detangler.timer.{Timer, TimerImpl}
import com.seanshubin.devon.domain.{DevonMarshaller, DevonMarshallerWiring}
trait AfterConfigurationWiring {
lazy val emitLine: String => Unit = println
lazy val clock: Clock = Clock.systemUTC()
lazy val createReporter: (Detangled, Path, Seq[Standalone], Notifications) =>
() => ReportResult = (theDetangled, theReportDir, theAllowedCycles, theNotifications) => new ReporterWiring {
override def detangled: Detangled = theDetangled
override def reportDir: Path = theReportDir
override def allowedCycles: Seq[Standalone] = theAllowedCycles
override def configurationWriter: ConfigurationWriter = AfterConfigurationWiring.this.configurationWriter
override def notifications: Notifications = theNotifications
}.reporter
lazy val filesContract: FilesContract = FilesDelegate
lazy val directoryScanner: DirectoryScanner =
new DirectoryScannerImpl(filesContract, searchPaths, ignoreFiles)
lazy val stringToStandaloneFunction: String => Option[Standalone] = new StringToStandaloneFunction(
level,
startsWithInclude,
startsWithExclude,
startsWithDrop,
ignoreJavadoc)
lazy val acceptNameFunction: String => Boolean = new AcceptNameFunction(stringToStandaloneFunction)
lazy val zipScanner: ZipScanner = new ZipScannerImpl(
filesContract,
FileTypes.isCompressed,
acceptNameFunction,
notifications.warnNoRelevantClassesInPath)
lazy val classScanner: ClassScanner = new ClassScannerImpl(filesContract)
lazy val timer: Timer = new TimerImpl(clock, logTiming, notifications.startTiming, notifications.endTiming)
lazy val fileScanner: FileScanner = new FileScannerImpl(zipScanner, classScanner, timer)
lazy val classParser: ClassParser = new ClassParserImpl
lazy val classBytesScanner: ClassBytesScanner = new ClassBytesScannerImpl(classParser)
lazy val devonMarshaller: DevonMarshaller = DevonMarshallerWiring.Default
lazy val notifications: Notifications = new LineEmittingNotifications(devonMarshaller, emitLine)
lazy val scanner: Scanner = new ScannerImpl(
directoryScanner,
fileScanner,
classBytesScanner,
timer)
lazy val cycleFinder: CycleFinder[Standalone] = new CycleFinderWarshall[Standalone]
lazy val detangler: Detangler = new DetanglerImpl(cycleFinder)
lazy val analyzer: Runnable = new AfterConfigurationRunnerImpl(
scanner,
detangler,
createReporter,
reportDir,
allowedCycles,
stringToStandaloneFunction,
timer,
notifications,
canFailBuild)
def searchPaths: Seq[Path]
def reportDir: Path
def level: Int
def startsWithInclude: Seq[Seq[String]]
def startsWithExclude: Seq[Seq[String]]
def startsWithDrop: Seq[Seq[String]]
def allowedCycles: Seq[Seq[String]]
def configurationWriter: ConfigurationWriter
def ignoreFiles: Seq[Path]
def canFailBuild: Boolean
def ignoreJavadoc: Boolean
def logTiming: Boolean
}
| SeanShubin/detangler | console/src/main/scala/com/seanshubin/detangler/console/AfterConfigurationWiring.scala | Scala | unlicense | 3,434 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.view.test
import play.api.libs.json.Json
case class SicAndComplianceTestSetup(businessActivityDescription: Option[String],
sicCode1: Option[String],
sicCode2: Option[String],
sicCode3: Option[String],
sicCode4: Option[String],
labourCompanyProvideWorkers: Option[String],
labourWorkers: Option[String],
labourTemporaryContracts: Option[String],
mainBusinessActivityId: Option[String] ,
mainBusinessActivityDescription: Option[String] ,
mainBusinessActivityDisplayDetails: Option[String]
)
object SicAndComplianceTestSetup {
implicit val format = Json.format[SicAndComplianceTestSetup]
}
| hmrc/vat-registration-frontend | app/models/view/test/SicAndComplianceTestSetup.scala | Scala | apache-2.0 | 1,619 |
package deburnat.transade.core.storages
import deburnat.transade.core.admins.CoreAdmin.{platform, st, _st}
/**
* An algorithm for dynamic programming. It uses internally a two-dimensional
* matrix to store the previous results.
* Project name: deburnat
* Date: 7/27/13
* Time: 12:10 AM
* @author Patrick Meppe (tapmeppe@gmail.com)
*/
protected[core] object Storage {
//st =: Storage, _st = storage
def getStorage(format: String): IStorage = try{
Class.forName(
platform(_st, false) + format(0).toUpper.toString + format.replaceAll("^.{1}", "").toLowerCase + st
).newInstance.asInstanceOf[IStorage]
}catch{case e: Exception => null} //ClassNotFoundException | InstantiationException | IllegalAccessException e
}
| deburnatshazem/deburnat | core/src/main/scala/deburnat/transade/core/storages/interfaces/Storage.scala | Scala | apache-2.0 | 742 |
package com.jeff.chaser.models.components.ai.states
/**
* State component to represent patrolling state
*/
class PatrolComponent extends AiStateComponent{
var side = 0
var timeInState = 5f
} | jregistr/Academia | CSC455-Game-Programming/Chaser/core/src/com/jeff/chaser/models/components/ai/states/PatrolComponent.scala | Scala | mit | 199 |
package edu.gemini.model.p1.immutable
import edu.gemini.model.p1.{mutable => M}
import org.specs2.mutable._
class GmosSBlueprintSpec extends Specification with SemesterProperties {
"The GmosS Blueprint" should {
"include the Lya395 filter, REL-1236" in {
val blueprint = GmosSBlueprintImaging(GmosSFilter.IMAGING)
blueprint.filters must contain(M.GmosSFilter.Lya395_G0342)
}
"include the Z (876 nm) filter, REL-1723" in {
val blueprint = GmosSBlueprintImaging(GmosSFilter.IMAGING)
blueprint.filters must contain(M.GmosSFilter.Z_G0343)
}
"include the Y (1010 nm) filter, REL-1723" in {
val blueprint = GmosSBlueprintImaging(GmosSFilter.IMAGING)
blueprint.filters must contain(M.GmosSFilter.Y_G0344)
}
}
}
| fnussber/ocs | bundle/edu.gemini.model.p1/src/test/scala/edu/gemini/model/p1/immutable/GmosSBlueprintSpec.scala | Scala | bsd-3-clause | 772 |
package com.github.j5ik2o.reactive.redis.parser.model
final case class NumberExpr(value: Long) extends Expr
| j5ik2o/reactive-redis | core/src/main/scala/com/github/j5ik2o/reactive/redis/parser/model/NumberExpr.scala | Scala | mit | 109 |
/*
* Copyright 2016 Uncharted Software Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package software.uncharted.sparkpipe.ops.community.twitter
import org.apache.spark.sql.{SQLContext, DataFrame}
import org.apache.spark.sql.types.{StructType, StructField, BooleanType, StringType, LongType, ArrayType, DoubleType, IntegerType}
import scala.collection.mutable.WrappedArray
import software.uncharted.sparkpipe.ops
import org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.functions.{col, udf}
/**
* This package contains twitter pipeline operations for Spark
* See the README or {@link software.uncharted.sparkpipe.Pipe} for more information.
*/
package object tweets {
// scalastyle:off multiple.string.literals
val CONTRIBUTER_SCHEMA = StructType(Seq(
StructField("id", entities.SIZE_SCHEMA, false),
StructField("id_str", StringType, true),
StructField("screen_name", StringType, true)
))
// scalastyle:on
// scalastyle:off multiple.string.literals
val COORDINATE_SCHEMA = StructType(Seq(
StructField("coordinates", ArrayType(DoubleType, true), true), // collection of float
StructField("type", StringType, true)
))
// scalastyle:on
// scalastyle:off multiple.string.literals
val TWEET_SCHEMA_BASE:StructType = StructType(Seq(
StructField("contributors",CONTRIBUTER_SCHEMA,true), // StringType inferred in sample tweet, ArrayType(CONTRIBUTER_SCHEMA) in docs
StructField("coordinates",COORDINATE_SCHEMA,true), // StringType inferred in sample tweet, COORDINATE_SCHEMA in docs
StructField("created_at",StringType,true),
StructField("current_user_retweet", StructType(Seq(
StructField("id", LongType, true),
StructField("id_str", StringType, true)
)),true),
StructField("entities",entities.ENTITY_SCHEMA,true),
StructField("favorite_count",LongType,true),
StructField("favorited",BooleanType,true),
StructField("filter_level", StringType, true), // Depricated, uses coordinate instead.
StructField("geo",StringType,true),
StructField("id",LongType,true),
StructField("id_str",StringType,true),
StructField("in_reply_to_screen_name",StringType,true),
StructField("in_reply_to_status_id",StringType,true),
StructField("in_reply_to_status_id_str",StringType,true),
StructField("in_reply_to_user_id",StringType,true),
StructField("in_reply_to_user_id_str",StringType,true),
StructField("is_quote_status",BooleanType,true), // Not in docs, but found in tweet
StructField("lang",StringType,true),
StructField("metadata",StructType(Seq( // Not in docs, but found in tweet
StructField("iso_language_code",StringType,true),
StructField("result_type",StringType,true))
),true),
StructField("place",StringType,true),
StructField("possibly_sensitive",BooleanType,true),
StructField("quoted_status_id", entities.SIZE_SCHEMA, true),
StructField("quoted_status_id_str", StringType, true),
StructField("scopes", StructType(Seq(
StructField("followers", BooleanType, true)
)), true),
StructField("retweet_count",LongType,true),
StructField("retweeted",BooleanType,true),
StructField("source",StringType,true),
StructField("text",StringType,true),
StructField("truncated",BooleanType,true),
StructField("user",users.USER_SCHEMA,true), // tweet.user.entities.description.urls must be string type
StructField("withheld_copyright", BooleanType, true),
StructField("withheld_in_countries", ArrayType(StringType, true), true),
StructField("withheld_scope", StringType, true)
))
// scalastyle:on
// scalastyle:off multiple.string.literals
val TWEET_SCHEMA_BASE_WITH_EXENDED_USER:StructType = util.addFields(
StructType(TWEET_SCHEMA_BASE.filterNot(s => {s.name == "user"})),
Seq(StructField("user", users.USER_SCHEMA_WITH_EXTENDED_ENTITY, true)))
// scalastyle:on
// scalastyle:off multiple.string.literals
val TWEET_SCHEMA = util.addFields(
StructType(TWEET_SCHEMA_BASE.filterNot(s => {s.name == "place" || s.name == "in_reply_to_status_id" || s.name == "in_reply_to_user_id"})),
Seq(
StructField("place",places.PLACE_SCHEMA,true),
StructField("in_reply_to_status_id",LongType,true),
StructField("in_reply_to_user_id",LongType,true),
StructField("retweeted_status",TWEET_SCHEMA_BASE_WITH_EXENDED_USER,true), // tweet.user.retweeted_status.user.entities.description.urls must be StructType
StructField("quoted_status", TWEET_SCHEMA_BASE, true)))
// scalastyle:on
/**
* A dummy function to take and return a row
*
* @return a Row that was passed to it
*/
val columnExtractor: WrappedArray[String] => WrappedArray[String] = row => {row}
/**
* Create a DataFrame from an input data source
*
* @param path A format-specific location String for the source data
* @param format Specifies the input data source format (parquet by default)
* @param options A Map[String, String] of options
* @return a DataFrame createad from the specified source
*/
def read(
path: String,
format: String = "parquet",
options: Map[String, String] = Map[String, String]()
)(sqlContext: SQLContext): DataFrame = {
ops.core.dataframe.io.read(path, format, options, TWEET_SCHEMA)(sqlContext)
}
/**
* Create a new column in the given dataframe of hashtags present in the tweet text
*
* @param newCol The column into which to put the hashtags
* @param sourceCol The column from which to get the hashtags
* @param input Input pipeline data to transform
* @return the dataframe with a new column containing the hashtags in the tweet
**/
def extractHashtags(newCol: String = "hashtags", sourceCol: String = "entities.hashtags.text")(input: DataFrame): DataFrame = {
ops.core.dataframe.addColumn(newCol, columnExtractor, sourceCol)(input)
}
/**
* Create a new column in the given dataframe of hashtags present in the tweet text
*
* @param newCol The column into which to put the user mentions
* @param sourceCol The column from which to get the user mentions
* @param input Input pipeline data to transform
* @return the dataframe with a new column containing the user mentions in the tweet
**/
def extractMentions(newCol: String = "mentions", sourceCol: String = "entities.user_mentions.screen_name")(input: DataFrame): DataFrame = {
ops.core.dataframe.addColumn(newCol, columnExtractor, sourceCol)(input)
}
/**
* Create a new column in the given dataframe of the tweets coordinates
*
* @param newCol The column into which to put the coordinates
* @param sourceCol The column from which to get the coordinates
* @param input Input pipeline data to transform
* @return the dataframe with a new column containing the user mentions in the tweet
**/
def extractGeo(newCol: String = "coordinates", sourceCol: String = "coordinates.coordinates")(input: DataFrame): DataFrame = {
val hashtagExtractor: WrappedArray[Double] => WrappedArray[Double] = row => {row}
ops.core.dataframe.addColumn(newCol, hashtagExtractor, sourceCol)(input)
}
/**
* Create a new column of all URLs present in the tweet object, not including those in the retweet object
*
* @param newCol The column into which to put the urls
* @param input Input pipeline data to transform
* @return the dataframe with a new column containing all urls in the tweet
**/
// scalastyle:off null method.length
def extractURLs(newCol: String = "urls")(input: DataFrame): DataFrame = {
// List of columns to extract from. Not including 17 more present in the retweet object
val sourceColsString = Array(
"user.profile_background_image_url", // string
"user.profile_background_image_url_https", // string
"user.profile_banner_url", // string
"user.profile_image_url", // string
"user.profile_image_url_https", // string
"user.url" // string
)
val sourceColsArray = Array(
"entities.media.url", // array<string>
"entities.media.display_url", // array<string>
"entities.media.expanded_url", // array<string>
"entities.media.media_url", // array<string>
"entities.media.media_url_https", // array<string>
"entities.urls.display_url", // array<string>
"entities.urls.expanded_url", // array<string>
"entities.urls.url", // array<string>
"user.entities.description.urls", // array<string>
"user.entities.url.urls.display_url", // array<string>
"user.entities.url.urls.expanded_url", // array<string>
"user.entities.url.urls.url" // array<string>
)
// Create extraction function for string columns
val appenderString = (sourceUrlCol: String, urlCol: WrappedArray[String]) => {
if (sourceUrlCol != null) {urlCol :+ sourceUrlCol}
else {urlCol}
}
val sqlfuncString = udf(appenderString)
// Create extraction function for array<string> columns
val appenderArray = (sourceUrlCol: WrappedArray[String], urlCol: WrappedArray[String]) => {
if (sourceUrlCol != null) {urlCol.union(sourceUrlCol)}
else {urlCol}
}
val sqlfuncArray = udf(appenderArray)
// Create empty column of Array[String]
val coder: () => Array[String] = () => {Array()}
val sqlfunc = udf(coder)
var df = input.withColumn("urls", sqlfunc())
// Add each url source to the new column
sourceColsArray.foreach(sourceCol => {
df = df
.withColumnRenamed("urls", "urlsTemp")
.withColumn("urls", sqlfuncArray(col(sourceCol), col("urlsTemp")))
.drop("urlsTemp")
})
// Add each url source to the new column
sourceColsString.foreach(sourceCol => {
df = df
.withColumnRenamed("urls", "urlsTemp")
.withColumn("urls", sqlfuncString(col(sourceCol), col("urlsTemp")))
.drop("urlsTemp")
})
df
}
// scalastyle:on
}
| unchartedsoftware/sparkpipe-twitter-ops | src/main/scala/software/uncharted/sparkpipe/ops/community/twitter/tweets/package.scala | Scala | apache-2.0 | 10,608 |
package example
import diode._
import scalatags.JsDom.all._
/**
* Counter view renders the counter value and provides interaction through various buttons affecting the counter value.
*
* @param counter
* Model reader for the counter value
* @param dispatch
* Dispatcher
*/
class CounterView(counter: ModelRO[Int], dispatch: Dispatcher) {
def render = {
div(
h3("Counter"),
p("Value = ", b(counter())),
div(
cls := "btn-group",
button(cls := "btn btn-default", onclick := (() => dispatch(Increase(2))), "Increase"),
button(cls := "btn btn-default", onclick := (() => dispatch(Decrease(1))), "Decrease"),
button(cls := "btn btn-default", onclick := (() => dispatch(Reset)), "Reset")
)
)
}
}
| ochrons/diode | examples/simple/src/main/scala/example/CounterView.scala | Scala | mit | 777 |
package org.jetbrains.sbt
package settings
import java.util
import com.intellij.openapi.components._
import com.intellij.openapi.externalSystem.settings.{AbstractExternalSystemSettings, ExternalSystemSettingsListener}
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil
import com.intellij.openapi.module.Module
import com.intellij.openapi.project.Project
import com.intellij.openapi.roots.ProjectRootManager
import com.intellij.openapi.util.io.FileUtil
import com.intellij.psi.PsiElement
import com.intellij.util.containers.ContainerUtilRt
import com.intellij.util.xmlb.annotations.AbstractCollection
import org.jetbrains.sbt.project.settings.{SbtProjectSettings, SbtProjectSettingsListener, SbtProjectSettingsListenerAdapter, SbtTopic}
import scala.beans.BeanProperty
import scala.collection.JavaConverters._
/**
* @author Pavel Fatin
*/
@State(
name = "ScalaSbtSettings",
storages = Array(new Storage("sbt.xml"))
)
class SbtSystemSettings(project: Project)
extends AbstractExternalSystemSettings[SbtSystemSettings, SbtProjectSettings, SbtProjectSettingsListener](SbtTopic, project)
with PersistentStateComponent[SbtSystemSettingsState]{
@BeanProperty
var customLauncherEnabled: Boolean = false
@BeanProperty
var customLauncherPath: String = ""
@BeanProperty
var maximumHeapSize: String = "768"
@BeanProperty
var vmParameters: String = "-XX:MaxPermSize=384M"
@BeanProperty
var customVMEnabled: Boolean = false
@BeanProperty
var customVMPath: String = ""
@BeanProperty
var customSbtStructurePath: String = ""
def checkSettings(old: SbtProjectSettings, current: SbtProjectSettings) {
if (old.jdkName != current.jdkName) {
getPublisher.onJdkChanged(old.jdk, current.jdk)
}
if (old.resolveClassifiers != current.resolveClassifiers) {
getPublisher.onResolveClassifiersChanged(old.resolveClassifiers, current.resolveClassifiers)
}
if (old.resolveSbtClassifiers != current.resolveSbtClassifiers) {
getPublisher.onResolveSbtClassifiersChanged(old.resolveSbtClassifiers, current.resolveSbtClassifiers)
}
if (old.sbtVersion != current.sbtVersion) {
getPublisher.onSbtVersionChanged(old.sbtVersion, current.sbtVersion)
}
}
def getState = {
val state = new SbtSystemSettingsState()
fillState(state)
state.customLauncherEnabled = customLauncherEnabled
state.customLauncherPath = customLauncherPath
state.maximumHeapSize = maximumHeapSize
state.vmParameters = vmParameters
state.customVMEnabled = customVMEnabled
state.customVMPath = customVMPath
state.customSbtStructureDir = customSbtStructurePath
state
}
def loadState(state: SbtSystemSettingsState) {
super[AbstractExternalSystemSettings].loadState(state)
customLauncherEnabled = state.customLauncherEnabled
customLauncherPath = state.customLauncherPath
maximumHeapSize = state.maximumHeapSize
vmParameters = state.vmParameters
customVMEnabled = state.customVMEnabled
customVMPath = state.customVMPath
customSbtStructurePath = state.customSbtStructureDir
}
def subscribe(listener: ExternalSystemSettingsListener[SbtProjectSettings]) {
val adapter = new SbtProjectSettingsListenerAdapter(listener)
getProject.getMessageBus.connect(getProject).subscribe(SbtTopic, adapter)
}
def copyExtraSettingsFrom(settings: SbtSystemSettings) {}
def getLinkedProjectSettings(module: Module): Option[SbtProjectSettings] =
Option(ExternalSystemApiUtil.getExternalRootProjectPath(module)).safeMap(getLinkedProjectSettings)
def getLinkedProjectSettings(element: PsiElement): Option[SbtProjectSettings] =
for {
virtualFile <- Option(element.getContainingFile).safeMap(_.getVirtualFile)
projectFileIndex = ProjectRootManager.getInstance(element.getProject).getFileIndex
module <- Option(projectFileIndex.getModuleForFile(virtualFile))
if project == element.getProject
projectSettings <- getLinkedProjectSettings(module)
} yield projectSettings
override def getLinkedProjectSettings(linkedProjectPath: String): SbtProjectSettings =
Option(super.getLinkedProjectSettings(linkedProjectPath))
.getOrElse(super.getLinkedProjectSettings(ExternalSystemApiUtil.normalizePath(linkedProjectPath)))
}
object SbtSystemSettings {
def getInstance(project: Project) = ServiceManager.getService(project, classOf[SbtSystemSettings])
}
class SbtSystemSettingsState extends AbstractExternalSystemSettings.State[SbtProjectSettings] {
private val projectSettings = ContainerUtilRt.newTreeSet[SbtProjectSettings]()
@BeanProperty
var customLauncherEnabled: Boolean = false
@BeanProperty
var customLauncherPath: String = ""
@BeanProperty
var maximumHeapSize: String = "768"
@BeanProperty
var vmParameters: String = "-XX:MaxPermSize=384M"
@BeanProperty
var customVMEnabled: Boolean = false
@BeanProperty
var customVMPath: String = ""
@BeanProperty
var customSbtStructureDir: String = ""
@AbstractCollection(surroundWithTag = false, elementTypes = Array(classOf[SbtProjectSettings]))
def getLinkedExternalProjectsSettings: util.Set[SbtProjectSettings] = {
projectSettings
}
def setLinkedExternalProjectsSettings(settings: util.Set[SbtProjectSettings]) {
if (settings != null) {
projectSettings.addAll(settings)
}
}
}
| whorbowicz/intellij-scala | src/org/jetbrains/sbt/settings/SbtSystemSettings.scala | Scala | apache-2.0 | 5,437 |
package com.rasterfoundry.api.tool
import com.rasterfoundry.api.utils.queryparams._
import com.rasterfoundry.datamodel._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.directives.ParameterDirectives.parameters
trait ToolQueryParameterDirective extends QueryParametersCommon {
val toolSpecificQueryParams = parameters(
'singleSource.as[Boolean].?
).as(ToolQueryParameters.apply _)
def combinedToolQueryParams =
(
toolSpecificQueryParams &
orgQueryParams &
userQueryParameters &
timestampQueryParameters &
searchParams &
ownerQueryParameters &
ownershipTypeQueryParameters &
groupQueryParameters
).as(CombinedToolQueryParameters.apply _)
}
| raster-foundry/raster-foundry | app-backend/api/src/main/scala/tools/QueryParameters.scala | Scala | apache-2.0 | 754 |
import leon.lang._
object ObjectAliasing5 {
case class A(var x: Int)
case class B(a: A)
def f1(b: B): A = {
b.a
}
}
| epfl-lara/leon | src/test/resources/regression/xlang/error/ObjectAliasing5.scala | Scala | gpl-3.0 | 132 |
package shop
import akka.actor.{Actor, ActorRef, ActorSystem, Props}
import cart.Item
import com.typesafe.config.ConfigFactory
import shop.ProductCatalogMessages.TopMatching
object Main extends App {
val config = ConfigFactory.load()
val catalogSystem = ActorSystem("productCatalog", config.getConfig("productcatalog").withFallback(config))
val mainSystem = ActorSystem("main", config.getConfig("main").withFallback(config))
catalogSystem.actorOf(
Props[ProductCatalogRouter],
"productCatalog"
)
val mainActor = mainSystem.actorOf(Props[MainActor])
mainActor ! TopMatching("gillette", 10)
}
class MainActor extends Actor {
private val catalog = context.system.actorSelection("akka.tcp://productCatalog@127.0.0.1:2553/user/productCatalog")
def receive: Receive = {
case msg: TopMatching => catalog ! msg
case result: List[Item] => result.foreach(println)
}
}
| apisarek/reactivescala-course | src/main/scala/shop/Main.scala | Scala | mit | 897 |
/*
* Copyright 2016 Carlo Micieli
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.hascalator
package typeclasses
import Prelude._
import io.hascalator.data.NonEmpty
class SemigroupSpec extends AbstractTestSpec {
implicit val intSemigroup: Semigroup[Int] = new Semigroup[Int] {
override def mAppend(x: Int, y: Int): Int = x + y
}
describe("SemiGroup") {
describe("mAppend") {
it("should append two values") {
Semigroup[Int].mAppend(1, 2) shouldBe 3
}
}
describe("sConcat") {
it("should apply <> to NonEmpty lists") {
Semigroup[Int].sConcat(NonEmpty(1, 2, 3, 4)) shouldBe 10
}
}
describe("sTimes") {
it("should apply <> n times") {
Semigroup[Int].sTimes(4)(2) shouldBe 4 * 2
}
it("should throw an exception when times == 0") {
the[ApplicationException] thrownBy {
Semigroup[Int].sTimes(0)(2)
} should have message "*** Exception: Semigroup.sTimes: invalid value"
}
}
}
}
| CarloMicieli/hascalator | core/src/test/scala/io/hascalator/typeclasses/SemigroupSpec.scala | Scala | apache-2.0 | 1,536 |
package org.randi3.schema
import scala.slick.driver.{BasicProfile, ExtendedProfile}
import scala.slick.lifted.DDL
/**
* A simple example that uses statically typed queries against an in-memory
* H2 database. The example data comes from Oracle's JDBC tutorial at
* http://download.oracle.com/javase/tutorial/jdbc/basics/tables.html.
*/
class BlockRandomizationSchema(val driver: ExtendedProfile) {
import driver.Implicit._
import driver.simple._
val schema = new DatabaseSchema(driver)
import schema._
object BlockRandomizations extends Table[(Int, Int, Option[Int], Option[Int], Option[Int], Option[Int])]("BlockRandomization") {
def id = column[Int]("id", O PrimaryKey, O AutoInc)
def version = column[Int]("Version", O NotNull)
def randomizationMethodId = column[Option[Int]]("RandomizationMethodId")
def blocksize = column[Option[Int]]("Blocksize", O Nullable)
def minBlockSize = column[Option[Int]]("MinBlocksize")
def maxBlockSize = column[Option[Int]]("MaxBlocksize")
def * = id ~ version ~ randomizationMethodId ~ blocksize ~ minBlockSize ~ maxBlockSize
def noId = version ~ randomizationMethodId ~ blocksize ~ minBlockSize ~ maxBlockSize
def randomizationMethod = foreignKey("BlockRandomizationFK_RandomizationMethod", randomizationMethodId, schema.RandomizationMethods)(_.id)
}
object Blocks extends Table[(Int, Option[Int], Int, String)]("Blocks") {
def id = column[Int]("id", O PrimaryKey, O AutoInc)
def randomizationMethodId = column[Option[Int]]("RandomizationMethodId")
def treatmentArmId = column[Int]("TreatmentArmId")
def stratum = column[String]("Stratum")
def * = id ~ randomizationMethodId ~ treatmentArmId ~ stratum
def noId = randomizationMethodId ~ treatmentArmId ~ stratum
def randomizationMethod = foreignKey("BlockFK_RandomizationMethod", randomizationMethodId, schema.RandomizationMethods)(_.id)
def treatmentArm = foreignKey("BlockFK_TreatmentArm", treatmentArmId, schema.TreatmentArms)(_.id)
}
def getDatabaseTables: DDL = {
(BlockRandomizations.ddl ++ Blocks.ddl)
}
}
| dschrimpf/randi3-method-block | src/main/scala/org/randi3/schema/BlockRandomizationSchema.scala | Scala | gpl-3.0 | 2,119 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import org.apache.spark.util.collection.OpenHashSet
/**
* <span class="badge" style="float: right;">ALPHA COMPONENT</span>
* GraphX is a graph processing framework built on top of Spark.
*/
package object graphx {
/**
* A 64-bit vertex identifier that uniquely identifies a vertex within a graph. It does not need
* to follow any ordering or any constraints other than uniqueness.
*/
type VertexId = Long
/** Integer identifer of a graph partition. Must be less than 2^30. */
// TODO: Consider using Char.
type PartitionID = Int
private[graphx] type VertexSet = OpenHashSet[VertexId]
}
| sjtu-iiot/graphx-algorithm | src/main/scala/org/apache/spark/graphx/package.scala | Scala | gpl-2.0 | 1,441 |
// Exercise 4: Collections and operations.
// For this exercise, you'll want to open up the Scaladocs
// for the immutable List and Map types.
//
// NOTE: For a list of fun problems to practice these
// collection transformations, see http://projecteuler.net/.
import CheapTests._
// Here are collections we'll use:
val list = List("now", "is", "the", "time")
val names = Map(
"Martin" -> "Odersky",
"Joe" -> "Armstrong",
"Simon" -> "Peyton Jones"
)
// To make the tests pass, replace the definitions of
// val expectedN = list
// etc. with the correct calls to one (or more!) List or
// Map methods. For example, the first one should be
// val expected1 = list.reverse
// (Do as many as you want.)
val expected1 = list
expected1 is List("time", "the", "is", "now")
val expected2 = list
expected2 is List("NOW", "IS", "THE", "TIME")
// Return the first element
val expected3 = list
expected3 is "now"
// Return the tail, all but the first element
val expected4 = list
expected4 is List("is", "the", "time")
// Return the last element. (Warning: O(n) time)
val expected5 = list
expected5 is "time"
// Sort the elements by length
val expected6 = list
expected6 is List("is", "now", "the", "time")
// "Partition" the collection elements into two collections,
// where one has the words that start with "t" and the other
// collection has the rest of the words.
val expected7 = list
expected7 is Tuple2(List("the", "time"),List("now", "is"))
// "Zip" the collection elements with their lengths.
val expected8 = list
expected8 is List(("now", 3), ("is", 2), ("the", 3), ("time", 4))
// Make a string from the list that matches the expected
// string shown
val expected9 = list
expected9 is "[now-is-the-time]"
// Map the "names" above to a 3-element list where each name is "first last".
// The anonymous function passed to map() can be written several ways. Note that
// the anonymous function expects a Tuple2 argument for the key and value, OR
// it can also be written as PartialFunction, which we'll discuss later:
val expected10 = names
expected10 is List("Martin Odersky", "Joe Armstrong", "Simon Peyton Jones")
// If you know SQL, you know the GROUP BY operation. Do the same thing using
// a single Map API call to group by the numbers used as keys.
val stuff = Map(1 -> "a", 2 -> "b", 1 -> "c", 3 -> "d", 2 -> "e")
val expected11 = stuff
expected11 is Map(
1 -> Map(1 -> "a", 1 -> "c"),
2 -> Map(2 -> "b", 2 -> "e"),
3 -> Map(3 -> "d")
)
// To anticipate the next section, note what happens when we retrieve
// map elements:
val expected12 = expected11
expected12 is Some(Map(1 -> "a", 1 -> "c"))
val expected13 = expected11
expected13 is None
val expected14 = expected11
expected14 is Map(4 -> "unknown")
// Final example:
import scala.collection.immutable.TreeSet
// Extract all the unique characters (respecting case) into a list.
// You could also just return the TreeSet, which is a Red-Black tree.
// Note the three function signatures. I'll explain why in class...
def uniques(arg: String): List[Char] = uniques(List(arg))
def uniques(arg: String, args: String*): List[Char] = uniques(arg +: (args.toSeq))
def uniques(args: Seq[String]): List[Char] =
args.flatMap(_.toList) // Flatten the sequence of strings to single list of chars.
.sortWith(_ < _) // Sort alphanumerically then "fold" the elements into a set
.foldLeft(new TreeSet[Char])((set,s) => set + s)
.toList // convert to the final list
val expected15 = List('N', 'T', 'e', 'h', 'i', 'm', 'o', 's', 't', 'w')
uniques("Now", "is", "the", "Time") is expected15
uniques("Now" :: "is" :: "the" :: "Time" :: Nil) is expected15
uniques(List("Now", "is", "the", "Time")) is expected15
uniques("Now") is List('N', 'o', 'w')
println("Success!")
| deanwampler/SeductionsOfScalaTutorial | tutorial-exercises/ex4-collections.scala | Scala | apache-2.0 | 3,791 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.prop.TableDrivenPropertyChecks
trait SuiteProp extends FunSuite with SuiteExamples with TableDrivenPropertyChecks with Matchers
| travisbrown/scalatest | src/test/scala/org/scalatest/SuiteProp.scala | Scala | apache-2.0 | 774 |
/* Copyright 2017-18, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow.api.ops.control_flow
import org.platanios.tensorflow.api.core.Shape
import org.platanios.tensorflow.api.core.exception._
import org.platanios.tensorflow.api.implicits.Implicits._
import org.platanios.tensorflow.api.ops._
import org.platanios.tensorflow.api.ops.Gradients.{Registry => GradientsRegistry}
import org.platanios.tensorflow.api.tensors.Tensor
import org.platanios.tensorflow.api.types.{INT32, RESOURCE}
import org.platanios.tensorflow.api.utilities.using
import org.platanios.tensorflow.jni.{TensorFlow => NativeLibrary}
import org.tensorflow.framework.AttrValue
import scala.reflect.ClassTag
/** Contains functions for constructing ops related to control flow.
*
* @author Emmanouil Antonios Platanios
*/
private[api] trait ControlFlow {
/** Creates an op that produces the content of `input` only after all ops in `dependencies` have finished executing.
*
* In some cases, a user may want the output of an op to be consumed externally only after some other dependencies
* have run first. This function ensures returns `input`, but only after all ops in `dependencies` have run. Note
* that this means that there is no guarantee that `input` will be evaluated after any `dependencies` have run.
*
* @group ControlFlowOps
* @param dependencies Set of ops to be executed before `input`.
* @param input Op output to be computed after all ops in `dependencies` have finished executing.
* @param name Name for the created op (used mainly as a name scope).
* @return Created op output.
*/
private[api] def withControlDependencies[T <: OutputLike](
dependencies: Set[Op], input: T, name: String = "WithControlDependencies"): T = {
Op.createWithNameScope(name, dependencies + input.op) {
Op.colocateWith(Set[Op](input.op)) {
Op.createWith(controlDependencies = dependencies) {
Basic.identity(input)
}
}
}
}
/** $OpDocControlFlowGroup
*
* @group ControlFlowOps
* @param inputs Ops to group.
* @param name Name for the created op (used mainly as a name scope).
* @return Created op output, which in this case is the result of a `noOp`.
*/
def group(inputs: Set[Op], name: String = "Group"): Op = Op.createWith(Op.getGraphFromInputs(inputs)) {
val inputsByDevice = inputs.groupBy(_.device)
if (inputsByDevice.size == 1) {
// 1-level tree. The root node is the returned no-op node.
val (device, ops) = inputsByDevice.head
if (device != null)
Op.createWith(device = device, controlDependencies = ops)(noOp(name))
else
Op.createWith(controlDependencies = ops)(noOp(name))
} else {
// 2-level tree. The root node is the returned no-op node. `dependencies` contains 1 NoOp node for each device.
val dependencies = inputsByDevice.toSeq.sortBy(_._1).map {
case (device, ops) =>
if (device != null)
Op.createWith(device = device, controlDependencies = ops)(noOp(name))
else
Op.createWith(controlDependencies = ops)(noOp(name))
}
Op.createWith(controlDependencies = dependencies.toSet)(noOp(name))
}
}
/** $OpDocControlFlowTuple
*
* @group ControlFlowOps
* @param inputs Op outputs being grouped.
* @param controlInputs Set of additional ops that have to finish before this op finishes, but whose outputs are not
* returned.
* @param name Name for the created ops (used mainly as a name scope).
* @return Created op outputs, which in this case are the values of `inputs`.
*/
def tuple[T <: OutputLike](
inputs: Array[T], controlInputs: Set[Op] = Set.empty, name: String = "Tuple")
(implicit tag: ClassTag[T]): Array[T] = {
val gatingOps = inputs.filter(_ != null).map(_.op).toSet
if (gatingOps.isEmpty) {
inputs
} else {
Op.createWithNameScope(name, gatingOps) {
val gate = group(gatingOps ++ controlInputs)
inputs.map(input => if (input == null) input else withControlDependencies(Set[Op](gate), input))
}
}
}
/** $OpDocControlFlowNoOp
*
* @group ControlFlowOps
* @param name Name for the created op.
* @return Created op output.
*/
def noOp(name: String = "NoOp"): Op = {
Op.Builder(opType = "NoOp", name = name).build()
}
/** Creates an op that raises an exception to abort the process when called.
*
* @group ControlFlowOps
* @param errorMessage Error message associated with the exception.
* @param exitWithoutError If `true`, the process will exit normally. Otherwise, it will exit with a `SIGABORT`
* signal.
* @param name Name for the created op.
* @return Created op output.
*/
private[api] def abort(
errorMessage: String = "", exitWithoutError: Boolean = false, name: String = "Abort"): Output = {
Op.Builder(opType = "Abort", name = name)
.setAttribute("error_message", errorMessage)
.setAttribute("exit_without_error", exitWithoutError)
.build().outputs(0)
}
/** $OpDocControlFlowCond
*
* @group ControlFlowOps
* @param predicate `BOOLEAN` scalar determining whether to return the result of `trueFn` or `falseFn`.
* @param trueFn Function returning the computation to be performed if `predicate` is `true`.
* @param falseFn Function returning the computation to be performed if `predicate` is `false`.
* @param name Name prefix for the created ops.
* @return Created op output structure, mirroring the return structure of `trueFn` and `falseFn`.
* @throws InvalidDataTypeException If the data types of the tensors returned by `trueFn` and `falseFn` do not match.
*/
@throws[InvalidDataTypeException]
def cond[T, R](predicate: Output, trueFn: () => T, falseFn: () => T, name: String = "Cond")(implicit
ev: CondOutput.Aux[T, R]
): T = {
Op.createWithNameScope(name) {
Output.constantValue(predicate) match {
case Some(predicateValue) if predicateValue.scalar == true => trueFn()
case Some(predicateValue) if predicateValue.scalar == false => falseFn()
case None =>
// Add the switch to the graph.
val (pFalse, pTrue) = ControlFlow.switch(predicate, predicate)
val pivotTrue = Basic.identity(pTrue, "SwitchTrue")
val pivotFalse = Basic.identity(pFalse, "SwitchFalse")
val predicateId = Basic.identity(predicate, "PredicateIdentity")
// Disable the fetching of tensors that are only on one branch of the cond.
pTrue.op.graph.preventFetching(pTrue.op)
pFalse.op.graph.preventFetching(pFalse.op)
pivotTrue.op.graph.preventFetching(pivotTrue.op)
pivotFalse.op.graph.preventFetching(pivotFalse.op)
predicateId.op.graph.preventFetching(predicateId.op)
// Build the graph for the true branch in a new context.
val contextTrue = CondContext(predicateId, pivotTrue, TrueBranch)
contextTrue.enter()
val (originalResultTrue, resultTrue) = contextTrue.buildCondBranch(trueFn)
contextTrue.exitResult(resultTrue)
contextTrue.exit()
// Build the graph for the false branch in a new context.
val contextFalse = CondContext(predicateId, pivotFalse, FalseBranch)
contextFalse.enter()
val (_, resultFalse) = contextFalse.buildCondBranch(falseFn)
contextFalse.exitResult(resultFalse)
contextFalse.exit()
// Check that the return values of the two branches have matching data types.
resultTrue.zip(resultFalse).foreach(pair => {
if (pair._1.dataType != pair._2.dataType)
throw InvalidDataTypeException(
s"The outputs of `trueFn` (dataType = ${pair._1.dataType}) and " +
s"`falseFn` (dataType = ${pair._2.dataType}) must have the same data type.")
})
// Add to collections.
Op.currentGraph.addToCollection(contextTrue, CondContext.COND_CONTEXTS)
Op.currentGraph.addToCollection(contextFalse, CondContext.COND_CONTEXTS)
// Add the final merge to the graph.
val merges = resultFalse.zip(resultTrue).map(p => ControlFlow.merge(Seq(p._1, p._2))._1)
ev.unflatten(originalResultTrue, merges)
}
}
}
/** $OpDocControlFlowCases
*
* @group ControlFlowOps
* @param predicateFnPairs Contains pairs of predicates and value functions for those predicates.
* @param default Default return value function, in case all predicates evaluate to `false`.
* @param exclusive If `true`, only one of the predicates is allowed to be `true` at the same time.
* @param name Name prefix for the created ops.
* @return Created op output structure, mirroring the return structure of the provided predicate functions.
* @throws InvalidDataTypeException If the data types of the tensors returned by the provided predicate functions
* do not match.
*/
@throws[InvalidDataTypeException]
def cases[T, R](
predicateFnPairs: Seq[(Output, () => T)],
default: () => T,
exclusive: Boolean = false,
name: String = "Cases"
)(implicit
ev: CondOutput.Aux[T, R]
): T = {
Op.createWithNameScope(name) {
// To evaluate the conditions in the correct order, we create nested conditions in reverse.
val fn = predicateFnPairs.reverse.foldLeft(default) {
case (falseFn, predicateFnPair) => () =>
cond(
predicate = predicateFnPair._1,
trueFn = predicateFnPair._2,
falseFn = falseFn)
}
if (exclusive) {
Op.createWith(controlDependencies = Set(Checks.assertAtMostNTrue(
predicateFnPairs.map(_._1), n = 1, message = "'cases' was created with 'exclusive = true'."))) {
fn()
}
} else {
fn()
}
}
}
/** $OpDocControlFlowWhileLoop
*
* @group ControlFlowOps
* @param predicateFn Function returning the computation to be performed to determine whether to continue
* looping or terminate.
* @param bodyFn Function returning the computation to be performed in the loop body.
* @param loopVariables Loop variables (possibly a structure over tensors).
* @param shapeInvariants Shape invariants for the loop variables.
* @param parallelIterations Number of iterations allowed to run in parallel.
* @param enableBackPropagation If `true`, back-propagation support is enabled for this while-loop context.
* @param swapMemory If `true`, GPU-CPU memory swapping support is enabled for this while-loop context.
* @param maximumIterations Optional `INT32` scalar specifying the maximum number of iterations to loop for. If
* `null` (the default), no iteration limit is enforced.
* @param name Name prefix for the created ops.
* @return Created op output structure containing the loop variables values after the loop finishes, mirroring the
* return structure of `bodyFn`.
*/
def whileLoop[T, TS](
predicateFn: T => Output, bodyFn: T => T, loopVariables: T, shapeInvariants: Option[TS] = None,
parallelIterations: Int = 10, enableBackPropagation: Boolean = true, swapMemory: Boolean = false,
maximumIterations: Output = null, name: String = "WhileLoop"
)(implicit
ev: WhileLoopVariable.Aux[T, TS]
): T = {
require(parallelIterations > 0, "'parallelIterations' must be a positive integer.")
Op.createWithNameScope(name) {
val loopContext = WhileLoopContext(
Option(maximumIterations), parallelIterations, enableBackPropagation, swapMemory)
Op.currentGraph.addToCollection(loopContext, WhileLoopContext.WHILE_LOOP_CONTEXTS)
if (maximumIterations == null) {
loopContext.buildLoop(predicateFn, bodyFn, loopVariables, shapeInvariants)
} else {
require(maximumIterations.rank == 0 || maximumIterations.rank == -1,
s"'maximumIterations' must be a scalar, but has shape ${maximumIterations.shape}.")
val zero = Basic.constant(0, name = "Zero")
val one = Basic.constant(1, name = "One")
// Building a loop involves mutating ops and thus we need to lock on the graph.
Op.currentGraph.synchronized {
loopContext.buildLoop[(Output, T), (Shape, TS)](
(v: (Output, T)) => Math.logicalAnd(v._1 < maximumIterations, predicateFn(v._2)),
(v: (Output, T)) => (v._1 + one, bodyFn(v._2)),
(zero, loopVariables),
shapeInvariants.map((Shape.scalar(), _)))._2
}
}
}
}
}
private[api] object ControlFlow extends ControlFlow {
case class ControlFlowOps(op: Op) {
/** Returns `true` if the provided op is within a cond statement. */
def isInCond: Boolean = op.controlFlowContext.flatMap(_.condContext).isDefined
/** Returns `true` if the provided op is within a while loop statement. */
def isInWhileLoop: Boolean = op.controlFlowContext.flatMap(_.whileLoopContext()).isDefined
/** Returns `true` if the provided op is within an XLA control flow context. */
def isInXLAContext: Boolean = {
val xlaCompile = {
try {
op.booleanAttribute("_XlaCompile")
} catch {
case _: IllegalArgumentException => false
}
}
xlaCompile || op.controlFlowContext.flatMap(_.xlaContext).isDefined
}
}
/** Returns `true` if and only if the provided op is a switch op. */
private[ops] def isSwitch(op: Op): Boolean = op.opType == "Switch" || op.opType == "RefSwitch"
/** Returns `true` if and only if the provided op is a loop invariant. */
private[ops] def isLoopEnter(op: Op): Boolean = op.opType == "Enter" || op.opType == "RefEnter"
/** Returns `true` if and only if the provided op is a constant loop invariant. */
private[ops] def isLoopConstantEnter(op: Op): Boolean = {
isLoopEnter(op) && op.booleanAttribute("is_constant")
}
/** Returns `true` if and only if the provided op is a loop exit op. */
private[ops] def isLoopExit(op: Op): Boolean = op.opType == "Exit" || op.opType == "RefExit"
/** Returns `true` if and only if the provided op is a switch op for a while loop. */
private[ops] def isLoopSwitch(op: Op): Boolean = {
isSwitch(op) && op.controlFlowContext.isDefined && op.controlFlowContext.get.isInstanceOf[WhileLoopContext]
}
/** Returns the enter op if we can infer `value` to be a loop invariant. Otherwise, returns [[None]]. */
private[control_flow] def getLoopConstantEnter(value: Output): Option[Op] = {
val identityOpTypes = Set("Identity", "RefIdentity", "Switch", "RefSwitch")
var op = value.op
while (identityOpTypes.contains(op.opType))
op = op.inputs(0).op
Some(op).filter(isLoopConstantEnter)
}
/** Returns the control flow context for the outputs of an op. */
private[ops] def getOutputContext(op: Op): Option[Context] = {
val context = op.controlFlowContext
if (isLoopExit(op))
context.flatMap(_.outerContext)
else
context
}
/** Returns `true` if `maybeContainingContext` is or contains `context`. */
private[ops] def isContainingContext(context: Context, maybeContainingContext: Option[Context]): Boolean = {
if (maybeContainingContext.isEmpty && context == null) {
true
} else {
maybeContainingContext.exists(containingContext => {
var currentContext = Option(context)
while (currentContext.exists(_ != containingContext))
currentContext = currentContext.flatMap(_.outerContext)
currentContext.contains(containingContext)
})
}
}
/** Checks whether `inputOp` can be used from within the `op`'s context. Conceptually, only inputs from an op's while
* loop context or any ancestor while loop context (including outside of any context) are valid. In practice, there
* are many other edge cases as well. */
@throws[InvalidArgumentException]
private[ops] def checkInputFromValidContext(op: Op, inputOp: Op): Unit = {
val opContext = op.controlFlowContext
val inputContext = getOutputContext(inputOp)
val errorMessage = inputContext match {
case None => null // `inputOp` is not in a control flow context.
case Some(context) if context == opContext.orNull => null // `inputOp` is in the same control flow context.
case Some(context) =>
val whileContext = opContext.flatMap(_.whileLoopContext())
val inputWhileContext = context.whileLoopContext()
whileContext match {
case None =>
if (inputWhileContext.isEmpty) {
// Neither `op` nor `inputOp` is in a while loop, but one or both are in conditionals. We allow this,
// although execution will fail if the branch corresponding to the `inputOp`'s conditional context is not
// taken.
null
} else if (isLoopEnter(op) || isSwitch(op)) {
// The while loop building code clears the context for enter nodes, and the conditional context add value
// code clears the context for switch nodes.
null
} else {
s"Cannot use '${inputOp.name}' as input to '${op.name}' because '${inputOp.name}' is in a while loop."
}
case Some(whileLoopContext) if isContainingContext(whileLoopContext, inputWhileContext) =>
// `inputOp` is in a while loop which contains `op`'s while loop (or not in a while loop at all).
null
case Some(whileLoopContext) if whileLoopContext.gradientLoopState.isDefined &&
isContainingContext(whileLoopContext.gradientLoopState.get.forwardContext, inputWhileContext) =>
// `op` is in a gradient context and `inputOp` is in the associated forward pass context or an ancestor
// thereof. This case is needed to build while loop gradients. Note that we theoretically also need this
// case for custom gradient functions that close over tensors from ancestor contexts, but this has not been
// verified yet.
null
case Some(whileLoopContext) if whileLoopContext.gradientLoopState.isDefined &&
whileLoopContext.gradientLoopState.get.forwardContext ==
inputWhileContext.flatMap(_.outerContext).orNull =>
// `op` is in a gradient context and `inputOp` is in a child of the associated forward pass context. This
// case is needed for the gradients of while loops with conditionals.
null
case Some(whileLoopContext) if inputWhileContext.flatMap(_.gradientLoopState).isDefined &&
inputWhileContext.flatMap(_.gradientLoopState).get.forwardContext == whileLoopContext =>
// `inputOp` is in the gradient context of `op`'s context. This case is needed when the gradient of a while
// loop gradient is requested (this will eventually fail unless there is a `stopGradient` op or similar).
null
case Some(whileLoopContext) if inputWhileContext.flatMap(_.gradientLoopState).isDefined &&
context.gradientLoopState.flatMap(_.forwardContext.gradientLoopState).isDefined &&
context.gradientLoopState
.flatMap(_.forwardContext.gradientLoopState).get.forwardContext == whileLoopContext =>
// `inputOp` is in the gradient gradient context of `op`'s context. This case is needed when the gradient of
// a while loop gradient is requested (this will eventually fail unless there is a `stopGradient` op or
// similar).
null
case _ =>
s"Cannot use '${inputOp.name}' as input to '${op.name}' because they are in different while loops."
}
}
if (errorMessage != null)
throw InvalidArgumentException(errorMessage)
}
/** Calculates a maximum size for use by stack ops inside XLA while loops.
*
* @param value Value inside the while loop forward context. Used for printing error messages.
* @param whileLoopContext Forward context inside which value resides. This does not always match the value's
* immediate context, as `value` may be inside e.g., a cond context, inside the while loop.
* @return Tensor containing the `maxSize` to feed to a stack initializer.
* @throws InvalidArgumentException If `value` is nested inside a while loop that either lacks a `maximumIterations`
* parameter, or whose `maximumIterations` parameter is inside a while loop that is
* a parent of the calling context, and cannot be evaluated at graph build time
* (i.e., statically) to a constant value.
*/
@throws[InvalidArgumentException]
private[control_flow] def getMaxSizeFromNestedMaximumIterations(
value: Output,
whileLoopContext: WhileLoopContext
): Output = {
val valueName = value.name
// `currentContext` is the context that `tf.gradients()` was called in.
val currentContext = Op.currentControlFlowContext
val currentContextName = currentContext.map(_.name).getOrElse("")
// Loop through all containing while-loop contexts between the value and the current context, multiplying together
// each context's `maxIterations`, in order to get the maximum stack size.
var maxSize = Basic.constant(1)
var currentWhileLoopContext: Option[WhileLoopContext] = Some(whileLoopContext)
while (currentWhileLoopContext.isDefined) {
currentWhileLoopContext.get.maximumIterations match {
case None => throw InvalidArgumentException(
s"Cannot create a gradient accumulator for tensor '$valueName', inside an XLA while loop, because " +
"'maximumIterations' was not passed to the `tf.whileLoop()` call " +
s"('${currentWhileLoopContext.get.name}').")
case Some(maximumIterations) =>
val maximumIterationsContext = maximumIterations.op.controlFlowContext
// If `maximumIterationsContext` (non-strictly) contains `currentContext`, then it is ok to use.
if (isContainingContext(currentContext.orNull, maximumIterationsContext)) {
maxSize *= maximumIterations
} else {
// We cannot use `maximumIterations` because it is defined in a nested while-loop or cond context, and so
// an error will be thrown if we try to use it as input to any ops in `currentContext` (e.g., `maxSize` or
// the final accumulator stack). We attempt to get a constant value out to use instead.
Output.constantValue(maximumIterations) match {
case Some(constantMaximumIterations) => maxSize *= constantMaximumIterations
case None => throw InvalidArgumentException(
s"Cannot create a gradient accumulator for tensor '$valueName', inside an XLA while loop, because " +
s"the 'maximumIterations' tensor ('${maximumIterations.name}') for while-loop context " +
s"'${currentWhileLoopContext.get.name}' must be statically known (e.g., a constant value or " +
"known shape dimension), or must be defined at or outside the while-loop context " +
s"'$currentContextName' (currently defined in '${maximumIterationsContext.get.name}').")
}
}
}
// Find the next outer while-loop context, or stop if we have reached the `tf.gradients()` context.
currentWhileLoopContext = currentWhileLoopContext
.flatMap(_.outerContext.flatMap(_.whileLoopContext(currentContext)))
}
maxSize
}
/** Creates an op that forwards `input` to the output port determined by `predicate`, while making sure the new op is
* colocated with `input`.
*
* If `predicate` is `true`, then `input` is forwarded to `outputTrue`. Otherwise, it goes to `outputFalse`.
*
* @param input Tensor to be forwarded to the appropriate output.
* @param predicate Scalar boolean tensor that specifies which output port will receive `input`.
* @param name Name for the created op.
* @return Tuple containing `outputFalse` and `outputTrue`, in that order.
*/
private[control_flow] def colocatedSwitch[T <: OutputLike](
input: T,
predicate: Output,
name: String = "Switch"
): (T, T) = {
// The device colocation below addresses the following scenario:
//
// Assume you execute Optimizer.applyGradients() in a branch of a cond() and:
// 1. The update op is created inside a `Op.colocateWith(Set(var.op)) { }` block.
// 2. Some tensor `data` is captured and a switch is created in a `Op.colocateWith(Set(data.op)) { }` block.
//
// Op.colocateWith(Set(var.op)) {
// Op.colocateWith(Set(data.op)) {
// op = ...
// }
// }
//
// `var` and `data` may be pinned to different devices and so we want the ops created within the
// `Op.colocateWith(Set(data.op)) { }` block to ignore the existing stack.
Op.colocateWith(Set(input.op), ignoreExisting = true)(switch(input, predicate, name))
}
/** Returns an `assert` op that checks that the provided predicates are exclusive (i.e., not more than one of them can
* be `true` at the same time). */
private[ControlFlow] def assertExclusive(predicates: Seq[Output]): Op = {
val stacked = Basic.stack(predicates, name = "StackedPredicates")
val numTrue = Math.sum(Math.cast(stacked, INT32), name = "NumTruePredicates")
val atMostOneTrue = Math.less(numTrue, Basic.constant(2, name = "TwoTruePredicates"))
val errorData =
Seq(
Basic.constant(Tensor(
"More than one condition evaluated as 'true' but 'exclusive = true'. " +
s"Conditions: (${predicates.map(_.name).mkString(", ")}), Values: ")),
stacked)
Checks.assert(atMostOneTrue, errorData, summarize = predicates.size)
}
//region Low Level Ops
/** Creates an op that does nothing and serves as a control trigger for scheduling. The created op is only useful as
* a placeholder for control edges.
*
* @param name Name for the created op.
* @return Created op output.
*/
private[control_flow] def controlTrigger(name: String = "ControlTrigger"): Op = {
Op.Builder(opType = "ControlTrigger", name = name).build()
}
/** Creates an op that forwards its input to the output.
*
* The op represents the loop termination condition used by the "pivot" switches of a loop.
*
* @param input Boolean scalar tensor, representing the branch predicate of the switch op.
* @param name Name for the created op.
* @return Created op output, which has the same value as the input tensor.
*/
private[control_flow] def loopCond(input: Output, name: String = "LoopCond"): Output = {
Op.Builder(opType = "LoopCond", name = name)
.addInput(input)
.build().outputs(0)
}
/** Creates an op that makes its input available to the next iteration.
*
* @param input Tensor to make available to the next iteration.
* @param name Name for the created op.
* @return Created op output, which is the same as `input`.
*/
private[control_flow] def nextIteration[T <: OutputLike](input: T, name: String = "NextIteration"): T = {
val result = {
input match {
case i: Output =>
Op.Builder("NextIteration", name)
.addInput(i)
.build().outputs(0)
case i: OutputIndexedSlices => Op.createWithNameScope(name) {
val values = nextIteration(i.values, "Values")
val indices = nextIteration(i.indices, "Indices")
val denseShape = {
if (i.denseShape != null)
nextIteration(i.denseShape, "DenseShape")
else
null
}
OutputIndexedSlices(indices = indices, values = values, denseShape = denseShape)
}
case i: SparseOutput => Op.createWithNameScope(name) {
val values = nextIteration(i.values, "Values")
val indices = nextIteration(i.indices, "Indices")
val denseShape = nextIteration(i.denseShape, "DenseShape")
SparseOutput(indices = indices, values = values, denseShape = denseShape)
}
}
}
result.asInstanceOf[T]
}
/** Creates an op that creates or finds a child frame, and makes `input` available to that child frame.
*
* The op is used together with `exit` to create loops in the graph. The unique `frameName` is used by the `Executor`
* to identify frames. If `isConstant` is `true`, then the output is a constant in the child frame. Otherwise, it may
* be changed in the child frame. At most `parallelIterations` iterations are run in parallel in the child frame.
*
* @param input Tensor to be made available to the child frame.
* @param frameName Name of the child frame.
* @param isConstant If `true`, the output is constant within the child frame.
* @param parallelIterations Number of iterations allowed to run in parallel.
* @param useInputShape If `true`, the output tensor's shape is manually set to the input tensor's shape.
* @param name Name for the created op.
* @return Created op output, which is the same as `input`.
*/
private[control_flow] def enter[T <: OutputLike](
input: T, frameName: String, isConstant: Boolean = false, parallelIterations: Int = 10,
useInputShape: Boolean = true, name: String = "Enter"): T = {
val result = {
input match {
case i: Output =>
val result = Op.Builder("Enter", name)
.addInput(i)
.setAttribute("frame_name", frameName)
.setAttribute("is_constant", isConstant)
.setAttribute("parallel_iterations", parallelIterations)
.build().outputs(0)
if (useInputShape)
result.setShape(i.shape)
result
case i: OutputIndexedSlices => Op.createWithNameScope(name) {
val values = enter(i.values, frameName, isConstant, parallelIterations, useInputShape, "Values")
val indices = enter(i.indices, frameName, isConstant, parallelIterations, useInputShape, "Indices")
val denseShape = {
if (i.denseShape != null)
enter(i.denseShape, frameName, isConstant, parallelIterations, useInputShape, "DenseShape")
else
null
}
OutputIndexedSlices(indices = indices, values = values, denseShape = denseShape)
}
case i: SparseOutput => Op.createWithNameScope(name) {
val values = enter(i.values, frameName, isConstant, parallelIterations, useInputShape, "Values")
val indices = enter(i.indices, frameName, isConstant, parallelIterations, useInputShape, "Indices")
val denseShape = enter(
i.denseShape, frameName, isConstant, parallelIterations, useInputShape, "DenseShape")
SparseOutput(indices = indices, values = values, denseShape = denseShape)
}
}
}
result.asInstanceOf[T]
}
/** Creates an op that exits from the current frame to its parent frame.
*
* The op makes `input` available to the parent frame.
*
* @param input Tensor to be made available to the parent frame.
* @param name Name for the created op.
* @return Created op output, which is the same as `input`.
*/
private[control_flow] def exit[T <: OutputLike](input: T, name: String = "Exit"): T = {
val result = {
input match {
case i: Output =>
Op.Builder("Exit", name)
.addInput(i)
.build().outputs(0)
case i: OutputIndexedSlices => Op.createWithNameScope(name) {
val values = exit(i.values, "Values")
val indices = exit(i.indices, "Indices")
val denseShape = {
if (i.denseShape != null)
exit(i.denseShape, "DenseShape")
else
null
}
OutputIndexedSlices(indices = indices, values = values, denseShape = denseShape)
}
case i: SparseOutput => Op.createWithNameScope(name) {
val values = exit(i.values, "Values")
val indices = exit(i.indices, "Indices")
val denseShape = {
if (i.denseShape != null)
exit(i.denseShape, "DenseShape")
else
null
}
SparseOutput(indices = indices, values = values, denseShape = denseShape)
}
}
}
result.asInstanceOf[T]
}
/** Creates an op that forwards `input` to the output port determined by `predicate`.
*
* If `predicate` is `true`, then `input` is forwarded to `outputTrue`. Otherwise, it goes to `outputFalse`.
*
* @param input Tensor to be forwarded to the appropriate output.
* @param predicate Scalar boolean tensor that specifies which output port will receive `input`.
* @param name Name for the created op.
* @return Tuple containing `outputFalse` and `outputTrue`, in that order.
*/
private[control_flow] def switch[T <: OutputLike](input: T, predicate: Output, name: String = "Switch"): (T, T) = {
val result = {
input match {
case i: Output =>
val outputs = Op.Builder("Switch", name)
.addInput(i)
.addInput(predicate)
.build().outputs
(outputs(0), outputs(1))
case i: OutputIndexedSlices => Op.createWithNameScope(name) {
val (valuesFalse, valuesTrue) = switch(i.values, predicate, "Values")
val (indicesFalse, indicesTrue) = switch(i.indices, predicate, "Indices")
val (denseShapeFalse, denseShapeTrue) = {
if (i.denseShape != null)
switch(i.denseShape, predicate, "DenseShape")
else
(null, null)
}
(OutputIndexedSlices(indices = indicesFalse, values = valuesFalse, denseShape = denseShapeFalse),
OutputIndexedSlices(indices = indicesTrue, values = valuesTrue, denseShape = denseShapeTrue))
}
case i: SparseOutput => Op.createWithNameScope(name) {
val (valuesFalse, valuesTrue) = switch(i.values, predicate, "ValuesSwitch")
val (indicesFalse, indicesTrue) = switch(i.indices, predicate, "IndicesSwitch")
val (denseShapeFalse, denseShapeTrue) = {
if (i.denseShape != null)
switch(i.denseShape, predicate, "DenseShape")
else
(null, null)
}
(SparseOutput(indices = indicesFalse, values = valuesFalse, denseShape = denseShapeFalse),
SparseOutput(indices = indicesTrue, values = valuesTrue, denseShape = denseShapeTrue))
}
}
}
result.asInstanceOf[(T, T)]
}
/** Creates an op that forwards the value of an available tensor from `inputs` to `output`.
*
* The op tests each of the tensors in `inputs` in turn to determine if any of them is available. If it finds an
* available tensor, it returns it and its index, `outputIndex`, in `inputs`.
*
* No more than one tensor in `inputs` should be available. If no tensor in `inputs` is available, the returned
* tensor and index are not set.
*
* This op is usually combined with `switch` to implement branching.
*
* IMPORTANT NOTE: The input tensors can either all be of type [[Output]] or [[SparseOutput]] or of mixed types that
* extend [[OutputLike]]. If they are all of type [[Output]] or [[SparseOutput]], then that is also the return op
* type. Otherwise, they will all be converted to [[OutputIndexedSlices]] first.
*
* @param inputs Input tensors.
* @param name Name for the created op.
* @return Tuple containing `output` and `outputIndex`, in that order.
*/
@throws[IllegalArgumentException]
private[control_flow] def merge[T <: OutputLike](inputs: Seq[T], name: String = "Merge"): (T, Output) = {
val result = {
inputs match {
case i if i.forall(_.isInstanceOf[Output]) =>
val outputs = Op.Builder("Merge", name)
.addInputList(i.map(_.asInstanceOf[Output]))
.build().outputs
(outputs(0), outputs(1))
case i if i.forall(_.isInstanceOf[SparseOutput]) => Op.createWithNameScope(name) {
val ii = i.map(_.asInstanceOf[SparseOutput])
val (indices, chosenIndex) = merge(ii.map(_.indices), "Indices")
val (values, _) = merge(ii.map(_.values), "Values")
val (denseShape, _) = if (ii.map(_.denseShape).exists(_ != null)) {
if (ii.map(_.denseShape).contains(null))
throw new IllegalArgumentException(
"Either all merged 'SparseOutput's must have a known dense shape, or none of them.")
merge(ii.map(_.denseShape), "DenseShape")
} else {
null
}
(SparseOutput(indices = indices, values = values, denseShape = denseShape), chosenIndex)
}
case i => Op.createWithNameScope(name) {
val ii = i.map(_.toOutputIndexedSlices(optimize = false))
val (indices, chosenIndex) = merge(ii.map(_.indices), "Indices")
val (values, _) = merge(ii.map(_.values), "Values")
val (denseShape, _) = if (ii.map(_.denseShape).exists(_ != null)) {
if (ii.map(_.denseShape).contains(null))
throw new IllegalArgumentException(
"Either all merged 'OutputIndexedSlices' must have a known dense shape, or none of them.")
merge(ii.map(_.denseShape), "DenseShape")
} else {
null
}
(OutputIndexedSlices(indices = indices, values = values, denseShape = denseShape), chosenIndex)
}
}
}
result.asInstanceOf[(T, Output)]
}
//endregion Low Level Ops
//region Native Library Functions
/** Replaces the `index`th input of `op` with `newInput`. */
private[control_flow] def updateInput(op: Op, index: Int, newInput: Output): Unit = {
using(op.graph.reference)(r => {
NativeLibrary.updateInput(r.nativeHandle, op.nativeHandle, index, newInput.op.nativeHandle, newInput.index)
})
op.reloadNumInputs()
op.reloadInputs()
}
/** Adds `inputOp` as a control input of `op`. */
private[control_flow] def addControlInput(op: Op, inputOp: Op): Unit = {
using(op.graph.reference)(r => {
NativeLibrary.addControlInput(r.nativeHandle, op.nativeHandle, inputOp.nativeHandle)
})
op.reloadNumControlInputs()
op.reloadControlInputs()
}
/** Clears the control inputs of `op` (i.e., removes all of them). */
private[control_flow] def clearControlInputs(op: Op): Unit = {
using(op.graph.reference)(r => {
NativeLibrary.clearControlInputs(r.nativeHandle, op.nativeHandle)
})
op.reloadNumControlInputs()
op.reloadControlInputs()
}
/** Sets attribute `name` of `op` to the provided value. */
private[control_flow] def setAttribute(op: Op, name: String, value: AttrValue): Unit = {
using(op.graph.reference)(r => {
NativeLibrary.setAttributeProto(r.nativeHandle, op.nativeHandle, name, value.toByteArray)
})
}
//endregion Native Library Functions
//region Gradients
private[ops] object Gradients {
GradientsRegistry.registerNonDifferentiable("ControlTrigger")
GradientsRegistry.register("LoopCond", loopCondGradient)
GradientsRegistry.register("NextIteration", nextIterationGradient)
GradientsRegistry.register("RefNextIteration", nextIterationGradient)
GradientsRegistry.register("Enter", enterGradient)
GradientsRegistry.register("RefEnter", enterGradient)
GradientsRegistry.register("Exit", exitGradient)
GradientsRegistry.register("RefExit", exitGradient)
GradientsRegistry.register("Switch", switchGradient)
GradientsRegistry.register("RefSwitch", switchGradient)
GradientsRegistry.register("Merge", mergeGradient)
GradientsRegistry.register("RefMerge", mergeGradient)
/** We stop back-propagation for the predicate of a while loop. */
private[this] def loopCondGradient(op: Op, outputGradients: Seq[OutputLike]): Seq[OutputLike] = {
Seq(null)
}
/** A forward next-iteration op is translated into a back-propagation identity op. Note that the back-propagation
* next-iteration op is added in switch op gradient. */
private[this] def nextIterationGradient(op: Op, outputGradients: Seq[OutputLike]): Seq[OutputLike] = {
outputGradients
}
/** Gradients for an enter op are calculated using an exit op. For loop variables, `outputGradients` is the gradient
* and so we just add an exit op. For loop invariants, we need to add an accumulator loop. */
private[this] def enterGradient(op: Op, outputGradients: Seq[OutputLike]): Seq[OutputLike] = {
Op.currentControlFlowContext.map(gradientContext => {
if (!gradientContext.backPropagate) {
// We skip gradient computation in this case.
outputGradients
} else if (gradientContext.gradientLoopState.isEmpty) {
// We pass the gradient through if we are not in a gradient while-loop context.
outputGradients
} else if (op.booleanAttribute("is_constant")) {
// We add a gradient accumulator for each while-loop invariant.
Seq(gradientContext.asInstanceOf[WhileLoopContext].addBackwardAccumulator(op, outputGradients.head))
} else {
val gradientWhileLoopContext = gradientContext.asInstanceOf[WhileLoopContext]
val result = Seq(exit(outputGradients.head))
result(0) match {
case o: Output => gradientWhileLoopContext.loopExits += o
case o: OutputIndexedSlices =>
gradientWhileLoopContext.loopExits += o.indices
gradientWhileLoopContext.loopExits += o.values
if (o.denseShape != null)
gradientWhileLoopContext.loopExits += o.denseShape
case o: SparseOutput =>
gradientWhileLoopContext.loopExits += o.indices
gradientWhileLoopContext.loopExits += o.values
if (o.denseShape != null)
gradientWhileLoopContext.loopExits += o.denseShape
}
gradientContext.exitResult(result)
result
}
}).get
}
/** Gradients for an exit op are calculated using an enter op. */
@throws[UnimplementedException]
private[this] def exitGradient(op: Op, outputGradients: Seq[OutputLike]): Seq[OutputLike] = {
Op.currentControlFlowContext.map(gradientContext => {
if (!gradientContext.backPropagate) {
// We skip gradient computation in this case.
Seq(null)
} else if (op.controlFlowContext.flatMap(_.gradientLoopState).isDefined) {
throw UnimplementedException("Second-order gradients are not supported for while loops.")
} else {
outputGradients.head match {
case o: Output => gradientContext.values += o.name
case o: OutputIndexedSlices =>
gradientContext.values += o.indices.name
gradientContext.values += o.values.name
if (o.denseShape != null)
gradientContext.values += o.denseShape.name
case o: SparseOutput =>
gradientContext.values += o.indices.name
gradientContext.values += o.values.name
if (o.denseShape != null)
gradientContext.values += o.denseShape.name
}
val gradientWhileLoopContext = gradientContext.asInstanceOf[WhileLoopContext]
gradientContext.enter()
val result = Seq(enter(
outputGradients.head,
gradientContext.name,
isConstant = false,
parallelIterations = gradientWhileLoopContext.parallelIterations,
name = "ExitGradient"))
result(0) match {
case o: Output => gradientWhileLoopContext.loopEnters += o
case o: OutputIndexedSlices =>
gradientWhileLoopContext.loopEnters += o.indices
gradientWhileLoopContext.loopEnters += o.values
if (o.denseShape != null)
gradientWhileLoopContext.loopEnters += o.denseShape
case o: SparseOutput =>
gradientWhileLoopContext.loopEnters += o.indices
gradientWhileLoopContext.loopEnters += o.values
if (o.denseShape != null)
gradientWhileLoopContext.loopEnters += o.denseShape
}
gradientContext.exit()
result
}
}).get
}
/** Gradients for a switch op are calculated using a merge op. If the switch is a loop switch, it will be visited
* twice. We create the merge op on the first visit, and we update the second input of the merge on the second
* visit. A next-iteration op is also added in the second visit. */
private[this] def switchGradient(op: Op, outputGradients: Seq[OutputLike]): Seq[OutputLike] = {
val gradientContext = Op.currentControlFlowContext
op.controlFlowContext match {
case Some(opContext: CondContext) =>
if (outputGradients(1 - opContext.branch.value) != null) {
Seq(merge(outputGradients, name = "CondGradient")._1, null)
} else if (op.inputs(0).dataType == RESOURCE) {
// At this point, we have created `zeroGradient` guarded by the right switch. Unfortunately, we may still
// get `null` here for non-trainable data types or for some types of ops (e.g., `ResourceGather`) created
// within only one branch.
// TODO: !!! This may be inefficient. What if one branch of the switch is not differentiable?
val goodGradient = outputGradients(opContext.branch.value)
val zeros = goodGradient match {
case o: Output => Basic.zerosLike(o)
case o: OutputIndexedSlices =>
OutputIndexedSlices(
Basic.zeros(o.indices.dataType, Shape(1)),
Basic.zeros(o.values.dataType, Shape(1, o.values.shape(1))),
o.denseShape)
case o: SparseOutput =>
SparseOutput(
Basic.zeros(o.indices.dataType, Shape(1, o.indices.shape(1))),
Basic.zeros(o.values.dataType, Shape(1)),
o.denseShape)
}
val zeroGradient = opContext.branch.other.selectSwitchResult(
ControlFlow.colocatedSwitch(zeros, opContext.predicate))
if (opContext.branch.value == 0)
Seq(merge(Seq(goodGradient, zeroGradient), name = "CondGradient")._1, null)
else
Seq(merge(Seq(zeroGradient, goodGradient), name = "CondGradient")._1, null)
} else {
Seq(null, null)
}
case Some(_: WhileLoopContext) =>
gradientContext.flatMap(_.gradientLoopState).flatMap(_.switchMap.get(op)) match {
case Some(mergeGradient) =>
// This is the second time this switch node is visited. It comes from the non-exit branch of the switch,
// and so we update the second input to the merge node.
if (outputGradients(1) != null)
WhileLoopContext.addNextIterationAndBackEdge(
mergeGradient, outputGradients(1), enforceShapeInvariant = false)
Seq(null, null)
case None if outputGradients.head != null =>
// This is the first time this switch node is visited. It comes from the exit branch of the switch, which
// is `outputGradients(0)`. `outputGradients(1)` is empty at this point. We use `outputGradients(0)` for
// both inputs to the merge for now, but we update the second input of the merge node when we visit this
// switch node for a second time.
val mergeGradient = merge(Seq(outputGradients(0), outputGradients(0)), name = "SwitchGradient")._1
gradientContext.flatMap(_.gradientLoopState).map(_.switchMap).foreach(_ += op -> mergeGradient)
Seq(mergeGradient, null)
case _ =>
// This is the first time this switch node is visited. It comes from the identity branch. Such a switch
// has `null` gradient for the exit branch, meaning that the output is not differentiable.
Seq(null, null)
}
case _ =>
val falseGradient = switch(outputGradients(0), op.inputs(1))._1
val trueGradient = switch(outputGradients(1), op.inputs(1))._2
Seq(merge(Seq(falseGradient, trueGradient))._1, null)
}
}
/** Gradients for a merge op are calculated using a switch op. */
private[this] def mergeGradient(op: Op, outputGradients: Seq[OutputLike]): Seq[OutputLike] = {
val gradientContext = Op.currentControlFlowContext
ControlFlow.getOutputContext(op.inputs(0).op) match {
case Some(opContext: CondContext) =>
val predicate = gradientContext.flatMap(_.gradientLoopState).map(gradientLoopState => {
// This merge node is part of a conditional structure within a loop. The back-propagation needs to have the
// value of this predicate for every iteration and so, we must have its values accumulated in the forward,
// and use the accumulated values as the predicate for this back-propagation switch.
gradientLoopState.historyMap.getOrElse(opContext.predicate.name, {
// We want to remember the value of the predicate for every iteration.
gradientLoopState.backwardContext.exit()
val historyPredicate = gradientLoopState.addForwardAccumulator(opContext.predicate)
gradientLoopState.backwardContext.enter()
// We now add the stack pop op. If `opContext.predicate.op` is in a (possibly outer) `CondContext`, then
// the stack pop op will be guarded with a switch.
val realPredicate = gradientLoopState.addBackwardAccumulatedValue(historyPredicate, opContext.predicate)
gradientLoopState.historyMap += opContext.predicate.name -> realPredicate
realPredicate
})
}).getOrElse(opContext.predicate)
val switch = colocatedSwitch(outputGradients.head, predicate)
Seq(switch._1, switch._2)
case Some(_: WhileLoopContext) =>
val switch = colocatedSwitch(outputGradients.head, gradientContext.get.asInstanceOf[WhileLoopContext].pivot)
Seq(switch._1, switch._2)
case _ =>
(0 until op.numInputs).map(i => {
colocatedSwitch(outputGradients.head, Math.equal(op.outputs(1), i))._2
})
}
}
}
//endregion Gradients
/** @define OpDocControlFlowGroup
* The `group` op groups multiple ops together.
*
* When the op finishes, all ops in `inputs` have finished. The op has no output.
*
* @define OpDocControlFlowTuple
* The `tuple` op groups op outputs together.
*
* The op creates a tuple of op outputs with the same values as `inputs`, except that the value of each output is
* only returned after the values of all outputs in `inputs` have been computed.
*
* This op can be used as a "join" mechanism for parallel computations: all the argument tensors can be computed in
* parallel, but the values of any tensor returned by `tuple` are only available after all the parallel
* computations are done.
*
* @define OpDocControlFlowNoOp
* The `noOp` op does nothing. The created op is only useful as a placeholder for control edges.
*
* @define OpDocControlFlowCond
* The `cond` op returns `trueFn()` if the predicate `predicate` is true, else `falseFn()`.
*
* `trueFn` and `falseFn` both return structures of tensors (e.g., lists of tensors). `trueFn` and `falseFn` must
* have the same non-zero number and type of outputs. Note that the conditional execution applies only to the ops
* defined in `trueFn` and `falseFn`.
*
* For example, consider the following simple program:
* {{{
* val z = tf.multiply(a, b)
* val result = tf.cond(x < y, () => tf.add(x, z), () => tf.square(y))
* }}}
* If `x < y`, the `tf.add` operation will be executed and the `tf.square` operation will not be executed. Since
* `z` is needed for at least one branch of the `cond`, the `tf.multiply` operation is always executed,
* unconditionally. Although this behavior is consistent with the data-flow model of TensorFlow, it has
* occasionally surprised some users who expected lazier semantics.
*
* Note that `cond` calls `trueFn` and `falseFn` *exactly once* (inside the call to `cond`, and not at all during
* `Session.run()`). `cond` stitches together the graph fragments created during the `trueFn` and `falseFn` calls
* with some additional graph nodes to ensure that the right branch gets executed depending on the value of
* `predicate`.
*
* `cond` supports nested tensor structures, similar to `Session.run()`. Both `trueFn` and `falseFn` must return
* the same (possibly nested) value structure of sequences, tuples, and/or maps.
*
* '''NOTE:''' If the predicate always evaluates to some constant value and that can be inferred statically, then
* only the corresponding branch is built and no control flow ops are added. In some cases, this can significantly
* improve performance.
*
* @define OpDocControlFlowCases
* The `cases` op creates a case operation.
*
* The `predicateFnPairs` parameter is a sequence of pairs. Each pair contains a boolean scalar tensor and a
* function that takes no parameters and creates the tensors to be returned if the boolean evaluates to `true`.
* `default` is a function that returns the default value, used when all provided predicates evaluate to `false`.
*
* All functions in `predicateFnPairs` as well as `default` (if provided) should return the same structure of
* tensors, and with matching data types. If `exclusive == true`, all predicates are evaluated, and an exception is
* thrown if more than one of the predicates evaluates to `true`. If `exclusive == false`, execution stops at the
* first predicate which evaluates to `true`, and the tensors generated by the corresponding function are returned
* immediately. If none of the predicates evaluate to `true`, the operation returns the tensors generated by
* `default`.
*
* Example 1:
* {{{
* // r = if (x < y) 17 else 23.
* val r = tf.cases(
* Seq(x < y -> () => tf.constant(17)),
* default = () => tf.constant(23))
* }}}
*
* Example 2:
* {{{
* // if (x < y && x > z) throw error.
* // r = if (x < y) 17 else if (x > z) 23 else -1.
* val r = tf.cases(
* Seq(x < y -> () => tf.constant(17), x > z -> tf.constant(23)),
* default = () => tf.constant(-1),
* exclusive = true)
* }}}
*
* @define OpDocControlFlowWhileLoop
* The `whileLoop` op repeats the result of `bodyFn` while the condition returned by `predicateFn` is `true`.
*
* `predicateFn` is a function returning a `BOOLEAN` scalar tensor. `bodyFn` is a function returning a structure
* over tensors mirroring that of `loopVariables`. `loopVariables` is a structure over tensors that is passed to
* both `predicateFn` and `bodyFn`. `predicateFn` and `bodyFn` both take as many arguments as there are
* `loopVariables`.
*
* In addition to regular tensors, indexed slices, or sparse tensors, the body function may accept and return
* tensor array objects. The flows of the tensor array objects will be appropriately forwarded between loops and
* during gradient calculations.
*
* Note that `whileLoop()` calls `predicateFn` and `bodyFn` *exactly once* (inside the call to `whileLoop`, and not
* at all during `Session.run()`). `whileLoop()` stitches together the graph fragments created during the
* `predicateFn` and `bodyFn` calls with some additional graph nodes to create the graph flow that repeats `bodyFn`
* until `predicateFn` returns `false`.
*
* For correctness, `whileLoop()` strictly enforces shape invariants for the loop variables. A shape invariant is a
* (possibly partial) shape that is unchanged across the iterations of the loop. An error will be raised if the
* shape of a loop variable after an iteration is determined to be more general than or incompatible with its shape
* invariant. For example, a shape of `[11, -1]` is more general than a shape of `[11, 17]`, and `[11, 21]` is not
* compatible with `[11, 17]`. By default, (if the argument `shapeInvariants` is not specified), it is assumed that
* the initial shape of each tensor in `loopVariables` is the same in every iteration. The `shapeInvariants`
* argument allows the caller to specify a less specific shape invariant for each loop variable, which is needed if
* the shape varies between iterations. The `Output.setShape()` function may also be used in the `bodyFn` function
* to indicate that the output loop variable has a particular shape. The shape invariants for indexed slices and
* sparse tensors are treated specially as follows:
*
* a) If a loop variable is an indexed slices, the shape invariant must be a shape invariant of the values tensor
* of the indexed slices. This means that the shapes of the three tensors of the indexed slices are `[shape(0)]`,
* `shape`, and `[shape.rank]`.
*
* b) If a loop variable is a sparse tensor, the shape invariant must be a shape `[r]`, where `r` is the rank of
* the dense tensor represented by the sparse tensor. This means that the shapes of the three tensors of the
* sparse tensor are `[-1, r]`, `[-1]`, and `[r]`. Note that the shape invariant here is the shape of the sparse
* tensor `denseShape` field. It must be the shape of a vector.
*
* `whileLoop()` implements non-strict semantics, enabling multiple iterations to run in parallel. The maximum
* number of parallel iterations can be controlled by `parallelIterations`, which gives users some control over
* memory consumption and execution order. For correct programs, `whileLoop()` should return the same result for
* any value `parallelIterations > 0`.
*
* For training, TensorFlow stores the tensors that are produced in the forward pass and are needed in
* back-propagation. These tensors are a main source of memory consumption and often cause out-of-memory errors
* when training on GPUs. When the flag `swapMemory` is set to `true`, we swap out these tensors from the GPU to
* the CPU. This, for example, allows us to train RNN models with very long sequences and large batch sizes.
*
* For example:
* {{{
* val i = tf.constant(0)
* val p = (i: Output) => tf.less(i, 10)
* val b = (i: Output) => tf.add(i, 1)
* val r = tf.whileLoop(p, b, i)
* }}}
*
* Or, using more involved tensor structures:
* {{{
* val ijk0 = (tf.constant(0), (tf.constant(1), tf.constant(2)))
* val p = (i: Output, (j: Output, k: Output)) => i < 10
* val b = (i: Output, (j: Output, k: Output)) => (i + 1, (j + k, j - k))
* val r = tf.whileLoop(p, b, ijk0)
* }}}
*
* Also, using shape invariants:
* {{{
* val i0 = tf.constant(0)
* val m0 = tf.ones(Shape(2, 2))
* val p = (i: Output, m: Output) => i < 10
* val b = (i: Output, m: Output) => (i + 1, tf.concatenate(Seq(m, m), axis = 0))
* val r = tf.whileLoop(p, b, (i0, m0), (i0.shape, Shape(-1, 2)))
* }}}
*
* Example which demonstrates non-strict semantics:
*
* In the following example, the final value of the counter `i` does not depend on `x`. So, the `whileLoop` can
* increment the counter parallel to updates of `x`. However, because the loop counter at one loop iteration
* depends on the value at the previous iteration, the loop counter itself cannot be incremented in parallel.
* Hence, if we just want the final value of the counter, then `x` will never be incremented, but the counter will
* be updated on a single thread. Conversely, if we want the value of the output, then the counter may be
* incremented on its own thread, while `x` can be incremented in parallel on a separate thread.
* In the extreme case, it is conceivable that the thread incrementing the counter runs until completion before `x`
* is incremented even a single time. The only thing that can never happen is that the thread updating `x` can
* never get ahead of the counter thread because the thread incrementing `x` depends on the value of the counter.
* {{{
* val n = 10000
* val x = tf.constant(Tensor.zeros(INT32, Shape(n)))
* val p = (i: Output, x: Output) => i < n
* val b = (i: Output, x: Output) => (tf.print(i + 1, Seq(i)), tf.print(x + 1, Seq(x), "x: "))
* val r = tf.whileLoop(p, b, (0, x))
*
* val session = tf.Session()
*
* // The following line prints [0] to [9999]
*
* // The following line may increment the counter and x in parallel. The counter thread may get ahead of the
* // other thread, but not the other way around. So you may see things like "[9996] x: [9987]", meaning that
* // the counter thread is on iteration 9996, while the other thread is on iteration 9987.
* session.run(r._2)
* }}}
*/
private[ops] trait Documentation
}
| eaplatanios/tensorflow | tensorflow/scala/api/src/main/scala/org/platanios/tensorflow/api/ops/control_flow/ControlFlow.scala | Scala | apache-2.0 | 63,334 |
package lila.tournament
import org.joda.time.format.ISODateTimeFormat
import play.api.libs.json._
import scala.concurrent.duration._
import lila.common.LightUser
import lila.common.PimpedJson._
import lila.game.{ Game, GameRepo }
import lila.user.User
final class JsonView(
getLightUser: String => Option[LightUser]) {
private case class CachableData(pairings: JsArray, games: JsArray, podium: Option[JsArray])
def apply(tour: Tournament, page: Option[Int], me: Option[String]): Fu[JsObject] = for {
data <- cachableData(tour.id)
myInfo <- me ?? { PlayerRepo.playerInfo(tour.id, _) }
stand <- (myInfo, page) match {
case (_, Some(p)) => standing(tour, p)
case (Some(i), _) => standing(tour, i.page)
case _ => standing(tour, 1)
}
} yield Json.obj(
"id" -> tour.id,
"createdBy" -> tour.createdBy,
"system" -> tour.system.toString.toLowerCase,
"fullName" -> tour.fullName,
"nbPlayers" -> tour.nbPlayers,
"private" -> tour.`private`.option(true),
"variant" -> tour.variant.key,
"isStarted" -> tour.isStarted,
"isFinished" -> tour.isFinished,
"schedule" -> tour.schedule.map(scheduleJson),
"secondsToFinish" -> tour.isStarted.option(tour.secondsToFinish),
"secondsToStart" -> tour.isCreated.option(tour.secondsToStart),
"startsAt" -> tour.isCreated.option(ISODateTimeFormat.dateTime.print(tour.startsAt)),
"pairings" -> data.pairings,
"lastGames" -> data.games,
"standing" -> stand,
"me" -> myInfo.map(myInfoJson),
"podium" -> data.podium
).noNull
def standing(tour: Tournament, page: Int): Fu[JsObject] =
if (page == 1) firstPageCache(tour.id)
else computeStanding(tour, page)
def clearCache(id: String) =
firstPageCache.remove(id) >> cachableData.remove(id)
private def computeStanding(tour: Tournament, page: Int): Fu[JsObject] = for {
rankedPlayers <- PlayerRepo.bestByTourWithRankByPage(tour.id, 10, page max 1)
sheets <- rankedPlayers.map { p =>
tour.system.scoringSystem.sheet(tour, p.player.userId) map p.player.userId.->
}.sequenceFu.map(_.toMap)
} yield Json.obj(
"page" -> page,
"players" -> rankedPlayers.map(playerJson(sheets, tour))
)
private val firstPageCache = lila.memo.AsyncCache[String, JsObject](
(id: String) => TournamentRepo byId id flatten s"No such tournament: $id" flatMap { computeStanding(_, 1) },
timeToLive = 1 second)
private val cachableData = lila.memo.AsyncCache[String, CachableData](id =>
for {
pairings <- PairingRepo.recentByTour(id, 40)
games <- GameRepo games pairings.take(4).map(_.gameId)
podium <- podiumJson(id)
} yield CachableData(
JsArray(pairings map pairingJson),
JsArray(games map gameJson),
podium),
timeToLive = 1 second)
private def myInfoJson(i: PlayerInfo) = Json.obj(
"rank" -> i.rank,
"withdraw" -> i.withdraw)
private def gameUserJson(player: lila.game.Player) = {
val light = player.userId flatMap getLightUser
Json.obj(
"name" -> light.map(_.name),
"title" -> light.map(_.title),
"rating" -> player.rating
).noNull
}
private def gameJson(g: Game) = Json.obj(
"id" -> g.id,
"fen" -> (chess.format.Forsyth exportBoard g.toChess.board),
"color" -> g.firstColor.name,
"lastMove" -> ~g.castleLastMoveTime.lastMoveString,
"user1" -> gameUserJson(g.firstPlayer),
"user2" -> gameUserJson(g.secondPlayer))
private def scheduleJson(s: Schedule) = Json.obj(
"freq" -> s.freq.name,
"speed" -> s.speed.name)
private def sheetJson(sheet: ScoreSheet) = sheet match {
case s: arena.ScoringSystem.Sheet =>
val o = Json.obj(
"scores" -> s.scores.reverse.map { score =>
if (score.flag == arena.ScoringSystem.Normal) JsNumber(score.value)
else Json.arr(score.value, score.flag.id)
},
"total" -> s.total)
s.onFire.fold(o + ("fire" -> JsBoolean(true)), o)
}
private def playerJson(sheets: Map[String, ScoreSheet], tour: Tournament)(rankedPlayer: RankedPlayer) = {
val p = rankedPlayer.player
val light = getLightUser(p.userId)
Json.obj(
"rank" -> rankedPlayer.rank,
"name" -> light.fold(p.userId)(_.name),
"title" -> light.map(_.title),
"rating" -> p.rating,
"provisional" -> p.provisional.option(true),
"withdraw" -> p.withdraw.option(true),
"score" -> p.score,
"perf" -> p.perf,
// "opposition" -> none[Int], //(tour.isFinished && rankedPlayer.rank <= 3).option(opposition(tour, p)),
"sheet" -> sheets.get(p.userId).map(sheetJson)
).noNull
}
private def podiumJson(id: String): Fu[Option[JsArray]] =
TournamentRepo finishedById id flatMap {
_ ?? { tour =>
for {
rankedPlayers <- PlayerRepo.bestByTourWithRank(id, 3)
sheets <- rankedPlayers.map { p =>
tour.system.scoringSystem.sheet(tour, p.player.userId) map p.player.userId.->
}.sequenceFu.map(_.toMap)
} yield JsArray(rankedPlayers.map(playerJson(sheets, tour))).some
}
}
// private def opposition(tour: Tournament, p: Player): Int =
// tour.userPairings(p.id).foldLeft((0, 0)) {
// case ((count, sum), pairing) => (
// count + 1,
// (pairing opponentOf p.id flatMap tour.playerByUserId).fold(sum)(_.rating + sum)
// )
// } match {
// case (0, _) => 0
// case (count, sum) => sum / count
// }
private def pairingUserJson(userId: String) = getLightUser(userId).fold(userId)(_.name)
private def pairingJson(p: Pairing) = Json.obj(
"id" -> p.gameId,
"u" -> Json.arr(pairingUserJson(p.user1), pairingUserJson(p.user2)),
"s" -> (if (p.finished) p.winner match {
case Some(w) if w == p.user1 => 2
case Some(w) => 3
case _ => 1
}
else 0))
}
| Happy0/lila | modules/tournament/src/main/JsonView.scala | Scala | mit | 5,924 |
package org.bitcoins.core.hd
/**
* Address chain (external vs. change) used by
* [[https://github.com/bitcoin/bips/blob/master/bip-0044.mediawiki#change BIP44]],
* [[https://github.com/bitcoin/bips/blob/master/bip-0084.mediawiki BIP84]]
* and
* [[https://github.com/bitcoin/bips/blob/master/bip-0049.mediawiki BIP49]]
*
* @see
*/
sealed abstract class HDChainType {
def index: Int
}
object HDChainType {
/**
* External chain is used for addresses that
* are meant to be visible outside of the
* wallet (e.g. for receiving payments).
*/
final case object External extends HDChainType {
override val index: Int = 0
}
/**
* Internal chain is used for addresses which
* are not meant to be visible outside of the
* wallet and is used for return transaction
* change
*/
final case object Change extends HDChainType {
override val index: Int = 1
}
def fromInt(int: Int): HDChainType =
int match {
case External.index => HDChainType.External
case Change.index => HDChainType.Change
case _: Int =>
throw new IllegalArgumentException(
s"$int is not a valid BIP44 change type!")
}
}
| bitcoin-s/bitcoin-s-core | core/src/main/scala/org/bitcoins/core/hd/HDChainType.scala | Scala | mit | 1,200 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval.instances
import cats.{Applicative, CommutativeApplicative, Monad, Parallel, ~>}
import monix.eval.Task
/** `cats.Parallel` type class instance for [[monix.eval.Task Task]].
*
* A `cats.Parallel` instances means that `Task` can be used for
* processing tasks in parallel (with non-deterministic effects
* ordering).
*
* References:
*
* - [[https://typelevel.org/cats/ typelevel/cats]]
* - [[https://github.com/typelevel/cats-effect typelevel/cats-effect]]
*/
class CatsParallelForTask extends Parallel[Task] {
override type F[A] = Task.Par[A]
override def applicative: Applicative[Task.Par] = CatsParallelForTask.NondetApplicative
override def monad: Monad[Task] = CatsConcurrentForTask
override val sequential: Task.Par ~> Task = new (Task.Par ~> Task) {
def apply[A](fa: Task.Par[A]): Task[A] = Task.Par.unwrap(fa)
}
override val parallel: Task ~> Task.Par = new (Task ~> Task.Par) {
def apply[A](fa: Task[A]): Task.Par[A] = Task.Par.apply(fa)
}
}
object CatsParallelForTask extends CatsParallelForTask {
private[eval] object NondetApplicative extends CommutativeApplicative[Task.Par] {
import Task.Par.unwrap
import Task.Par.{apply => par}
override def ap[A, B](ff: Task.Par[A => B])(fa: Task.Par[A]): Task.Par[B] =
par(Task.mapBoth(unwrap(ff), unwrap(fa))(_(_)))
override def map2[A, B, Z](fa: Task.Par[A], fb: Task.Par[B])(f: (A, B) => Z): Task.Par[Z] =
par(Task.mapBoth(unwrap(fa), unwrap(fb))(f))
override def product[A, B](fa: Task.Par[A], fb: Task.Par[B]): Task.Par[(A, B)] =
par(Task.mapBoth(unwrap(fa), unwrap(fb))((_, _)))
override def pure[A](a: A): Task.Par[A] =
par(Task.now(a))
override val unit: Task.Par[Unit] =
par(Task.now(()))
override def map[A, B](fa: Task.Par[A])(f: A => B): Task.Par[B] =
par(unwrap(fa).map(f))
}
}
| alexandru/monifu | monix-eval/shared/src/main/scala/monix/eval/instances/CatsParallelForTask.scala | Scala | apache-2.0 | 2,558 |
import com.thesamet.proto.e2e.no_box._
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.must.Matchers
import com.google.protobuf.InvalidProtocolBufferException
class NoBoxSpec extends AnyFlatSpec with Matchers {
val car = Car(tyre1 = Tyre(size = 10), tyre2 = Some(Tyre(size = 20)))
"no_box" should "create correct methods" in {
car.tyre1 must be(Tyre(size = 10))
car.tyre2 must be(Some(Tyre(size = 20)))
}
"fields with no_box" should "parseFrom byte array correctly" in {
val serialized = car.toByteArray
Car.parseFrom(serialized) must be(car)
}
"Java representation of Scala message with a no_box field with default value" should "not have that field" in {
val scalaCar = Car(tyre1 = Tyre.defaultInstance)
scalaCar.tyre1 must be(Tyre.defaultInstance)
}
"Scala message with a no_box field with null value" should "throw exception when being serialized" in {
val car = Car(tyre1 = null)
a[Exception] shouldBe thrownBy(car.toByteArray)
}
"Scala message with a no_box reference" should "generate correct types" in {
val car = Car()
car.dontBoxMeDef mustBe (DontBoxMe.defaultInstance)
car.dontBoxMeOverrideTrue mustBe (DontBoxMe.defaultInstance)
car.dontBoxMeOverrideFalse mustBe (None)
car.nameNoBox mustBe (com.thesamet.pb.FullName("", ""))
}
"RequiredCar" should "have unboxed message field" in {
RequiredCar(tyre1 = Tyre(size = 12))
}
"RequiredCar" should "fail validation if required field is missing" in {
intercept[InvalidProtocolBufferException] {
RequiredCar.parseFrom(Array.empty[Byte])
}.getMessage must be("Message missing required fields.")
}
"RequiredCar" should "fail parsing from text if field is empty" in {
RequiredCar.fromAscii("tyre1 { size: 12 }")
intercept[NoSuchElementException] {
RequiredCar.fromAscii("")
}
}
// Issue 1198
"Non-total type" should "serialize and parse correctly" in {
val p = Person("", Money(BigDecimal("123.123")))
Person.parseFrom(p.toByteArray) must be(p)
}
it should "throw an exception when missing data" in {
intercept[NumberFormatException] {
Person.parseFrom(Array.empty[Byte])
}
}
}
| scalapb/ScalaPB | e2e/src/test/scala/NoBoxSpec.scala | Scala | apache-2.0 | 2,219 |
/**
* FILE: BuildingPossessionTest.scala
* PERCORSO /Codice/sgad/servertier/src/test/scala/sgad/servertier/dataaccess/data/userdata
* DATA CREAZIONE: 20 Febbraio 2014
* AUTORE: ProTech
* EMAIL: protech.unipd@gmail.com
*
* Questo file è proprietà del gruppo ProTech, viene rilasciato sotto licenza Apache v2.
*
* DIARIO DELLE MODIFICHE:
* 2014-02-20 - Creazione della classe - Nessi Alberto
*/
import org.scalatest._
import sgad.servertier.dataaccess.data.shareddata._
import sgad.servertier.dataaccess.data.userdata.{BuildingPossession, UnitInProgress, Position}
/**
* Classe per il Test della classe BuildingPossession
*/
class BuildingPossessionTest extends FlatSpec {
var timeNow = 1392902385004L
val quantityResource = Array[QuantityResource]()
var preconditions = Vector[BuildingWithLevel]()
var bonus = new Bonus("bonus1", 2, 3)
var gold = new Resource("oro")
var potion = new Resource("pozione")
var resourceQuantityVector = Vector(new QuantityResource(gold, 100), new QuantityResource(potion, 300))
var cost = new Cost(1000, resourceQuantityVector)
var productedResource1 = new ProductedResource(new Resource("risorsaDiProva"), 1223, 1, 2)
var productedUnit1 = Vector[`Unit`]()
var productedUnit2 = new `Unit`("soldato2", 1, 3, cost, true)
var productedUnit3 = new `Unit`("soldato3", 1, 3, cost, true)
var buildingWithLevel1 = new BuildingWithLevel(true, bonus, cost, 2, "Torre", preconditions, productedResource1, productedUnit1, 2, false)
var position = new Position(3, 8)
var position2 = new Position(1, 12)
var unitInProgress = new UnitInProgress(productedUnit2, 1392902384789L, 1)
var unitInProgress2 = new UnitInProgress(productedUnit3, 1392902384789L, 1)
var buildingPossession = new BuildingPossession(buildingWithLevel1, position, true, 1392902385004L, unitInProgress)
"Un BuildingPossession " must " mantenere il tipo di costruzione così come viene espresso nel costruttore" in {
assert(buildingPossession.getBuilding.equals(buildingWithLevel1))
}
"Un BuildingPossession " must " mantenere le coordinate di un edificio di gioco così come viene espresso nel costruttore" in {
assert(buildingPossession.getPosition.equals(position))
}
"Un BuildingPossession " must " mantenere lo stato di isFinished così come viene espresso nel costruttore" in {
assert(buildingPossession.getIsFinished.equals(true))
}
"Un BuildingPossession " must " mantenere l'attributo time così come viene espresso nel costruttore" in {
assert(buildingPossession.getTime.equals(1392902385004L))
}
"Un BuildingPossession " must " mantenere l'attributo unitInProgress così come viene espresso nel costruttore" in {
assert(buildingPossession.getUnitInProgress.equals(unitInProgress))
}
"Il costruttore " must " throw illegalArgument exception con costruttore illegale" in {
intercept[IllegalArgumentException] {
// building
new BuildingPossession(null, position, true, 1392902385004L, unitInProgress)
}
intercept[IllegalArgumentException] {
// position
new BuildingPossession(buildingWithLevel1, null, true, 1392902385004L, unitInProgress)
}
}
it must " essere uguale solo ad uno stesso BuildingPossession " in {
val buildingPossession1 = new BuildingPossession(buildingWithLevel1, position, true, 1392902385004L, unitInProgress)
val buildingPossession2 = new BuildingPossession(buildingWithLevel1, position, true, 1392902385004L, unitInProgress)
val buildingPossession3 = new BuildingPossession(buildingWithLevel1, position2, false, 1392902385123L, unitInProgress)
assert(buildingPossession1 equals buildingPossession2)
assert(buildingPossession1 == buildingPossession2)
assert(!buildingPossession1.equals(buildingPossession3))
assert(!buildingPossession2.equals(buildingPossession3))
assert(buildingPossession != buildingWithLevel1)
}
it must "inserire correttamente le unità in coda" in {
buildingPossession.addUnit(unitInProgress)
assert(buildingPossession.addUnit(unitInProgress))
assert(!buildingPossession.addUnit(unitInProgress2))
}
"Il metodo getKey" must " restituire la chiave nel formato valido" in {
assert(buildingPossession.getKey.equals(buildingWithLevel1.getNameBuilding + "L" + buildingWithLevel1.getLevel + "X" + position.getX + "Y" + position.getY))
}
} | protechunipd/SGAD | Codice/sgad/servertier/src/test/scala/sgad/servertier/dataaccess/data/userdata/BuildingPossessionTest.scala | Scala | apache-2.0 | 4,284 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.spark.sql.api.java
import java.util.{ Map => JMap }
import scala.collection.JavaConverters.mapAsScalaMapConverter
import scala.collection.{ Map => SMap }
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.Dataset
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.SparkSession
import org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_QUERY
import org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_RESOURCE_READ
import org.elasticsearch.spark.sql.EsSparkSQL
object JavaEsSparkSQL {
// specify the return types to make sure the bytecode is generated properly (w/o any scala.collections in it)
def esDF(sc: SQLContext): DataFrame = EsSparkSQL.esDF(sc, SMap.empty[String, String])
def esDF(sc: SQLContext, resource: String): DataFrame = EsSparkSQL.esDF(sc, Map(ES_RESOURCE_READ -> resource))
def esDF(sc: SQLContext, resource: String, query: String): DataFrame = EsSparkSQL.esDF(sc, Map(ES_RESOURCE_READ -> resource, ES_QUERY -> query))
def esDF(sc: SQLContext, cfg: JMap[String, String]): DataFrame = EsSparkSQL.esDF(sc, cfg.asScala)
def esDF(sc: SQLContext, resource: String, cfg: JMap[String, String]): DataFrame = EsSparkSQL.esDF(sc, resource, cfg.asScala)
def esDF(sc: SQLContext, resource: String, query: String, cfg: JMap[String, String]): DataFrame = EsSparkSQL.esDF(sc, resource, query, cfg.asScala)
def esDF(ss: SparkSession): DataFrame = EsSparkSQL.esDF(ss, SMap.empty[String, String])
def esDF(ss: SparkSession, resource: String): DataFrame = EsSparkSQL.esDF(ss, Map(ES_RESOURCE_READ -> resource))
def esDF(ss: SparkSession, resource: String, query: String): DataFrame = EsSparkSQL.esDF(ss, Map(ES_RESOURCE_READ -> resource, ES_QUERY -> query))
def esDF(ss: SparkSession, cfg: JMap[String, String]): DataFrame = EsSparkSQL.esDF(ss, cfg.asScala)
def esDF(ss: SparkSession, resource: String, cfg: JMap[String, String]): DataFrame = EsSparkSQL.esDF(ss, resource, cfg.asScala)
def esDF(ss: SparkSession, resource: String, query: String, cfg: JMap[String, String]): DataFrame = EsSparkSQL.esDF(ss, resource, query, cfg.asScala)
def saveToEs[T](ds: Dataset[T], resource: String): Unit = EsSparkSQL.saveToEs(ds , resource)
def saveToEs[T](ds: Dataset[T], resource: String, cfg: JMap[String, String]): Unit = EsSparkSQL.saveToEs(ds, resource, cfg.asScala)
def saveToEs[T](ds: Dataset[T], cfg: JMap[String, String]): Unit = EsSparkSQL.saveToEs(ds, cfg.asScala)
} | elastic/elasticsearch-hadoop | spark/sql-30/src/main/scala/org/elasticsearch/spark/sql/api/java/JavaEsSparkSQL.scala | Scala | apache-2.0 | 3,252 |
package org.jetbrains.plugins.scala
package lang
package psi
package types
package api
package designator
import com.intellij.psi._
import org.jetbrains.plugins.scala.caches.{BlockModificationTracker, RecursionManager}
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScBindingPattern
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScTypeParam, TypeParamIdOwner}
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScTypeAlias, ScTypeAliasDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.ScSyntheticClass
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.ScTypePolymorphicType
import org.jetbrains.plugins.scala.lang.psi.types.recursiveUpdate.ScSubstitutor
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.lang.resolve.processor.ResolveProcessor
import org.jetbrains.plugins.scala.lang.resolve.{ResolveTargets, ScalaResolveResult, ScalaResolveState}
import org.jetbrains.plugins.scala.macroAnnotations.CachedWithRecursionGuard
import org.jetbrains.plugins.scala.util.HashBuilder._
import org.jetbrains.plugins.scala.util.ScEquivalenceUtil
/**
* @author ilyas
*/
/**
* This type means type projection:
* SomeType#member
* member can be class or type alias
*/
final class ScProjectionType private(val projected: ScType,
override val element: PsiNamedElement) extends DesignatorOwner {
override protected def calculateAliasType: Option[AliasType] = {
actualElement match {
case ta: ScTypeAlias if ta.typeParameters.isEmpty =>
val subst: ScSubstitutor = actualSubst
Some(AliasType(ta, ta.lowerBound.map(subst), ta.upperBound.map(subst)))
case ta: ScTypeAlias => //higher kind case
ta match {
case ta: ScTypeAliasDefinition => //hack for simple cases, it doesn't cover more complicated examples
ta.aliasedType match {
case Right(tp) if tp == this => // recursive type alias
return Some(AliasType(ta, Right(this), Right(this)))
case Right(tp) =>
actualSubst(tp) match {
case target @ ParameterizedType(des, typeArgs) =>
val tParams = ta.typeParameters
val sameParams = tParams.length == typeArgs.length && tParams.zip(typeArgs).forall {
case (tParam: ScTypeParam, TypeParameterType.ofPsi(param)) if tParam.typeParamId == param.typeParamId => true
case _ => false
}
if (sameParams) return Some(AliasType(ta, Right(des), Right(des)))
else {
val typeConsuctor = ScTypePolymorphicType(target, tParams.map(TypeParameter.apply))
return Option(AliasType(ta, Right(typeConsuctor), Right(typeConsuctor)))
}
case _ =>
}
case _ =>
}
case _ =>
}
val existentialArgs = ta.typeParameters
.map(tp => ScExistentialArgument(tp.name + "$$", Nil, Nothing, Any))
.toList
val genericSubst = ScSubstitutor.bind(ta.typeParameters, existentialArgs)
val s = actualSubst.followed(genericSubst)
Some(AliasType(ta,
ta.lowerBound.map(scType => ScExistentialType(s(scType))),
ta.upperBound.map(scType => ScExistentialType(s(scType)))))
case _ => None
}
}
override def isStable: Boolean = (projected match {
case designatorOwner: DesignatorOwner => designatorOwner.isStable
case _ => false
}) && super.isStable
override private[types] def designatorSingletonType: Option[ScType] = super.designatorSingletonType.map(actualSubst)
@CachedWithRecursionGuard(element, None, BlockModificationTracker(element))
private def actualImpl(projected: ScType, updateWithProjectionSubst: Boolean): Option[(PsiNamedElement, ScSubstitutor)] = {
val resolvePlace = {
def fromClazz(definition: ScTypeDefinition): PsiElement =
definition.extendsBlock.templateBody
.flatMap(_.lastChildStub)
.getOrElse(definition.extendsBlock)
projected.tryExtractDesignatorSingleton.extractClass match {
case Some(definition: ScTypeDefinition) => fromClazz(definition)
case _ =>
projected match {
case ScThisType(definition: ScTypeDefinition) => fromClazz(definition)
case _ => element
}
}
}
import org.jetbrains.plugins.scala.lang.resolve.ResolveTargets._
def processType(kinds: Set[ResolveTargets.Value] = ValueSet(CLASS)): Option[(PsiNamedElement, ScSubstitutor)] = {
def elementClazz: Option[PsiClass] = element match {
case named: ScBindingPattern => Option(named.containingClass)
case member: ScMember => Option(member.containingClass)
case _ => None
}
projected match {
case ScDesignatorType(clazz: PsiClass)
if elementClazz.exists(ScEquivalenceUtil.areClassesEquivalent(_, clazz)) =>
return Some(element, ScSubstitutor(projected))
case p @ ParameterizedType(ScDesignatorType(clazz: PsiClass), _)
if elementClazz.exists(ScEquivalenceUtil.areClassesEquivalent(_, clazz)) =>
return Some(element, ScSubstitutor(projected).followed(p.substitutor))
case p: ScProjectionType =>
p.actualElement match {
case `element` if element.is[ScTypeAlias] => //rare case of recursive projection, see SCL-15345
return Some(element, p.actualSubst)
case clazz: PsiClass
if elementClazz.exists(ScEquivalenceUtil.areClassesEquivalent(_, clazz)) =>
return Some(element, ScSubstitutor(projected).followed(p.actualSubst))
case _ => //continue with processor :(
}
case ScThisType(clazz)
if elementClazz.exists(ScEquivalenceUtil.areClassesEquivalent(_, clazz)) =>
//for this type we shouldn't put this substitutor because of possible recursions
//and we don't need that, because all types are already calculated with proper this type
return Some(element, ScSubstitutor.empty)
case ScCompoundType(_, _, typesMap) =>
typesMap.get(element.name) match {
case Some(taSig) => return Some(taSig.typeAlias, taSig.substitutor)
case _ =>
}
case _ => //continue with processor :(
}
val processor = new ResolveProcessor(kinds, resolvePlace, element.name) {
doNotCheckAccessibility()
override protected def addResults(results: Iterable[ScalaResolveResult]): Boolean = {
candidatesSet ++= results
true
}
}
processor.processType(projected, resolvePlace, ScalaResolveState.empty, updateWithProjectionSubst)
processor.candidates match {
case Array(candidate) => candidate.element match {
case candidateElement: PsiNamedElement =>
val thisSubstitutor = ScSubstitutor(projected)
val defaultSubstitutor =
projected match {
case _: ScThisType => candidate.substitutor
case _ => thisSubstitutor.followed(candidate.substitutor)
}
val needSuperSubstitutor = element match {
case _: PsiClass => element != candidateElement
case _ => false
}
if (needSuperSubstitutor) {
Some(element,
ScalaPsiUtil.superTypeSignatures(candidateElement)
.find(_.namedElement == element)
.map(typeSig => typeSig.substitutor.followed(defaultSubstitutor))
.getOrElse(defaultSubstitutor))
} else {
Some(candidateElement, defaultSubstitutor)
}
case _ => None
}
case _ => None
}
}
element match {
case d: ScTypedDefinition if d.isStable => //val's, objects, parameters
processType(ValueSet(VAL, OBJECT))
case _: ScTypeAlias | _: PsiClass =>
processType(ValueSet(CLASS))
case _ => None
}
}
private def actual(updateWithProjectionSubst: Boolean = true): (PsiNamedElement, ScSubstitutor) =
actualImpl(projected, updateWithProjectionSubst).getOrElse(element, ScSubstitutor.empty)
def actualElement: PsiNamedElement = actual()._1
def actualSubst: ScSubstitutor = actual()._2
override def equivInner(r: ScType, constraints: ConstraintSystem, falseUndef: Boolean): ConstraintsResult = {
def isEligibleForPrefixUnification(proj: ScType): Boolean = proj.subtypeExists {
case _: UndefinedType => true
case _ => false
}
def checkDesignatorType(e: PsiNamedElement): ConstraintsResult = e match {
case td: ScTypedDefinition if td.isStable =>
val tp = actualSubst(td.`type`().getOrAny)
tp match {
case designatorOwner: DesignatorOwner if designatorOwner.isSingleton =>
tp.equiv(r, constraints, falseUndef)
case lit: ScLiteralType => lit.equiv(r, constraints, falseUndef)
case _ => ConstraintsResult.Left
}
case _ => ConstraintsResult.Left
}
val desRes = checkDesignatorType(actualElement)
if (desRes.isRight) return desRes
r match {
case tpt: ScTypePolymorphicType =>
return ScEquivalenceUtil
.isDesignatorEqiuivalentToPolyType(this, tpt, constraints, falseUndef)
.getOrElse(ConstraintsResult.Left)
case _ => ()
}
val res = r match {
case t: StdType =>
element match {
case synth: ScSyntheticClass => synth.stdType.equiv(t, constraints, falseUndef)
case _ => ConstraintsResult.Left
}
case ParameterizedType(ScProjectionType(_, _), _) =>
r match {
case AliasType(_: ScTypeAliasDefinition, Right(lower), _) =>
this.equiv(lower, constraints, falseUndef)
case _ => ConstraintsResult.Left
}
case proj2 @ ScProjectionType(p1, _) =>
val desRes = checkDesignatorType(proj2.actualElement)
if (desRes.isRight) return desRes
val lElement = actualElement
val rElement = proj2.actualElement
val sameElements = lElement == rElement || {
lElement.name == rElement.name &&
(isEligibleForPrefixUnification(projected) || isEligibleForPrefixUnification(p1))
}
if (sameElements) projected.equiv(p1, constraints, falseUndef)
else
r match {
case AliasType(_: ScTypeAliasDefinition, Right(lower), _) =>
this.equiv(lower, constraints, falseUndef)
case _ => ConstraintsResult.Left
}
case ScThisType(_) =>
element match {
case _: ScObject => ConstraintsResult.Left
case t: ScTypedDefinition if t.isStable =>
t.`type`() match {
case Right(singleton: DesignatorOwner) if singleton.isSingleton =>
val newSubst = actualSubst.followed(ScSubstitutor(projected))
r.equiv(newSubst(singleton), constraints, falseUndef)
case _ => ConstraintsResult.Left
}
case _ => ConstraintsResult.Left
}
case _ => ConstraintsResult.Left
}
res match {
case cs: ConstraintSystem => cs
case ConstraintsResult.Left =>
this match {
case AliasType(_: ScTypeAliasDefinition, Right(lower), _) =>
lower.equiv(r, constraints, falseUndef)
case _ => ConstraintsResult.Left
}
}
}
override def isFinalType: Boolean = actualElement match {
case cl: PsiClass if cl.isEffectivelyFinal => true
case alias: ScTypeAliasDefinition => alias.aliasedType.exists(_.isFinalType)
case _ => false
}
override def visitType(visitor: ScalaTypeVisitor): Unit = visitor.visitProjectionType(this)
def canEqual(other: Any): Boolean = other.is[ScProjectionType]
override def equals(other: Any): Boolean = other match {
case that: ScProjectionType =>
(that canEqual this) &&
projected == that.projected &&
element == that.element
case _ => false
}
private var hash: Int = -1
//noinspection HashCodeUsesVar
override def hashCode: Int = {
if (hash == -1)
hash = projected #+ element
hash
}
override def typeDepth: Int = projected.typeDepth
}
object ScProjectionType {
private val guard = RecursionManager.RecursionGuard[ScType, Nothing]("aliasProjectionGuard")
def simpleAliasProjection(p: ScProjectionType): ScType = {
p.actual() match {
case (td: ScTypeAliasDefinition, subst) if td.typeParameters.isEmpty =>
val upper = guard.doPreventingRecursion(p) {
td.upperBound.map(subst).toOption
}
upper
.flatten
.filter(_.typeDepth < p.typeDepth)
.getOrElse(p)
case _ => p
}
}
def apply(projected: ScType, element: PsiNamedElement): ScType = {
val simple = new ScProjectionType(projected, element)
simple.actualElement match {
case td: ScTypeAliasDefinition if td.typeParameters.isEmpty =>
val manager = ScalaPsiManager.instance(element.getProject)
manager.simpleAliasProjectionCached(simple).nullSafe.getOrElse(simple)
case _ => simple
}
}
def unapply(proj: ScProjectionType): Option[(ScType, PsiNamedElement)] = {
Some(proj.projected, proj.element)
}
object withActual {
private[this] val extractor = new withActual(true)
def unapply(proj: ScProjectionType): Option[(PsiNamedElement, ScSubstitutor)] = extractor.unapply(proj)
}
class withActual(updateWithProjectionSubst: Boolean) {
def unapply(proj: ScProjectionType): Option[(PsiNamedElement, ScSubstitutor)] =
Option(proj.actual(updateWithProjectionSubst))
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/types/api/designator/ScProjectionType.scala | Scala | apache-2.0 | 14,523 |
package com.arcusys.valamis.lesson.scorm.model.sequencing
case class SequencingRulesResponse(terminationRequest: Option[TerminationRequestType.Value] = None,
sequencingRequest: Option[SequencingRequestType.Value] = None)
object SequencingRulesResponse {
//That ugly option is to avoid type erasure clashes
def termination(terminationRequest: TerminationRequestType.Value) = new SequencingRulesResponse(terminationRequest = Some(terminationRequest))
def sequencing(sequencingRequest: SequencingRequestType.Value) = new SequencingRulesResponse(sequencingRequest = Some(sequencingRequest))
def apply(terminationRequest: TerminationRequestType.Value, sequencingRequest: SequencingRequestType.Value) =
new SequencingRulesResponse(terminationRequest = Some(terminationRequest), sequencingRequest = Some(sequencingRequest))
}
| ViLPy/Valamis | valamis-scorm-lesson/src/main/scala/com/arcusys/valamis/lesson/scorm/model/sequencing/SequencingRulesResponse.scala | Scala | lgpl-3.0 | 837 |
package io.seldon.spark.tags
import org.apache.log4j.Logger
import org.apache.log4j.Level
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import io.seldon.spark.SparkUtils
import org.apache.spark.mllib.feature.HashingTF
import org.apache.spark.mllib.feature.IDF
import org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, Statistics}
import java.sql.ResultSet
import scala.collection.mutable.ListBuffer
import org.apache.spark.mllib.feature.IDF
import io.seldon.spark.rdd.FileUtils
import io.seldon.spark.rdd.DataSourceMode
case class ClusterTagAffinityConfig(
client : String = "",
inputPath : String = "/seldon-models",
outputPath : String = "/seldon-models",
startDay : Int = 1,
days : Int = 1,
awsKey : String = "",
awsSecret : String = "",
local : Boolean = false,
zkHosts : String = "",
activate : Boolean = false,
tagFilterPath : String = "",
jdbc : String = "",
tagAttr : String = "",
minActionsPerUser : Int = 10,
minTagCount : Int = 4,
minPcIncrease : Double = 0.2)
class UserTagAffinityCluster(private val sc : SparkContext,config : ClusterTagAffinityConfig) {
def parseJsonActions(path : String) = {
val rdd = sc.textFile(path).map{line =>
import org.json4s._
import org.json4s.jackson.JsonMethods._
implicit val formats = DefaultFormats
val json = parse(line)
val user = (json \ "userid").extract[Int]
val item = (json \ "itemid").extract[Int]
(user,item)
}
rdd
}
def getItemTagsFromDb(jdbc : String,attr : String) =
{
val sql = "select * from (SELECT i.item_id,i.client_item_id,unix_timestamp(first_op),CASE WHEN imi.value IS NOT NULL THEN cast(imi.value as char) WHEN imd.value IS NOT NULL THEN cast(imd.value as char) WHEN imb.value IS NOT NULL THEN cast(imb.value as char) WHEN imboo.value IS NOT NULL THEN cast(imboo.value as char) WHEN imt.value IS NOT NULL THEN imt.value WHEN imdt.value IS NOT NULL THEN cast(imdt.value as char) WHEN imv.value IS NOT NULL THEN imv.value WHEN e.value_name IS NOT NULL THEN e.value_name END" +
" tags FROM items i INNER JOIN item_attr a ON a.name in ('"+attr+"') and i.type=a.item_type LEFT JOIN item_map_int imi ON i.item_id=imi.item_id AND a.attr_id=imi.attr_id LEFT JOIN item_map_double imd ON i.item_id=imd.item_id AND a.attr_id=imd.attr_id LEFT JOIN item_map_enum ime ON i.item_id=ime.item_id AND a.attr_id=ime.attr_id LEFT JOIN item_map_bigint imb ON i.item_id=imb.item_id AND a.attr_id=imb.attr_id LEFT JOIN item_map_boolean imboo ON i.item_id=imboo.item_id AND a.attr_id=imboo.attr_id LEFT JOIN item_map_text imt ON i.item_id=imt.item_id AND a.attr_id=imt.attr_id LEFT JOIN item_map_datetime imdt ON i.item_id=imdt.item_id AND a.attr_id=imdt.attr_id LEFT JOIN item_map_varchar imv ON i.item_id=imv.item_id AND a.attr_id=imv.attr_id LEFT JOIN item_attr_enum e ON ime.attr_id =e.attr_id AND ime.value_id=e.value_id " +
" where i.item_id>? and i.item_id<? order by imv.pos) t where not t.tags is null"
val rdd = new org.apache.spark.rdd.JdbcRDD(
sc,
() => {
Class.forName("com.mysql.jdbc.Driver")
java.sql.DriverManager.getConnection(jdbc)
},
sql,
0, 999999999, 1,
(row : ResultSet) => (row.getInt("item_id"),row.getString("tags").toLowerCase().trim())
)
rdd
}
def getFilteredActions(minActions : Int,actions : org.apache.spark.rdd.RDD[(Int,Int)]) = {
actions.groupBy(_._1).filter(_._2.size >= minActions).flatMap(_._2).map(v => (v._2,v._1)) // filter users with no enough actions and transpose to item first
}
def convertJson(affinity : org.apache.spark.rdd.RDD[(Int,Int,Int,String)]) = {
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
val userJson = affinity.map{case (user,group,cluster,tags) =>
val json = (("user" -> user ) ~
("group" -> group ) ~
("cluster" -> cluster ) ~
("tags" -> tags)
)
val jsonText = compact(render(json))
jsonText
}
userJson
}
def activate(location : String)
{
import io.seldon.spark.zookeeper.ZkCuratorHandler
import org.apache.curator.utils.EnsurePath
val curator = new ZkCuratorHandler(config.zkHosts)
if(curator.getCurator.getZookeeperClient.blockUntilConnectedOrTimedOut())
{
val zkPath = "/all_clients/"+config.client+"/tagcluster"
val ensurePath = new EnsurePath(zkPath)
ensurePath.ensure(curator.getCurator.getZookeeperClient)
curator.getCurator.setData().forPath(zkPath,location.getBytes())
}
else
println("Failed to get zookeeper! Can't activate model")
}
def run()
{
val actionsGlob = config.inputPath + "/" + config.client+"/actions/"+SparkUtils.getS3UnixGlob(config.startDay,config.days)+"/*"
println("loading actions from "+actionsGlob)
println("Loading tags from "+config.jdbc)
val rddActions = getFilteredActions(config.minActionsPerUser, parseJsonActions(actionsGlob))
// tag, group, cluster within group
//e.g.
// Liverpool FC,football teams,liverpool
// Everton FC,football teams,everton
val tagFilterDefns = sc.textFile(config.tagFilterPath).map { x =>
val tagClass = x.split(",")
(tagClass(0).trim().toLowerCase(),(tagClass(1).trim().toLowerCase(),tagClass(2).trim().toLowerCase()))
}.collect().toMap[String,(String,String)]
for(k <- tagFilterDefns.keys)
{
val (group,cluster) = tagFilterDefns(k)
println(k+","+tagFilterDefns(k))
}
// create idx for clusters and groups
val groupIdx = tagFilterDefns.map{case (tag,(group,cluster)) => group}.toSet.zipWithIndex.toMap
val clusterIdx = tagFilterDefns.map{case (tag,(group,cluster)) => cluster}.toSet.zipWithIndex.toMap
val clusterToGroupIdx = tagFilterDefns.map{case (tag,(group,cluster)) => (clusterIdx(cluster),groupIdx(group))}.toMap
// create broadcast data
val bc_groupIdx = sc.broadcast(groupIdx)
val bc_clusterIdx = sc.broadcast(clusterIdx)
val bc_clusterToGroupIdx = sc.broadcast(clusterToGroupIdx)
val bc_tagFilterDefns = sc.broadcast(tagFilterDefns)
// get item tags from db
val rddItems = getItemTagsFromDb(config.jdbc, config.tagAttr)
// Calculate for each tag the percentage of articles in which it appears
val rddCombined = rddActions.join(rddItems)
val numActions = rddCombined.count()
val tagCounts = rddCombined.flatMap(_._2._2.split(",")).map { x => (x.trim().toLowerCase(),1) }.reduceByKey(_ + _).collectAsMap
val tagPercent = scala.collection.mutable.Map[String,Float]()
for((t,c) <- tagCounts) tagPercent(t) = c/numActions.toFloat
println("tagCounts size is "+tagCounts.size)
// extract tags and filter to desired tags
val rddFeatures = rddCombined.map{ case (item,(user,tags)) => (user,(item,tags))}.groupByKey()
.mapValues{v =>
var doc = new StringBuilder()
var allTags = ListBuffer[String]()
val tagFilter = bc_tagFilterDefns.value
for ((item,tags) <- v)
{
for(tag <- tags.split(","))
{
val tagToken = tag.trim().toLowerCase()//.replaceAll("[ :;'\",]", "_")
if (tagToken.size > 0)
{
if (tagFilter.size > 0)
{
if (tagFilter.contains(tagToken))
allTags.append(tagToken)
}
else
allTags.append(tagToken)
}
}
}
(allTags.mkString(","),v.size)
}
val bc_tagPercent = sc.broadcast(tagPercent)
val minTagCount = config.minTagCount
val minPcIncrease = config.minPcIncrease
val minUserActions = config.minActionsPerUser
val tagAffinity = rddFeatures.flatMap{case (user,(tags,numDocs)) =>
var allTags = ListBuffer[(Int,Int,Int,String)]()
var clusterToCount = collection.mutable.Map[Int, Int]().withDefaultValue(0)
var tagsFound = collection.mutable.Map[Int, String]().withDefaultValue("")
if (numDocs >= minUserActions)
{
val tagPercent = bc_tagPercent.value
val tagDefns = bc_tagFilterDefns.value
val clusterIdx = bc_clusterIdx.value
val groupIdx = bc_groupIdx.value
val clusterToGroupIdx = bc_clusterToGroupIdx.value
val tagCounts = tags.split(",").groupBy { l => l }.map(t => (t._1, t._2.length))
for (tag <- tags.split(",").toSet[String])
{
val tag_tf = tagCounts(tag)
if (tag_tf > minTagCount)
{
val tagPc = tag_tf/numDocs.toFloat
val tagPcGlobal = tagPercent(tag)
val pc_increase = (tagPc - tagPcGlobal)/tagPcGlobal
if (pc_increase > minPcIncrease)
{
val affinity = pc_increase
val (group,cluster) = tagDefns(tag)
val groupId = groupIdx(group)
val clusterId = clusterIdx(cluster)
clusterToCount(clusterId) += 1
tagsFound(clusterId) += (tag+ ",")
}
}
}
}
for(k <- clusterToCount.keys)
{
allTags.append((user,clusterToGroupIdx(k),k,tagsFound(k)))
}
allTags
}
val jsonRdd = convertJson(tagAffinity)
val outPath = config.outputPath + "/" + config.client + "/tagcluster/"+config.startDay
jsonRdd.coalesce(1, false).saveAsTextFile(outPath)
/* Not Needed?
val clusterToGroupCSV = clusterToGroupIdx.map{case (cluster,group) => cluster.toString()+","+group.toString()}
FileUtils.outputModelToFile(clusterToGroupCSV.toArray, outPath, DataSourceMode.fromString(outPath), "clusterToGroup.csv")
val clusterIdxCSV = clusterIdx.map{case (cluster,idx) => cluster +","+idx.toString()}
FileUtils.outputModelToFile(clusterIdxCSV.toArray, outPath, DataSourceMode.fromString(outPath), "cluster.csv")
val groupIdxCSV = groupIdx.map{case (group,idx) => group+","+idx.toString()}
FileUtils.outputModelToFile(groupIdxCSV.toArray, outPath, DataSourceMode.fromString(outPath), "group.csv")
*/
val tagFilterDefnsCSV = tagFilterDefns.map{case (tag,(groupName,clusterName)) =>
val groupId = groupIdx(groupName)
val clusterId = clusterIdx(clusterName)
tag+","+groupId.toString()+","+clusterId.toString()
}
FileUtils.outputModelToFile(tagFilterDefnsCSV.toArray, outPath, DataSourceMode.fromString(outPath), "tags.csv")
if (config.activate)
activate(outPath)
}
}
object UserTagAffinityCluster
{
def updateConf(config : ClusterTagAffinityConfig) =
{
import io.seldon.spark.zookeeper.ZkCuratorHandler
var c = config.copy()
if (config.zkHosts.nonEmpty)
{
val curator = new ZkCuratorHandler(config.zkHosts)
val path = "/all_clients/"+config.client+"/offline/tagcluster"
if (curator.getCurator.checkExists().forPath(path) != null)
{
val bytes = curator.getCurator.getData().forPath(path)
val j = new String(bytes,"UTF-8")
println("Confguration from zookeeper -> "+j)
import org.json4s._
import org.json4s.jackson.JsonMethods._
implicit val formats = DefaultFormats
val json = parse(j)
import org.json4s.JsonDSL._
import org.json4s.jackson.Serialization.write
type DslConversion = ClusterTagAffinityConfig => JValue
val existingConf = write(c) // turn existing conf into json
val existingParsed = parse(existingConf) // parse it back into json4s internal format
val combined = existingParsed merge json // merge with zookeeper value
c = combined.extract[ClusterTagAffinityConfig] // extract case class from merged json
c
}
else
{
println("Warning: using default configuaration - path["+path+"] not found!");
c
}
}
else
{
println("Warning: using default configuration - no zkHost!");
c
}
}
def main(args: Array[String])
{
Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
var c = new ClusterTagAffinityConfig()
val parser = new scopt.OptionParser[Unit]("UserTagAffinityCluster") {
head("UserTagAffinityCluster", "1.0")
opt[Unit]('l', "local") foreach { x => c = c.copy(local = true) } text("local mode - use local Master")
opt[String]('c', "client") required() valueName("<client>") foreach { x => c = c.copy(client = x) } text("client name (will be used as db and folder suffix)")
opt[String]('i', "inputPath") valueName("path url") foreach { x => c = c.copy(inputPath = x) } text("path prefix for input")
opt[String]('o', "outputPath") valueName("path url") foreach { x => c = c.copy(outputPath = x) } text("path prefix for output")
opt[Int]('r', "days") foreach { x =>c = c.copy(days = x) } text("number of days in past to get foreachs for")
opt[Int]("startDay") foreach { x =>c = c.copy(startDay = x) } text("start day in unix time")
opt[String]('a', "awskey") valueName("aws access key") foreach { x => c = c.copy(awsKey = x) } text("aws key")
opt[String]('s', "awssecret") valueName("aws secret") foreach { x => c = c.copy(awsSecret = x) } text("aws secret")
opt[String]('z', "zookeeper") valueName("zookeeper hosts") foreach { x => c = c.copy(zkHosts = x) } text("zookeeper hosts (comma separated)")
opt[Unit]("activate") foreach { x => c = c.copy(activate = true) } text("activate the model in the Seldon Server")
opt[String]('j', "jdbc") valueName("<JDBC URL>") foreach { x => c = c.copy(jdbc = x) } text("jdbc url (to get dimension for all items)")
opt[Int]('m', "minActionsPerUser") foreach { x => c = c.copy(minActionsPerUser = x) } text("min number of actions per user")
opt[String]("tagFilterPath") valueName("path url") foreach { x => c = c.copy(tagFilterPath = x) } text("tag filter path")
opt[String]("tagAttr") valueName("tag attr") foreach { x => c = c.copy(tagAttr = x) } text("db attribute name containing tags")
opt[Int]("minTagCount") foreach { x => c = c.copy(minTagCount = x) } text("min count for tags in user actions")
opt[Double]("minPcIncrease") foreach { x => c = c.copy(minPcIncrease = x) } text("min percentage increase for affinity to be included")
}
if (parser.parse(args)) // Parse to check and get zookeeper if there
{
c = updateConf(c) // update from zookeeper args
parser.parse(args) // overrride with args that were on command line
val conf = new SparkConf().setAppName("UserTagAffinityCluster")
if (c.local)
conf.setMaster("local")
// .set("spark.akka.frameSize", "300")
val sc = new SparkContext(conf)
try
{
sc.hadoopConfiguration.set("fs.s3.impl", "org.apache.hadoop.fs.s3native.NativeS3FileSystem")
if (c.awsKey.nonEmpty && c.awsSecret.nonEmpty)
{
sc.hadoopConfiguration.set("fs.s3n.awsAccessKeyId", c.awsKey)
sc.hadoopConfiguration.set("fs.s3n.awsSecretAccessKey", c.awsSecret)
}
println(c)
val cu = new UserTagAffinityCluster(sc,c)
cu.run()
}
finally
{
println("Shutting down job")
sc.stop()
}
}
else
{
}
}
}
| michaelshing/seldon-server | offline-jobs/spark/src/main/scala/io/seldon/spark/tags/UserTagAffinityCluster.scala | Scala | apache-2.0 | 15,637 |
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.3
* @date Wed Feb 20 17:39:57 EST 2013
* @see LICENSE (MIT style license file).
*/
package scalation.analytics.par
import math.pow
import scalation.linalgebra.{Fac_QR_H, VectoD}
import scalation.linalgebra.par._
import scalation.plot.Plot
import scalation.util.{Error, time}
import scalation.analytics.Predictor
import scalation.analytics.RegTechnique._
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Regression` class supports multiple linear regression. In this case,
* 'x' is multi-dimensional [1, x_1, ... x_k]. Fit the parameter vector 'b' in
* the regression equation
* <p>
* y = b dot x + e = b_0 + b_1 * x_1 + ... b_k * x_k + e
* <p>
* where 'e' represents the residuals (the part not explained by the model).
* Use Least-Squares (minimizing the residuals) to fit the parameter vector
* <p>
* b = x_pinv * y [ alternative: b = solve (y) ]
* <p>
* where 'x_pinv' is the pseudo-inverse. Three techniques are provided:
* <p>
* 'Fac_QR' // QR Factorization: slower, more stable (default)
* 'Fac_Cholesky' // Cholesky Factorization: faster, less stable (reasonable choice)
* 'Inverse' // Inverse/Gaussian Elimination, classical textbook technique (outdated)
* <p>
* This version uses parallel processing to speed up execution.
* @see see.stanford.edu/materials/lsoeldsee263/05-ls.pdf
* @param x the input/design m-by-n matrix augmented with a first column of ones
* @param y the response vector
* @param technique the technique used to solve for b in x.t*x*b = x.t*y
*/
class Regression (x: MatrixD, y: VectorD, technique: RegTechnique = QR)
extends Predictor with Error
{
if (y != null && x.dim1 != y.dim) flaw ("constructor", "dimensions of x and y are incompatible")
if (x.dim1 <= x.dim2) flaw ("constructor", "not enough data rows in matrix to use regression")
private val DEBUG = false // debug flag
private val k = x.dim2 - 1 // number of variables (k = n-1)
private val m = x.dim1.toDouble // number of data points (rows)
private val r_df = (m-1.0) / (m-k-1.0) // ratio of degrees of freedom
private var rSquared = -1.0 // coefficient of determination (quality of fit)
private var rBarSq = -1.0 // Adjusted R-squared
private var fStat = -1.0 // F statistic (quality of fit)
type Fac_QR = Fac_QR_H [MatrixD] // change as needed
private val fac = technique match { // select the factorization technique
case QR => new Fac_QR (x) // QR Factorization
case Cholesky => new Fac_Cholesky (x.t * x) // Cholesky Factorization
case _ => null // don't factor, use inverse
} // match
private val x_pinv = technique match { // pseudo-inverse of x
case QR => val (q, r) = fac.factor12 (); r.inverse * q.t
case Cholesky => fac.factor (); null // don't compute it directly
case _ => (x.t * x).inverse * x.t // classic textbook technique
} // match
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Train the predictor by fitting the parameter vector (b-vector) in the
* multiple regression equation
* y = b dot x + e = [b_0, ... b_k] dot [1, x_1 , ... x_k] + e
* using the least squares method.
*/
def train ()
{
b = if (x_pinv == null) fac.solve (y)
else x_pinv * y // parameter vector [b_0, b_1, ... b_k]
val e = y - x * b // residual/error vector
val sse = e dot e // residual/error sum of squares
val sst = (y dot y) - pow (y.sum, 2) / m // total sum of squares
val ssr = sst - sse // regression sum of squares
rSquared = ssr / sst // coefficient of determination (R-squared)
rBarSq = 1.0 - (1.0-rSquared) * r_df // R-bar-squared (adjusted R-squared)
fStat = ssr * (m-k-1.0) / (sse * k) // F statistic (msr / mse)
} // train
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Retrain the predictor by fitting the parameter vector (b-vector) in the
* multiple regression equation
* yy = b dot x + e = [b_0, ... b_k] dot [1, x_1 , ... x_k] + e
* using the least squares method.
* @param yy the new response vector
*/
def train (yy: VectorD)
{
b = if (x_pinv == null) fac.solve (yy)
else x_pinv * yy // parameter vector [b_0, b_1, ... b_k]
val e = yy - x * b // residual/error vector
val sse = e dot e // residual/error sum of squares
val sst = (yy dot yy) - pow (yy.sum, 2) / m // total sum of squares
val ssr = sst - sse // regression sum of squares
rSquared = ssr / sst // coefficient of determination
rBarSq = 1.0 - (1.0-rSquared) * r_df // R-bar-squared (adjusted R-squared)
fStat = ssr * (m-k-1.0) / (sse * k) // F statistic (msr / mse)
} // train
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the quality of the fit, including 'rSquared'.
*/
def fit: VectorD = VectorD (rSquared, rBarSq, fStat)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Predict the value of y = f(z) by evaluating the formula y = b dot z,
* e.g., (b_0, b_1, b_2) dot (1, z_1, z_2).
* @param z the new vector to predict
*/
def predict (z: VectoD): Double = b dot z
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform backward elimination to remove the least predictive variable
* from the model, returning the variable to eliminate, the new parameter
* vector, the new R-squared value and the new F statistic.
*/
def backElim (): Tuple3 [Int, VectoD, VectorD] =
{
var j_max = -1 // index of variable to eliminate
var b_max: VectoD = null // parameter values for best solution
var ft_max = VectorD (3); ft_max.set (-1.0) // optimize on quality of fit (ft(0) is rSquared)
for (j <- 1 to k) {
val keep = m.toInt // i-value large enough to not exclude any rows in slice
val rg_j = new Regression (x.sliceExclude (keep, j), y) // regress with x_j removed
rg_j.train ()
val b = rg_j.coefficient
val ft = rg_j.fit
if (ft(0) > ft_max(0)) { j_max = j; b_max = b; ft_max = ft }
} // for
(j_max, b_max, ft_max)
} // backElim
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the Variance Inflation Factor 'VIF' for each variable to test
* for multi-collinearity by regressing 'xj' against the rest of the variables.
* A VIF over 10 indicates that over 90% of the variance of 'xj' can be predicted
* from the other variables, so 'xj' is a candidate for removal from the model.
*/
def vif: VectorD =
{
val vifV = new VectorD (k) // VIF vector
for (j <- 1 to k) {
val keep = m.toInt // i-value large enough to not exclude any rows in slice
val x_j = x.col(j) // x_j is jth column in x
val rg_j = new Regression (x.sliceExclude (keep, j), x_j) // regress with x_j removed
rg_j.train ()
vifV(j-1) = 1.0 / (1.0 - rg_j.fit(0)) // store vif for x_1 in vifV(0)
} // for
vifV
} // vif
} // Regression class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `RegressionTest` object tests `Regression` class using the following
* regression equation.
* <p>
* y = b dot x = b_0 + b_1*x_1 + b_2*x_2.
* <p>
* Test regression and backward elimination.
* @see http://statmaster.sdu.dk/courses/st111/module03/index.html
*/
object RegressionTest extends App
{
// 5 data points: constant term, x_1 coordinate, x_2 coordinate
val x = new MatrixD ((5, 3), 1.0, 36.0, 66.0, // 5-by-3 matrix
1.0, 37.0, 68.0,
1.0, 47.0, 64.0,
1.0, 32.0, 53.0,
1.0, 1.0, 101.0)
val y = VectorD (745.0, 895.0, 442.0, 440.0, 1598.0)
val z = VectorD (1.0, 20.0, 80.0)
println ("x = " + x)
println ("y = " + y)
val rg = new Regression (x, y)
rg.train ()
println ("fit = " + rg.fit)
val yp = rg.predict (z) // predict y for one point
println ("predict (" + z + ") = " + yp)
// val yyp = rg.predict (x) // predict y for several points
// println ("predict (" + x + ") = " + yyp)
//
// new Plot (x.col(1), y, yyp)
// new Plot (x.col(2), y, yyp)
println ("reduced model: fit = " + rg.backElim ()) // eliminate least predictive variable
} // RegressionTest object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `RegressionTest2` object tests `Regression` class using the following
* regression equation.
* <p>
* y = b dot x = b_0 + b_1*x1 + b_2*x_2.
* <p>
* Test regression using QR Decomposition and Gaussian Elimination for computing
* the pseudo-inverse.
*/
object RegressionTest2 extends App
{
// 4 data points: constant term, x_1 coordinate, x_2 coordinate
val x = new MatrixD ((4, 3), 1.0, 1.0, 1.0, // 4-by-3 matrix
1.0, 1.0, 2.0,
1.0, 2.0, 1.0,
1.0, 2.0, 2.0)
val y = VectorD (6.0, 8.0, 7.0, 9.0)
val z = VectorD (1.0, 2.0, 3.0)
var rg: Regression = null
println ("x = " + x)
println ("y = " + y)
println ("-------------------------------------------------")
println ("Fit the parameter vector b using QR Factorization")
rg = new Regression (x, y) // use QR Factorization
rg.train ()
println ("fit = " + rg.fit)
val yp = rg.predict (z) // predict y for on3 point
println ("predict (" + z + ") = " + yp)
// val yyp = rg.predict (x) // predict y for several points
// println ("predict (" + x + ") = " + yyp)
//
// new Plot (x.col(1), y, yyp)
// new Plot (x.col(2), y, yyp)
println ("-------------------------------------------------")
println ("Fit the parameter vector b using Cholesky Factorization")
rg = new Regression (x, y, Cholesky) // use Cholesky Factorization
rg.train ()
println ("fit = " + rg.fit)
println ("predict (" + z + ") = " + rg.predict (z))
println ("-------------------------------------------------")
println ("Fit the parameter vector b using Matrix Inversion")
rg = new Regression (x, y, Inverse) // use Matrix Inversion
rg.train ()
println ("fit = " + rg.fit)
println ("predict (" + z + ") = " + rg.predict (z))
} // RegressionTest2 object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `RegressionTest3` object tests the multi-collinearity method in the
* `Regression` class using the following regression equation.
* <p>
* y = b dot x = b_0 + b_1*x_1 + b_2*x_2 + b_3*x_3 + b_4 * x_4
* <p>
* @see online.stat.psu.edu/online/development/stat501/12multicollinearity/05multico_vif.html
* @see online.stat.psu.edu/online/development/stat501/data/bloodpress.txt
*/
object RegressionTest3 extends App
{
// 20 data points: Constant x_1 x_2 x_3 x_4
// Age Weight Dur Stress
val x = new MatrixD ((20, 5), 1.0, 47.0, 85.4, 5.1, 33.0,
1.0, 49.0, 94.2, 3.8, 14.0,
1.0, 49.0, 95.3, 8.2, 10.0,
1.0, 50.0, 94.7, 5.8, 99.0,
1.0, 51.0, 89.4, 7.0, 95.0,
1.0, 48.0, 99.5, 9.3, 10.0,
1.0, 49.0, 99.8, 2.5, 42.0,
1.0, 47.0, 90.9, 6.2, 8.0,
1.0, 49.0, 89.2, 7.1, 62.0,
1.0, 48.0, 92.7, 5.6, 35.0,
1.0, 47.0, 94.4, 5.3, 90.0,
1.0, 49.0, 94.1, 5.6, 21.0,
1.0, 50.0, 91.6, 10.2, 47.0,
1.0, 45.0, 87.1, 5.6, 80.0,
1.0, 52.0, 101.3, 10.0, 98.0,
1.0, 46.0, 94.5, 7.4, 95.0,
1.0, 46.0, 87.0, 3.6, 18.0,
1.0, 46.0, 94.5, 4.3, 12.0,
1.0, 48.0, 90.5, 9.0, 99.0,
1.0, 56.0, 95.7, 7.0, 99.0)
// response BP
val y = VectorD (105.0, 115.0, 116.0, 117.0, 112.0, 121.0, 121.0, 110.0, 110.0, 114.0,
114.0, 115.0, 114.0, 106.0, 125.0, 114.0, 106.0, 113.0, 110.0, 122.0)
val rg = new Regression (x, y)
time { rg.train () }
println ("fit = " + rg.fit) // fit model y = b_0 + b_1*x_1 + b_2*x_2 + b_3*x_3 + b_4*x_4
println ("vif = " + rg.vif) // test multi-collinearity (VIF)
} // RegressionTest3 object
| NBKlepp/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/analytics/par/Regression.scala | Scala | mit | 14,748 |
package info.armado.ausleihe.admin.transport.responses
import javax.xml.bind.annotation.{XmlAccessType, XmlAccessorType, XmlRootElement}
object AddGamesResponseDTO {
def apply(success: Boolean): AddGamesResponseDTO = new AddGamesResponseDTO(success)
/**
* Creates a new [[AddGamesResponseDTO]] instance based on the given `alreadyExistingBarcodes` and `alreadyExistingBarcodes`
* arrays.
* The created response will be marked as successful, iff both `alreadyExistingBarcodes` and `alreadyExistingBarcodes` are empty
*
* @param alreadyExistingBarcodes An array containing all barcodes belonging to already existing entities in the database
* @param emptyTitleBarcodes An array containing all barcodes whose entries have no title set
* @return The created [[AddGamesResponseDTO]] instance
*/
def apply(alreadyExistingBarcodes: Array[String], duplicateBarcodes: Array[String], emptyTitleBarcodes: Array[String]): AddGamesResponseDTO =
AddGamesResponseDTO(
alreadyExistingBarcodes.isEmpty && duplicateBarcodes.isEmpty && emptyTitleBarcodes.isEmpty,
alreadyExistingBarcodes, duplicateBarcodes, emptyTitleBarcodes
)
}
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
case class AddGamesResponseDTO(var success: Boolean,
var alreadyExistingBarcodes: Array[String],
var duplicateBarcodes: Array[String],
var emptyTitleBarcodes: Array[String]) {
def this() = this(false, Array(), Array(), Array())
def this(success: Boolean) = this(success, Array(), Array(), Array())
}
| Spielekreis-Darmstadt/lending | lending-admin-interfaces/src/main/scala/info/armado/ausleihe/admin/transport/responses/AddGamesResponseDTO.scala | Scala | apache-2.0 | 1,631 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.loadBalancer
import akka.actor.ActorRef
import akka.actor.ActorRefFactory
import org.apache.kafka.clients.producer.RecordMetadata
import org.apache.openwhisk.core.connector.ActivationMessage
import org.apache.openwhisk.core.connector.MessageProducer
import org.apache.openwhisk.core.connector.MessagingProvider
import org.apache.openwhisk.core.entity.InvokerInstanceId
import scala.concurrent.Future
trait InvokerPoolFactory {
def createInvokerPool(
actorRefFactory: ActorRefFactory,
messagingProvider: MessagingProvider,
messagingProducer: MessageProducer,
sendActivationToInvoker: (MessageProducer, ActivationMessage, InvokerInstanceId) => Future[RecordMetadata],
monitor: Option[ActorRef]): ActorRef
}
| starpit/openwhisk | core/controller/src/main/scala/org/apache/openwhisk/core/loadBalancer/InvokerPoolFactory.scala | Scala | apache-2.0 | 1,565 |
/**
* Copyright (C) 2013 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr
import org.orbeon.oxf.fr.FormRunner.{dropTrailingSlash => _, _}
import org.orbeon.oxf.util.DateUtils
import org.orbeon.oxf.util.ScalaUtils._
import org.orbeon.oxf.util.StringReplacer._
import org.orbeon.oxf.xforms.action.XFormsAPI.{insert, _}
import org.orbeon.saxon.om.{NodeInfo, SequenceIterator}
import org.orbeon.scaxon.XML._
import scala.collection.{immutable ⇒ i}
import scala.util.Try
trait FormRunnerHome {
private def appForm(s: String) = {
val parts = split[List](s, "/")
parts(0) → parts(1)
}
private case class AvailableAndTime(available: Boolean, time: Long)
private case class Form(app: String, form: String, local: Option[AvailableAndTime], remote: Option[AvailableAndTime], ops: Set[String]) {
import Form._
def isLocalAvailable = local exists (_.available)
def isRemoteAvailable = remote exists (_.available)
def isLocalUnavailable = local exists (! _.available)
def isRemoteUnavailable = remote exists (! _.available)
def isLocal = local.isDefined
def isRemote = remote.isDefined
def isLocalNewer = isLocal && isRemote && local.get.time > remote.get.time
def isRemoteNewer = isLocal && isRemote && remote.get.time > local.get.time
def isSummaryAllowed = ops intersect SummaryOps nonEmpty
def isNewAllowed = ops intersect NewOps nonEmpty
def isAdmin = ops(AdminOp)
}
private object Form {
private val SummaryOps = Set("*", "update", "read", "delete")
private val NewOps = Set("*", "create")
private val AdminOp = "admin"
def apply(form: NodeInfo): Form = {
val localTime = form elemValueOpt "last-modified-time"
val remoteTime = form elemValueOpt "remote-last-modified-time"
Form(
form elemValue "application-name",
form elemValue "form-name",
localTime map (v ⇒ AvailableAndTime((form elemValue "available") != "false", DateUtils.parseISODateOrDateTime(v))),
remoteTime map (v ⇒ AvailableAndTime((form elemValue "remote-available") != "false", DateUtils.parseISODateOrDateTime(v))),
stringToSet(form /@ "operations")
)
}
}
private def collectForms(forms: SequenceIterator, p: NodeInfo ⇒ Boolean = _ ⇒ true) =
asScalaIterator(forms) collect { case form: NodeInfo if p(form) ⇒ Form(form) }
private def formsForSelection(selection: String, forms: SequenceIterator) = {
val appFormsSet = split[List](selection) map appForm toSet
def formIsSelected(form: NodeInfo) =
appFormsSet((form elemValue "application-name") → (form elemValue "form-name"))
collectForms(forms, formIsSelected)
}
def isLocal(form: NodeInfo) = Form(form).local.isDefined
def isRemote(form: NodeInfo) = Form(form).remote.isDefined
//@XPathFunction
def isLocalAvailable(form: NodeInfo) = Form(form).local exists(_.available)
//@XPathFunction
def isRemoteAvailable(form: NodeInfo) = Form(form).remote exists(_.available)
//@XPathFunction
def isLocalUnavailable(form: NodeInfo) = Form(form).local exists(! _.available)
//@XPathFunction
def isRemoteUnavailable(form: NodeInfo) = Form(form).remote exists(! _.available)
//@XPathFunction
def isLocalNewer(form: NodeInfo) = Form(form).isLocalNewer
//@XPathFunction
def isRemoteNewer(form: NodeInfo) = Form(form).isRemoteNewer
//@XPathFunction
def canSelectUnpublishedLocal(selection: String, forms: SequenceIterator) =
collectForms(forms) exists (f ⇒ f.isAdmin && f.isLocalUnavailable)
//@XPathFunction
def canSelectPublishedLocal(selection: String, forms: SequenceIterator) =
collectForms(forms) exists (f ⇒ f.isAdmin && f.isLocalAvailable)
//@XPathFunction
def canSelectUnpublishedRemote(selection: String, forms: SequenceIterator) =
collectForms(forms) exists (f ⇒ f.isAdmin && f.isRemoteUnavailable)
//@XPathFunction
def canSelectPublishedRemote(selection: String, forms: SequenceIterator) =
collectForms(forms) exists (f ⇒ f.isAdmin && f.isRemoteAvailable)
//@XPathFunction
def canSelectLocalNewer(selection: String, forms: SequenceIterator) =
collectForms(forms) exists (f ⇒ f.isAdmin && f.isLocalNewer)
//@XPathFunction
def canSelectRemoteNewer(selection: String, forms: SequenceIterator) =
collectForms(forms) exists (f ⇒ f.isAdmin && f.isRemoteNewer)
//@XPathFunction
def canPublishLocal(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) forall (_.isLocalUnavailable)
//@XPathFunction
def canUnpublishLocal(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) forall (_.isLocalAvailable)
//@XPathFunction
def canPublishRemote(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) forall (_.isRemoteUnavailable)
//@XPathFunction
def canUnpublishRemote(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) forall (_.isRemoteAvailable)
//@XPathFunction
def canPublishLocalToRemote(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) forall (_.isLocal)
//@XPathFunction
def canPublishRemoteToLocal(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) forall (_.isRemote)
//@XPathFunction
def canNavigateSummary(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) exists (f ⇒ f.isLocalAvailable && f.isSummaryAllowed)
//@XPathFunction
def canNavigateNew(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) exists (f ⇒ f.isLocalAvailable && f.isNewAllowed)
//@XPathFunction
def canUpgradeLocal(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) forall (_.isLocal)
//@XPathFunction
def canUpgradeRemote(selection: String, forms: SequenceIterator) =
formsForSelection(selection, forms) forall (_.isRemote)
//@XPathFunction
def publish(
xhtml : NodeInfo,
toBaseURI : String,
app : String,
form : String,
username : String,
password : String,
forceAttachments : Boolean
): Unit =
putWithAttachments(
data = xhtml.root,
toBaseURI = toBaseURI,
fromBasePath = createFormDefinitionBasePath(app, form),
toBasePath = createFormDefinitionBasePath(app, form),
filename = "form.xhtml",
commonQueryString = "",
forceAttachments = forceAttachments,
username = nonEmptyOrNone(username),
password = nonEmptyOrNone(password),
formVersion = Some("next")
)
// NOTE: It would be great if we could work on typed data, whether created from XML, JSON or an object
// serialization. Here we juggle between XML and typed data.
//@XPathFunction
def joinLocalAndRemoteMetadata(
local : SequenceIterator,
remote : SequenceIterator,
permissionInstance : NodeInfo
): SequenceIterator = {
val combinedIndexIterator = {
def makeAppFormKey(node: NodeInfo) =
(node elemValue "application-name", node elemValue "form-name")
def createIndex(it: SequenceIterator) = asScalaIterator(it) collect {
case node: NodeInfo ⇒ makeAppFormKey(node) → node
} toMap
val localIndex = createIndex(local)
val remoteIndex = createIndex(remote)
(localIndex.keySet ++ remoteIndex.keySet).iterator map { key ⇒
key →(localIndex.get(key), remoteIndex.get(key))
}
}
def createNode(localAndOrRemote: (Option[NodeInfo], Option[NodeInfo])): NodeInfo = {
def remoteElements(remoteNode: NodeInfo) = {
def remoteElement(name: String) =
elementInfo("remote-" + name, stringToStringValue(remoteNode elemValue name))
List(
remoteElement("title"),
remoteElement("available"),
remoteElement("last-modified-time")
)
}
localAndOrRemote match {
case (Some(localNode), None) ⇒
localNode
case (None, Some(remoteNode)) ⇒
// Don't just use remoteNode, because we need `remote-` prefixes for remote data
elementInfo("form", (remoteNode / "application-name" head) :: (remoteNode / "form-name" head) :: remoteElements(remoteNode))
case (Some(localNode), Some(remoteNode)) ⇒
insert(origin = remoteElements(remoteNode), into = localNode, after = localNode / *, doDispatch = false)
localNode
case (None, None) ⇒
throw new IllegalStateException
}
}
for (((app, form), localAndOrRemote) ← combinedIndexIterator)
yield createNode(localAndOrRemote)
}
// Return remote servers information:
//
// 1. If the backward compatibility property (oxf.fr.production-server-uri) is present and not empty, try to use it
// and return a sequence of one string containing the server URL configured.
// 2. Else try the JSON property (oxf.fr.home.remote-servers). If the property exists and is well-formed, return
// a flattened sequence of label/url pairs.
// 3. Otherwise the empty sequence is returned.
//@XPathFunction
def remoteServersXPath: SequenceIterator = {
import FormRunnerHome._
def fromCompatibility =
remoteServerFromCompatibilityProperty map (List(_))
def fromJSON =
remoteServersFromJSONProperty map { values ⇒
values flatMap { case (label, uri) ⇒ label :: uri :: Nil }
}
def fromEither =
fromCompatibility orElse fromJSON getOrElse List.empty[String]
fromEither
}
}
object FormRunnerHome {
import spray.json._
def tryRemoteServersFromString(json: String) =
Try(json.parseJson) flatMap tryRemoteServersFromJSON
def tryRemoteServersFromJSON(json: JsValue) = Try {
json match {
case JsArray(elements) ⇒
elements collect {
case JsObject(fields) ⇒
def stringValueOrThrow(v: JsValue) = (
collectByErasedType[JsString](v)
map (_.value)
flatMap nonEmptyOrNone
getOrElse (throw new IllegalArgumentException)
)
stringValueOrThrow(fields("label")) → dropTrailingSlash(stringValueOrThrow(fields("url")))
}
case other ⇒
throw new IllegalArgumentException
}
}
private def remoteServersFromJSONProperty: Option[i.Seq[(String, String)]] =
Option(properties.getProperty("oxf.fr.home.remote-servers")) map { property ⇒
Try(property.associatedValue(_.value.toString.parseJson)) flatMap tryRemoteServersFromJSON getOrElse {
implicit val logger = containingDocument.getIndentedLogger("form-runner")
warn(
s"incorrect JSON configuration for property `oxf.fr.home.remote-servers`",
Seq("JSON" → property.value.toString)
)
Nil
}
}
private def remoteServerFromCompatibilityProperty: Option[String] = (
Option(properties.getStringOrURIAsString("oxf.fr.production-server-uri"))
flatMap nonEmptyOrNone
map dropTrailingSlash
)
}
| wesley1001/orbeon-forms | src/main/scala/org/orbeon/oxf/fr/FormRunnerHome.scala | Scala | lgpl-2.1 | 11,891 |
package org.jetbrains.plugins.scala.lang.transformation.types
import org.jetbrains.plugins.scala.lang.transformation.TransformerTest
/**
* @author Pavel Fatin
*/
class ExpandTupleTypeTest extends TransformerTest(ExpandTupleType) {
def testTuple2() = check(
"val v: (A, B)",
"val v: Tuple2[A, B]"
)
def testTuple3() = check(
"val v: (A, B, C)",
"val v: Tuple3[A, B, C]"
)
def testParens() = check(
"val v: (A)",
"val v: (A)"
)
def testInsideFunctionType() = check(
"val v: (A, B) => C",
"val v: (A, B) => C"
)
def testExplicit() = check(
"val v: Tuple2[A, B]",
"val v: Tuple2[A, B]"
)
}
| whorbowicz/intellij-scala | test/org/jetbrains/plugins/scala/lang/transformation/types/ExpandTupleTypeTest.scala | Scala | apache-2.0 | 656 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.common
import kafka.utils.{Time, SystemTime, ZkUtils, Logging}
import org.I0Itec.zkclient.{IZkChildListener, ZkClient}
import scala.collection.JavaConverters._
/**
* Handle the notificationMessage.
*/
trait NotificationHandler {
def processNotification(notificationMessage: String)
}
/**
* A listener that subscribes to seqNodeRoot for any child changes where all children are assumed to be sequence node
* with seqNodePrefix. When a child is added under seqNodeRoot this class gets notified, it looks at lastExecutedChange
* number to avoid duplicate processing and if it finds an unprocessed child, it reads its data and calls supplied
* notificationHandler's processNotification() method with the child's data as argument. As part of processing these changes it also
* purges any children with currentTime - createTime > changeExpirationMs.
*
* The caller/user of this class should ensure that they use zkClient.subscribeStateChanges and call processAllNotifications
* method of this class from ZkStateChangeListener's handleNewSession() method. This is necessary to ensure that if zk session
* is terminated and reestablished any missed notification will be processed immediately.
* @param zkClient
* @param seqNodeRoot
* @param seqNodePrefix
* @param notificationHandler
* @param changeExpirationMs
* @param time
*/
class ZkNodeChangeNotificationListener(private val zkUtils: ZkUtils,
private val seqNodeRoot: String,
private val seqNodePrefix: String,
private val notificationHandler: NotificationHandler,
private val changeExpirationMs: Long = 15 * 60 * 1000,
private val time: Time = SystemTime) extends Logging {
private var lastExecutedChange = -1L
/**
* create seqNodeRoot and begin watching for any new children nodes.
*/
def init() {
zkUtils.makeSurePersistentPathExists(seqNodeRoot)
zkUtils.zkClient.subscribeChildChanges(seqNodeRoot, NodeChangeListener)
processAllNotifications()
}
/**
* Process all changes
*/
def processAllNotifications() {
val changes = zkUtils.zkClient.getChildren(seqNodeRoot)
processNotifications(changes.asScala.sorted)
}
/**
* Process the given list of notifications
*/
private def processNotifications(notifications: Seq[String]) {
if (notifications.nonEmpty) {
info(s"Processing notification(s) to $seqNodeRoot")
val now = time.milliseconds
for (notification <- notifications) {
val changeId = changeNumber(notification)
if (changeId > lastExecutedChange) {
val changeZnode = seqNodeRoot + "/" + notification
val (data, stat) = zkUtils.readDataMaybeNull(changeZnode)
data map (notificationHandler.processNotification(_)) getOrElse(logger.warn(s"read null data from $changeZnode when processing notification $notification"))
}
lastExecutedChange = changeId
}
purgeObsoleteNotifications(now, notifications)
}
}
/**
* Purges expired notifications.
* @param now
* @param notifications
*/
private def purgeObsoleteNotifications(now: Long, notifications: Seq[String]) {
for (notification <- notifications.sorted) {
val notificationNode = seqNodeRoot + "/" + notification
val (data, stat) = zkUtils.readDataMaybeNull(notificationNode)
if (data.isDefined) {
if (now - stat.getCtime > changeExpirationMs) {
debug(s"Purging change notification $notificationNode")
zkUtils.deletePath(notificationNode)
}
}
}
}
/* get the change number from a change notification znode */
private def changeNumber(name: String): Long = name.substring(seqNodePrefix.length).toLong
/**
* A listener that gets invoked when a node is created to notify changes.
*/
object NodeChangeListener extends IZkChildListener {
override def handleChildChange(path: String, notifications: java.util.List[String]) {
try {
import scala.collection.JavaConverters._
if (notifications != null)
processNotifications(notifications.asScala.sorted)
} catch {
case e: Exception => error(s"Error processing notification change for path = $path and notification= $notifications :", e)
}
}
}
}
| eljefe6a/kafka | core/src/main/scala/kafka/common/ZkNodeChangeNotificationListener.scala | Scala | apache-2.0 | 5,237 |
package org.jetbrains.plugins.scala.testingSupport.specs2.specs2_2_12_4_0_0
import org.jetbrains.plugins.scala.SlowTests
import org.jetbrains.plugins.scala.testingSupport.specs2.SCL7228Test
import org.junit.experimental.categories.Category
/**
* @author Roman.Shein
* @since 11.01.2015.
*/
@Category(Array(classOf[SlowTests]))
class Specs2_2_12_4_0_0_SCL7228Test extends SCL7228Test with Specs2_2_12_4_0_0_Base
| jastice/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/testingSupport/specs2/specs2_2_12_4_0_0/Specs2_2_12_4_0_0_SCL7228Test.scala | Scala | apache-2.0 | 416 |
package com.equalinformation.bpm.poc.consumer.scala
import vaadin.scala._
import vaadin.scala.server.ScaladinRequest
/**
* Created by bpupadhyaya on 11/28/15.
*/
class ActivitiConsumerUI extends UI(title = "Activiti Consumer") {
val layout = new VerticalLayout {
sizeFull()
}
val headerLayout = new VerticalLayout {
width = 100 pct;
height = 70 px;
}
val contentLayout = new VerticalLayout {
sizeFull()
margin = true
}
override def init(request: ScaladinRequest) {
val navigator = new Navigator(this, contentLayout) {
addView(SampleView.VIEW1, new SampleView)
//addView(SampleView.VIEW2, classOf[SampleView])
addView(SampleView.VIEW2, new SampleView)
addView(SampleView.VIEW3, new SampleView)
addView(SampleView.VIEW4, new SampleView)
}
navigator_=(navigator)
content_=(layout)
//Check what headers needed
headerLayout.add(buildApplicationHeader)
headerLayout.add(buildApplicationMenu(navigator))
layout.add(headerLayout)
layout.add(contentLayout, ratio = 1)
}
private def buildApplicationHeader: HorizontalLayout = new HorizontalLayout {
width = 100 pct;
height = 45 px;
add(alignment = Alignment.MiddleLeft, component = new Label {
value = "Activiti Consumer Application"
})
// add(alignment = Alignment.MiddleCenter, component = new Label {
// value = "Activiti Consumer"
// })
// add(alignment = Alignment.MiddleRight, component = new Label {
// value = "Activiti Message"
// })
}
private def buildApplicationMenu(navigator: Navigator): HorizontalLayout = new HorizontalLayout {
width = 100 pct;
height = 25 px;
val menuBar = new MenuBar {
// addItem("Tasks", (e: MenuBar.MenuItem) => navigator.navigateTo(SampleView.VIEW1))
// addItem("Processes", (e: MenuBar.MenuItem) => navigator.navigateTo(SampleView.VIEW2))
// addItem("Reports", (e: MenuBar.MenuItem) => navigator.navigateTo(SampleView.VIEW3))
// addItem("Manage", (e: MenuBar.MenuItem) => navigator.navigateTo(SampleView.VIEW4))
val tasks = addItem("Tasks")
val processes = addItem("Processes")
val reports = addItem("Reports", (e: MenuBar.MenuItem) => navigator.navigateTo(SampleView.VIEW3))
val manage = addItem("Manage", (e: MenuBar.MenuItem) => navigator.navigateTo(SampleView.VIEW4))
val inbox = tasks.addItem("Inbox", (e: MenuBar.MenuItem) => createTaskTables())
val queued = tasks.addItem("Queued")
val involved = tasks.addItem("Involved")
val archived = tasks.addItem("Archived")
val myInstances = processes.addItem("My instances")
val deployedProcessDefinition = processes.addItem("Deployed process definitions")
}
addComponent(menuBar)
}
private def createTaskTables(): VerticalLayout = new VerticalLayout {
//TODO
val inboxTaskSummaryTable = new Table() {
}
addComponent(inboxTaskSummaryTable)
val inboxTaskDetailTable = new Table() {
}
addComponent(inboxTaskDetailTable)
}
}
object SampleView {
val VIEW1 = ""
val VIEW2 = "ClassBasedView"
val VIEW3 = ""
val VIEW4 = ""
val INBOX_VIEW = "ClassBasedView"
private var count = 1
private def inc = {
count += 1; count
}
}
class SampleView extends VerticalLayout with Navigator.View {
// val label = Label("Label for SampleView")
val label = Label("")
def init() {
val layout = new VerticalLayout() {
sizeFull()
add(label)
}
layout.margin = true
add(layout)
}
init()
override def enter(event: Navigator.ViewChangeEvent) {
val viewName = event.viewName.getOrElse("")
if (viewName == SampleView.VIEW2) {
SampleView.inc
}
// label.value = "Test message from view " + viewName + ", the view has been created " + SampleView.count + " times."
Notification.show("Entering view " + viewName)
}
} | bpupadhyaya/ActivitiConsumerScala | src/main/java/com/equalinformation/bpm/poc/consumer/scala/ActivitiConsumerUI.scala | Scala | mit | 3,921 |
package controllers
import dao.{AuthorityDao, RoomDao}
import database.{RoomDb, RoomTable}
import models.{Room, RoomProtocol}
import org.joda.time.DateTime
import play.api.libs.json.{Reads, Writes}
import play.api.mvc.ControllerComponents
import security.LWMRole.{Admin, EmployeeRole, StudentRole}
import security.SecurityActionChain
import java.util.UUID
import javax.inject.{Inject, Singleton}
import scala.util.{Failure, Try}
object RoomController {
lazy val labelAttribute = "label"
}
@Singleton
final class RoomController @Inject()(
cc: ControllerComponents,
val authorityDao: AuthorityDao,
val abstractDao: RoomDao,
val securedAction: SecurityActionChain
) extends AbstractCRUDController[RoomProtocol, RoomTable, RoomDb, Room](cc) {
import RoomController._
override protected implicit val writes: Writes[Room] = Room.writes
override protected implicit val reads: Reads[RoomProtocol] = RoomProtocol.reads
override protected def makeTableFilter(attribute: String, value: String): Try[TableFilterPredicate] =
(attribute, value) match {
case (`labelAttribute`, l) => l.makeLabelEqualsFilter
case _ => Failure(new Throwable(s"Unknown attribute $attribute"))
}
override protected def toDbModel(protocol: RoomProtocol, existingId: Option[UUID]): RoomDb = {
import utils.date.DateTimeOps.DateTimeConverter
RoomDb(protocol.label, protocol.description, protocol.capacity, DateTime.now.timestamp, None, existingId getOrElse UUID.randomUUID)
}
override protected def contextFrom: PartialFunction[Rule, SecureContext] = {
case Get => PartialSecureBlock(List(StudentRole, EmployeeRole))
case GetAll => PartialSecureBlock(List(EmployeeRole))
case _ => PartialSecureBlock(List(Admin))
}
override protected def restrictedContext(restrictionId: String): PartialFunction[Rule, SecureContext] = forbiddenAction()
} | THK-ADV/lwm-reloaded | app/controllers/RoomController.scala | Scala | mit | 1,879 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package patterns
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* @author Alexander Podkhalyuzin
* Date: 28.02.2008
*/
/*
* Patern ::= Pattern1 {'|' Pattern1}
*/
object Pattern extends Pattern {
override protected def pattern1 = Pattern1
}
trait Pattern {
protected def pattern1: Pattern1
def parse(builder: ScalaPsiBuilder): Boolean = {
val patternMarker = builder.mark
if (!pattern1.parse(builder)) {
patternMarker.drop()
return false
}
var isComposite = false
while (builder.getTokenText == "|") {
isComposite = true
builder.advanceLexer() //Ate |
if (!pattern1.parse(builder)) {
builder error ScalaBundle.message("wrong.pattern")
}
}
if (isComposite) patternMarker.done(ScalaElementTypes.PATTERN)
else patternMarker.drop()
true
}
} | loskutov/intellij-scala | src/org/jetbrains/plugins/scala/lang/parser/parsing/patterns/Pattern.scala | Scala | apache-2.0 | 946 |
package org.http4s
package server
import java.net.{Inet4Address, Inet6Address, InetSocketAddress}
import org.log4s.getLogger
abstract class Server[F[_]] {
private[this] val logger = getLogger
def address: InetSocketAddress
def isSecure: Boolean
def baseUri: Uri = Uri(
scheme = Some(if (isSecure) Uri.Scheme.https else Uri.Scheme.http),
authority = Some(
Uri.Authority(
host = address.getAddress match {
case ipv4: Inet4Address =>
Uri.Ipv4Address.fromInet4Address(ipv4)
case ipv6: Inet6Address =>
Uri.Ipv6Address.fromInet6Address(ipv6)
case weird =>
logger.warn(s"Unexpected address type ${weird.getClass}: $weird")
Uri.RegName(weird.getHostAddress)
},
port = Some(address.getPort)
)),
path = "/"
)
}
| ChristopherDavenport/http4s | server/src/main/scala/org/http4s/server/Server.scala | Scala | apache-2.0 | 842 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.counter.util
object CartesianProduct {
def apply[T](xss: List[List[T]]): List[List[T]] = xss match {
case Nil => List(Nil)
case h :: t => for(xh <- h; xt <- apply(t)) yield xh :: xt
}
}
| daewon/incubator-s2graph | s2counter_core/src/main/scala/org/apache/s2graph/counter/util/CartesianProduct.scala | Scala | apache-2.0 | 1,037 |
/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.remote.serialization
import scala.collection.immutable
import org.spark_project.protobuf.ByteString
import akka.actor.ActorSelectionMessage
import akka.actor.ExtendedActorSystem
import akka.actor.SelectChildName
import akka.actor.SelectChildPattern
import akka.actor.SelectParent
import akka.actor.SelectionPathElement
import akka.remote.ContainerFormats
import akka.serialization.SerializationExtension
import akka.serialization.Serializer
class MessageContainerSerializer(val system: ExtendedActorSystem) extends Serializer {
def identifier: Int = 6
def includeManifest: Boolean = false
def toBinary(obj: AnyRef): Array[Byte] = obj match {
case sel: ActorSelectionMessage ⇒ serializeSelection(sel)
case _ ⇒ throw new IllegalArgumentException(s"Cannot serialize object of type [${obj.getClass.getName}]")
}
import ContainerFormats.PatternType._
private def serializeSelection(sel: ActorSelectionMessage): Array[Byte] = {
val builder = ContainerFormats.SelectionEnvelope.newBuilder()
val message = sel.msg.asInstanceOf[AnyRef]
val serializer = SerializationExtension(system).findSerializerFor(message)
builder.
setEnclosedMessage(ByteString.copyFrom(serializer.toBinary(message))).
setSerializerId(serializer.identifier).
setWildcardFanOut(sel.wildcardFanOut)
if (serializer.includeManifest)
builder.setMessageManifest(ByteString.copyFromUtf8(message.getClass.getName))
sel.elements.foreach {
case SelectChildName(name) ⇒
builder.addPattern(buildPattern(Some(name), CHILD_NAME))
case SelectChildPattern(patternStr) ⇒
builder.addPattern(buildPattern(Some(patternStr), CHILD_PATTERN))
case SelectParent ⇒
builder.addPattern(buildPattern(None, PARENT))
}
builder.build().toByteArray
}
private def buildPattern(matcher: Option[String], tpe: ContainerFormats.PatternType): ContainerFormats.Selection.Builder = {
val builder = ContainerFormats.Selection.newBuilder().setType(tpe)
matcher foreach builder.setMatcher
builder
}
def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = {
val selectionEnvelope = ContainerFormats.SelectionEnvelope.parseFrom(bytes)
val msg = SerializationExtension(system).deserialize(
selectionEnvelope.getEnclosedMessage.toByteArray,
selectionEnvelope.getSerializerId,
if (selectionEnvelope.hasMessageManifest)
Some(system.dynamicAccess.getClassFor[AnyRef](selectionEnvelope.getMessageManifest.toStringUtf8).get) else None).get
import scala.collection.JavaConverters._
val elements: immutable.Iterable[SelectionPathElement] = selectionEnvelope.getPatternList.asScala.map { x ⇒
x.getType match {
case CHILD_NAME ⇒ SelectChildName(x.getMatcher)
case CHILD_PATTERN ⇒ SelectChildPattern(x.getMatcher)
case PARENT ⇒ SelectParent
}
}(collection.breakOut)
val wildcardFanOut = if (selectionEnvelope.hasWildcardFanOut) selectionEnvelope.getWildcardFanOut else false
ActorSelectionMessage(msg, elements, wildcardFanOut)
}
}
| Fincore/org.spark-project.akka | remote/src/main/scala/akka/remote/serialization/MessageContainerSerializer.scala | Scala | mit | 3,240 |
package net.sansa_stack.rdf.flink.stats
import java.io.StringWriter
import net.sansa_stack.rdf.flink.utils.{Logging, NodeKey}
import org.apache.flink.api.scala.{DataSet, _}
import org.apache.flink.core.fs.FileSystem
import org.apache.jena.graph.{Node, Triple}
import org.apache.jena.vocabulary.{OWL, RDF, RDFS}
/**
* A Distributed implementation of RDF Statisctics using Apache Flink.
*
* @author Gezim Sejdiu
*/
object RDFStatistics extends Serializable with Logging {
val env = ExecutionEnvironment.getExecutionEnvironment
/**
* Compute distributed RDF dataset statistics.
*
* @param triples DataSet graph
* @return VoID description of the given dataset
*/
def run(triples: DataSet[Triple]): DataSet[String] = {
Used_Classes(triples, ExecutionEnvironment.getExecutionEnvironment).Voidify
.union(DistinctEntities(triples, env).Voidify)
.union(DistinctSubjects(triples, env).Voidify)
.union(DistinctObjects(triples, env).Voidify)
.union(PropertyUsage(triples, env).Voidify)
.union(SPO_Vocabularies(triples, env).Voidify)
}
/**
* Voidify RDF dataset based on the Vocabulary of Interlinked Datasets (VoID) [[https://www.w3.org/TR/void/]]
*
* @param stats given RDF dataset statistics
* @param source name of the Dataset:source--usualy the file's name
* @param output the directory to save RDF dataset summary
*/
def voidify(stats: DataSet[String], source: String, output: String): Unit = {
val pw = new StringWriter
val prefix =
"""@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix void: <http://rdfs.org/ns/void#> .
@prefix void-ext: <http://stats.lod2.eu/rdf/void-ext/> .
@prefix qb: <http://purl.org/linked-data/cube#> .
@prefix dcterms: <http://purl.org/dc/terms/> .
@prefix ls-void: <http://stats.lod2.eu/rdf/void/> .
@prefix ls-qb: <http://stats.lod2.eu/rdf/qb/> .
@prefix ls-cr: <http://stats.lod2.eu/rdf/qb/criteria/> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
@prefix xstats: <http://example.org/XStats#> .
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> ."""
val src = "\\n<http://stats.lod2.eu/rdf/void/?source=" + source + ">\\n"
val end = "\\na void:Dataset ."
val voidify = prefix.concat(src).concat(stats.setParallelism(1).collect().mkString).concat(end)
println("\\n" + voidify)
pw.write(voidify)
val vidifyStats = env.fromCollection(Seq(pw.toString()))
vidifyStats.writeAsText(output, writeMode = FileSystem.WriteMode.OVERWRITE).setParallelism(1)
}
/**
* Prints the Voidiy version of the given RDF dataset
*
* @param stats given RDF dataset statistics
* @param source name of the Dataset:source--usualy the file's name
*/
def print(stats: DataSet[String], source: String): Unit = {
val prefix =
"""@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix void: <http://rdfs.org/ns/void#> .
@prefix void-ext: <http://stats.lod2.eu/rdf/void-ext/> .
@prefix qb: <http://purl.org/linked-data/cube#> .
@prefix dcterms: <http://purl.org/dc/terms/> .
@prefix ls-void: <http://stats.lod2.eu/rdf/void/> .
@prefix ls-qb: <http://stats.lod2.eu/rdf/qb/> .
@prefix ls-cr: <http://stats.lod2.eu/rdf/qb/criteria/> .
@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
@prefix xstats: <http://example.org/XStats#> .
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> ."""
val src = "\\n<http://stats.lod2.eu/rdf/void/?source=" + source + ">\\n"
val end = "\\na void:Dataset ."
val voidify = prefix.concat(src).concat(stats.setParallelism(1).collect().mkString).concat(end)
println("\\n" + voidify)
}
/**
* * 17. Literals criterion
*
* @param triples Dataset of triples
* @return number of triples that are referencing literals to subjects.
*/
def literals(triples: DataSet[Triple]): DataSet[Triple] =
triples.filter(_.getObject.isLiteral())
/**
* 18. Blanks as subject criterion
*
* @param triples DataSet of triples
* @return number of triples where blanknodes are used as subjects.
*/
def blanksAsSubject(triples: DataSet[Triple]): DataSet[Triple] =
triples.filter(_.getSubject.isBlank())
/**
* 19. Blanks as object criterion
*
* @param triples DataSet of triples
* @return number of triples where blanknodes are used as objects.
*/
def blanksAsObject(triples: DataSet[Triple]): DataSet[Triple] =
triples.filter(_.getObject.isBlank())
/**
* 20. Datatypes criterion
*
* @param triples DataSet of triples
* @return histogram of types used for literals.
*/
def dataTypes(triples: DataSet[Triple]): DataSet[(String, Int)] = {
triples.filter(triple => (triple.getObject.isLiteral && !triple.getObject.getLiteralDatatype.getURI.isEmpty))
.map(triple => (triple.getObject.getLiteralDatatype.getURI, 1))
.groupBy(0)
.sum(1)
}
/**
* 21. Languages criterion
*
* @param triples DataSet of triples
* @return histogram of languages used for literals.
*/
def languages(triples: DataSet[Triple]): DataSet[(String, Int)] = {
triples.filter(triple => (triple.getObject.isLiteral && !triple.getObject.getLiteralLanguage.isEmpty))
.map(triple => (triple.getObject.getLiteralLanguage, 1))
.groupBy(0)
.sum(1)
}
/**
* 24. Typed subjects criterion.
*
* @param triples DataSet of triples
* @return list of typed subjects.
*/
def typedSubjects(triples: DataSet[Triple]): DataSet[Node] =
triples.filter(triple => triple.predicateMatches(RDF.`type`.asNode())).map(_.getSubject)
/**
* 24. Labeled subjects criterion.
*
* @param triples DataSet of triples
* @return list of labeled subjects.
*/
def labeledSubjects(triples: DataSet[Triple]): DataSet[Node] =
triples.filter(triple => triple.predicateMatches(RDFS.label.asNode())).map(_.getSubject)
/**
* 25. SameAs criterion.
*
* @param triples DataSet of triples
* @return list of triples with owl#sameAs as predicate
*/
def sameAs(triples: DataSet[Triple]): DataSet[Triple] =
triples.filter(_.predicateMatches(OWL.sameAs.asNode()))
/**
* 26. Links criterion.
*
* Computes the frequencies of links between entities of different namespaces. This measure is directed, i.e.
* a link from `ns1 -> ns2` is different from `ns2 -> ns1`.
*
* @param triples DataSet of triples
* @return list of namespace combinations and their frequencies.
*/
def links(triples: DataSet[Triple]): DataSet[(String, String, Int)] = {
triples
.filter(triple => (triple.getSubject.isURI && triple.getObject.isURI) && triple.getSubject.getNameSpace != triple.getObject.getNameSpace)
.map(triple => ((triple.getSubject.getNameSpace, triple.getObject.getNameSpace), 1))
.groupBy(0)
.sum(1)
.map(e => (e._1._1, e._1._2, e._2))
}
}
class Used_Classes(triples: DataSet[Triple], env: ExecutionEnvironment) extends Serializable with Logging {
// ?p=rdf:type && isIRI(?o)
def Filter(): DataSet[Triple] = triples.filter(f =>
f.getPredicate.matches(RDF.`type`.asNode()) && f.getObject.isURI())
// M[?o]++
def Action(): DataSet[(Node, Int)] = Filter().map(f => NodeKey(f.getObject))
.map(f => (f, 1))
.groupBy(0)
.sum(1)
.map(f => (f._1.node, f._2))
// top(M,100)
def PostProc(): Seq[(Node, Int)] = Action().collect().sortBy(_._2).take(100)
def Voidify(): DataSet[String] = {
var triplesString = new Array[String](1)
triplesString(0) = "\\nvoid:classPartition "
val classes = env.fromCollection(PostProc())
val vc = classes.map(t => "[ \\nvoid:class " + "<" + t._1 + ">; \\nvoid:triples " + t._2 + ";\\n], ")
var cl_a = new Array[String](1)
cl_a(0) = "\\nvoid:classes " + Action().map(f => f._1).distinct().count
val c_p = env.fromCollection(triplesString)
val c = env.fromCollection(cl_a)
c.union(c_p).union(vc)
}
}
object Used_Classes {
def apply(triples: DataSet[Triple], env: ExecutionEnvironment): Used_Classes = new Used_Classes(triples, env)
}
class Classes_Defined(triples: DataSet[Triple], env: ExecutionEnvironment) extends Serializable with Logging {
// ?p=rdf:type && isIRI(?s) &&(?o=rdfs:Class||?o=owl:Class)
def Filter(): DataSet[Triple] = triples.filter(f =>
(f.getPredicate.matches(RDF.`type`.asNode()) && f.getObject.matches(RDFS.Class.asNode()))
|| (f.getPredicate.matches(RDF.`type`.asNode()) && f.getObject.matches(OWL.Class.asNode()))
&& !f.getSubject.isURI())
// M[?o]++
def Action(): DataSet[Node] = Filter().map(_.getSubject).distinct(f => f.hashCode())
def PostProc(): Long = Action().count()
def Voidify(): DataSet[String] = {
var cd = new Array[String](1)
cd(0) = "\\nvoid:classes " + PostProc() + ";"
env.fromCollection(cd)
}
}
object Classes_Defined {
def apply(triples: DataSet[Triple], env: ExecutionEnvironment): Classes_Defined = new Classes_Defined(triples, env)
}
class PropertiesDefined(triples: DataSet[Triple], env: ExecutionEnvironment) extends Serializable with Logging {
def Filter(): DataSet[Triple] = triples.filter(f =>
(f.getPredicate.matches(RDF.`type`.asNode()) && f.getObject.matches(OWL.ObjectProperty.asNode()))
|| (f.getPredicate.matches(RDF.`type`.asNode()) && f.getObject.matches(RDF.Property.asNode()))
&& !f.getSubject.isURI())
def Action(): DataSet[Node] = Filter().map(_.getPredicate).distinct(_.hashCode())
def PostProc(): Long = Action().count()
def Voidify(): DataSet[String] = {
var cd = new Array[String](1)
cd(0) = "\\nvoid:properties " + PostProc() + ";"
env.fromCollection(cd)
}
}
object PropertiesDefined {
def apply(triples: DataSet[Triple], env: ExecutionEnvironment): PropertiesDefined = new PropertiesDefined(triples, env)
}
class PropertyUsage(triples: DataSet[Triple], env: ExecutionEnvironment) extends Serializable with Logging {
def Filter(): DataSet[Triple] = triples
// M[?p]++
def Action(): DataSet[(Node, Int)] = Filter().map(f => NodeKey(f.getPredicate))
.map(f => (f, 1))
.groupBy(0)
.sum(1)
.map(f => (f._1.node, f._2))
// top(M,100)
def PostProc(): Seq[(Node, Int)] = Action().collect().sortBy(_._2).take(100)
def Voidify(): DataSet[String] = {
var triplesString = new Array[String](1)
triplesString(0) = "\\nvoid:propertyPartition "
val properties = env.fromCollection(PostProc())
val vp = properties.map(t => "[ \\nvoid:property " + "<" + t._1 + ">; \\nvoid:triples " + t._2 + ";\\n], ")
var pl_a = new Array[String](1)
pl_a(0) = "\\nvoid:properties " + Action().map(f => f._1).distinct().count
val c_p = env.fromCollection(triplesString)
val p = env.fromCollection(pl_a)
p.union(c_p).union(vp)
}
}
object PropertyUsage {
def apply(triples: DataSet[Triple], env: ExecutionEnvironment): PropertyUsage = new PropertyUsage(triples, env)
}
class DistinctEntities(triples: DataSet[Triple], env: ExecutionEnvironment) extends Serializable with Logging {
def Filter(): DataSet[Triple] = triples.filter(f =>
(f.getSubject.isURI() && f.getPredicate.isURI() && f.getObject.isURI()))
def Action(): DataSet[Triple] = Filter().distinct(_.hashCode())
def PostProc(): Long = Action().count()
def Voidify(): DataSet[String] = {
var ents = new Array[String](1)
ents(0) = "\\nvoid:entities " + PostProc() + ";"
env.fromCollection(ents)
}
}
object DistinctEntities {
def apply(triples: DataSet[Triple], env: ExecutionEnvironment): DistinctEntities = new DistinctEntities(triples, env)
}
class DistinctSubjects(triples: DataSet[Triple], env: ExecutionEnvironment) extends Serializable with Logging {
def Filter(): DataSet[Triple] = triples.filter(f => f.getSubject.isURI())
def Action(): DataSet[Triple] = Filter().distinct(t => t.hashCode())
def PostProc(): Long = Action().count()
def Voidify(): DataSet[String] = {
var ents = new Array[String](1)
ents(0) = "\\nvoid:distinctSubjects " + PostProc() + ";"
env.fromCollection(ents)
}
}
object DistinctSubjects {
def apply(triples: DataSet[Triple], env: ExecutionEnvironment): DistinctSubjects = new DistinctSubjects(triples, env)
}
class DistinctObjects(triples: DataSet[Triple], env: ExecutionEnvironment) extends Serializable with Logging {
def Filter(): DataSet[Triple] = triples.filter(f => f.getObject.isURI())
def Action(): DataSet[Triple] = Filter().distinct(_.hashCode())
def PostProc(): Long = Action().count()
def Voidify(): DataSet[String] = {
var ents = new Array[String](1)
ents(0) = "\\nvoid:distinctObjects " + PostProc() + ";"
env.fromCollection(ents)
}
}
object DistinctObjects {
def apply(triples: DataSet[Triple], env: ExecutionEnvironment): DistinctObjects = new DistinctObjects(triples, env)
}
class SPO_Vocabularies(triples: DataSet[Triple], env: ExecutionEnvironment) extends Serializable with Logging {
def Filter(): DataSet[Triple] = triples
def Action(node: Node): DataSet[String] = Filter().map(f => node.getNameSpace())
def SubjectVocabulariesAction(): DataSet[String] = Filter().filter(f => f.getSubject.isURI()).map(f => (f.getSubject.getNameSpace()))
def SubjectVocabulariesPostProc(): AggregateDataSet[(String, Int)] = SubjectVocabulariesAction()
.map(f => (f, 1)).groupBy(0)
.sum(1)
def PredicateVocabulariesAction(): DataSet[String] = Filter().filter(f => f.getPredicate.isURI()).map(f => (f.getPredicate.getNameSpace()))
def PredicateVocabulariesPostProc(): AggregateDataSet[(String, Int)] = PredicateVocabulariesAction()
.map(f => (f, 1)).groupBy(0)
.sum(1)
def ObjectVocabulariesAction(): DataSet[String] = Filter().filter(f => f.getObject.isURI()).map(f => (f.getObject.getNameSpace()))
def ObjectVocabulariesPostProc(): AggregateDataSet[(String, Int)] = ObjectVocabulariesAction()
.map(f => (f, 1)).groupBy(0)
.sum(1)
def PostProc(node: Node): AggregateDataSet[(String, Int)] = Filter().map(f => node.getNameSpace())
.map(f => (f, 1)).groupBy(0)
.sum(1)
def Voidify(): DataSet[String] = {
var ents = new Array[String](1)
ents(0) = "\\nvoid:vocabulary <" + SubjectVocabulariesAction().union(PredicateVocabulariesAction()).union(ObjectVocabulariesAction()).distinct().collect.take(15).mkString(">, <") + ">;"
env.fromCollection(ents)
}
}
object SPO_Vocabularies {
def apply(triples: DataSet[Triple], env: ExecutionEnvironment): SPO_Vocabularies = new SPO_Vocabularies(triples, env)
}
| SANSA-Stack/SANSA-RDF | sansa-rdf/sansa-rdf-flink/src/main/scala/net/sansa_stack/rdf/flink/stats/RDFStatistics.scala | Scala | apache-2.0 | 15,074 |
/*
* Copyright (C) 2016 Nikos Katzouris
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package oled.distributed
import java.util.UUID
import akka.actor.{ActorContext, ActorSelection}
import app.runutils.RunningOptions
import com.mongodb.casbah.Imports._
import com.mongodb.casbah.commons.MongoDBObject
import com.mongodb.casbah.{MongoClient, MongoCollection}
import logic.Clause
import logic.Examples.Example
import oled.distributed.Structures.{ClauseStats, StatsReply}
import utils.DataUtils.DataAsIntervals
import utils.Database
import oled.functions.DistributedOLEDFunctions._
/**
* Created by nkatz on 2/15/17.
*/
object Utils {
def getCaviarData(mc: MongoClient, dbName: String, chunkSize: Int): Iterator[List[String]] = {
val collection = mc(dbName)("examples")
collection.find().map(x => Example(x)).grouped(chunkSize).map(x =>
x.foldLeft(List[String]())((z, y) => z ++ y.annotation ++ y.narrative))
}
//, dataSize: Double = Double.PositiveInfinity
/* utility function for retrieving data */
def getDataFromDB(dbName: String, HLE: String, chunkSize: Int,
intervals: DataAsIntervals = DataAsIntervals()): Iterator[Example] = {
// No worry about removing prior annotation from the examples, since in any case inertia
// is not used during learning. Even if a pair is passed where in both times
// there is positive annotation, the first positive example will be covered by
// the initalTime axiom, while the second positive will be covered by abduction (no inertia).
val mc = MongoClient()
val collection = mc(dbName)("examples")
if (intervals.isEmpty) {
//collection.createIndex(MongoDBObject("time" -> 1))
val data = collection.find().sort(MongoDBObject("time" -> 1)).map { x =>
val e = Example(x)
new Example(annot = e.annotation filter (_.contains(HLE)), nar = e.narrative, _time = e.time)
}
val dataChunked = data.grouped(chunkSize)
val dataIterator = dataChunked.map { x =>
val merged = x.foldLeft(Example()) { (z, y) =>
new Example(annot = z.annotation ++ y.annotation, nar = z.narrative ++ y.narrative, _time = x.head.time)
}
merged
}
dataIterator
} else {
utils.CaviarUtils.getDataFromIntervals(collection, HLE, intervals.trainingSet, chunkSize)
}
}
def intervalsToDB(dbToReadFrom: String, intervals: DataAsIntervals, HLE: String,
chunkSize: Int, withChunking: Boolean = true) = {
val dbToWriteTo = s"d-oled-DB-${UUID.randomUUID()}"
val mongoClient = MongoClient()
val collectionWriteTo = mongoClient(dbToWriteTo)("examples")
val collectionReadFrom = mongoClient(dbToReadFrom)("examples")
println(s"Inserting data to $dbToWriteTo")
for (interval <- intervals.trainingSet) {
val batch = collectionReadFrom.find("time" $gte interval.startPoint $lte interval.endPoint).
sort(MongoDBObject("time" -> 1))
val examples = batch.map(x => Example(x)) //.toList
val HLExmpls = examples map { x =>
val a = x.annotation filter (_.contains(HLE))
new Example(annot = a, nar = x.narrative, _time = x.time)
}
val chunked = if (withChunking) HLExmpls.sliding(chunkSize, chunkSize - 1) else HLExmpls.sliding(HLExmpls.length)
val out = chunked map { x =>
val merged = x.foldLeft(Example()) { (z, y) =>
new Example(annot = z.annotation ++ y.annotation, nar = z.narrative ++ y.narrative, _time = x.head.time)
}
merged
}
out.foreach{ e =>
val entry = MongoDBObject("time" -> e._time.toInt) ++ ("annotation" -> e.annotation) ++ ("narrative" -> e.narrative)
collectionWriteTo.insert(entry)
}
}
dbToWriteTo
}
def getExmplIteratorSorted(collection: MongoCollection) = {
collection.find().sort(MongoDBObject("time" -> 1))
}
def getExmplIteratorShuffle(collection: MongoCollection) = {
}
// Utility function, returns a list of other Node actors
def getOtherActors(context: ActorContext, otherNodesNames: List[String]): List[ActorSelection] = {
otherNodesNames map (actorName => context.actorSelection(s"${context.parent.path}/$actorName"))
}
def getActorByName(context: ActorContext, name: String) = {
context.actorSelection(s"${context.parent.path}/$name")
}
// Utility function, returns a new small example batch for processing
def getNextBatch(data: Iterator[Example], processBatchBeforeMailBox: Int) = {
data.take(processBatchBeforeMailBox)
}
/*
* Decide if a clause will be expanded or not, after taking into account the new counts
* from all nodes. clause is the clause in question, replies is a list of StatsReply objects
* received from all nodes and the remaining parameters are for calculating the hoeffding bound.
* This method returns a (b, c) tuple, where b is true of false, according to whether the input
* clause will be expanded or not and c either the input clause (if b = false) or its best
* specialization (if b = true).
* */
def expand_?(clause: Clause, replies: List[StatsReply], delta: Double,
breakTiesThreshold: Double, minSeenExmpls: Int,
currentNodeState: String, nodeName: String, params: RunningOptions, logger: org.slf4j.Logger) = {
// A StatsReply is a reply from a node. So it should contain stats
// for any requested clause. If a clause id is not found in a reply an exception
// is thrown from r.getClauseStats
val repliesGroupedByNode = (for (r <- replies) yield (r.sender, r.getClauseStats(clause.uuid))).toMap
// update the counts per node for each node, for this clause and for each one of its refinements
repliesGroupedByNode.keys foreach { node =>
updateCountsPerNode(clause, node, repliesGroupedByNode, currentNodeState, nodeName)
}
// Re-check the clause for expansion
expandRule(clause, delta, breakTiesThreshold, minSeenExmpls, nodeName, params, logger)
}
/*
* Returns the new counts (by subtracting the old ones from the received ones)
* for clause c and for node nodeName. The output is a new stats object with the counts,
* along with nodeName (in order to update c.previousCountsPerNode). The replies map
* is a (k,v) map where k is a node id and v is a stats object sent from node k for clause c.
* */
def updateCountsPerNode(clause: Clause, nodeName: String, replies: Map[String, Structures.Stats], currentNodeState: String, currentlyOnNode: String): Unit = {
val receivedStats = replies.getOrElse(nodeName,
throw new RuntimeException(s"$currentNodeState Could not find node's name $nodeName as key in the nodes-stats map. The map is $replies")
)
val parentClauseStats = receivedStats.parentStats
val refinementsStats = receivedStats.refinementsStats
clause.countsPerNode(nodeName) = parentClauseStats // update the countsPerNode map
clause.updateTotalCounts(currentlyOnNode) // Update the accumulated counts variables
// just to be on the safe side...
if (refinementsStats.size != clause.refinements.length) {
//throw new RuntimeException(s"$currentNodeState Problem with refinements reply!")
}
clause.refinements.foreach{ ref =>
/*
val refStats = refinementsStats.getOrElse(ref.uuid,
throw new RuntimeException(s"$currentNodeState Refinement ${ref.uuid} not found in the returned stats map"))
*/
val refStats = refinementsStats.getOrElse(ref.uuid, ClauseStats())
ref.countsPerNode(nodeName) = refStats // update the refinement's countsPerNode map
ref.updateTotalCounts(currentlyOnNode) // Update the accumulated counts variables
}
}
def copyClause(c: Clause) = {
def basicopy(clause: Clause) = {
val copy_ = Clause(head = clause.head, body = clause.body, uuid = clause.uuid)
//copy_.uuid = clause.uuid
copy_.tps = clause.tps
copy_.fps = clause.fps
copy_.fns = clause.fns
copy_.seenExmplsNum = clause.seenExmplsNum
copy_.countsPerNode = clause.countsPerNode
//copy_.generatedAtNode = clause.generatedAtNode
// don't copy these, there's no need (nothing changes in the parent clause or the support set) and copying
// it makes it messy to retrieve ids in other nodes
copy_.parentClause = clause.parentClause
copy_.supportSet = clause.supportSet
copy_
}
val copy = basicopy(c)
val refinementsCopy = c.refinements.map(ref => basicopy(ref))
copy.refinements = refinementsCopy
copy
}
}
| nkatzz/OLED | src/main/scala/oled/distributed/Utils.scala | Scala | gpl-3.0 | 9,174 |
package com.afei.akkaangular
import akka.actor.ActorSystem
import akka.event.Logging
import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
import com.afei.akkaangular.api.Routes
import com.typesafe.config.ConfigFactory
object Main extends App with Routes{
implicit val system = ActorSystem()
implicit val executor = system.dispatcher
implicit val materializer = ActorMaterializer()
val config = ConfigFactory.load()
val rootLogger = Logging(system, getClass)
Http().bindAndHandle(routes, config.getString("http.interface"), config.getInt("http.port"))
}
| yuanqingfei/gdbscan-akka-d3js | jvm/src/main/scala/com/afei/akkaangular/Main.scala | Scala | apache-2.0 | 585 |
package neuroflow.core
/**
* @author bogdanski
* @since 03.01.16
*/
trait IllusionBreaker { self: Network[_, _, _] =>
/**
* Checks if the [[Settings]] are properly defined for this network.
* Throws a [[neuroflow.core.IllusionBreaker.SettingsNotSupportedException]] if not. Default behavior is no op.
*/
def checkSettings(): Unit = ()
}
object IllusionBreaker {
class SettingsNotSupportedException(message: String) extends Exception(message)
class NotSoundException(message: String) extends Exception(message)
}
| zenecture/neuroflow | core/src/main/scala/neuroflow/core/IllusionBreaker.scala | Scala | apache-2.0 | 547 |
/*
* Copyright (c) 2012 Dame Ningen.
* All rights reserved.
*
* This file is part of Gausel.
*
* Gausel is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Gausel is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Gausel. If not, see <http://www.gnu.org/licenses/>.
*/
package gausel.test
/** Tests the parser.
*
* @author dameNingen <dame.ningen@mail.com>
* @version $Revision$
* $Id$
*/
object TestParser extends App with gausel.lib.Verb {
// Verbose related stuff.
val name = "TestParser"
val verbLevel = 1
val color = Colors.cyan
val toParse =
"title {"::
" One two one two it's just a test."::
"}"::
""::
"matrix {"::
" 0 a12 a13"::
" a21 a22 0"::
" 0 a32 a33"::
"}"::
""::
"vector {"::
" b1"::
" b2"::
" b3"::
"}"::
""::
"Bla bla bla"::
"Toto tata titi tutu"::
"sntahoestnud;ad,.tnd"::
""::
";s,.ntphaoearicd"::
Nil
verbln(1)
verbln("Parsing the following lines:")
toParse.foreach(l => verbln(" " + l))
val (title,matrix,vector,rest) =
gausel.parser.SystemParser(toParse)
verbln(1)
verbln("Result:")
verbln(" title:")
verbln(" " + title)
verbln(" matrix:")
matrix.foreach(l => verbln(" " + l))
verbln(" vector:")
vector.foreach(l => verbln(" " + l))
verbln(" rest:")
rest.foreach(l => verbln(" " + l))
verbln(1)
verbln("That's all.")
verbln("See ya.")
}
| Uchuu/Gausel | trunk/src/test/scala/TestParser.scala | Scala | gpl-3.0 | 1,895 |
package ua.nure.lab4
import org.specs2.mutable.Specification
import scala.collection.mutable
/**
* Class Lab4commonTest implementation.
*
* @author Bohdan_Suprun
*/
class Lab4commonTest extends Specification {
"Lab4commonTest" should {
"values" in {
Lab4common values ((x) => x * x, 1, 3) mustEqual List((1, 1), (2, 4), (3, 9))
}
"largest" in {
Lab4common largest ((x) => 10 * x - x * x, 1 to 10) mustEqual 25
}
"indexes" in {
Lab4common indexes "Mississippi" mustEqual mutable.LinkedHashMap[String, Set[Int]]("M" -> Set(0),
"I" -> Set(1, 4, 7,10), "S" -> Set(2, 3, 5, 6), "P" -> Set(8, 9))
}
"removeZeroMembers" in {
Lab4common removeZeroMembers List(10, 20, 0, 30, 50, 0) mustEqual List(10, 20, 30, 50)
}
}
}
| isCompetent/spp | src/test/scala/ua/nure/lab4/Lab4commonTest.scala | Scala | mit | 798 |
package processes.freeMonads.vanillaScala.single
import scala.concurrent.Future
import domain.Profile
import play.api.mvc.AnyContent
import play.api.mvc.Request
import play.api.mvc.Result
import processes.PatchAssignment
import processes.Services
import processes.freeMonads.single.NestedProgramParts
import processes.freeMonads.single.NestedProgramRunner
import processes.freeMonads.vanillaScala.SingleMachinery
class Nested(protected val services: Services) extends PatchAssignment
with SingleMachinery with NestedProgramRunner with NestedProgramParts {
protected def handlePatchRequest(id: String, request: Request[AnyContent]): Future[Result] = {
val patchProgram =
for {
profile <- RequestToInternalRepresentation(request)
serviceResult <- PerformProfileUpdate(id, profile)
response <- InternalRepresentationToResponse(serviceResult)
} yield response
val serviceProgram = patchProgram.run(PatchProgramRunner)
serviceProgram.run(ServiceRunner).map(_.merge)
}
sealed trait SubRoutine[T]
case class RequestToInternalRepresentation(request: Request[AnyContent]) extends SubRoutine[Profile] {
val program =
for {
json <- ParseJson(request) ifEmpty BadRequest
profile <- JsonToProfile(json) ifError ValidationErrors
} yield profile
}
case class PerformProfileUpdate(id: String, profile: Profile) extends SubRoutine[ServiceResult] {
val program =
for {
oldProfile <- GetProfileById(id) ifEmpty NotFound(id)
mergedProfile <- MergeProfile(oldProfile, profile)
_ <- UpdateProfile(id, mergedProfile)
} yield Success: ServiceResult
}
case class InternalRepresentationToResponse(serviceResult: ServiceResult) extends SubRoutine[Result] {
val result =
serviceResult match {
case Success => results.noContent
case BadRequest => results.badRequest
case ValidationErrors(errors) => results.unprocessableEntity(errors)
case NotFound(id) => results.notFound(id)
}
}
type Routine[A] = Free[Service, A]
protected def serviceResultToResponse(serviceResult: ServiceResult): Result =
InternalRepresentationToResponse(serviceResult).result
object PatchProgramRunner extends (SubRoutine ~> Routine) {
def apply[A](sa: SubRoutine[A]) = sa match {
case x @ RequestToInternalRepresentation(_) => x.program
case x @ PerformProfileUpdate(_, _) => x.program
case x @ InternalRepresentationToResponse(_) => Return(x.result)
}
}
object ServiceRunner extends (Service ~> HttpResult) {
def apply[A](sa: Service[A]) = serviceRunner(sa)
}
} | EECOLOR/scala-clean-code-patterns | src/main/scala/processes/freeMonads/vanillaScala/single/Nested.scala | Scala | mit | 2,655 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.index
import com.typesafe.scalalogging.LazyLogging
import com.vividsolutions.jts.geom.Geometry
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.{DEFAULT_DATE_KEY, RichSimpleFeatureType}
import org.locationtech.geomesa.utils.geotools.{FeatureUtils, SimpleFeatureTypes}
import org.opengis.feature.simple.SimpleFeatureType
import scala.collection.JavaConverters._
object GeoMesaSchemaValidator {
def validate(sft: SimpleFeatureType): Unit = {
MixedGeometryCheck.validateGeometryType(sft)
TemporalIndexCheck.validateDtgField(sft)
ReservedWordCheck.validateAttributeNames(sft)
}
}
/**
* Utility object that ensures that none of the (local portion of the) property
* names is a reserved word in ECQL. Using those reserved words in a simple
* feature type will cause queries to fail.
*/
object ReservedWordCheck extends LazyLogging {
// ensure that no attribute names are reserved words within GeoTools that will cause query problems
def validateAttributeNames(sft: SimpleFeatureType): Unit = {
val reservedWords = FeatureUtils.sftReservedWords(sft)
if (reservedWords.nonEmpty) {
// TODO: Make this an exception after GeoMesa 1.2.2
logger.warn(
"The simple feature type contains one or more attributes whose names are reserved words: " +
reservedWords.mkString(", "))
}
}
}
/**
* Utility object for emitting a warning to the user if a SimpleFeatureType contains a temporal attribute, but
* none is used in the index.
*
* Furthermore, this object presents a candidate to be used in this case.
*
* This is useful since the only symptom of this mistake is slower than normal queries on temporal ranges.
*/
object TemporalIndexCheck extends LazyLogging {
def validateDtgField(sft: SimpleFeatureType): Unit = {
val dtgCandidates = scanForTemporalAttributes(sft) // all attributes which may be used
// validate that we have an ok field, and replace it if not
if (!sft.getDtgField.exists(dtgCandidates.contains)) {
sft.getDtgField.foreach { dtg => // clear out any existing invalid field
logger.warn(s"Invalid date field '$dtg' specified for schema $sft")
sft.clearDtgField()
}
// if there are valid fields, warn and set to the first available
dtgCandidates.headOption.foreach { candidate =>
lazy val theWarning = s"$DEFAULT_DATE_KEY is not valid or defined for simple feature type $sft. " +
"However, the following attribute(s) can be used in GeoMesa's temporal index: " +
s"${dtgCandidates.mkString(", ")}. GeoMesa will now point $DEFAULT_DATE_KEY to the first " +
s"temporal attribute found: $candidate"
logger.warn(theWarning)
sft.setDtgField(candidate)
}
}
}
def scanForTemporalAttributes(sft: SimpleFeatureType) =
sft.getAttributeDescriptors.asScala.toList
.withFilter { classOf[java.util.Date] isAssignableFrom _.getType.getBinding } .map { _.getLocalName }
}
object MixedGeometryCheck extends LazyLogging {
import java.lang.{Boolean => jBoolean}
import SimpleFeatureTypes.MIXED_GEOMETRIES
def validateGeometryType(sft: SimpleFeatureType): Unit = {
val gd = sft.getGeometryDescriptor
if (gd != null && gd.getType.getBinding == classOf[Geometry]) {
val declared = sft.getUserData.get(MIXED_GEOMETRIES) match {
case null => false
case mixed: jBoolean if mixed => true
case mixed: String if jBoolean.valueOf(mixed) => true
case mixed if jBoolean.valueOf(mixed.toString) => true
case _ => false
}
if (!declared) {
throw new IllegalArgumentException("Trying to create a schema with mixed geometry type " +
s"'${gd.getLocalName}:Geometry'. Queries may be slower when using mixed geometries. " +
"If this is intentional, you may override this message by putting Boolean.TRUE into the " +
s"SimpleFeatureType user data under the key '$MIXED_GEOMETRIES' before calling createSchema. " +
"Otherwise, please specify a single geometry type (e.g. Point, LineString, Polygon, etc).")
}
}
}
} | mdzimmerman/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/index/GeoMesaSchemaValidator.scala | Scala | apache-2.0 | 4,663 |
package io.gatling.amqp.config
import akka.actor._
import com.rabbitmq.client.ConnectionFactory
import com.typesafe.scalalogging.StrictLogging
import io.gatling.amqp.data._
import io.gatling.amqp.event._
import io.gatling.core.config.Protocol
import io.gatling.core.controller.throttle.Throttler
import io.gatling.core.result.writer.StatsEngine
import io.gatling.core.session.Session
/**
* Wraps a AMQP protocol configuration
*/
case class AmqpProtocol(
connection: Connection,
preparings: List[AmqpChannelCommand]
) extends Protocol with AmqpVariables with AmqpPreparation with AmqpTermination with AmqpRunner with StrictLogging {
lazy val event: AmqpEventBus = new AmqpEventBus() // not used yet cause messages seems in random order in the bus
/**
* create new AMQP connection
*/
def newConnection: com.rabbitmq.client.Connection = {
import connection._
val factory = new ConnectionFactory()
factory.setHost(host)
factory.setPort(port)
factory.setUsername(user)
factory.setPassword(password)
factory.setVirtualHost(vhost)
factory.newConnection
}
/**
* validate variables
*/
def validate(): Unit = {
connection.validate()
}
/**
* Whether is AMQP channel used for confirmation mode? (RabbitMQ feature)
*/
def isConfirmMode: Boolean = connection.confirm
/**
* warmUp AMQP protocol (invoked by gatling framework)
*/
override def warmUp(system: ActorSystem, statsEngine: StatsEngine, throttler: Throttler): Unit = {
logger.info("amqp: warmUp start")
super.warmUp(system, statsEngine, throttler)
setupVariables(system, statsEngine)
awaitPreparation()
}
/**
* finalize user session about AMQP (invoked by gatling framework)
*/
override def userEnd(session: Session): Unit = {
awaitTerminationFor(session)
super.userEnd(session)
}
override def toString: String = {
s"AmqpProtocol(hashCode=$hashCode)"
}
}
| maiha/gatling-amqp | src/main/scala/io/gatling/amqp/config/AmqpProtocol.scala | Scala | mit | 1,942 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.scaladsl.persistence
import scala.concurrent.ExecutionContext
import akka.actor.ActorSystem
import akka.stream.Materializer
import com.lightbend.lagom.internal.persistence.ReadSideConfig
import com.lightbend.lagom.internal.scaladsl.persistence.ReadSideImpl
import com.lightbend.lagom.scaladsl.cluster.ClusterComponents
import play.api.Configuration
/**
* Persistence components (for compile-time injection).
*/
trait PersistenceComponents extends ReadSidePersistenceComponents
/**
* Write-side persistence components (for compile-time injection).
*/
trait WriteSidePersistenceComponents extends ClusterComponents {
def persistentEntityRegistry: PersistentEntityRegistry
}
/**
* Read-side persistence components (for compile-time injection).
*/
trait ReadSidePersistenceComponents extends WriteSidePersistenceComponents {
def actorSystem: ActorSystem
def executionContext: ExecutionContext
def materializer: Materializer
def configuration: Configuration
lazy val readSideConfig: ReadSideConfig = ReadSideConfig(configuration)
lazy val readSide: ReadSide = new ReadSideImpl(actorSystem, readSideConfig, persistentEntityRegistry)(
executionContext, materializer
)
}
| edouardKaiser/lagom | persistence/scaladsl/src/main/scala/com/lightbend/lagom/scaladsl/persistence/PersistenceComponents.scala | Scala | apache-2.0 | 1,301 |
//: ----------------------------------------------------------------------------
//: Copyright (C) 2015 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package funnel
package integration
import scalaz.Scalaz
import scala.concurrent.duration._
import scalaz.stream.async.boundedQueue
import scalaz.concurrent.{Task,Strategy}
import scalaz.stream.{Process,async,time}
import chemist.{Chemist,PlatformEvent,Pipeline,sinks}
class IntegrationChemist extends Chemist[IntegrationPlatform]{
import Scalaz._, PlatformEvent._, Pipeline.contextualise
import Chemist.Flow
private[this] val log = journal.Logger[IntegrationChemist]
val lifecycle: Flow[PlatformEvent] =
Process.emitAll(NoOp :: Nil).map(contextualise)
val queue =
boundedQueue[PlatformEvent](100)(Chemist.defaultExecutor)
val init: ChemistK[Unit] =
for {
cfg <- config
_ = log.info("Initilizing Chemist....")
_ <- Pipeline.task(
lifecycle,
cfg.rediscoveryInterval
)(cfg.discovery,
queue,
cfg.sharder,
cfg.http,
cfg.state,
sinks.unsafeNetworkIO(cfg.remoteFlask, queue)
).liftKleisli
} yield ()
}
| neigor/funnel | integration/src/multi-jvm/scala/IntegrationChemist.scala | Scala | apache-2.0 | 1,864 |
/*
* Simple BinaryTree (Int) Implementation
* Search Trees
* In order
* Pre order
* Post order
* Level order
*
*/
package object BinaryTree {
import scala.collection.mutable.Queue
trait Tree {
def toListInOrder : List[Int] = this match {
case Empty() => Nil
case Leaf(v) => v :: Nil
case Node(l,v,r) => l.toListInOrder ::: (v :: r.toListInOrder)
}
def toListPreOrder : List[Int] = this match {
case Empty() => Nil
case Leaf(v) => v :: Nil
case Node(l,v,r) => v :: l.toListPreOrder ::: r.toListPreOrder
}
def toListPostOrder : List[Int] = this match {
case Empty() => Nil
case Leaf(v) => v :: Nil
case Node(l,v,r) => l.toListPreOrder ::: r.toListPreOrder ++ (v :: Nil)
}
// Use the standard Haskell implementation of level-order
def toListLevelOrder : List[Int] = {
def elements (t : Tree ) : List[Int] = t match {
case Empty() => Nil
case Leaf(v) => v :: Nil
case Node(l,v,r) => v :: Nil
}
def subTrees( t : Tree ) : List[Tree] = t match {
case Node(l,_,r) => List(l,r)
case _ => Nil
}
def step (ls : List[Tree]) : List[Int] = ls match {
case Nil => Nil
case ts => ts.flatMap(elements) ::: step (ts.flatMap(subTrees))
}
step (this :: Nil)
}
// insert into an ordered (i.e. binary search tree) tree
def insertOrdered( i : Int ) : Tree = this match {
case Empty() => Leaf(i)
case Leaf(v) =>
if (i < v)
Node(Leaf(i), v, Empty())
else if(i > v)
Node(Empty(), v, Leaf(i))
else
this
case Node(l,v,r) =>
if (i < v)
Node(l.insertOrdered(i), v, r)
else if (i > v)
Node(l, v, r.insertOrdered(i))
else
this
}
// assumes a binary search tree
def find( i : Int ) : Boolean = this match {
case Empty() => false
case Leaf(v) => i == v
case Node(l,v,r) =>
if (i < v)
l.find(i)
else if (i > v)
r.find(i)
else
true
}
}
case class Leaf(v : Int) extends Tree
case class Node(l : Tree, v : Int, r : Tree) extends Tree
case class Empty() extends Tree
def toTree ( ls : List[Int] ) : Tree =
ls.foldRight(Empty() : Tree) ( (x, t) => t.insertOrdered(x) )
} // package BinaryTree
object Main {
def main(args: Array[String]) {
import BinaryTree._
val t = BinaryTree.toTree(List(1,4,6,2,4))
println("The tree: " + t.toString)
println("In order: " + t.toListInOrder.toString)
println("Pre order: " + t.toListPreOrder.toString)
println("Post order: " + t.toListPostOrder.toString)
println("Level order: " + t.toListLevelOrder.toString)
}
}
| bgaster/scala-intro | BinaryTree/src/main/scala/BinaryTree.scala | Scala | mit | 2,844 |
package org.airpnp.plist;
import java.io.IOException
class PropertyListException(private val reason: java.lang.String) extends IOException(reason) {
}
/**
* Represent a binary property list error due to an unhandled feature.
*/
class PropertyListUnhandledException(private val reason: java.lang.String) extends PropertyListException(reason) {
}
/**
* Represents a binary property list format error.
*/
class PropertyListFormatException(private val reason: java.lang.String) extends PropertyListException(reason) {
} | provegard/ScAirPnp | src/main/scala/org/airpnp/plist/Exceptions.scala | Scala | mit | 523 |
package com.imaginea.activegrid.core.models
import com.imaginea.activegrid.core.utils.ActiveGridUtils
import com.typesafe.scalalogging.Logger
import org.neo4j.graphdb.Node
import org.slf4j.LoggerFactory
/**
* Created by sampathr on 22/9/16.
*/
case class ImageInfo(override val id: Option[Long],
imageId: Option[String],
state: Option[String],
ownerId: Option[String],
publicValue: Boolean,
architecture: Option[String],
imageType: Option[String],
platform: Option[String],
imageOwnerAlias: Option[String],
name: Option[String],
description: Option[String],
rootDeviceType: Option[String],
rootDeviceName: Option[String],
version: Option[String]) extends BaseEntity
object ImageInfo {
implicit class ImageInfoImpl(imageInfo: ImageInfo) extends Neo4jRep[ImageInfo] {
val logger = Logger(LoggerFactory.getLogger(getClass.getName))
val label = "ImageInfo"
override def toNeo4jGraph(imageInfo: ImageInfo): Node = {
logger.debug(s"In toGraph for Image Info: $imageInfo")
val map = Map("imageId" -> imageInfo.imageId,
"state" -> imageInfo.state,
"ownerId" -> imageInfo.ownerId,
"publicValue" -> imageInfo.publicValue,
"architecture" -> imageInfo.architecture,
"imageType" -> imageInfo.imageType,
"platform" -> imageInfo.platform,
"imageOwnerAlias" -> imageInfo.imageOwnerAlias,
"name" -> imageInfo.name,
"description" -> imageInfo.description,
"rootDeviceType" -> imageInfo.rootDeviceType,
"rootDeviceName" -> imageInfo.rootDeviceName,
"version" -> imageInfo.version
)
val imageInfoNode = Neo4jRepository.saveEntity[ImageInfo](label, imageInfo.id, map)
imageInfoNode
}
override def fromNeo4jGraph(nodeId: Long): Option[ImageInfo] = {
ImageInfo.fromNeo4jGraph(nodeId)
}
}
def fromNeo4jGraph(nodeId: Long): Option[ImageInfo] = {
val logger = Logger(LoggerFactory.getLogger(getClass.getName))
try {
val node = Neo4jRepository.findNodeById(nodeId)
val map = Neo4jRepository.getProperties(node.get, "imageId", "state", "ownerId", "publicValue",
"architecture", "imageType", "platform", "imageOwnerAlias", "name", "description", "rootDeviceType", "rootDeviceName", "version")
val imageInfo = ImageInfo(Some(node.get.getId),
ActiveGridUtils.getValueFromMapAs[String](map, "imageId"),
ActiveGridUtils.getValueFromMapAs[String](map, "state"),
ActiveGridUtils.getValueFromMapAs[String](map, "ownerId"),
map("publicValue").asInstanceOf[Boolean],
ActiveGridUtils.getValueFromMapAs[String](map, "architecture"),
ActiveGridUtils.getValueFromMapAs[String](map, "imageType"),
ActiveGridUtils.getValueFromMapAs[String](map, "platform"),
ActiveGridUtils.getValueFromMapAs[String](map, "imageOwnerAlias"),
ActiveGridUtils.getValueFromMapAs[String](map, "name"),
ActiveGridUtils.getValueFromMapAs[String](map, "description"),
ActiveGridUtils.getValueFromMapAs[String](map, "rootDeviceType"),
ActiveGridUtils.getValueFromMapAs[String](map, "rootDeviceName"),
ActiveGridUtils.getValueFromMapAs[String](map, "version"))
Some(imageInfo)
} catch {
case ex: Exception =>
logger.warn(ex.getMessage, ex)
None
}
}
} | nagulmeeras/activeGrid | src/main/scala/com/imaginea/activegrid/core/models/ImageInfo.scala | Scala | apache-2.0 | 3,608 |
package com.eevolution.context.dictionary.domain.api.service
import com.eevolution.context.dictionary.api
import com.eevolution.context.dictionary.domain.model.PrintLabelLineTrl
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 10/11/17.
*/
/**
* Print Label Line Trl Service
*/
trait PrintLabelLineTrlService extends api.Service[PrintLabelLineTrl, Int] {
//Definition
}
| adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/service/PrintLabelLineTrlService.scala | Scala | gpl-3.0 | 1,251 |
/**
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package actorbintree
import akka.actor.{ Props, ActorRef, ActorSystem }
import org.scalatest.{ BeforeAndAfterAll, FlatSpec }
import akka.testkit.{ TestProbe, ImplicitSender, TestKit }
import org.scalatest.matchers.ShouldMatchers
import scala.util.Random
import scala.concurrent.duration._
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class BinaryTreeSuite(_system: ActorSystem) extends TestKit(_system) with FunSuite with ShouldMatchers with BeforeAndAfterAll with ImplicitSender
{
def this() = this(ActorSystem("PostponeSpec"))
override def afterAll: Unit = system.shutdown()
import actorbintree.BinaryTreeSet._
def receiveN(requester: TestProbe, ops: Seq[Operation], expectedReplies: Seq[OperationReply]): Unit =
within(5.seconds) {
val repliesUnsorted = for (i <- 1 to ops.size) yield try {
requester.expectMsgType[OperationReply]
} catch {
case ex: Throwable if ops.size > 10 => fail(s"failure to receive confirmation $i/${ops.size}", ex)
case ex: Throwable => fail(s"failure to receive confirmation $i/${ops.size}\\nRequests:" + ops.mkString("\\n ", "\\n ", ""), ex)
}
val replies = repliesUnsorted.sortBy(_.id)
if (replies != expectedReplies) {
val pairs = (replies zip expectedReplies).zipWithIndex filter (x => x._1._1 != x._1._2)
fail("unexpected replies:" + pairs.map(x => s"at index ${x._2}: got ${x._1._1}, expected ${x._1._2}").mkString("\\n ", "\\n ", ""))
}
}
def verify(probe: TestProbe, ops: Seq[Operation], expected: Seq[OperationReply]): Unit = {
val topNode = system.actorOf(Props[BinaryTreeSet])
ops foreach { op =>
topNode ! op
}
receiveN(probe, ops, expected)
}
test("proper inserts and lookups") {
val topNode = system.actorOf(Props[BinaryTreeSet])
topNode ! Contains(testActor, id = 1, 1)
expectMsg(ContainsResult(1, false))
topNode ! Insert(testActor, id = 2, 1)
topNode ! Contains(testActor, id = 3, 1)
expectMsg(OperationFinished(2))
expectMsg(ContainsResult(3, true))
}
test("instruction example") {
val requester = TestProbe()
val requesterRef = requester.ref
val ops = List(
Insert(requesterRef, id=100, 1),
Contains(requesterRef, id=50, 2),
Remove(requesterRef, id=10, 1),
Insert(requesterRef, id=20, 2),
Contains(requesterRef, id=80, 1),
Contains(requesterRef, id=70, 2)
)
val expectedReplies = List(
OperationFinished(id=10),
OperationFinished(id=20),
ContainsResult(id=50, false),
ContainsResult(id=70, true),
ContainsResult(id=80, false),
OperationFinished(id=100)
)
verify(requester, ops, expectedReplies)
}
test("behave identically to built-in set (includes GC)") {
val rnd = new Random()
def randomOperations(requester: ActorRef, count: Int): Seq[Operation] = {
def randomElement: Int = rnd.nextInt(100)
def randomOperation(requester: ActorRef, id: Int): Operation = rnd.nextInt(4) match {
case 0 => Insert(requester, id, randomElement)
case 1 => Insert(requester, id, randomElement)
case 2 => Contains(requester, id, randomElement)
case 3 => Remove(requester, id, randomElement)
}
for (seq <- 0 until count) yield randomOperation(requester, seq)
}
def referenceReplies(operations: Seq[Operation]): Seq[OperationReply] = {
var referenceSet = Set.empty[Int]
def replyFor(op: Operation): OperationReply = op match {
case Insert(_, seq, elem) =>
referenceSet = referenceSet + elem
OperationFinished(seq)
case Remove(_, seq, elem) =>
referenceSet = referenceSet - elem
OperationFinished(seq)
case Contains(_, seq, elem) =>
ContainsResult(seq, referenceSet(elem))
}
for (op <- operations) yield replyFor(op)
}
val requester = TestProbe()
val topNode = system.actorOf(Props[BinaryTreeSet])
val count = 10000 // orig value 1000
val ops = randomOperations(requester.ref, count)
val expectedReplies = referenceReplies(ops)
ops foreach { op =>
topNode ! op
//println("Calling op " + op)
if (rnd.nextDouble() < 0.1) {
//if (op.id == 3) {
//println("Sending GC call at op " + op.id)
topNode ! GC
}
}
receiveN(requester, ops, expectedReplies)
}
} | mitochon/hexercise | src/mooc/reactive/week5.actorbintree/src/test/scala/actorbintree/BinaryTreeSuite.scala | Scala | mit | 4,609 |
// Project: scalanative-cocoa
// Module: Foundation
// Description: Generated with scala-obj-bindgen (with manual postprocessing) from:
// objc/objc.h, objc/NSObject.h, objc/Protocol.h, Foundation/NSObject.h
package cocoa.foundation
import scalanative.native._
import objc._
import scala.scalanative.native.extern
@ObjC
trait NSCoding {
@inline def encodeWithCoder_(aCoder: NSCoder): Unit = extern
@inline def initWithCoder_(aDecoder: NSCoder): this.type = extern
}
| jokade/scalanative-cocoa | foundation/src/main/scala/cocoa/foundation/NSCoding.scala | Scala | mit | 495 |
package spire
package math
package extras
import org.scalacheck.Arbitrary.{arbitrary, _}
import org.scalacheck._
import org.scalatest.{Matchers, _}
import org.scalatest.prop._
import spire.implicits._
import spire.laws.arb.rational
import spire.math.Rational
import scala.util.Try
class FixedPointCheck extends PropSpec with Matchers with GeneratorDrivenPropertyChecks {
implicit val arbFixedScale: Arbitrary[FixedScale] =
Arbitrary(arbitrary[Int].map(_.abs).filter(_ > 0).map(FixedScale))
implicit val arbFixedPoint: Arbitrary[FixedPoint] =
Arbitrary(arbitrary[Long].map(new FixedPoint(_)))
property("FixedScale(r).toRational ~= r") {
forAll { (s: FixedScale, r: Rational) =>
implicit val scale = s
val minV = FixedPoint.MinValue.toRational
val maxV = FixedPoint.MaxValue.toRational
if (r < minV || maxV < r) {
Try(FixedPoint(r)).isSuccess shouldBe false
} else {
FixedPoint(r).toRational shouldBe r.roundTo(s.denom)
}
}
}
property("new FixedScale(n).toRational = n/d") {
forAll { (s: FixedScale, n: Long) =>
implicit val scale = s
new FixedPoint(n).toRational shouldBe Rational(n, s.denom)
}
}
def build(x: Long, y0: Long, z: Byte, noZero: Boolean): (Int, Int, FixedPoint, FixedPoint, Rational, Rational) = {
val y = if (y0 == 0L && noZero) 1L else y0
val d = z.toInt.abs % 11
val denom = 10 ** (d)
val (fx, fy) = (new FixedPoint(x), new FixedPoint(y))
val (ax, ay) = (Rational(x, denom), Rational(y, denom))
(d, denom, fx, fy, ax, ay)
}
type S2[A] = (A, A, FixedScale) => A
type F2[A] = (A, A) => A
import scala.util.{Success, Try}
def testBinop2(name: String, noZero: Boolean, f: S2[FixedPoint], g: F2[Rational]) =
property(name) {
forAll { (x: Long, y: Long, s: FixedScale) =>
implicit val scale = s
if (!noZero || y != 0L) {
val (fx, fy) = (new FixedPoint(x), new FixedPoint(y))
val (ax, ay) = (Rational(x, s.denom), Rational(y, s.denom))
val az = g(ax, ay)
val fz = Try(f(fx, fy, scale)) match {
case Success(fz) =>
BigInt(fz.long) shouldBe (az * s.denom).toBigInt
case _ =>
(az * s.denom < Long.MinValue || Long.MaxValue < az * s.denom) shouldBe true
}
}
}
}
def testBinop(name: String, noZero: Boolean, f: S2[FixedPoint], g: F2[Rational]) =
property(name) {
forAll { (x: Long, y: Long, z: Byte) =>
val (_, denom, fx, fy, ax, ay) = build(x, y, z, noZero)
val az = g(ax, ay)
val ofz = try {
implicit val scale = FixedScale(denom)
Some(f(fx, fy, scale))
} catch {
case _: FixedPointOverflow => None
}
ofz match {
case Some(fz) =>
BigInt(fz.long) shouldBe (az * denom).toBigInt
case None =>
(az * denom < Long.MinValue || Long.MaxValue < az * denom) shouldBe true
}
}
}
testBinop2("addition", false, (x, y, s) => x + y, _ + _)
testBinop2("subtraction", false, (x, y, s) => x - y, _ - _)
testBinop2("multiplication", false, (x, y, s) => (x).*(y)(s), _ * _)
testBinop2("division", true, (x, y, s) => (x)./(y)(s), _ / _)
testBinop2("modulus", true, (x, y, s) => x % y, _ % _)
def buildHalf(x: Long, z: Byte): (Int, Int, FixedPoint, Rational) = {
val d = z.toInt.abs % 11
val denom = 10 ** (d)
val fx = new FixedPoint(x)
val ax = Rational(x, denom)
(d, denom, fx, ax)
}
type SH2[A] = (A, Long, FixedScale) => A
type FH2[A] = (A, Long) => A
def testHalfop(name: String, noZero: Boolean, f: SH2[FixedPoint], g: FH2[Rational]) =
property(name) {
forAll { (x: Long, y0: Long, z: Byte) =>
val y = if (noZero && y0 == 0) 1L else y0
val (d, denom, fx, ax) = buildHalf(x, z)
val az = g(ax, y)
val ofz = try {
implicit val scale = FixedScale(denom)
Some(f(fx, y, scale))
} catch {
case _: FixedPointOverflow => None
}
ofz match {
case Some(fz) =>
BigInt(fz.long) shouldBe (az * denom).toBigInt
case None =>
(az * denom < Long.MinValue || Long.MaxValue < az * denom) shouldBe true
}
}
}
testHalfop("h-addition", false, (x, y, s) => (x).+(y)(s), _ + _)
testHalfop("h-subtraction", false, (x, y, s) => (x).-(y)(s), _ - _)
testHalfop("h-multiplication", false, (x, y, s) => x * y, _ * _)
testHalfop("h-division", true, (x, y, s) => x / y, _ / _)
testHalfop("h-modulus", true, (x, y, s) => (x).%(y)(s), _ % _)
property("pow") {
forAll { (x: Long, k0: Byte, d0: Byte) =>
val k = k0.toInt.abs
val denom = 10 ** (d0.toInt.abs % 11)
val az = Rational(x, denom).pow(k)
val ofz = try {
implicit val scale = FixedScale(denom)
Some(new FixedPoint(x).pow(k))
} catch {
case _: FixedPointOverflow => None
}
ofz match {
case Some(fz) =>
BigInt(fz.long) shouldBe (az * denom).toBigInt
case None =>
(az * denom < Long.MinValue || Long.MaxValue < az * denom) shouldBe true
}
}
}
}
| rklaehn/spire | tests/src/test/scala/spire/math/extras/FixedPointCheck.scala | Scala | mit | 5,248 |
/*
* Copyright 2012-2013 Eligotech BV.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eligosource.eventsourced.journal.dynamodb
import akka.actor.ActorSystem
import org.eligosource.eventsourced.journal.common.{PersistentReplaySpec, JournalProps, PersistentJournalSpec}
class DynamoDBJournalSpec extends PersistentJournalSpec with DynamoDBJournalSupport
class DynamoDBReplaySpec extends PersistentReplaySpec with DynamoDBJournalSupport
| CoderPaulK/eventsourced | es-journal/es-journal-dynamodb/src/it/scala/org/eligosource/eventsourced/journal/dynamodb/DynamoDBJournalSpec.scala | Scala | apache-2.0 | 965 |
package net.liftmodules
import _root_.net.liftweb._
import util.Props
import http._
/**
* ==FoBo Popper Resource Module==
*
* This resource module provides Popper resource components to the FoBo Popper Toolkit module,
* but can also be used as-is, see below for setup information.
*
* If you are using this module via the FoBo/FoBo module see also [[net.liftmodules.fobo]] for setup information.
*/
package object fobopopres {
override def toString() = fobopopres.Resource.toString()
/**
* Initiate fobo's Popper Resource(s) in you bootstrap liftweb Boot.
*
* @example
* {{{
* import net.liftmodules.{fobopopres => fobo}
* :
* fobo.Resource.init=fobo.Resource.[Resource Object]
* }}}
* '''Note:''' To see available objects click on the round trait icon in the header of this page.
*/
sealed trait Resource
object Resource extends Resource {
//we don't actually need to store the objects (for now) so lets just save
//the object name, we can easily change this if we need to
private type Store = List[String] //List[Resource]
private var store: Store = List()
def init: Store = store
def init_=(t: Resource): Store = {
store = if (store contains t.toString) store else t.toString :: store
store
}
override def toString() = "fobopopres.Resource = " + store.toString()
/**
* Enable usage of Popper version 1․12․9 in your bootstrap liftweb Boot.
* @version 1.12.9
*
* @example
*
* {{{
* import net.liftmodules.{fobopopres => fobo}
* :
* fobo.Resource.init=fobo.Resource.Popper1129
* }}}
* @since v2.0
*/
case object Popper1129 extends Resource {
FoBoResources.init
FoBoResources.Popper1129
}
} //end Resource object
/**
* Object holding internally used FoBo resources.
*/
private object FoBoResources {
lazy val init: Unit = {
ResourceServer.allow {
case "fobo" :: tail => true
}
}
/*
Here we use the Universal Module Definition (UMD) module from the dist folder of popper.
This is done as it is the most versertile module format. If we in the future needs the ESM
module of the or the dist folders unnambed module type script files we need to fined a way to
express that level for example by prefixing fobo.Resource.init=fobo.Resource.ESM.Popper199,
fobo.Resource.init=fobo.Resource.UMD.Popper199.
https://github.com/umdjs/umd
*/
lazy val Popper1129: Unit = {
ResourceServer.rewrite {
case "fobo" :: "popper.js" :: Nil if Props.devMode =>
List("fobo", "popper", "1.12.9", "js", "umd", "popper.js")
case "fobo" :: "popper.js" :: Nil =>
List("fobo", "popper", "1.12.9", "js", "umd", "popper.min.js")
case "fobo" :: "popper.js.map" :: Nil if Props.devMode =>
List("fobo", "popper", "1.12.9", "js", "umd", "popper.js.map")
case "fobo" :: "popper.js.map" :: Nil =>
List("fobo", "popper", "1.12.9", "js", "umd", "popper.min.js.map")
case "fobo" :: "popper-utils.js" :: Nil if Props.devMode =>
List("fobo", "popper", "1.12.9", "js", "umd", "popper-utils.js")
case "fobo" :: "popper-utils.js" :: Nil =>
List("fobo", "popper", "1.12.9", "js", "umd", "popper-utils.min.js")
case "fobo" :: "popper-utils.js.map" :: Nil if Props.devMode =>
List("fobo", "popper", "1.12.9", "js", "umd", "popper-utils.js.map")
case "fobo" :: "popper-utils.js.map" :: Nil =>
List("fobo",
"popper",
"1.12.9",
"js",
"umd",
"popper-utils.min.js.map")
}
}
}
}
| karma4u101/FoBo | Popper/Popper-Res/src/main/scala/net/liftmodules/fobopopres/fobopopres.scala | Scala | apache-2.0 | 3,791 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.rdd.InputFileBlockHolder
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodeGenerator, ExprCode, FalseLiteral}
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.types.{DataType, LongType, StringType}
import org.apache.spark.unsafe.types.UTF8String
// scalastyle:off whitespace.end.of.line
@ExpressionDescription(
usage = "_FUNC_() - Returns the name of the file being read, or empty string if not available.",
examples = """
Examples:
> SELECT _FUNC_();
""",
since = "1.5.0",
group = "misc_funcs")
// scalastyle:on whitespace.end.of.line
case class InputFileName() extends LeafExpression with Nondeterministic {
override def nullable: Boolean = false
override def dataType: DataType = StringType
override def prettyName: String = "input_file_name"
override protected def initializeInternal(partitionIndex: Int): Unit = {}
override protected def evalInternal(input: InternalRow): UTF8String = {
InputFileBlockHolder.getInputFilePath
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val className = InputFileBlockHolder.getClass.getName.stripSuffix("$")
val typeDef = s"final ${CodeGenerator.javaType(dataType)}"
ev.copy(code = code"$typeDef ${ev.value} = $className.getInputFilePath();",
isNull = FalseLiteral)
}
}
@ExpressionDescription(
usage = "_FUNC_() - Returns the start offset of the block being read, or -1 if not available.",
examples = """
Examples:
> SELECT _FUNC_();
-1
""",
since = "2.2.0",
group = "misc_funcs")
case class InputFileBlockStart() extends LeafExpression with Nondeterministic {
override def nullable: Boolean = false
override def dataType: DataType = LongType
override def prettyName: String = "input_file_block_start"
override protected def initializeInternal(partitionIndex: Int): Unit = {}
override protected def evalInternal(input: InternalRow): Long = {
InputFileBlockHolder.getStartOffset
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val className = InputFileBlockHolder.getClass.getName.stripSuffix("$")
val typeDef = s"final ${CodeGenerator.javaType(dataType)}"
ev.copy(code = code"$typeDef ${ev.value} = $className.getStartOffset();", isNull = FalseLiteral)
}
}
@ExpressionDescription(
usage = "_FUNC_() - Returns the length of the block being read, or -1 if not available.",
examples = """
Examples:
> SELECT _FUNC_();
-1
""",
since = "2.2.0",
group = "misc_funcs")
case class InputFileBlockLength() extends LeafExpression with Nondeterministic {
override def nullable: Boolean = false
override def dataType: DataType = LongType
override def prettyName: String = "input_file_block_length"
override protected def initializeInternal(partitionIndex: Int): Unit = {}
override protected def evalInternal(input: InternalRow): Long = {
InputFileBlockHolder.getLength
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val className = InputFileBlockHolder.getClass.getName.stripSuffix("$")
val typeDef = s"final ${CodeGenerator.javaType(dataType)}"
ev.copy(code = code"$typeDef ${ev.value} = $className.getLength();", isNull = FalseLiteral)
}
}
| shaneknapp/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/inputFileBlock.scala | Scala | apache-2.0 | 4,245 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.feeder
import scala.collection.immutable
import io.gatling.BaseSpec
import io.gatling.commons.validation.Failure
import io.gatling.core.CoreComponents
import io.gatling.core.config._
import io.gatling.core.structure.ScenarioContext
import org.mockito.Mockito._
class FeederBuilderSpec extends BaseSpec with FeederSupport {
implicit val configuration = GatlingConfiguration.loadForTest()
def scenarioContext(cfg: GatlingConfiguration = configuration) = {
val ctx = mock[ScenarioContext]
val coreComponents = mock[CoreComponents]
when(coreComponents.configuration) thenReturn cfg
when(ctx.coreComponents) thenReturn coreComponents
ctx
}
"RecordSeqFeederBuilder" should "be able to use all the strategies" in {
val builder = RecordSeqFeederBuilder(IndexedSeq())
builder.queue.strategy shouldBe Queue
builder.random.strategy shouldBe Random
builder.shuffle.strategy shouldBe Shuffle
builder.circular.strategy shouldBe Circular
}
"RecordSeqFeederBuilder" should "throw an exception when provided with bad resource" in {
an[IllegalArgumentException] should be thrownBy
feederBuilder(Failure(""))(SeparatedValuesParser.parse(_, SeparatedValuesParser.CommaSeparator, quoteChar = '\\'', escapeChar = 0))
}
"RecordSeqFeederBuilder" should "build a Feeder with a queue strategy" in {
val queuedFeeder = RecordSeqFeederBuilder(IndexedSeq(Map("1" -> "Test"), Map("2" -> "Test"))).queue.build(scenarioContext())
queuedFeeder.toArray shouldBe Array(Map("1" -> "Test"), Map("2" -> "Test"))
}
it should "build a Feeder with a random strategy" in {
val fiftyTimes = 1 to 50
val orderedMaps =
fiftyTimes.foldLeft(IndexedSeq.empty[Record[String]]) { (acc, id) => Map(id.toString -> "Test") +: acc }
val testsOutcome: immutable.IndexedSeq[Boolean] =
(1 to 3).map { _ =>
val randomFeeder = RecordSeqFeederBuilder(orderedMaps).random.build(scenarioContext())
randomFeeder.hasNext shouldBe true
val retrievedMaps = fiftyTimes.map(_ => randomFeeder.next())
retrievedMaps != orderedMaps
}
if (!testsOutcome.reduce(_ || _)) fail("Random feeder did not return a random order even once out of three attempts")
}
it should "build a Feeder with a shuffle strategy" in {
val fiftyTimes = 1 to 50
val orderedMaps =
fiftyTimes.foldLeft(IndexedSeq.empty[Record[String]]) { (acc, id) => Map(id.toString -> "Test") +: acc }
val shuffledOutcome: immutable.IndexedSeq[IndexedSeq[Record[String]]] =
(1 to 3).map { _ =>
val shuffleFeeder = RecordSeqFeederBuilder(orderedMaps).shuffle.build(scenarioContext())
shuffleFeeder.hasNext shouldBe true
fiftyTimes.map(_ => shuffleFeeder.next())
}
val allShuffledSeqsAreDifferent = (shuffledOutcome :+ orderedMaps).distinct.length == 4
if (!allShuffledSeqsAreDifferent) fail("Shuffle feeder returned the same order at least once out of three attempts")
}
it should "build a Feeder with a circular strategy" in {
val circularFeeder = RecordSeqFeederBuilder(IndexedSeq(Map("1" -> "Test"), Map("2" -> "Test"))).circular.build(scenarioContext())
circularFeeder.next()
circularFeeder.next()
circularFeeder.next() shouldBe Map("1" -> "Test")
}
"RecordSeqFeederBuilder" should "be able to have a record converted" in {
val queuedFeeder = RecordSeqFeederBuilder(IndexedSeq(Map("1" -> "Test"), Map("2" -> "Test")))
val convertedValue: Option[Any] = queuedFeeder.convert {
case ("1", attr) => attr.concat("s are boring !")
}.records.head.get("1")
convertedValue.fold(fail("Could not find key"))(_ shouldBe "Tests are boring !")
val cantConvert: Option[Any] = queuedFeeder.convert {
case ("Can't find because don't exist", shouldKeepAsIs) => shouldKeepAsIs.concat("s are boring !")
}.records.head.get("1")
cantConvert.fold(fail("Could not find key"))(_ shouldBe "Test")
}
// [fl]
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
// [fl]
"FeederBuilder" should "have working implicit conversions" in {
IndexedSeq(Map("1" -> "Test")).build(scenarioContext()) shouldBe a[Feeder[_]]
val convertedObj = Array(Map("1" -> "Test")).build(scenarioContext())
convertedObj shouldBe a[Feeder[_]]
convertedObj.build(mock[ScenarioContext]) shouldBe a[Feeder[_]]
}
}
| MykolaB/gatling | gatling-core/src/test/scala/io/gatling/core/feeder/FeederBuilderSpec.scala | Scala | apache-2.0 | 5,085 |
package skinny.micro.contrib.json4s
import skinny.json4s._
import skinny.micro.context.SkinnyContext
import skinny.micro.{ Format, SkinnyMicroBase }
/**
* JSON response support.
*/
trait JSONSupport extends JSONStringOps { self: SkinnyMicroBase =>
/**
* Returns JSON response.
*
* @param entity entity
* @param charset charset
* @param prettify prettify if true
* @return body
*/
protected def responseAsJSON(
entity: Any,
charset: Option[String] = Some("utf-8"),
prettify: Boolean = false,
underscoreKeys: Boolean = self.useUnderscoreKeysForJSON)(implicit ctx: SkinnyContext): String = {
// If Content-Type is already set, never overwrite it.
if (contentType(ctx) == null) {
(contentType = Format.JSON.contentType + charset.map(c => s"; charset=${c}").getOrElse(""))(ctx)
}
if (prettify) toPrettyJSONString(entity)
else toJSONString(entity, underscoreKeys)
}
}
object JSONSupport {
val ParsedBodyKey = "skinny.micro.json4s.ParsedBody"
}
| xerial/skinny-micro | micro-json4s/src/main/scala/skinny/micro/contrib/json4s/JSONSupport.scala | Scala | bsd-2-clause | 1,020 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.ast.rewriters
import org.neo4j.cypher.internal.compiler.v2_3.helpers.FreshIdNameGenerator
import org.neo4j.cypher.internal.frontend.v2_3._
import org.neo4j.cypher.internal.frontend.v2_3.ast._
/**
* This rewriter makes sure that all return items in a RETURN clauses are aliased, and moves
* any ORDER BY to a preceding WITH clause
*
* Example:
*
* MATCH (n)
* RETURN n.foo AS foo, n.bar ORDER BY foo
*
* This rewrite will change the query to:
*
* MATCH (n)
* WITH n.foo AS ` FRESHIDxx`, n.bar AS ` FRESHIDnn` ORDER BY ` FRESHIDxx`
* RETURN ` FRESHIDxx` AS foo, ` FRESHIDnn` AS `n.bar`
*/
case class normalizeReturnClauses(mkException: (String, InputPosition) => CypherException) extends Rewriter {
def apply(that: AnyRef): AnyRef = bottomUp(instance).apply(that)
private val clauseRewriter: (Clause => Seq[Clause]) = {
case clause @ Return(_, ri, None, _, _, _) =>
val aliasedItems = ri.items.map({
case i: AliasedReturnItem =>
i
case i =>
val newPosition = i.expression.position.bumped()
AliasedReturnItem(i.expression, Identifier(i.name)(newPosition))(i.position)
})
Seq(
clause.copy(returnItems = ri.copy(items = aliasedItems)(ri.position))(clause.position)
)
case clause @ Return(distinct, ri, orderBy, skip, limit, _) =>
clause.verifyOrderByAggregationUse((s,i) => throw mkException(s,i))
var rewrites = Map[Expression, Identifier]()
val (aliasProjection, finalProjection) = ri.items.map {
i =>
val returnColumn = i.alias match {
case Some(alias) => alias
case None => Identifier(i.name)(i.expression.position.bumped())
}
val newIdentifier = Identifier(FreshIdNameGenerator.name(i.expression.position))(i.expression.position)
rewrites = rewrites + (returnColumn -> newIdentifier)
rewrites = rewrites + (i.expression -> newIdentifier)
(AliasedReturnItem(i.expression, newIdentifier)(i.position), AliasedReturnItem(newIdentifier.copyId, returnColumn)(i.position))
}.unzip
val newOrderBy = orderBy.endoRewrite(topDown(Rewriter.lift {
case exp: Expression if rewrites.contains(exp) => rewrites(exp).copyId
}))
val introducedVariables = if (ri.includeExisting) aliasProjection.map(_.identifier.name).toSet else Set.empty[String]
Seq(
With(distinct = distinct, returnItems = ri.copy(items = aliasProjection)(ri.position),
orderBy = newOrderBy, skip = skip, limit = limit, where = None)(clause.position),
Return(distinct = false, returnItems = ri.copy(items = finalProjection)(ri.position),
orderBy = None, skip = None, limit = None, excludedNames = introducedVariables)(clause.position)
)
case clause =>
Seq(clause)
}
private val instance: Rewriter = Rewriter.lift {
case query @ SingleQuery(clauses) =>
query.copy(clauses = clauses.flatMap(clauseRewriter))(query.position)
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/ast/rewriters/normalizeReturnClauses.scala | Scala | apache-2.0 | 3,866 |
package im.actor.server.models
import org.joda.time.DateTime
case class Group(
id: Int,
creatorUserId: Int,
accessHash: Long,
title: String,
isPublic: Boolean,
createdAt: DateTime,
about: Option[String],
topic: Option[String]
)
object Group {
def fromFull(fullGroup: FullGroup): Group =
Group(
id = fullGroup.id,
creatorUserId = fullGroup.creatorUserId,
accessHash = fullGroup.accessHash,
title = fullGroup.title,
isPublic = fullGroup.isPublic,
createdAt = fullGroup.createdAt,
about = fullGroup.about,
topic = fullGroup.topic
)
}
case class FullGroup(
id: Int,
creatorUserId: Int,
accessHash: Long,
title: String,
isPublic: Boolean,
createdAt: DateTime,
about: Option[String],
topic: Option[String],
titleChangerUserId: Int,
titleChangedAt: DateTime,
titleChangeRandomId: Long,
avatarChangerUserId: Int,
avatarChangedAt: DateTime,
avatarChangeRandomId: Long
)
| berserkertdl/actor-platform | actor-server/actor-models/src/main/scala/im/actor/server/models/Group.scala | Scala | mit | 1,132 |
package com.twitter.finagle.factory
import com.twitter.finagle._
import com.twitter.finagle.stats._
import com.twitter.finagle.util.Rng
import com.twitter.util._
import java.net.InetSocketAddress
import org.junit.runner.RunWith
import org.mockito.Matchers.any
import org.mockito.Mockito.{times, verify, when}
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfter, FunSuite}
import scala.collection.mutable
@RunWith(classOf[JUnitRunner])
class BindingFactoryTest extends FunSuite with MockitoSugar with BeforeAndAfter {
var saveBase: Dtab = Dtab.empty
before {
saveBase = Dtab.base
Dtab.base ++= Dtab.read("""
/test1010=>/$/inet/0/1010
""")
}
after {
Dtab.base = saveBase
}
def anonNamer() = new Namer {
def lookup(path: Path): Activity[NameTree[Name]] =
Activity.value(NameTree.Neg)
def enum(prefix: Path): Activity[Dtab] =
Activity.exception(new UnsupportedOperationException)
}
trait Ctx {
val imsr = new InMemoryStatsReceiver
val path = Path.read("/foo/bar")
var news = 0
var closes = 0
val newFactory: Name.Bound => ServiceFactory[Unit, Var[Addr]] =
bound => new ServiceFactory[Unit, Var[Addr]] {
news += 1
def apply(conn: ClientConnection) = Future.value(new Service[Unit, Var[Addr]] {
def apply(_unit: Unit) = Future.value(bound.addr)
})
def close(deadline: Time) = {
closes += 1
Future.Done
}
}
val factory = new BindingFactory(
path,
newFactory,
statsReceiver = imsr,
maxNamerCacheSize = 2,
maxNameCacheSize = 2)
def newWith(localDtab: Dtab): Service[Unit, Var[Addr]] = {
Dtab.unwind {
Dtab.local = localDtab
Await.result(factory())
}
}
}
test("Uses Dtab.base") (new Ctx {
val n1 = Dtab.read("/foo/bar=>/test1010")
val s1 = newWith(n1)
val v1 = Await.result(s1(()))
assert(v1.sample() === Addr.Bound(new InetSocketAddress(1010)))
s1.close()
})
test("Respects Dtab.base changes after service factory creation") (new Ctx {
// factory is already created here
Dtab.base ++= Dtab.read("/test1010=>/$/inet/0/1011")
val n1 = Dtab.read("/foo/bar=>/test1010")
val s1 = newWith(n1)
val v1 = Await.result(s1(()))
assert(v1.sample() === Addr.Bound(new InetSocketAddress(1011)))
s1.close()
})
test("Includes path in NoBrokersAvailableException") (new Ctx {
val noBrokers = intercept[NoBrokersAvailableException] {
Await.result(factory())
}
assert(noBrokers.name === "/foo/bar")
assert(noBrokers.localDtab === Dtab.empty)
})
test("Includes path and Dtab.local in NoBrokersAvailableException from name resolution") (new Ctx {
val localDtab = Dtab.read("/baz=>/quux")
val noBrokers = intercept[NoBrokersAvailableException] {
newWith(localDtab)
}
assert(noBrokers.name === "/foo/bar")
assert(noBrokers.localDtab === localDtab)
})
test("Includes path and Dtab.local in NoBrokersAvailableException from service creation") {
val localDtab = Dtab.read("/foo/bar=>/test1010")
val factory = new BindingFactory(
Path.read("/foo/bar"),
newFactory = { addr =>
new ServiceFactory[Unit, Unit] {
def apply(conn: ClientConnection) =
Future.exception(new NoBrokersAvailableException("/foo/bar"))
def close(deadline: Time) = Future.Done
}
})
val noBrokers = intercept[NoBrokersAvailableException] {
Dtab.unwind {
Dtab.local = localDtab
Await.result(factory())
}
}
assert(noBrokers.name === "/foo/bar")
assert(noBrokers.localDtab === localDtab)
}
test("Caches namers") (new Ctx {
val n1 = Dtab.read("/foo/bar=>/$/inet/0/1")
val n2 = Dtab.read("/foo/bar=>/$/inet/0/2")
val n3 = Dtab.read("/foo/bar=>/$/inet/0/3")
val n4 = Dtab.read("/foo/bar=>/$/inet/0/4")
assert(news === 0)
Await.result(newWith(n1).close() before newWith(n1).close())
assert(news === 1)
assert(closes === 0)
val s2 = newWith(n2)
assert(news === 2)
assert(closes === 0)
// This should evict n1
val s3 = newWith(n3)
assert(news === 3)
assert(closes === 1)
// n2, n3 are outstanding, so additional requests
// should hit the one-shot path.
val s1 = newWith(n1)
assert(news === 4)
assert(closes === 1)
// Closing this should close the factory immediately.
s1.close()
assert(closes === 2)
Await.result(newWith(n2).close() before newWith(n3).close())
assert(news === 4)
assert(closes === 2)
})
test("Caches names") (new Ctx {
val n1 = Dtab.read("/foo/bar=>/$/inet/0/1; /bar/baz=>/$/nil")
val n2 = Dtab.read("/foo/bar=>/$/inet/0/1")
val n3 = Dtab.read("/foo/bar=>/$/inet/0/2")
val n4 = Dtab.read("/foo/bar=>/$/inet/0/3")
assert(news === 0)
Await.result(newWith(n1).close() before newWith(n1).close())
assert(news === 1)
assert(closes === 0)
Await.result(newWith(n2).close())
assert(news === 1)
assert(closes === 0)
Await.result(newWith(n3).close())
assert(news === 2)
assert(closes === 0)
Await.result(newWith(n4).close())
assert(news === 3)
assert(closes === 1)
Await.result(newWith(n3).close())
assert(news === 3)
assert(closes === 1)
Await.result(newWith(n1).close())
assert(news === 4)
assert(closes === 2)
Await.result(newWith(n2).close())
assert(news === 4)
assert(closes === 2)
})
test("BindingFactory.Module: filters with bound residual paths") {
val module = new BindingFactory.Module[Path, Path] {
protected[this] def boundPathFilter(path: Path) =
Filter.mk { (in, service) => service(path ++ in) }
}
val name = Name.Bound(Var(Addr.Pending), "id", Path.read("/alpha"))
val end = Stack.Leaf(Stack.Role("end"),
ServiceFactory(() => Future.value(Service.mk[Path, Path](Future.value))))
val params = Stack.Params.empty + BindingFactory.Dest(name)
val factory = module.toStack(end).make(params)
val service = Await.result(factory())
val full = Await.result(service(Path.read("/omega")))
assert(full === Path.read("/alpha/omega"))
}
}
@RunWith(classOf[JUnitRunner])
class NamerTracingFilterTest extends FunSuite {
private trait Ctx {
var records = Seq.empty[(String, String)]
def record(key: String, value: String) {
records :+= key -> value
}
val addr = RandomSocket.nextAddress()
val path = Path.read("/foo")
val baseDtab = () => Dtab.read("/foo => /bar")
val localDtab = Dtab.read("/bar => /baz")
def mkName(id: Any) = Name.Bound(Var(Addr.Bound(addr)), id)
def run(f: => Unit) {
Dtab.unwind {
Dtab.local = localDtab
f
}
}
def verifyRecord(nameOrFailure: Either[String, String]) {
val expected = Seq(
"namer.path" -> "/foo",
"namer.dtab.base" -> "/foo=>/bar",
nameOrFailure match {
case Left(id) => "namer.name" -> id
case Right(failure) => "namer.failure" -> failure
}
)
expectResult(expected)(records)
}
}
test("NamerTracingFilter.trace with string id")(new Ctx {
run {
NamerTracingFilter.trace(path, baseDtab(), Return(mkName("dat-name")), record)
verifyRecord(Left("dat-name"))
}
})
test("NamerTracingFilter.trace name with path id")(new Ctx {
run {
NamerTracingFilter.trace(path, baseDtab(), Return(mkName(Path.read("/foo/bar/baz"))), record)
verifyRecord(Left("/foo/bar/baz"))
}
})
test("NamerTracingFilter.trace name with object id")(new Ctx {
run {
NamerTracingFilter.trace(path, baseDtab(), Return(mkName(Some("foo"))), record)
verifyRecord(Left("Some(foo)"))
}
})
test("NamerTracingFilter.trace throwable")(new Ctx {
run {
NamerTracingFilter.trace(path, baseDtab(), Throw(new RuntimeException), record)
verifyRecord(Right("java.lang.RuntimeException"))
}
})
test("NamerTracingFilter.apply trace path/name with string id")(new Ctx {
run {
val filter = new NamerTracingFilter[Int, Int](path, baseDtab, mkName("dat-name"), record)
val service = filter andThen Service.mk[Int, Int](Future.value(_))
Await.result(service(3))
verifyRecord(Left("dat-name"))
}
})
}
@RunWith(classOf[JUnitRunner])
class DynNameFactoryTest extends FunSuite with MockitoSugar {
private trait Ctx {
val newService = mock[(NameTree[Name.Bound], ClientConnection) => Future[Service[String, String]]]
val svc = mock[Service[String, String]]
val (name, namew) = Activity[NameTree[Name.Bound]]()
val traceNamerFailure = mock[Throwable => Unit]
val dyn = new DynNameFactory[String, String](name, newService, traceNamerFailure)
}
test("queue requests until name is nonpending (ok)")(new Ctx {
when(newService(any[NameTree[Name.Bound]], any[ClientConnection])).thenReturn(Future.value(svc))
val f1, f2 = dyn()
assert(!f1.isDefined)
assert(!f2.isDefined)
namew.notify(Return(NameTree.Leaf(Name.empty)))
assert(f1.poll === Some(Return(svc)))
assert(f2.poll === Some(Return(svc)))
Await.result(f1)("foo")
Await.result(f1)("bar")
Await.result(f2)("baz")
verify(traceNamerFailure, times(0))(any[Throwable])
})
test("queue requests until name is nonpending (fail)")(new Ctx {
when(newService(any[NameTree[Name.Bound]], any[ClientConnection])).thenReturn(Future.never)
val f1, f2 = dyn()
assert(!f1.isDefined)
assert(!f2.isDefined)
val exc = new Exception
namew.notify(Throw(exc))
assert(f1.poll === Some(Throw(exc)))
assert(f2.poll === Some(Throw(exc)))
verify(traceNamerFailure, times(2))(exc)
})
test("dequeue interrupted requests")(new Ctx {
when(newService(any[NameTree[Name.Bound]], any[ClientConnection])).thenReturn(Future.never)
val f1, f2 = dyn()
assert(!f1.isDefined)
assert(!f2.isDefined)
val exc = new Exception
f1.raise(exc)
f1.poll match {
case Some(Throw(cce: CancelledConnectionException)) =>
assert(cce.getCause === exc)
// no throw for cancel
verify(traceNamerFailure, times(0))(any[Throwable])
case _ => fail()
}
assert(f2.poll === None)
namew.notify(Return(NameTree.Leaf(Name.empty)))
assert(f2.poll === None)
})
}
@RunWith(classOf[JUnitRunner])
class NameTreeFactoryTest extends FunSuite {
test("distributes requests according to weight") {
val tree =
NameTree.Union(
NameTree.Weighted(1D, NameTree.Union(
NameTree.Weighted(1D, NameTree.Leaf("foo")),
NameTree.Weighted(1D, NameTree.Leaf("bar")))),
NameTree.Weighted(1D, NameTree.Leaf("baz")))
val counts = mutable.HashMap[String, Int]()
val factoryCache = new ServiceFactoryCache[String, Unit, Unit](
key => new ServiceFactory[Unit, Unit] {
def apply(conn: ClientConnection): Future[Service[Unit, Unit]] = {
val count = counts.getOrElse(key, 0)
counts.put(key, count + 1)
Future.value(null)
}
def close(deadline: Time) = Future.Done
})
// not the world's greatest test since it depends on the
// implementation of Drv
val rng = {
val ints = Array(0, 0, 0, 1, 1)
var intIdx = 0
new Rng {
def nextDouble() =
throw new UnsupportedOperationException
def nextInt(n: Int) = {
val i = ints(intIdx)
intIdx += 1
i
}
def nextInt() = ???
def nextLong(n: Long) = ???
}
}
val factory = NameTreeFactory(
Path.empty,
tree,
factoryCache,
rng)
factory.apply(ClientConnection.nil)
factory.apply(ClientConnection.nil)
factory.apply(ClientConnection.nil)
assert(counts("foo") == 1)
assert(counts("bar") == 1)
assert(counts("baz") == 1)
}
test("is available iff all leaves are available") {
def isAvailable(tree: NameTree[Status]): Boolean =
NameTreeFactory(
Path.empty,
tree,
new ServiceFactoryCache[Status, Unit, Unit](
key => new ServiceFactory[Unit, Unit] {
def apply(conn: ClientConnection): Future[Service[Unit, Unit]] = Future.value(null)
def close(deadline: Time) = Future.Done
override def status = key
})
).isAvailable
assert(isAvailable(
NameTree.Union(
NameTree.Weighted(1D, NameTree.Union(
NameTree.Weighted(1D, NameTree.Leaf(Status.Open)),
NameTree.Weighted(1D, NameTree.Leaf(Status.Open)))),
NameTree.Weighted(1D, NameTree.Leaf(Status.Open)))))
assert(!isAvailable(
NameTree.Union(
NameTree.Weighted(1D, NameTree.Union(
NameTree.Weighted(1D, NameTree.Leaf(Status.Open)),
NameTree.Weighted(1D, NameTree.Leaf(Status.Closed)))),
NameTree.Weighted(1D, NameTree.Leaf(Status.Open)))))
assert(!isAvailable(
NameTree.Union(
NameTree.Weighted(1D, NameTree.Union(
NameTree.Weighted(1D, NameTree.Leaf(Status.Open)),
NameTree.Weighted(1D, NameTree.Leaf(Status.Open)))),
NameTree.Weighted(1D, NameTree.Empty))))
}
}
| kristofa/finagle | finagle-core/src/test/scala/com/twitter/finagle/factory/BindingFactoryTest.scala | Scala | apache-2.0 | 13,387 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.service.test
import java.util.concurrent.TimeUnit
import akka.actor._
import akka.pattern._
import akka.util.Timeout
import ch.qos.logback.classic.Level
import com.typesafe.config.{Config, ConfigFactory}
import com.webtrends.harness.HarnessConstants._
import com.webtrends.harness.app.Harness
import com.webtrends.harness.app.HarnessActor.{GetManagers, ReadyCheck, SystemReady}
import com.webtrends.harness.component.{Component, LoadComponent}
import com.webtrends.harness.service.Service
import com.webtrends.harness.service.messages.LoadService
import scala.concurrent.Await
import scala.concurrent.duration._
object TestHarness {
var harness:Option[TestHarness] = None
/**
* Create a new instance of the test harness and start all of it's components
*
* @param config the config to use
* @return
*/
def apply(config:Config,
serviceMap:Option[Map[String, Class[_ <: Service]]]=None,
componentMap:Option[Map[String, Class[_ <: Component]]]=None, logLevel:Level=Level.INFO) : TestHarness = {
harness match {
case Some(h) => h
case None =>
harness = Some(new TestHarness(config).start(serviceMap, componentMap, logLevel))
harness.get
}
}
def system = Harness.getActorSystem
def log = Harness.getLogger
def rootActor = Harness.getRootActor
def shutdown = harness match {
case Some(h) => h.stop
case None => // ignore
}
}
class TestHarness(conf:Config) {
var services = Map[String, ActorRef]()
var components = Map[String, ActorRef]()
var serviceManager: Option[ActorRef] = None
var componentManager: Option[ActorRef] = None
var commandManager: Option[ActorRef] = None
var policyManager: Option[ActorRef] = None
var config = conf.withFallback(defaultConfig)
config = config.withFallback(config.getConfig("wookiee-system")).resolve()
implicit val timeout = Timeout(4000, TimeUnit.MILLISECONDS)
def start(serviceMap:Option[Map[String, Class[_ <: Service]]]=None,
componentMap:Option[Map[String, Class[_ <: Component]]]=None, logLevel:Level=Level.ERROR) : TestHarness = {
Harness.externalLogger.info("Starting Harness...")
Harness.externalLogger.info(s"Test Harness Config: ${config.toString}")
Harness.addShutdownHook
Harness.startActorSystem(Some(config))
// after we have started the TestHarness we need to set the serviceManager, ComponentManager and CommandManager from the Harness
harnessReadyCheck(10.seconds.fromNow)
Await.result(TestHarness.rootActor.get ? GetManagers, 5.seconds) match {
case m =>
val map = m.asInstanceOf[Map[String, ActorRef]]
serviceManager = map.get(ServicesName)
policyManager = map.get(PolicyName)
commandManager = map.get(CommandName)
componentManager = map.get(ComponentName)
TestHarness.log.get.info("Managers all accounted for")
}
setLogLevel(logLevel)
if (componentMap.isDefined) {
loadComponents(componentMap.get)
}
if (serviceMap.isDefined) {
loadServices(serviceMap.get)
}
componentManager.foreach {_ ! SystemReady}
serviceManager.foreach {_ ! SystemReady}
this
}
def stop = {
Harness.shutdownActorSystem(block = false) {
// wait a second to make sure it shutdown correctly
Thread.sleep(1000)
}
}
def setLogLevel(level:Level) = {
TestHarness.log.get.setLogLevel(level)
}
def harnessReadyCheck(timeOut: Deadline) {
while(!timeOut.isOverdue() && !Await.result(TestHarness.rootActor.get ? ReadyCheck, 10.seconds).asInstanceOf[Boolean]) {
}
if (timeOut.isOverdue()) {
throw new IllegalStateException("HarnessActor did not start up")
}
}
def getService(service: String): Option[ActorRef] = {
services.get(service)
}
def getComponent(component: String): Option[ActorRef] = {
components.get(component)
}
def loadComponents(componentMap: Map[String, Class[_ <: Component]]) = {
componentMap foreach { p =>
componentReady(5.seconds.fromNow, p._1, p._2.getCanonicalName)
}
}
def loadServices(serviceMap: Map[String, Class[_ <: Service]]) = {
serviceMap foreach { p =>
serviceReady(5.seconds.fromNow, p._1, p._2)
}
}
private def componentReady(timeOut: Deadline, componentName: String, componentClass: String) {
if (timeOut.isOverdue()) {
throw new IllegalStateException(s"Component $componentName did not start up")
}
Await.result(componentManager.get ? LoadComponent(componentName, componentClass), 5.seconds) match {
case Some(m) =>
val component = m.asInstanceOf[ActorRef]
TestHarness.log.get.info(s"Loaded component $componentName, ${component.path.toString}")
components += (componentName -> component)
case None =>
throw new Exception("Component not returned")
}
}
private def serviceReady(timeOut: Deadline, serviceName: String, serviceClass: Class[_ <: Service]) {
if (timeOut.isOverdue()) {
throw new IllegalStateException(s"Service $serviceName did not start up")
}
Await.result(serviceManager.get ? LoadService(serviceName, serviceClass), 3.seconds) match {
case Some(m) =>
val service = m.asInstanceOf[ActorRef]
TestHarness.log.get.info(s"Loaded service $serviceName, ${service.path.toString}")
services += (serviceName -> service)
case None =>
throw new Exception("Service not returned")
}
}
def defaultConfig : Config = {
ConfigFactory.parseString(
"""
wookiee-system {
prepare-to-shutdown-timeout = 1
}
services {
path = ""
distinct-classloader = false
}
components {
path = ""
}
test-mode = true
internal-http {
enabled = false
}
# CIDR Rules
cidr-rules {
# This is a list of IP ranges to allow through. Can be empty.
allow = ["127.0.0.1/30", "10.0.0.0/8"]
# This is a list of IP ranges to specifically deny access. Can be empty.
deny = []
}
message-processor {
# How often the MessageProcessor should share it's subscription information
share-interval = 1s
# When should MessageTopicProcessor instances be removed after there are no longer any subscribers for that topic
trash-interval = 30s
# The default send timeout
default-send-timeout = 2s
}
commands {
# generally this should be enabled
enabled = true
default-nr-routees = 5
}
""")
}
}
| mjwallin1/wookiee | wookiee-test/src/main/scala/com/webtrends/harness/service/test/TestHarness.scala | Scala | apache-2.0 | 7,422 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.jdbc
import java.math.BigDecimal
import java.sql.{Date, DriverManager, SQLException, Timestamp}
import java.util.{Calendar, GregorianCalendar, Properties}
import org.h2.jdbc.JdbcSQLException
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.apache.spark.SparkException
import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.sql.execution.DataSourceScanExec
import org.apache.spark.sql.execution.command.ExplainCommand
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JDBCPartition, JDBCRDD, JDBCRelation, JdbcUtils}
import org.apache.spark.sql.execution.metric.InputOutputMetricsHelper
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources._
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
class JDBCSuite extends QueryTest
with BeforeAndAfter with PrivateMethodTester with SharedSQLContext {
import testImplicits._
val url = "jdbc:h2:mem:testdb0"
val urlWithUserAndPass = "jdbc:h2:mem:testdb0;user=testUser;password=testPass"
var conn: java.sql.Connection = null
val testBytes = Array[Byte](99.toByte, 134.toByte, 135.toByte, 200.toByte, 205.toByte)
val testH2Dialect = new JdbcDialect {
override def canHandle(url: String) : Boolean = url.startsWith("jdbc:h2")
override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] =
Some(StringType)
}
before {
Utils.classForName("org.h2.Driver")
// Extra properties that will be specified for our database. We need these to test
// usage of parameters from OPTIONS clause in queries.
val properties = new Properties()
properties.setProperty("user", "testUser")
properties.setProperty("password", "testPass")
properties.setProperty("rowId", "false")
conn = DriverManager.getConnection(url, properties)
conn.prepareStatement("create schema test").executeUpdate()
conn.prepareStatement(
"create table test.people (name TEXT(32) NOT NULL, theid INTEGER NOT NULL)").executeUpdate()
conn.prepareStatement("insert into test.people values ('fred', 1)").executeUpdate()
conn.prepareStatement("insert into test.people values ('mary', 2)").executeUpdate()
conn.prepareStatement(
"insert into test.people values ('joe ''foo'' \\"bar\\"', 3)").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW foobar
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW fetchtwo
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass',
| ${JDBCOptions.JDBC_BATCH_FETCH_SIZE} '2')
""".stripMargin.replaceAll("\\n", " "))
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW parts
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass',
| partitionColumn 'THEID', lowerBound '1', upperBound '4', numPartitions '3')
""".stripMargin.replaceAll("\\n", " "))
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW partsoverflow
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass',
| partitionColumn 'THEID', lowerBound '-9223372036854775808',
| upperBound '9223372036854775807', numPartitions '3')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("create table test.inttypes (a INT, b BOOLEAN, c TINYINT, "
+ "d SMALLINT, e BIGINT)").executeUpdate()
conn.prepareStatement("insert into test.inttypes values (1, false, 3, 4, 1234567890123)"
).executeUpdate()
conn.prepareStatement("insert into test.inttypes values (null, null, null, null, null)"
).executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW inttypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.INTTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("create table test.strtypes (a BINARY(20), b VARCHAR(20), "
+ "c VARCHAR_IGNORECASE(20), d CHAR(20), e BLOB, f CLOB)").executeUpdate()
val stmt = conn.prepareStatement("insert into test.strtypes values (?, ?, ?, ?, ?, ?)")
stmt.setBytes(1, testBytes)
stmt.setString(2, "Sensitive")
stmt.setString(3, "Insensitive")
stmt.setString(4, "Twenty-byte CHAR")
stmt.setBytes(5, testBytes)
stmt.setString(6, "I am a clob!")
stmt.executeUpdate()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW strtypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.STRTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("create table test.timetypes (a TIME, b DATE, c TIMESTAMP)"
).executeUpdate()
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
+ "'1996-01-01', '2002-02-20 11:22:33.543543543')").executeUpdate()
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
+ "null, '2002-02-20 11:22:33.543543543')").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW timetypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.TIMETYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("CREATE TABLE test.timezone (tz TIMESTAMP WITH TIME ZONE) " +
"AS SELECT '1999-01-08 04:05:06.543543543 GMT-08:00'")
.executeUpdate()
conn.commit()
conn.prepareStatement("CREATE TABLE test.array (ar ARRAY) " +
"AS SELECT '(1, 2, 3)'")
.executeUpdate()
conn.commit()
conn.prepareStatement("create table test.flttypes (a DOUBLE, b REAL, c DECIMAL(38, 18))"
).executeUpdate()
conn.prepareStatement("insert into test.flttypes values ("
+ "1.0000000000000002220446049250313080847263336181640625, "
+ "1.00000011920928955078125, "
+ "123456789012345.543215432154321)").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW flttypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.FLTTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement(
s"""
|create table test.nulltypes (a INT, b BOOLEAN, c TINYINT, d BINARY(20), e VARCHAR(20),
|f VARCHAR_IGNORECASE(20), g CHAR(20), h BLOB, i CLOB, j TIME, k DATE, l TIMESTAMP,
|m DOUBLE, n REAL, o DECIMAL(38, 18))
""".stripMargin.replaceAll("\\n", " ")).executeUpdate()
conn.prepareStatement("insert into test.nulltypes values ("
+ "null, null, null, null, null, null, null, null, null, "
+ "null, null, null, null, null, null)").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW nulltypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.NULLTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement(
"create table test.emp(name TEXT(32) NOT NULL," +
" theid INTEGER, \\"Dept\\" INTEGER)").executeUpdate()
conn.prepareStatement(
"insert into test.emp values ('fred', 1, 10)").executeUpdate()
conn.prepareStatement(
"insert into test.emp values ('mary', 2, null)").executeUpdate()
conn.prepareStatement(
"insert into test.emp values ('joe ''foo'' \\"bar\\"', 3, 30)").executeUpdate()
conn.prepareStatement(
"insert into test.emp values ('kathy', null, null)").executeUpdate()
conn.commit()
conn.prepareStatement(
"create table test.seq(id INTEGER)").executeUpdate()
(0 to 6).foreach { value =>
conn.prepareStatement(
s"insert into test.seq values ($value)").executeUpdate()
}
conn.prepareStatement(
"insert into test.seq values (null)").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW nullparts
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.EMP', user 'testUser', password 'testPass',
|partitionColumn '"Dept"', lowerBound '1', upperBound '4', numPartitions '3')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement(
"""create table test."mixedCaseCols" ("Name" TEXT(32), "Id" INTEGER NOT NULL)""")
.executeUpdate()
conn.prepareStatement("""insert into test."mixedCaseCols" values ('fred', 1)""").executeUpdate()
conn.prepareStatement("""insert into test."mixedCaseCols" values ('mary', 2)""").executeUpdate()
conn.prepareStatement("""insert into test."mixedCaseCols" values (null, 3)""").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW mixedCaseCols
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST."mixedCaseCols"', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("CREATE TABLE test.partition (THEID INTEGER, `THE ID` INTEGER) " +
"AS SELECT 1, 1")
.executeUpdate()
conn.commit()
conn.prepareStatement("CREATE TABLE test.datetime (d DATE, t TIMESTAMP)").executeUpdate()
conn.prepareStatement(
"INSERT INTO test.datetime VALUES ('2018-07-06', '2018-07-06 05:50:00.0')").executeUpdate()
conn.prepareStatement(
"INSERT INTO test.datetime VALUES ('2018-07-06', '2018-07-06 08:10:08.0')").executeUpdate()
conn.prepareStatement(
"INSERT INTO test.datetime VALUES ('2018-07-08', '2018-07-08 13:32:01.0')").executeUpdate()
conn.prepareStatement(
"INSERT INTO test.datetime VALUES ('2018-07-12', '2018-07-12 09:51:15.0')").executeUpdate()
conn.commit()
// Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types.
}
after {
conn.close()
}
// Check whether the tables are fetched in the expected degree of parallelism
def checkNumPartitions(df: DataFrame, expectedNumPartitions: Int): Unit = {
val jdbcRelations = df.queryExecution.analyzed.collect {
case LogicalRelation(r: JDBCRelation, _, _, _) => r
}
assert(jdbcRelations.length == 1)
assert(jdbcRelations.head.parts.length == expectedNumPartitions,
s"Expecting a JDBCRelation with $expectedNumPartitions partitions, but got:`$jdbcRelations`")
}
private def checkPushdown(df: DataFrame): DataFrame = {
val parentPlan = df.queryExecution.executedPlan
// Check if SparkPlan Filter is removed in a physical plan and
// the plan only has PhysicalRDD to scan JDBCRelation.
assert(parentPlan.isInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec])
val node = parentPlan.asInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec]
assert(node.child.isInstanceOf[org.apache.spark.sql.execution.DataSourceScanExec])
assert(node.child.asInstanceOf[DataSourceScanExec].nodeName.contains("JDBCRelation"))
df
}
private def checkNotPushdown(df: DataFrame): DataFrame = {
val parentPlan = df.queryExecution.executedPlan
// Check if SparkPlan Filter is not removed in a physical plan because JDBCRDD
// cannot compile given predicates.
assert(parentPlan.isInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec])
val node = parentPlan.asInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec]
assert(node.child.isInstanceOf[org.apache.spark.sql.execution.FilterExec])
df
}
test("SELECT *") {
assert(sql("SELECT * FROM foobar").collect().size === 3)
}
test("SELECT * WHERE (simple predicates)") {
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID < 1")).collect().size == 0)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID != 2")).collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID = 1")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME = 'fred'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME <=> 'fred'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME > 'fred'")).collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME != 'fred'")).collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME IN ('mary', 'fred')"))
.collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME NOT IN ('fred')"))
.collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID = 1 OR NAME = 'mary'"))
.collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID = 1 OR NAME = 'mary' "
+ "AND THEID = 2")).collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME LIKE 'fr%'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME LIKE '%ed'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME LIKE '%re%'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM nulltypes WHERE A IS NULL")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM nulltypes WHERE A IS NOT NULL")).collect().size == 0)
// This is a test to reflect discussion in SPARK-12218.
// The older versions of spark have this kind of bugs in parquet data source.
val df1 = sql("SELECT * FROM foobar WHERE NOT (THEID != 2) OR NOT (NAME != 'mary')")
assert(df1.collect.toSet === Set(Row("mary", 2)))
// SPARK-22548: Incorrect nested AND expression pushed down to JDBC data source
val df2 = sql("SELECT * FROM foobar " +
"WHERE (THEID > 0 AND TRIM(NAME) = 'mary') OR (NAME = 'fred')")
assert(df2.collect.toSet === Set(Row("fred", 1), Row("mary", 2)))
assert(checkNotPushdown(sql("SELECT * FROM foobar WHERE (THEID + 1) < 2")).collect().size == 0)
assert(checkNotPushdown(sql("SELECT * FROM foobar WHERE (THEID + 2) != 4")).collect().size == 2)
}
test("SELECT COUNT(1) WHERE (predicates)") {
// Check if an answer is correct when Filter is removed from operations such as count() which
// does not require any columns. In some data sources, e.g., Parquet, `requiredColumns` in
// org.apache.spark.sql.sources.interfaces is not given in logical plans, but some filters
// are applied for columns with Filter producing wrong results. On the other hand, JDBCRDD
// correctly handles this case by assigning `requiredColumns` properly. See PR 10427 for more
// discussions.
assert(sql("SELECT COUNT(1) FROM foobar WHERE NAME = 'mary'").collect.toSet === Set(Row(1)))
}
test("SELECT * WHERE (quoted strings)") {
assert(sql("select * from foobar").where('NAME === "joe 'foo' \\"bar\\"").collect().size === 1)
}
test("SELECT first field") {
val names = sql("SELECT NAME FROM foobar").collect().map(x => x.getString(0)).sortWith(_ < _)
assert(names.size === 3)
assert(names(0).equals("fred"))
assert(names(1).equals("joe 'foo' \\"bar\\""))
assert(names(2).equals("mary"))
}
test("SELECT first field when fetchsize is two") {
val names = sql("SELECT NAME FROM fetchtwo").collect().map(x => x.getString(0)).sortWith(_ < _)
assert(names.size === 3)
assert(names(0).equals("fred"))
assert(names(1).equals("joe 'foo' \\"bar\\""))
assert(names(2).equals("mary"))
}
test("SELECT second field") {
val ids = sql("SELECT THEID FROM foobar").collect().map(x => x.getInt(0)).sortWith(_ < _)
assert(ids.size === 3)
assert(ids(0) === 1)
assert(ids(1) === 2)
assert(ids(2) === 3)
}
test("SELECT second field when fetchsize is two") {
val ids = sql("SELECT THEID FROM fetchtwo").collect().map(x => x.getInt(0)).sortWith(_ < _)
assert(ids.size === 3)
assert(ids(0) === 1)
assert(ids(1) === 2)
assert(ids(2) === 3)
}
test("SELECT * partitioned") {
val df = sql("SELECT * FROM parts")
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length == 3)
}
test("SELECT WHERE (simple predicates) partitioned") {
val df1 = sql("SELECT * FROM parts WHERE THEID < 1")
checkNumPartitions(df1, expectedNumPartitions = 3)
assert(df1.collect().length === 0)
val df2 = sql("SELECT * FROM parts WHERE THEID != 2")
checkNumPartitions(df2, expectedNumPartitions = 3)
assert(df2.collect().length === 2)
val df3 = sql("SELECT THEID FROM parts WHERE THEID = 1")
checkNumPartitions(df3, expectedNumPartitions = 3)
assert(df3.collect().length === 1)
}
test("SELECT second field partitioned") {
val ids = sql("SELECT THEID FROM parts").collect().map(x => x.getInt(0)).sortWith(_ < _)
assert(ids.size === 3)
assert(ids(0) === 1)
assert(ids(1) === 2)
assert(ids(2) === 3)
}
test("overflow of partition bound difference does not give negative stride") {
val df = sql("SELECT * FROM partsoverflow")
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length == 3)
}
test("Register JDBC query with renamed fields") {
// Regression test for bug SPARK-7345
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW renamed
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable '(select NAME as NAME1, NAME as NAME2 from TEST.PEOPLE)',
|user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
val df = sql("SELECT * FROM renamed")
assert(df.schema.fields.size == 2)
assert(df.schema.fields(0).name == "NAME1")
assert(df.schema.fields(1).name == "NAME2")
}
test("Basic API") {
assert(spark.read.jdbc(
urlWithUserAndPass, "TEST.PEOPLE", new Properties()).collect().length === 3)
}
test("Basic API with illegal fetchsize") {
val properties = new Properties()
properties.setProperty(JDBCOptions.JDBC_BATCH_FETCH_SIZE, "-1")
val e = intercept[IllegalArgumentException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", properties).collect()
}.getMessage
assert(e.contains("Invalid value `-1` for parameter `fetchsize`"))
}
test("Missing partition columns") {
withView("tempPeople") {
val e = intercept[IllegalArgumentException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW tempPeople
|USING org.apache.spark.sql.jdbc
|OPTIONS (
| url 'jdbc:h2:mem:testdb0;user=testUser;password=testPass',
| dbtable 'TEST.PEOPLE',
| lowerBound '0',
| upperBound '52',
| numPartitions '53',
| fetchSize '10000' )
""".stripMargin.replaceAll("\\n", " "))
}.getMessage
assert(e.contains("When reading JDBC data sources, users need to specify all or none " +
"for the following options: 'partitionColumn', 'lowerBound', 'upperBound', and " +
"'numPartitions'"))
}
}
test("Basic API with FetchSize") {
(0 to 4).foreach { size =>
val properties = new Properties()
properties.setProperty(JDBCOptions.JDBC_BATCH_FETCH_SIZE, size.toString)
assert(spark.read.jdbc(
urlWithUserAndPass, "TEST.PEOPLE", properties).collect().length === 3)
}
}
test("Partitioning via JDBCPartitioningInfo API") {
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", "THEID", 0, 4, 3, new Properties())
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length === 3)
}
test("Partitioning via list-of-where-clauses API") {
val parts = Array[String]("THEID < 2", "THEID >= 2")
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, new Properties())
checkNumPartitions(df, expectedNumPartitions = 2)
assert(df.collect().length === 3)
}
test("Partitioning on column that might have null values.") {
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.EMP", "theid", 0, 4, 3, new Properties())
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length === 4)
val df2 = spark.read.jdbc(urlWithUserAndPass, "TEST.EMP", "THEID", 0, 4, 3, new Properties())
checkNumPartitions(df2, expectedNumPartitions = 3)
assert(df2.collect().length === 4)
// partitioning on a nullable quoted column
assert(
spark.read.jdbc(urlWithUserAndPass, "TEST.EMP", """"Dept"""", 0, 4, 3, new Properties())
.collect().length === 4)
}
test("Partitioning on column where numPartitions is zero") {
val res = spark.read.jdbc(
url = urlWithUserAndPass,
table = "TEST.seq",
columnName = "id",
lowerBound = 0,
upperBound = 4,
numPartitions = 0,
connectionProperties = new Properties()
)
checkNumPartitions(res, expectedNumPartitions = 1)
assert(res.count() === 8)
}
test("Partitioning on column where numPartitions are more than the number of total rows") {
val res = spark.read.jdbc(
url = urlWithUserAndPass,
table = "TEST.seq",
columnName = "id",
lowerBound = 1,
upperBound = 5,
numPartitions = 10,
connectionProperties = new Properties()
)
checkNumPartitions(res, expectedNumPartitions = 4)
assert(res.count() === 8)
}
test("Partitioning on column where lowerBound is equal to upperBound") {
val res = spark.read.jdbc(
url = urlWithUserAndPass,
table = "TEST.seq",
columnName = "id",
lowerBound = 5,
upperBound = 5,
numPartitions = 4,
connectionProperties = new Properties()
)
checkNumPartitions(res, expectedNumPartitions = 1)
assert(res.count() === 8)
}
test("Partitioning on column where lowerBound is larger than upperBound") {
val e = intercept[IllegalArgumentException] {
spark.read.jdbc(
url = urlWithUserAndPass,
table = "TEST.seq",
columnName = "id",
lowerBound = 5,
upperBound = 1,
numPartitions = 3,
connectionProperties = new Properties()
)
}.getMessage
assert(e.contains("Operation not allowed: the lower bound of partitioning column " +
"is larger than the upper bound. Lower bound: 5; Upper bound: 1"))
}
test("SELECT * on partitioned table with a nullable partition column") {
val df = sql("SELECT * FROM nullparts")
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length == 4)
}
test("H2 integral types") {
val rows = sql("SELECT * FROM inttypes WHERE A IS NOT NULL").collect()
assert(rows.length === 1)
assert(rows(0).getInt(0) === 1)
assert(rows(0).getBoolean(1) === false)
assert(rows(0).getInt(2) === 3)
assert(rows(0).getInt(3) === 4)
assert(rows(0).getLong(4) === 1234567890123L)
}
test("H2 null entries") {
val rows = sql("SELECT * FROM inttypes WHERE A IS NULL").collect()
assert(rows.length === 1)
assert(rows(0).isNullAt(0))
assert(rows(0).isNullAt(1))
assert(rows(0).isNullAt(2))
assert(rows(0).isNullAt(3))
assert(rows(0).isNullAt(4))
}
test("H2 string types") {
val rows = sql("SELECT * FROM strtypes").collect()
assert(rows(0).getAs[Array[Byte]](0).sameElements(testBytes))
assert(rows(0).getString(1).equals("Sensitive"))
assert(rows(0).getString(2).equals("Insensitive"))
assert(rows(0).getString(3).equals("Twenty-byte CHAR"))
assert(rows(0).getAs[Array[Byte]](4).sameElements(testBytes))
assert(rows(0).getString(5).equals("I am a clob!"))
}
test("H2 time types") {
val rows = sql("SELECT * FROM timetypes").collect()
val cal = new GregorianCalendar(java.util.Locale.ROOT)
cal.setTime(rows(0).getAs[java.sql.Timestamp](0))
assert(cal.get(Calendar.HOUR_OF_DAY) === 12)
assert(cal.get(Calendar.MINUTE) === 34)
assert(cal.get(Calendar.SECOND) === 56)
cal.setTime(rows(0).getAs[java.sql.Timestamp](1))
assert(cal.get(Calendar.YEAR) === 1996)
assert(cal.get(Calendar.MONTH) === 0)
assert(cal.get(Calendar.DAY_OF_MONTH) === 1)
cal.setTime(rows(0).getAs[java.sql.Timestamp](2))
assert(cal.get(Calendar.YEAR) === 2002)
assert(cal.get(Calendar.MONTH) === 1)
assert(cal.get(Calendar.DAY_OF_MONTH) === 20)
assert(cal.get(Calendar.HOUR) === 11)
assert(cal.get(Calendar.MINUTE) === 22)
assert(cal.get(Calendar.SECOND) === 33)
assert(rows(0).getAs[java.sql.Timestamp](2).getNanos === 543543000)
}
test("test DATE types") {
val rows = spark.read.jdbc(
urlWithUserAndPass, "TEST.TIMETYPES", new Properties()).collect()
val cachedRows = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties())
.cache().collect()
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
assert(rows(1).getAs[java.sql.Date](1) === null)
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
}
test("test DATE types in cache") {
val rows = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties()).collect()
spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties())
.cache().createOrReplaceTempView("mycached_date")
val cachedRows = sql("select * from mycached_date").collect()
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
}
test("test types for null value") {
val rows = spark.read.jdbc(
urlWithUserAndPass, "TEST.NULLTYPES", new Properties()).collect()
assert((0 to 14).forall(i => rows(0).isNullAt(i)))
}
test("H2 floating-point types") {
val rows = sql("SELECT * FROM flttypes").collect()
assert(rows(0).getDouble(0) === 1.00000000000000022)
assert(rows(0).getDouble(1) === 1.00000011920928955)
assert(rows(0).getAs[BigDecimal](2) ===
new BigDecimal("123456789012345.543215432154321000"))
assert(rows(0).schema.fields(2).dataType === DecimalType(38, 18))
val result = sql("SELECT C FROM flttypes where C > C - 1").collect()
assert(result(0).getAs[BigDecimal](0) ===
new BigDecimal("123456789012345.543215432154321000"))
}
test("SQL query as table name") {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW hack
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable '(SELECT B, B*B FROM TEST.FLTTYPES)',
| user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
val rows = sql("SELECT * FROM hack").collect()
assert(rows(0).getDouble(0) === 1.00000011920928955) // Yes, I meant ==.
// For some reason, H2 computes this square incorrectly...
assert(math.abs(rows(0).getDouble(1) - 1.00000023841859331) < 1e-12)
}
test("Pass extra properties via OPTIONS") {
// We set rowId to false during setup, which means that _ROWID_ column should be absent from
// all tables. If rowId is true (default), the query below doesn't throw an exception.
intercept[JdbcSQLException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW abc
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable '(SELECT _ROWID_ FROM test.people)',
| user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
}
}
test("Remap types via JdbcDialects") {
JdbcDialects.registerDialect(testH2Dialect)
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties())
assert(df.schema.filter(_.dataType != org.apache.spark.sql.types.StringType).isEmpty)
val rows = df.collect()
assert(rows(0).get(0).isInstanceOf[String])
assert(rows(0).get(1).isInstanceOf[String])
JdbcDialects.unregisterDialect(testH2Dialect)
}
test("Default jdbc dialect registration") {
assert(JdbcDialects.get("jdbc:mysql://127.0.0.1/db") == MySQLDialect)
assert(JdbcDialects.get("jdbc:postgresql://127.0.0.1/db") == PostgresDialect)
assert(JdbcDialects.get("jdbc:db2://127.0.0.1/db") == DB2Dialect)
assert(JdbcDialects.get("jdbc:sqlserver://127.0.0.1/db") == MsSqlServerDialect)
assert(JdbcDialects.get("jdbc:derby:db") == DerbyDialect)
assert(JdbcDialects.get("test.invalid") == NoopDialect)
}
test("quote column names by jdbc dialect") {
val MySQL = JdbcDialects.get("jdbc:mysql://127.0.0.1/db")
val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
val Derby = JdbcDialects.get("jdbc:derby:db")
val columns = Seq("abc", "key")
val MySQLColumns = columns.map(MySQL.quoteIdentifier(_))
val PostgresColumns = columns.map(Postgres.quoteIdentifier(_))
val DerbyColumns = columns.map(Derby.quoteIdentifier(_))
assert(MySQLColumns === Seq("`abc`", "`key`"))
assert(PostgresColumns === Seq(""""abc"""", """"key""""))
assert(DerbyColumns === Seq(""""abc"""", """"key""""))
}
test("compile filters") {
val compileFilter = PrivateMethod[Option[String]]('compileFilter)
def doCompileFilter(f: Filter): String =
JDBCRDD invokePrivate compileFilter(f, JdbcDialects.get("jdbc:")) getOrElse("")
assert(doCompileFilter(EqualTo("col0", 3)) === """"col0" = 3""")
assert(doCompileFilter(Not(EqualTo("col1", "abc"))) === """(NOT ("col1" = 'abc'))""")
assert(doCompileFilter(And(EqualTo("col0", 0), EqualTo("col1", "def")))
=== """("col0" = 0) AND ("col1" = 'def')""")
assert(doCompileFilter(Or(EqualTo("col0", 2), EqualTo("col1", "ghi")))
=== """("col0" = 2) OR ("col1" = 'ghi')""")
assert(doCompileFilter(LessThan("col0", 5)) === """"col0" < 5""")
assert(doCompileFilter(LessThan("col3",
Timestamp.valueOf("1995-11-21 00:00:00.0"))) === """"col3" < '1995-11-21 00:00:00.0'""")
assert(doCompileFilter(LessThan("col4", Date.valueOf("1983-08-04")))
=== """"col4" < '1983-08-04'""")
assert(doCompileFilter(LessThanOrEqual("col0", 5)) === """"col0" <= 5""")
assert(doCompileFilter(GreaterThan("col0", 3)) === """"col0" > 3""")
assert(doCompileFilter(GreaterThanOrEqual("col0", 3)) === """"col0" >= 3""")
assert(doCompileFilter(In("col1", Array("jkl"))) === """"col1" IN ('jkl')""")
assert(doCompileFilter(In("col1", Array.empty)) ===
"""CASE WHEN "col1" IS NULL THEN NULL ELSE FALSE END""")
assert(doCompileFilter(Not(In("col1", Array("mno", "pqr"))))
=== """(NOT ("col1" IN ('mno', 'pqr')))""")
assert(doCompileFilter(IsNull("col1")) === """"col1" IS NULL""")
assert(doCompileFilter(IsNotNull("col1")) === """"col1" IS NOT NULL""")
assert(doCompileFilter(And(EqualNullSafe("col0", "abc"), EqualTo("col1", "def")))
=== """((NOT ("col0" != 'abc' OR "col0" IS NULL OR 'abc' IS NULL) """
+ """OR ("col0" IS NULL AND 'abc' IS NULL))) AND ("col1" = 'def')""")
}
test("Dialect unregister") {
JdbcDialects.registerDialect(testH2Dialect)
JdbcDialects.unregisterDialect(testH2Dialect)
assert(JdbcDialects.get(urlWithUserAndPass) == NoopDialect)
}
test("Aggregated dialects") {
val agg = new AggregatedDialect(List(new JdbcDialect {
override def canHandle(url: String) : Boolean = url.startsWith("jdbc:h2:")
override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] =
if (sqlType % 2 == 0) {
Some(LongType)
} else {
None
}
override def quoteIdentifier(colName: String): String = {
s"My $colName quoteIdentifier"
}
override def getTableExistsQuery(table: String): String = {
s"My $table Table"
}
override def getSchemaQuery(table: String): String = {
s"My $table Schema"
}
override def isCascadingTruncateTable(): Option[Boolean] = Some(true)
}, testH2Dialect))
assert(agg.canHandle("jdbc:h2:xxx"))
assert(!agg.canHandle("jdbc:h2"))
assert(agg.getCatalystType(0, "", 1, null) === Some(LongType))
assert(agg.getCatalystType(1, "", 1, null) === Some(StringType))
assert(agg.isCascadingTruncateTable() === Some(true))
assert(agg.quoteIdentifier ("Dummy") === "My Dummy quoteIdentifier")
assert(agg.getTableExistsQuery ("Dummy") === "My Dummy Table")
assert(agg.getSchemaQuery ("Dummy") === "My Dummy Schema")
}
test("Aggregated dialects: isCascadingTruncateTable") {
def genDialect(cascadingTruncateTable: Option[Boolean]): JdbcDialect = new JdbcDialect {
override def canHandle(url: String): Boolean = true
override def getCatalystType(
sqlType: Int,
typeName: String,
size: Int,
md: MetadataBuilder): Option[DataType] = None
override def isCascadingTruncateTable(): Option[Boolean] = cascadingTruncateTable
}
def testDialects(cascadings: List[Option[Boolean]], expected: Option[Boolean]): Unit = {
val dialects = cascadings.map(genDialect(_))
val agg = new AggregatedDialect(dialects)
assert(agg.isCascadingTruncateTable() === expected)
}
testDialects(List(Some(true), Some(false), None), Some(true))
testDialects(List(Some(true), Some(true), None), Some(true))
testDialects(List(Some(false), Some(false), None), None)
testDialects(List(Some(true), Some(true)), Some(true))
testDialects(List(Some(false), Some(false)), Some(false))
testDialects(List(None, None), None)
}
test("DB2Dialect type mapping") {
val db2Dialect = JdbcDialects.get("jdbc:db2://127.0.0.1/db")
assert(db2Dialect.getJDBCType(StringType).map(_.databaseTypeDefinition).get == "CLOB")
assert(db2Dialect.getJDBCType(BooleanType).map(_.databaseTypeDefinition).get == "CHAR(1)")
assert(db2Dialect.getJDBCType(ShortType).map(_.databaseTypeDefinition).get == "SMALLINT")
assert(db2Dialect.getJDBCType(ByteType).map(_.databaseTypeDefinition).get == "SMALLINT")
// test db2 dialect mappings on read
assert(db2Dialect.getCatalystType(java.sql.Types.REAL, "REAL", 1, null) == Option(FloatType))
assert(db2Dialect.getCatalystType(java.sql.Types.OTHER, "DECFLOAT", 1, null) ==
Option(DecimalType(38, 18)))
assert(db2Dialect.getCatalystType(java.sql.Types.OTHER, "XML", 1, null) == Option(StringType))
assert(db2Dialect.getCatalystType(java.sql.Types.OTHER, "TIMESTAMP WITH TIME ZONE", 1, null) ==
Option(TimestampType))
}
test("PostgresDialect type mapping") {
val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
assert(Postgres.getCatalystType(java.sql.Types.OTHER, "json", 1, null) === Some(StringType))
assert(Postgres.getCatalystType(java.sql.Types.OTHER, "jsonb", 1, null) === Some(StringType))
assert(Postgres.getJDBCType(FloatType).map(_.databaseTypeDefinition).get == "FLOAT4")
assert(Postgres.getJDBCType(DoubleType).map(_.databaseTypeDefinition).get == "FLOAT8")
val errMsg = intercept[IllegalArgumentException] {
Postgres.getJDBCType(ByteType)
}
assert(errMsg.getMessage contains "Unsupported type in postgresql: ByteType")
}
test("DerbyDialect jdbc type mapping") {
val derbyDialect = JdbcDialects.get("jdbc:derby:db")
assert(derbyDialect.getJDBCType(StringType).map(_.databaseTypeDefinition).get == "CLOB")
assert(derbyDialect.getJDBCType(ByteType).map(_.databaseTypeDefinition).get == "SMALLINT")
assert(derbyDialect.getJDBCType(BooleanType).map(_.databaseTypeDefinition).get == "BOOLEAN")
}
test("OracleDialect jdbc type mapping") {
val oracleDialect = JdbcDialects.get("jdbc:oracle")
val metadata = new MetadataBuilder().putString("name", "test_column").putLong("scale", -127)
assert(oracleDialect.getCatalystType(java.sql.Types.NUMERIC, "float", 1, metadata) ==
Some(DecimalType(DecimalType.MAX_PRECISION, 10)))
assert(oracleDialect.getCatalystType(java.sql.Types.NUMERIC, "numeric", 0, null) ==
Some(DecimalType(DecimalType.MAX_PRECISION, 10)))
assert(oracleDialect.getCatalystType(OracleDialect.BINARY_FLOAT, "BINARY_FLOAT", 0, null) ==
Some(FloatType))
assert(oracleDialect.getCatalystType(OracleDialect.BINARY_DOUBLE, "BINARY_DOUBLE", 0, null) ==
Some(DoubleType))
assert(oracleDialect.getCatalystType(OracleDialect.TIMESTAMPTZ, "TIMESTAMP", 0, null) ==
Some(TimestampType))
}
test("table exists query by jdbc dialect") {
val MySQL = JdbcDialects.get("jdbc:mysql://127.0.0.1/db")
val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
val db2 = JdbcDialects.get("jdbc:db2://127.0.0.1/db")
val h2 = JdbcDialects.get(url)
val derby = JdbcDialects.get("jdbc:derby:db")
val table = "weblogs"
val defaultQuery = s"SELECT * FROM $table WHERE 1=0"
val limitQuery = s"SELECT 1 FROM $table LIMIT 1"
assert(MySQL.getTableExistsQuery(table) == limitQuery)
assert(Postgres.getTableExistsQuery(table) == limitQuery)
assert(db2.getTableExistsQuery(table) == defaultQuery)
assert(h2.getTableExistsQuery(table) == defaultQuery)
assert(derby.getTableExistsQuery(table) == defaultQuery)
}
test("truncate table query by jdbc dialect") {
val mysql = JdbcDialects.get("jdbc:mysql://127.0.0.1/db")
val postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
val db2 = JdbcDialects.get("jdbc:db2://127.0.0.1/db")
val h2 = JdbcDialects.get(url)
val derby = JdbcDialects.get("jdbc:derby:db")
val oracle = JdbcDialects.get("jdbc:oracle://127.0.0.1/db")
val teradata = JdbcDialects.get("jdbc:teradata://127.0.0.1/db")
val table = "weblogs"
val defaultQuery = s"TRUNCATE TABLE $table"
val postgresQuery = s"TRUNCATE TABLE ONLY $table"
val teradataQuery = s"DELETE FROM $table ALL"
Seq(mysql, db2, h2, derby).foreach{ dialect =>
assert(dialect.getTruncateQuery(table, Some(true)) == defaultQuery)
}
assert(postgres.getTruncateQuery(table) == postgresQuery)
assert(oracle.getTruncateQuery(table) == defaultQuery)
assert(teradata.getTruncateQuery(table) == teradataQuery)
}
test("SPARK-22880: Truncate table with CASCADE by jdbc dialect") {
// cascade in a truncate should only be applied for databases that support this,
// even if the parameter is passed.
val mysql = JdbcDialects.get("jdbc:mysql://127.0.0.1/db")
val postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
val db2 = JdbcDialects.get("jdbc:db2://127.0.0.1/db")
val h2 = JdbcDialects.get(url)
val derby = JdbcDialects.get("jdbc:derby:db")
val oracle = JdbcDialects.get("jdbc:oracle://127.0.0.1/db")
val teradata = JdbcDialects.get("jdbc:teradata://127.0.0.1/db")
val table = "weblogs"
val defaultQuery = s"TRUNCATE TABLE $table"
val postgresQuery = s"TRUNCATE TABLE ONLY $table CASCADE"
val oracleQuery = s"TRUNCATE TABLE $table CASCADE"
val teradataQuery = s"DELETE FROM $table ALL"
Seq(mysql, db2, h2, derby).foreach{ dialect =>
assert(dialect.getTruncateQuery(table, Some(true)) == defaultQuery)
}
assert(postgres.getTruncateQuery(table, Some(true)) == postgresQuery)
assert(oracle.getTruncateQuery(table, Some(true)) == oracleQuery)
assert(teradata.getTruncateQuery(table, Some(true)) == teradataQuery)
}
test("Test DataFrame.where for Date and Timestamp") {
// Regression test for bug SPARK-11788
val timestamp = java.sql.Timestamp.valueOf("2001-02-20 11:22:33.543543");
val date = java.sql.Date.valueOf("1995-01-01")
val jdbcDf = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties())
val rows = jdbcDf.where($"B" > date && $"C" > timestamp).collect()
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
assert(rows(0).getAs[java.sql.Timestamp](2)
=== java.sql.Timestamp.valueOf("2002-02-20 11:22:33.543543"))
}
test("test credentials in the properties are not in plan output") {
val df = sql("SELECT * FROM parts")
val explain = ExplainCommand(df.queryExecution.logical, extended = true)
spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach {
r => assert(!List("testPass", "testUser").exists(r.toString.contains))
}
// test the JdbcRelation toString output
df.queryExecution.analyzed.collect {
case r: LogicalRelation =>
assert(r.relation.toString == "JDBCRelation(TEST.PEOPLE) [numPartitions=3]")
}
}
test("test credentials in the connection url are not in the plan output") {
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties())
val explain = ExplainCommand(df.queryExecution.logical, extended = true)
spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach {
r => assert(!List("testPass", "testUser").exists(r.toString.contains))
}
}
test("hide credentials in create and describe a persistent/temp table") {
val password = "testPass"
val tableName = "tab1"
Seq("TABLE", "TEMPORARY VIEW").foreach { tableType =>
withTable(tableName) {
val df = sql(
s"""
|CREATE $tableType $tableName
|USING org.apache.spark.sql.jdbc
|OPTIONS (
| url '$urlWithUserAndPass',
| dbtable 'TEST.PEOPLE',
| user 'testUser',
| password '$password')
""".stripMargin)
val explain = ExplainCommand(df.queryExecution.logical, extended = true)
spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach { r =>
assert(!r.toString.contains(password))
}
sql(s"DESC FORMATTED $tableName").collect().foreach { r =>
assert(!r.toString().contains(password))
}
}
}
}
test("SPARK 12941: The data type mapping for StringType to Oracle") {
val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db")
assert(oracleDialect.getJDBCType(StringType).
map(_.databaseTypeDefinition).get == "VARCHAR2(255)")
}
test("SPARK-16625: General data types to be mapped to Oracle") {
def getJdbcType(dialect: JdbcDialect, dt: DataType): String = {
dialect.getJDBCType(dt).orElse(JdbcUtils.getCommonJDBCType(dt)).
map(_.databaseTypeDefinition).get
}
val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db")
assert(getJdbcType(oracleDialect, BooleanType) == "NUMBER(1)")
assert(getJdbcType(oracleDialect, IntegerType) == "NUMBER(10)")
assert(getJdbcType(oracleDialect, LongType) == "NUMBER(19)")
assert(getJdbcType(oracleDialect, FloatType) == "NUMBER(19, 4)")
assert(getJdbcType(oracleDialect, DoubleType) == "NUMBER(19, 4)")
assert(getJdbcType(oracleDialect, ByteType) == "NUMBER(3)")
assert(getJdbcType(oracleDialect, ShortType) == "NUMBER(5)")
assert(getJdbcType(oracleDialect, StringType) == "VARCHAR2(255)")
assert(getJdbcType(oracleDialect, BinaryType) == "BLOB")
assert(getJdbcType(oracleDialect, DateType) == "DATE")
assert(getJdbcType(oracleDialect, TimestampType) == "TIMESTAMP")
}
private def assertEmptyQuery(sqlString: String): Unit = {
assert(sql(sqlString).collect().isEmpty)
}
test("SPARK-15916: JDBC filter operator push down should respect operator precedence") {
val TRUE = "NAME != 'non_exists'"
val FALSE1 = "THEID > 1000000000"
val FALSE2 = "THEID < -1000000000"
assertEmptyQuery(s"SELECT * FROM foobar WHERE ($TRUE OR $FALSE1) AND $FALSE2")
assertEmptyQuery(s"SELECT * FROM foobar WHERE $FALSE1 AND ($FALSE2 OR $TRUE)")
// Tests JDBCPartition whereClause clause push down.
withTempView("tempFrame") {
val jdbcPartitionWhereClause = s"$FALSE1 OR $TRUE"
val df = spark.read.jdbc(
urlWithUserAndPass,
"TEST.PEOPLE",
predicates = Array[String](jdbcPartitionWhereClause),
new Properties())
df.createOrReplaceTempView("tempFrame")
assertEmptyQuery(s"SELECT * FROM tempFrame where $FALSE2")
}
}
test("SPARK-16387: Reserved SQL words are not escaped by JDBC writer") {
val df = spark.createDataset(Seq("a", "b", "c")).toDF("order")
val schema = JdbcUtils.schemaString(df, "jdbc:mysql://localhost:3306/temp")
assert(schema.contains("`order` TEXT"))
}
test("SPARK-18141: Predicates on quoted column names in the jdbc data source") {
assert(sql("SELECT * FROM mixedCaseCols WHERE Id < 1").collect().size == 0)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id <= 1").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id > 1").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id >= 1").collect().size == 3)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id = 1").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id != 2").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id <=> 2").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name LIKE 'fr%'").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name LIKE '%ed'").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name LIKE '%re%'").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name IS NULL").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name IS NOT NULL").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols").filter($"Name".isin()).collect().size == 0)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name IN ('mary', 'fred')").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name NOT IN ('fred')").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id = 1 OR Name = 'mary'").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name = 'mary' AND Id = 2").collect().size == 1)
}
test("SPARK-18419: Fix `asConnectionProperties` to filter case-insensitively") {
val parameters = Map(
"url" -> "jdbc:mysql://localhost:3306/temp",
"dbtable" -> "t1",
"numPartitions" -> "10")
assert(new JDBCOptions(parameters).asConnectionProperties.isEmpty)
assert(new JDBCOptions(CaseInsensitiveMap(parameters)).asConnectionProperties.isEmpty)
}
test("SPARK-16848: jdbc API throws an exception for user specified schema") {
val schema = StructType(Seq(
StructField("name", StringType, false), StructField("theid", IntegerType, false)))
val parts = Array[String]("THEID < 2", "THEID >= 2")
val e1 = intercept[AnalysisException] {
spark.read.schema(schema).jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, new Properties())
}.getMessage
assert(e1.contains("User specified schema not supported with `jdbc`"))
val e2 = intercept[AnalysisException] {
spark.read.schema(schema).jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties())
}.getMessage
assert(e2.contains("User specified schema not supported with `jdbc`"))
}
test("jdbc API support custom schema") {
val parts = Array[String]("THEID < 2", "THEID >= 2")
val customSchema = "NAME STRING, THEID INT"
val props = new Properties()
props.put("customSchema", customSchema)
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, props)
assert(df.schema.size === 2)
assert(df.schema === CatalystSqlParser.parseTableSchema(customSchema))
assert(df.count() === 3)
}
test("jdbc API custom schema DDL-like strings.") {
withTempView("people_view") {
val customSchema = "NAME STRING, THEID INT"
sql(
s"""
|CREATE TEMPORARY VIEW people_view
|USING org.apache.spark.sql.jdbc
|OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass',
|customSchema '$customSchema')
""".stripMargin.replaceAll("\\n", " "))
val df = sql("select * from people_view")
assert(df.schema.length === 2)
assert(df.schema === CatalystSqlParser.parseTableSchema(customSchema))
assert(df.count() === 3)
}
}
test("SPARK-15648: teradataDialect StringType data mapping") {
val teradataDialect = JdbcDialects.get("jdbc:teradata://127.0.0.1/db")
assert(teradataDialect.getJDBCType(StringType).
map(_.databaseTypeDefinition).get == "VARCHAR(255)")
}
test("SPARK-15648: teradataDialect BooleanType data mapping") {
val teradataDialect = JdbcDialects.get("jdbc:teradata://127.0.0.1/db")
assert(teradataDialect.getJDBCType(BooleanType).
map(_.databaseTypeDefinition).get == "CHAR(1)")
}
test("Checking metrics correctness with JDBC") {
val foobarCnt = spark.table("foobar").count()
val res = InputOutputMetricsHelper.run(sql("SELECT * FROM foobar").toDF())
assert(res === (foobarCnt, 0L, foobarCnt) :: Nil)
}
test("unsupported types") {
var e = intercept[SQLException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.TIMEZONE", new Properties()).collect()
}.getMessage
assert(e.contains("Unsupported type TIMESTAMP_WITH_TIMEZONE"))
e = intercept[SQLException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.ARRAY", new Properties()).collect()
}.getMessage
assert(e.contains("Unsupported type ARRAY"))
}
test("SPARK-19318: Connection properties keys should be case-sensitive.") {
def testJdbcOptions(options: JDBCOptions): Unit = {
// Spark JDBC data source options are case-insensitive
assert(options.tableOrQuery == "t1")
// When we convert it to properties, it should be case-sensitive.
assert(options.asProperties.size == 3)
assert(options.asProperties.get("customkey") == null)
assert(options.asProperties.get("customKey") == "a-value")
assert(options.asConnectionProperties.size == 1)
assert(options.asConnectionProperties.get("customkey") == null)
assert(options.asConnectionProperties.get("customKey") == "a-value")
}
val parameters = Map("url" -> url, "dbTAblE" -> "t1", "customKey" -> "a-value")
testJdbcOptions(new JDBCOptions(parameters))
testJdbcOptions(new JDBCOptions(CaseInsensitiveMap(parameters)))
// test add/remove key-value from the case-insensitive map
var modifiedParameters = CaseInsensitiveMap(Map.empty) ++ parameters
testJdbcOptions(new JDBCOptions(modifiedParameters))
modifiedParameters -= "dbtable"
assert(modifiedParameters.get("dbTAblE").isEmpty)
modifiedParameters -= "customkey"
assert(modifiedParameters.get("customKey").isEmpty)
modifiedParameters += ("customKey" -> "a-value")
modifiedParameters += ("dbTable" -> "t1")
testJdbcOptions(new JDBCOptions(modifiedParameters))
assert ((modifiedParameters -- parameters.keys).size == 0)
}
test("SPARK-19318: jdbc data source options should be treated case-insensitive.") {
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("DbTaBle", "TEST.PEOPLE")
.load()
assert(df.count() == 3)
withTempView("people_view") {
sql(
s"""
|CREATE TEMPORARY VIEW people_view
|USING org.apache.spark.sql.jdbc
|OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass')
""".stripMargin.replaceAll("\\n", " "))
assert(sql("select * from people_view").count() == 3)
}
}
test("SPARK-21519: option sessionInitStatement, run SQL to initialize the database session.") {
val initSQL1 = "SET @MYTESTVAR 21519"
val df1 = spark.read.format("jdbc")
.option("url", urlWithUserAndPass)
.option("dbtable", "(SELECT NVL(@MYTESTVAR, -1))")
.option("sessionInitStatement", initSQL1)
.load()
assert(df1.collect() === Array(Row(21519)))
val initSQL2 = "SET SCHEMA DUMMY"
val df2 = spark.read.format("jdbc")
.option("url", urlWithUserAndPass)
.option("dbtable", "TEST.PEOPLE")
.option("sessionInitStatement", initSQL2)
.load()
val e = intercept[SparkException] {df2.collect()}.getMessage
assert(e.contains("""Schema "DUMMY" not found"""))
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW test_sessionInitStatement
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$urlWithUserAndPass',
|dbtable '(SELECT NVL(@MYTESTVAR1, -1), NVL(@MYTESTVAR2, -1))',
|sessionInitStatement 'SET @MYTESTVAR1 21519; SET @MYTESTVAR2 1234')
""".stripMargin)
val df3 = sql("SELECT * FROM test_sessionInitStatement")
assert(df3.collect() === Array(Row(21519, 1234)))
}
test("jdbc data source shouldn't have unnecessary metadata in its schema") {
val schema = StructType(Seq(
StructField("NAME", StringType, true), StructField("THEID", IntegerType, true)))
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("DbTaBle", "TEST.PEOPLE")
.load()
assert(df.schema === schema)
withTempView("people_view") {
sql(
s"""
|CREATE TEMPORARY VIEW people_view
|USING org.apache.spark.sql.jdbc
|OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass')
""".stripMargin.replaceAll("\\n", " "))
assert(sql("select * from people_view").schema === schema)
}
}
test("SPARK-23856 Spark jdbc setQueryTimeout option") {
val numJoins = 100
val longRunningQuery =
s"SELECT t0.NAME AS c0, ${(1 to numJoins).map(i => s"t$i.NAME AS c$i").mkString(", ")} " +
s"FROM test.people t0 ${(1 to numJoins).map(i => s"join test.people t$i").mkString(" ")}"
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("dbtable", s"($longRunningQuery)")
.option("queryTimeout", 1)
.load()
val errMsg = intercept[SparkException] {
df.collect()
}.getMessage
assert(errMsg.contains("Statement was canceled or the session timed out"))
}
test("SPARK-24327 verify and normalize a partition column based on a JDBC resolved schema") {
def testJdbcParitionColumn(partColName: String, expectedColumnName: String): Unit = {
val df = spark.read.format("jdbc")
.option("url", urlWithUserAndPass)
.option("dbtable", "TEST.PARTITION")
.option("partitionColumn", partColName)
.option("lowerBound", 1)
.option("upperBound", 4)
.option("numPartitions", 3)
.load()
val quotedPrtColName = testH2Dialect.quoteIdentifier(expectedColumnName)
df.logicalPlan match {
case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) =>
val whereClauses = parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet
assert(whereClauses === Set(
s"$quotedPrtColName < 2 or $quotedPrtColName is null",
s"$quotedPrtColName >= 2 AND $quotedPrtColName < 3",
s"$quotedPrtColName >= 3"))
}
}
testJdbcParitionColumn("THEID", "THEID")
testJdbcParitionColumn("\\"THEID\\"", "THEID")
withSQLConf("spark.sql.caseSensitive" -> "false") {
testJdbcParitionColumn("ThEiD", "THEID")
}
testJdbcParitionColumn("THE ID", "THE ID")
def testIncorrectJdbcPartitionColumn(partColName: String): Unit = {
val errMsg = intercept[AnalysisException] {
testJdbcParitionColumn(partColName, "THEID")
}.getMessage
assert(errMsg.contains(s"User-defined partition column $partColName not found " +
"in the JDBC relation:"))
}
testIncorrectJdbcPartitionColumn("NoExistingColumn")
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
testIncorrectJdbcPartitionColumn(testH2Dialect.quoteIdentifier("ThEiD"))
}
}
test("query JDBC option - negative tests") {
val query = "SELECT * FROM test.people WHERE theid = 1"
// load path
val e1 = intercept[RuntimeException] {
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("query", query)
.option("dbtable", "test.people")
.load()
}.getMessage
assert(e1.contains("Both 'dbtable' and 'query' can not be specified at the same time."))
// jdbc api path
val properties = new Properties()
properties.setProperty(JDBCOptions.JDBC_QUERY_STRING, query)
val e2 = intercept[RuntimeException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", properties).collect()
}.getMessage
assert(e2.contains("Both 'dbtable' and 'query' can not be specified at the same time."))
val e3 = intercept[RuntimeException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW queryOption
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', query '$query', dbtable 'TEST.PEOPLE',
| user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
}.getMessage
assert(e3.contains("Both 'dbtable' and 'query' can not be specified at the same time."))
val e4 = intercept[RuntimeException] {
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("query", "")
.load()
}.getMessage
assert(e4.contains("Option `query` can not be empty."))
// Option query and partitioncolumn are not allowed together.
val expectedErrorMsg =
s"""
|Options 'query' and 'partitionColumn' can not be specified together.
|Please define the query using `dbtable` option instead and make sure to qualify
|the partition columns using the supplied subquery alias to resolve any ambiguity.
|Example :
|spark.read.format("jdbc")
| .option("url", jdbcUrl)
| .option("dbtable", "(select c1, c2 from t1) as subq")
| .option("partitionColumn", "c1")
| .option("lowerBound", "1")
| .option("upperBound", "100")
| .option("numPartitions", "3")
| .load()
""".stripMargin
val e5 = intercept[RuntimeException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW queryOption
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', query '$query', user 'testUser', password 'testPass',
| partitionColumn 'THEID', lowerBound '1', upperBound '4', numPartitions '3')
""".stripMargin.replaceAll("\\n", " "))
}.getMessage
assert(e5.contains(expectedErrorMsg))
}
test("query JDBC option") {
val query = "SELECT name, theid FROM test.people WHERE theid = 1"
// query option to pass on the query string.
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("query", query)
.load()
checkAnswer(
df,
Row("fred", 1) :: Nil)
// query option in the create table path.
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW queryOption
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', query '$query', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
checkAnswer(
sql("select name, theid from queryOption"),
Row("fred", 1) :: Nil)
}
test("SPARK-22814 support date/timestamp types in partitionColumn") {
val expectedResult = Seq(
("2018-07-06", "2018-07-06 05:50:00.0"),
("2018-07-06", "2018-07-06 08:10:08.0"),
("2018-07-08", "2018-07-08 13:32:01.0"),
("2018-07-12", "2018-07-12 09:51:15.0")
).map { case (date, timestamp) =>
Row(Date.valueOf(date), Timestamp.valueOf(timestamp))
}
// DateType partition column
val df1 = spark.read.format("jdbc")
.option("url", urlWithUserAndPass)
.option("dbtable", "TEST.DATETIME")
.option("partitionColumn", "d")
.option("lowerBound", "2018-07-06")
.option("upperBound", "2018-07-20")
.option("numPartitions", 3)
.load()
df1.logicalPlan match {
case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) =>
val whereClauses = parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet
assert(whereClauses === Set(
""""D" < '2018-07-10' or "D" is null""",
""""D" >= '2018-07-10' AND "D" < '2018-07-14'""",
""""D" >= '2018-07-14'"""))
}
checkAnswer(df1, expectedResult)
// TimestampType partition column
val df2 = spark.read.format("jdbc")
.option("url", urlWithUserAndPass)
.option("dbtable", "TEST.DATETIME")
.option("partitionColumn", "t")
.option("lowerBound", "2018-07-04 03:30:00.0")
.option("upperBound", "2018-07-27 14:11:05.0")
.option("numPartitions", 2)
.load()
df2.logicalPlan match {
case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) =>
val whereClauses = parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet
assert(whereClauses === Set(
""""T" < '2018-07-15 20:50:32.5' or "T" is null""",
""""T" >= '2018-07-15 20:50:32.5'"""))
}
checkAnswer(df2, expectedResult)
}
test("throws an exception for unsupported partition column types") {
val errMsg = intercept[AnalysisException] {
spark.read.format("jdbc")
.option("url", urlWithUserAndPass)
.option("dbtable", "TEST.PEOPLE")
.option("partitionColumn", "name")
.option("lowerBound", "aaa")
.option("upperBound", "zzz")
.option("numPartitions", 2)
.load()
}.getMessage
assert(errMsg.contains(
"Partition column type should be numeric, date, or timestamp, but string found."))
}
test("SPARK-24288: Enable preventing predicate pushdown") {
val table = "test.people"
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("dbTable", table)
.option("pushDownPredicate", false)
.load()
.filter("theid = 1")
.select("name", "theid")
checkAnswer(
checkNotPushdown(df),
Row("fred", 1) :: Nil)
// pushDownPredicate option in the create table path.
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW predicateOption
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$urlWithUserAndPass', dbTable '$table', pushDownPredicate 'false')
""".stripMargin.replaceAll("\\n", " "))
checkAnswer(
checkNotPushdown(sql("SELECT name, theid FROM predicateOption WHERE theid = 1")),
Row("fred", 1) :: Nil)
}
}
| mdespriee/spark | sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala | Scala | apache-2.0 | 63,774 |
package score.discord.canti.command
import net.dv8tion.jda.api.entities.Message
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should
import score.discord.canti.TestFixtures
import score.discord.canti.command.api.*
import score.discord.canti.util.BotMessages
import score.discord.canti.wrappers.jda.RetrievableMessage
import scala.concurrent.Future
class HelpCommandTest extends AnyFlatSpec with should.Matchers:
val fixture = TestFixtures.default
import fixture.{given, *}
private val dummyDesc = "dummy command"
private val dummyLongDesc = "this command is dummy"
private class DummyCommand(val name: String) extends GenericCommand:
override def description = dummyDesc
override def longDescription(invocation: String) = dummyLongDesc
override def permissions = CommandPermissions.Anyone
override def argSpec = Nil
override def execute(ctx: CommandInvocation): Future[RetrievableMessage] = ???
(1 to 100)
.map(n => s"cmd$n")
.map(DummyCommand(_))
.foreach(commands.register)
private val cmd = HelpCommand(commands)
commands.register(cmd)
"The &help command" should "reject negative page numbers" in {
val embed = testCommand("&help -1").getEmbeds.get(0).nn
embed.getColor should be(BotMessages.ERROR_COLOR)
embed.getDescription.nn shouldNot include(dummyDesc)
}
it should "reject excessive page numbers" in {
val embed = testCommand("&help 100").getEmbeds.get(0).nn
embed.getColor should be(BotMessages.ERROR_COLOR)
embed.getDescription.nn should include("That page does not exist")
}
it should "show command descriptions" in {
val embed = testCommand("&help").getEmbeds.get(0).nn
embed.getDescription.nn should include(dummyDesc)
}
it should "show long descriptions" in {
val helpText = testCommand("&help cmd53").getEmbeds.get(0).nn.getDescription.nn
helpText should include("cmd53")
helpText should include(dummyDesc)
helpText should include(dummyLongDesc)
}
it should "link to github from default help pages" in {
val embed = testCommand("&help 3").getEmbeds.get(0).nn
embed.getDescription.nn should include("github.com/")
}
end HelpCommandTest
| ScoreUnder/canti-bot | src/test/scala/score/discord/canti/command/HelpCommandTest.scala | Scala | agpl-3.0 | 2,212 |
object O extends Dynamic {
def applyDynamic(s: String)() {}
}
O./* file: this, name: applyDynamic */foo()
| ilinum/intellij-scala | testdata/resolve2/dynamic/ApplyDynamic.scala | Scala | apache-2.0 | 109 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.Properties
import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart}
import org.apache.spark.sql.SparkSession
class SQLExecutionSuite extends SparkFunSuite {
test("concurrent query execution (SPARK-10548)") {
// Try to reproduce the issue with the old SparkContext
val conf = new SparkConf()
.setMaster("local[*]")
.setAppName("test")
val badSparkContext = new BadSparkContext(conf)
try {
testConcurrentQueryExecution(badSparkContext)
fail("unable to reproduce SPARK-10548")
} catch {
case e: IllegalArgumentException =>
assert(e.getMessage.contains(SQLExecution.EXECUTION_ID_KEY))
} finally {
badSparkContext.stop()
}
// Verify that the issue is fixed with the latest SparkContext
val goodSparkContext = new SparkContext(conf)
try {
testConcurrentQueryExecution(goodSparkContext)
} finally {
goodSparkContext.stop()
}
}
test("concurrent query execution with fork-join pool (SPARK-13747)") {
val spark = SparkSession.builder
.master("local[*]")
.appName("test")
.getOrCreate()
import spark.implicits._
try {
// Should not throw IllegalArgumentException
(1 to 100).par.foreach { _ =>
spark.sparkContext.parallelize(1 to 5).map { i => (i, i) }.toDF("a", "b").count()
}
} finally {
spark.sparkContext.stop()
}
}
/**
* Trigger SPARK-10548 by mocking a parent and its child thread executing queries concurrently.
*/
private def testConcurrentQueryExecution(sc: SparkContext): Unit = {
val spark = SparkSession.builder.getOrCreate()
import spark.implicits._
// Initialize local properties. This is necessary for the test to pass.
sc.getLocalProperties
// Set up a thread that runs executes a simple SQL query.
// Before starting the thread, mutate the execution ID in the parent.
// The child thread should not see the effect of this change.
var throwable: Option[Throwable] = None
val child = new Thread {
override def run(): Unit = {
try {
sc.parallelize(1 to 100).map { i => (i, i) }.toDF("a", "b").collect()
} catch {
case t: Throwable =>
throwable = Some(t)
}
}
}
sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, "anything")
child.start()
child.join()
// The throwable is thrown from the child thread so it doesn't have a helpful stack trace
throwable.foreach { t =>
t.setStackTrace(t.getStackTrace ++ Thread.currentThread.getStackTrace)
throw t
}
}
test("Finding QueryExecution for given executionId") {
val spark = SparkSession.builder.master("local[*]").appName("test").getOrCreate()
import spark.implicits._
var queryExecution: QueryExecution = null
spark.sparkContext.addSparkListener(new SparkListener {
override def onJobStart(jobStart: SparkListenerJobStart): Unit = {
val executionIdStr = jobStart.properties.getProperty(SQLExecution.EXECUTION_ID_KEY)
if (executionIdStr != null) {
queryExecution = SQLExecution.getQueryExecution(executionIdStr.toLong)
}
SQLExecutionSuite.canProgress = true
}
})
val df = spark.range(1).map { x =>
while (!SQLExecutionSuite.canProgress) {
Thread.sleep(1)
}
x
}
df.collect()
assert(df.queryExecution === queryExecution)
spark.stop()
}
}
/**
* A bad [[SparkContext]] that does not clone the inheritable thread local properties
* when passing them to children threads.
*/
private class BadSparkContext(conf: SparkConf) extends SparkContext(conf) {
protected[spark] override val localProperties = new InheritableThreadLocal[Properties] {
override protected def childValue(parent: Properties): Properties = new Properties(parent)
override protected def initialValue(): Properties = new Properties()
}
}
object SQLExecutionSuite {
@volatile var canProgress = false
}
| wangyixiaohuihui/spark2-annotation | sql/core/src/test/scala/org/apache/spark/sql/execution/SQLExecutionSuite.scala | Scala | apache-2.0 | 4,943 |
/*
* Copyright 2016 Lightcopy
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.lightcopy.testutil
import java.util.logging._
/**
* Workaround to disable Parquet logging (org.apache.parquet), which is very verbose and is not
* particularly meaningful unless debugging Parquet read/write functionality. Currently test trait
* is inherited in UnitTestSuite.
*/
trait ParquetLogging {
val logger = Logger.getLogger("org.apache.parquet")
val handlers: Array[Handler] = logger.getHandlers()
if (handlers == null || handlers.length == 0) {
println("[LOGGING] Found no handlers for org.apache.parquet, add no-op logging")
val handler = new ConsoleHandler()
handler.setLevel(Level.OFF)
logger.addHandler(handler)
logger.setLevel(Level.OFF)
}
}
| lightcopy/parquet-index | src/test/scala/com/github/lightcopy/testutil/ParquetLogging.scala | Scala | apache-2.0 | 1,303 |
package cobalt.parser.expression
import cobalt.ast.AST.{DoubleConst, FloatConst, IntConst, LongConst}
import cobalt.parser.ExpressionParser
import cobalt.utils.TestUtil
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FunSpec, Matchers}
@RunWith(classOf[JUnitRunner])
class NumberParserTest extends FunSpec with Matchers
{
describe("Number parser")
{
it("Should parse integers")
{
TestUtil.parse("100", ExpressionParser.expressionParser) shouldBe IntConst(100)
}
it("Should parse longs")
{
TestUtil.parse("100l", ExpressionParser.expressionParser) shouldBe LongConst(100)
TestUtil.parse("100L", ExpressionParser.expressionParser) shouldBe LongConst(100)
}
it("Should parse floats")
{
TestUtil.parse("123.123f", ExpressionParser.expressionParser) shouldBe FloatConst(123.123)
TestUtil.parse("123.123F", ExpressionParser.expressionParser) shouldBe FloatConst(123.123)
}
it("Should parse doubles")
{
TestUtil.parse("123.123", ExpressionParser.expressionParser) shouldBe DoubleConst(123.123)
}
}
}
| Michael2109/cobalt | src/test/scala/cobalt/parser/expression/NumberParserTest.scala | Scala | lgpl-3.0 | 1,133 |
class A {
class B {
def outer(): Unit = {
def inner(): Int = 2
val fi: Function0[Int] = () => inner()
}
}
}
object Test {
def foo(x: Int) = {
trait B {
def bar = x
}
class C extends B {
override def bar = super[B].bar
}
new C().bar
}
def f1(x: Int) = {
class C1 {
def f2 = {
class C2 {
def f3 = {
def f4 = x
f4
}
}
new C2().f3
}
}
new C1().f2
}
def f1a(x: Int) = {
// class C1 {
def f2 = {
trait T2 {
def f3 = {
def f4 = x
def f5 = f4
f5
}
}
class C2 extends T2
new C2().f3
}
// }
/*new C1().*/f2
}
def f1b(x: Int) = {
class T1 {
def f2 = {
trait T2 {
def f3 = {
def f4 = x
def f5 = f4
f5
}
}
class C2 extends T2
new C2().f3
}
}
class C1 extends T1
new C1().f2
}
def f1c(x: Int) = {
class T1 {
def f2 = {
trait T2 {
def f3: Int = {
def f4 = x
def f5 = f4
def f7 = this.f3
f5
}
def f3a = f3
}
class C2 extends T2
class C3 extends T1
new C2().f3a + new C3().f6
}
def f6 = x
}
class C1 extends T1
new C1().f2
}
def f1d(x: Int) = {
trait T1 {
def f2 = {
trait T2 {
def f3: Int = {
def f4 = x
def f5 = f4
def f7 = this.f3
f5
}
def f3a = f3
}
class C2 extends T2
class C3 extends T1
new C2().f3a + new C3().f6
}
def f6 = x
}
class C1 extends T1
new C1().f2
}
def f1e(x: Int) = {
trait T1 {
def f2 = {
trait T2 {
def f3: Int = x
}
class C2 extends T2
new C2().f3
}
def f6 = x
}
class C1 extends T1
new C1().f6
}
def f1f(x: Int) = {
trait T1 {
trait T2 {
def f3: Int = x
}
class C2 extends T2 {
override def f3 = super.f3
}
new C2().f3
def f6 = x
}
class C1 extends T1
new C1().f6
}
def main(args: Array[String]) = {
assert(foo(3) == 3)
assert(f1(4) == 4)
assert(f1a(5) == 5)
assert(f1b(6) == 6)
assert(f1c(7) == 14)
assert(f1d(8) == 16)
assert(f1e(9) == 9)
assert(f1f(10) == 10)
}
}
| som-snytt/dotty | tests/run/llift.scala | Scala | apache-2.0 | 2,578 |
package dotty.tools
package dotc
import core._
import Contexts._, Periods._, Symbols._, Phases._, Decorators._
import dotty.tools.dotc.transform.TreeTransforms.TreeTransformer
import io.PlainFile
import util.{SourceFile, NoSource, Stats, SimpleMap}
import reporting.Reporter
import transform.TreeChecker
import java.io.{BufferedWriter, OutputStreamWriter}
import scala.reflect.io.VirtualFile
import scala.util.control.NonFatal
class Run(comp: Compiler)(implicit ctx: Context) {
assert(comp.phases.last.last.id <= Periods.MaxPossiblePhaseId)
assert(ctx.runId <= Periods.MaxPossibleRunId)
var units: List[CompilationUnit] = _
def getSource(fileName: String): SourceFile = {
val f = new PlainFile(fileName)
if (f.exists) new SourceFile(f)
else {
ctx.error(s"not found: $fileName")
NoSource
}
}
def compile(fileNames: List[String]): Unit = try {
val sources = fileNames map getSource
compileSources(sources)
} catch {
case NonFatal(ex) =>
ctx.println(i"exception occurred while compiling $units%, %")
throw ex
}
/** TODO: There's a fundamental design problem here: We assemble phases using `squash`
* when we first build the compiler. But we modify them with -Yskip, -Ystop
* on each run. That modification needs to either transform the tree structure,
* or we need to assemble phases on each run, and take -Yskip, -Ystop into
* account. I think the latter would be preferable.
*/
def compileSources(sources: List[SourceFile]) =
if (sources forall (_.exists)) {
units = sources map (new CompilationUnit(_))
compileUnits()
}
protected def compileUnits() = Stats.monitorHeartBeat {
ctx.checkSingleThreaded()
val phases = ctx.squashPhases(ctx.phasePlan,
ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, ctx.settings.YstopAfter.value, ctx.settings.Ycheck.value)
ctx.usePhases(phases)
for (phase <- ctx.allPhases)
if (!ctx.reporter.hasErrors) {
if (ctx.settings.verbose.value) ctx.println(s"[$phase]")
units = phase.runOn(units)
def foreachUnit(op: Context => Unit)(implicit ctx: Context): Unit =
for (unit <- units) op(ctx.fresh.setPhase(phase.next).setCompilationUnit(unit))
if (ctx.settings.Xprint.value.containsPhase(phase))
foreachUnit(printTree)
}
}
private def printTree(ctx: Context) = {
val unit = ctx.compilationUnit
val prevPhase = ctx.phase.prev // can be a mini-phase
val squashedPhase = ctx.squashed(prevPhase)
ctx.println(s"result of $unit after ${squashedPhase}:")
ctx.println(unit.tpdTree.show(ctx))
}
def compile(sourceCode: String): Unit = {
val virtualFile = new VirtualFile(sourceCode) // use source code as name as it's used for equals
val writer = new BufferedWriter(new OutputStreamWriter(virtualFile.output, "UTF-8")) // buffering is still advised by javadoc
writer.write(sourceCode)
writer.close()
compileSources(List(new SourceFile(virtualFile)))
}
/** Print summary; return # of errors encountered */
def printSummary(): Reporter = {
ctx.runInfo.printMaxConstraint()
val r = ctx.reporter
r.printSummary
r
}
}
| reactormonk/dotty | src/dotty/tools/dotc/Run.scala | Scala | bsd-3-clause | 3,220 |
package gettingstarted
import org.scalatest.FunSuite
import org.scalatest.prop.TableDrivenPropertyChecks
class MyModuleTest extends FunSuite with TableDrivenPropertyChecks{
import MyModule._
val fibCases = Table(
("n", "expected"),
(0, 0),
(1, 1),
(2, 1),
(3, 2),
(4, 3),
(5, 5),
(6, 8)
)
forAll(fibCases) { (n:Int, expected:Int) =>
test(s"Exercise 2.1 fib($n) will be $expected") {
assert(fib(n) == expected)
}
}
test("Exercise 2.2 isSorted") {
def ordered(x:Int, y:Int):Boolean = x <= y
assert( isSorted[Int](Array(0, 1, 2), ordered) === true )
assert( isSorted[Int](Array(0, 2, 1), ordered) === false)
}
}
| ara-ta3/FunctionalProgramminginScala | src/test/scala/gettingstarted/MyModuleTest.scala | Scala | mit | 690 |
package com.azavea.maml.eval.directive
import com.azavea.maml.eval._
import com.azavea.maml.error._
import com.azavea.maml.ast._
import com.azavea.maml.eval.tile._
import com.azavea.maml.util._
import geotrellis.raster.Tile
import geotrellis.raster.mapalgebra.focal
import geotrellis.vector.Point
import geotrellis.proj4.LatLng
import cats.implicits._
import cats.data._
import Validated._
object FocalDirectives {
val max = Directive { case (FocalMax(_, neighborhood, target), childResults) =>
childResults
.toList
.traverse { _.as[LazyMultibandRaster] }
.map({ lr =>
ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Max.apply))
})
}
val min = Directive { case (FocalMin(_, neighborhood, target), childResults) =>
childResults
.map({ _.as[LazyMultibandRaster] })
.toList.sequence
.map({ lr =>
ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Min.apply _))
})
}
val mean = Directive { case (FocalMean(_, neighborhood, target), childResults) =>
childResults
.map({ _.as[LazyMultibandRaster] })
.toList.sequence
.map({ lr =>
ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Mean.apply _))
})
}
val median = Directive { case (FocalMedian(_, neighborhood, target), childResults) =>
childResults
.map({ _.as[LazyMultibandRaster] })
.toList.sequence
.map({ lr =>
ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Median.apply _))
})
}
val mode = Directive { case (FocalMode(_, neighborhood, target), childResults) =>
childResults
.map({ _.as[LazyMultibandRaster] })
.toList.sequence
.map({ lr =>
ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Mode.apply _))
})
}
val sum = Directive { case (FocalSum(_, neighborhood, target), childResults) =>
childResults
.map({ _.as[LazyMultibandRaster] })
.toList.sequence
.map({ lr =>
ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Sum.apply _))
})
}
val standardDeviation = Directive { case (FocalStdDev(_, neighborhood, target), childResults) =>
childResults
.map({ _.as[LazyMultibandRaster] })
.toList.sequence
.map({ lr =>
ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.StandardDeviation.apply _))
})
}
val slope = Directive { case (FocalSlope(_, zf, target), childResults) =>
childResults
.map({ _.as[LazyMultibandRaster] })
.toList.sequence
.map({ lr =>
val image = lr.head
val re = image.rasterExtent
val zfactor = zf.getOrElse {
val llExtent = re.extent.reproject(image.crs, LatLng)
val middleY = llExtent.ymax - (llExtent.ymax - llExtent.ymin)
val EQUATOR_METERS = 11320
1 / (EQUATOR_METERS * math.cos(math.toRadians(middleY)))
}
ImageResult(image.slope(None, zfactor, re.cellSize, target))
})
}
val hillshade = Directive { case (FocalHillshade(_, azimuth, altitude, zf, target), childResults) =>
childResults
.map({ _.as[LazyMultibandRaster] })
.toList.sequence
.map({ lr =>
val image = lr.head
val re = image.rasterExtent
val zfactor = zf.getOrElse {
val llExtent = re.extent.reproject(image.crs, LatLng)
val middleY = llExtent.ymax - (llExtent.ymax - llExtent.ymin)
val EQUATOR_METERS = 11320
1 / (EQUATOR_METERS * math.cos(math.toRadians(middleY)))
}
ImageResult(image.hillshade(None, zfactor, re.cellSize, azimuth, altitude, target))
})
}
val aspect = Directive { case (FocalAspect(_, target), childResults) =>
childResults
.map({ _.as[LazyMultibandRaster] })
.toList.sequence
.map({ lr =>
val image = lr.head
val re = image.rasterExtent
ImageResult(image.aspect(None, re.cellSize, target))
})
}
}
| geotrellis/maml | jvm/src/main/scala/eval/directive/FocalDirectives.scala | Scala | apache-2.0 | 4,167 |
package macros
import scala.quoted.*
def oops(using Quotes) = {
var v = '{0};
val q = '{ ??? match { case x => ${ v = '{x}; v } } }
v
}
inline def test = ${oops}
| lampepfl/dotty | tests/neg-macros/i7142d/Macro_1.scala | Scala | apache-2.0 | 169 |
package model
import io.circe.{Decoder, Encoder}
import io.circe.generic.semiauto._
import io.circe.generic.auto._
final case class CountryInfo(
name: String,
population: Option[Double],
area: Option[Double],
gini: Option[Double],
currencies: List[Currency],
capital: String,
subregion: Option[String],
flag: Option[String]
)
object CountryInfo {
implicit val decoder: Decoder[CountryInfo] = deriveDecoder
implicit val encoder: Encoder[CountryInfo] = deriveEncoder
}
final case class Currency(code: String, symbol: String)
| radusw/cities | src/main/scala/model/CountryInfo.scala | Scala | apache-2.0 | 562 |
package task.airport
package util
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
trait ResourcesSupport {
val resources: Route =
getFromResourceDirectory("")
}
object ResourcesSupport extends ResourcesSupport
| rarrabi/task-airport | jvm/src/main/scala/task/airport/util/ResourcesSupport.scala | Scala | mit | 258 |
package blended.security.ssl.internal
import org.scalatest.FreeSpec
// TODO FIXME: Review and re-enable this test
class CertificateControllerSpec extends FreeSpec {
// private[this] val log = Logger[CertificateControllerSpec]
// private[this] val subject = "CN=test, O=blended, C=Germany"
// private[this] val validDays: Int = 10
//
// private[this] val millisPerDay = 1.day.toMillis
//
// def ctrlConfig(keyStore: String): CertControllerConfig = CertControllerConfig(
// alias = "default",
// keyStore = new File(projectTestOutput, keyStore).getAbsolutePath,
// storePass = "andreas".toCharArray,
// keyPass = "123456".toCharArray,
// minValidDays = 5,
// overwriteForFailure = false
// )
//
// def selfSignedConfig = SelfSignedConfig(
// new DefaultCommonNameProvider(subject, List("server1", "server2")), 2048, "SHA256withRSA", validDays
// )
//
// def defaultProvider = new SelfSignedCertificateProvider(selfSignedConfig)
//
// "The Certificate Controller should" - {
//
// "retrieve a new certificate if no current keystore exists" in {
//
// val cfg = ctrlConfig("newKeystore")
// val keystore = new File(cfg.keyStore)
// if (keystore.exists()) keystore.delete()
//
// val ctrl = new CertificateController(cfg, defaultProvider)
//
// ctrl.checkCertificate() match {
// case Success(ServerKeyStore(ks, cert)) =>
// val info = X509CertificateInfo(cert.chain.head)
//
// log.info(s"$info")
//
// assert(info.serial.bigInteger === BigInteger.ONE)
// assert(info.cn.equals(subject))
// assert(info.issuer.equals(subject))
//
// assert(info.notBefore.getTime() < System.currentTimeMillis())
// assert(info.notAfter.getTime() >= info.notBefore.getTime() + validDays * millisPerDay)
//
// case Failure(e) => fail(e.getMessage())
// }
// }
//
// "provide the current certificate if it is still vaild" in {
//
// val cfg = ctrlConfig("validKeystore")
// val keystore = new File(cfg.keyStore)
// if (keystore.exists()) keystore.delete()
//
// val ctrl = new CertificateController(cfg, defaultProvider)
// // initially create cert
// ctrl.checkCertificate()
//
// // check and update cert
// ctrl.checkCertificate() match {
// case Success(ServerKeyStore(ks, cert)) =>
// val info = X509CertificateInfo(cert.chain.head)
//
// log.info(s"$info")
//
// assert(info.serial === BigInt(1))
// assert(info.cn.equals(subject))
// assert(info.issuer.equals(subject))
//
// assert(info.notBefore.getTime() < System.currentTimeMillis())
// assert(info.notAfter.getTime() >= info.notBefore.getTime() + validDays * millisPerDay)
//
// case Failure(e) => fail(e.getMessage())
// }
// }
//
// s"refresh the current certificate if it is valid for less than a given threshold (${validDays} days)" in {
//
// val cfg = ctrlConfig("validKeystore")
// val keystore = new File(cfg.keyStore)
// if (keystore.exists()) keystore.delete()
//
// // initially create a cert which is valid but only for a short period of time
// val firstCertInfo = {
// val tempConfig = selfSignedConfig.copy(validDays = cfg.minValidDays - 1)
// val tempSelfProvider = new SelfSignedCertificateProvider(tempConfig)
// val tempController = new CertificateController(cfg, tempSelfProvider)
// val initKs = tempController.checkCertificate()
// assert(initKs.isSuccess)
// X509CertificateInfo(initKs.get.serverCertificate.chain.head)
// }
// assert(firstCertInfo.notAfter.getTime() > System.currentTimeMillis())
// assert(firstCertInfo.notAfter.getTime() <= System.currentTimeMillis() + (validDays * millisPerDay))
//
// // check and update cert
// val ctrl = new CertificateController(cfg, defaultProvider)
// val secondKs = ctrl.checkCertificate()
// assert(secondKs.isSuccess)
// val secondCertInfo = X509CertificateInfo(secondKs.get.serverCertificate.chain.head)
//
// log.info(s"$secondCertInfo")
//
// assert(firstCertInfo !== secondCertInfo, "The certificate was not renewed")
//
// assert(secondCertInfo.serial === BigInt(2))
// assert(secondCertInfo.cn.equals(subject))
// assert(secondCertInfo.issuer.equals(subject))
//
// assert(secondCertInfo.notBefore.getTime() < System.currentTimeMillis())
// assert(secondCertInfo.notAfter.getTime() >= secondCertInfo.notBefore.getTime() + validDays * millisPerDay)
//
// }
// }
}
| lefou/blended | blended.security.ssl/src/test/scala/blended/security/ssl/internal/CertificateControllerSpec.scala | Scala | apache-2.0 | 4,602 |
package controllers
import scala.concurrent._
import ExecutionContext.Implicits.global
import play.api._
import play.api.mvc._
import play.api.libs.json._
import trending.util._
object Application extends Controller {
def index = Action.async { request =>
Location.getCoordinates(request.remoteAddress).map {
case (lat: Double, lng: Double) => Ok(views.html.index(lat, lng))
}
}
def about = Action {
Ok(views.html.about())
}
implicit val writeVenueAsJson = Json.writes[Venue]
def post = Action.async { request =>
val lat = request.body.asFormUrlEncoded.get("lat")(0)
val lng = request.body.asFormUrlEncoded.get("lng")(0)
Venue.trending(lat, lng).map {
case v => Ok(Json.toJson(v))
}
}
}
| fbessadok/trendingvenues | app/controllers/Application.scala | Scala | mit | 756 |
/*
* Copyright (c) 2015 Goldman Sachs.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Eclipse Distribution License v. 1.0 which accompany this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*/
package org.eclipse.collections.impl
import java.lang.StringBuilder
import java.util.concurrent.TimeUnit
import org.eclipse.collections.api.collection.MutableCollection
import org.eclipse.collections.api.list.MutableList
import org.eclipse.collections.api.multimap.MutableMultimap
import org.eclipse.collections.api.tuple.Pair
import org.eclipse.collections.impl.Prelude._
import org.eclipse.collections.impl.collection.mutable.AbstractMultiReaderMutableCollection
import org.eclipse.collections.impl.list.mutable.FastList
import org.eclipse.collections.impl.multimap.list.FastListMultimap
import org.junit.{Assert, Test}
trait MultiReaderThreadSafetyTestTrait
{
val classUnderTest: AbstractMultiReaderMutableCollection[Int]
def createReadLockHolderThread(gate: Gate): Thread
def createWriteLockHolderThread(gate: Gate): Thread
def sleep(gate: Gate): Unit =
{
gate.open()
try
{
Thread.sleep(java.lang.Long.MAX_VALUE)
}
catch
{
case ignore: InterruptedException => Thread.currentThread.interrupt()
}
}
def spawn(code: => Unit) =
{
val result = new Thread
{
override def run() = code
}
result.start()
result
}
class Gate
{
val latch = new java.util.concurrent.CountDownLatch(1)
def open(): Unit = this.latch.countDown()
def await(): Unit = this.latch.await()
}
def time(code: => Unit) =
{
val before = System.currentTimeMillis
code
val after = System.currentTimeMillis
after - before
}
def assert(readersBlocked: Boolean, writersBlocked: Boolean)(code: => Any): Unit =
{
if (readersBlocked)
{
assertReadersBlocked(code)
}
else
{
assertReadersNotBlocked(code)
}
if (writersBlocked)
{
assertWritersBlocked(code)
}
else
{
assertWritersNotBlocked(code)
}
}
def assertReadersBlocked(code: => Unit): Unit =
{
this.assertReadSafety(threadSafe = true, 10L, TimeUnit.MILLISECONDS, code)
}
def assertReadersNotBlocked(code: => Unit): Unit =
{
this.assertReadSafety(threadSafe = false, 60L, TimeUnit.SECONDS, code)
}
def assertWritersBlocked(code: => Unit): Unit =
{
this.assertWriteSafety(threadSafe = true, 10L, TimeUnit.MILLISECONDS, code)
}
def assertWritersNotBlocked(code: => Unit): Unit =
{
this.assertWriteSafety(threadSafe = false, 60L, TimeUnit.SECONDS, code)
}
def assertReadSafety(threadSafe: Boolean, timeout: Long, timeUnit: TimeUnit, code: => Unit): Unit =
{
val gate = new Gate
assertThreadSafety(timeout, timeUnit, gate, code, threadSafe, createReadLockHolderThread(gate))
}
def assertWriteSafety(threadSafe: Boolean, timeout: Long, timeUnit: TimeUnit, code: => Unit): Unit =
{
val gate = new Gate
assertThreadSafety(timeout, timeUnit, gate, code, threadSafe, createWriteLockHolderThread(gate))
}
def assertThreadSafety(timeout: Long, timeUnit: TimeUnit, gate: MultiReaderThreadSafetyTestTrait.this.type#Gate, code: => Unit, threadSafe: Boolean, lockHolderThread: Thread): Unit =
{
val millisTimeout = TimeUnit.MILLISECONDS.convert(timeout, timeUnit)
val measuredTime = time
{
// Don't start until the other thread is synchronized on classUnderTest
gate.await()
spawn(code).join(millisTimeout, 0)
}
Assert.assertEquals(
"Measured " + measuredTime + " ms but timeout was " + millisTimeout + " ms.",
threadSafe,
measuredTime >= millisTimeout)
lockHolderThread.interrupt()
lockHolderThread.join()
}
@Test
def newEmpty_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = false)
{
this.classUnderTest.newEmpty
}
@Test
def iterator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = false)
{
try
{
this.classUnderTest.iterator
}
catch
{
case e: Exception => ()
}
}
@Test
def add_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.add(4)
}
@Test
def addAll_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.addAll(new FastList[Int])
}
@Test
def addAllIterable_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.addAllIterable(new FastList[Int])
}
@Test
def remove_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.remove(1)
}
@Test
def removeAll_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.removeAll(new FastList[Int])
}
@Test
def removeAllIterable_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.removeAllIterable(new FastList[Int])
}
@Test
def retainAll_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.retainAll(new FastList[Int])
}
@Test
def retainAllIterable_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.retainAllIterable(new FastList[Int])
}
@Test
def removeIf_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.removeIf((_: Int) => true)
}
@Test
def removeIfWith_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.removeIfWith((_: Int, _: Int) => true, 0)
}
@Test
def with_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.`with`(4)
}
@Test
def without_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.without(1)
}
@Test
def withAll_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.withAll(new FastList[Int])
}
@Test
def withoutAll_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.withoutAll(new FastList[Int])
}
@Test
def clear_safe(): Unit =
this.assert(readersBlocked = true, writersBlocked = true)
{
this.classUnderTest.clear()
}
@Test
def size_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.size
}
@Test
def getFirst_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.getFirst
}
@Test
def getLast_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.getLast
}
@Test
def isEmpty_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.isEmpty
}
@Test
def notEmpty_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.notEmpty
}
@Test
def contains_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.contains(1)
}
@Test
def containsAll_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.containsAll(new FastList[Int])
}
@Test
def containsAllIterable_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.containsAll(new FastList[Int])
}
@Test
def containsAllArguments_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.containsAllArguments("1", "2")
}
@Test
def equals_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.equals(null)
}
@Test
def hashCode_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.hashCode
}
@Test
def forEach_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.forEach((_: Int) => ())
}
@Test
def forEachWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.forEachWith((_: Int, _: Int) => (), 0)
}
@Test
def collect_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.collect[String]((_: Int) => "")
}
@Test
def collect_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.collect[String, MutableCollection[String]]((_: Int) => "", new FastList[String])
}
@Test
def collectWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.collectWith[String, String]((_: Int, _: String) => "", "")
}
@Test
def collectWith_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.collectWith[String, String, MutableList[String]]((_: Int, _: String) => "", "", new FastList[String])
}
@Test
def flatCollect_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.flatCollect[String]((_: Int) => new FastList[String])
}
@Test
def flatCollect_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.flatCollect[Int, MutableList[Int]]((_: Int) => new FastList[Int], new FastList[Int])
}
@Test
def collectIf_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.collectIf[String]((_: Int) => true, (_: Int) => "")
}
@Test
def collectIf_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.collectIf[String, MutableCollection[String]]((_: Int) => true, (num: Int) => "", new FastList[String])
}
@Test
def select_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.select((_: Int) => true)
}
@Test
def select_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.select((_: Int) => true, new FastList[Int])
}
@Test
def selectWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.selectWith((_: Int, _: Int) => true, 1)
}
@Test
def selectWith_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.selectWith((_: Int, _: Int) => true, 1, new FastList[Int])
}
@Test
def reject_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.reject((_: Int) => true)
}
@Test
def reject_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.reject((_: Int) => true, new FastList[Int])
}
@Test
def rejectWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.rejectWith((_: Int, _: Int) => true, 1)
}
@Test
def rejectWith_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.rejectWith((_: Int, _: Int) => true, 1, new FastList[Int])
}
@Test
def selectInstancesOf_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.selectInstancesOf(Int.getClass)
}
@Test
def partition_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.partition((_: Int) => true)
}
@Test
def partitionWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.partitionWith((_: Int, _: Int) => true, 1)
}
@Test
def selectAndRejectWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.selectAndRejectWith((_: Int, _: Int) => true, 1)
}
@Test
def count_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.count((_: Int) => true)
}
@Test
def countWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.countWith((_: Int, _: Int) => true, 1)
}
@Test
def min_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.min
}
@Test
def max_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.max
}
@Test
def min_withComparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.min((_: Int, _: Int) => 0)
}
@Test
def max_withComparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.max((_: Int, _: Int) => 0)
}
@Test
def minBy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.minBy[String]((_: Int) => "")
}
@Test
def maxBy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.maxBy[String]((_: Int) => "")
}
@Test
def injectInto_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.injectInto[Int](0, (_: Int, _: Int) => 0)
}
@Test
def injectIntoWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.injectIntoWith[Int, Int](0, (_: Int, _: Int, _: Int) => 0, 0)
}
@Test
def sumOfInt_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.sumOfInt((_: Int) => 0)
}
@Test
def sumOfLong_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.sumOfLong((_: Int) => 0L)
}
@Test
def sumOfDouble_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.sumOfDouble((_: Int) => 0.0)
}
@Test
def sumOfFloat_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.sumOfFloat((_: Int) => 0.0f)
}
@Test
def toString_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toString
}
@Test
def makeString_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.makeString
}
@Test
def makeString_withSeparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.makeString(", ")
}
@Test
def makeString_withStartEndSeparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.makeString("[", ", ", "]")
}
@Test
def appendString_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.appendString(new StringBuilder)
}
@Test
def appendString_withSeparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.appendString(new StringBuilder, ", ")
}
@Test
def appendString_withStartEndSeparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.appendString(new StringBuilder, "[", ", ", "]")
}
@Test
def groupBy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.groupBy[String]((_: Int) => "")
}
@Test
def groupBy_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.groupBy[String, MutableMultimap[String, Int]]((_: Int) => "", new FastListMultimap[String, Int])
}
@Test
def groupByEach_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.groupByEach((_: Int) => new FastList[String])
}
@Test
def groupByEach_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.groupByEach[Int, MutableMultimap[Int, Int]]((_: Int) => new FastList[Int], new FastListMultimap[Int, Int])
}
@Test
def aggregateBy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.aggregateBy[String, Int]((_: Int) => "", () => 0, (_: Int, _: Int) => 0)
}
@Test
def aggregateInPlaceBy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.aggregateInPlaceBy[String, Int]((_: Int) => "", () => 0, (_: Int, _: Int) => ())
}
@Test
def zip_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.zip(FastList.newListWith("1", "1", "2"))
}
@Test
def zip_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.zip(FastList.newListWith[String]("1", "1", "2"), new FastList[Pair[Int, String]])
}
@Test
def zipByIndex_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.zipWithIndex()
}
@Test
def zipByIndex_withTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.zipWithIndex(new FastList[Pair[Int, java.lang.Integer]])
}
@Test
def chunk_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.chunk(2)
}
@Test
def anySatisfy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.anySatisfy((_: Int) => true)
}
@Test
def anySatisfyWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.anySatisfyWith((_: Int, _: Int) => true, 1)
}
@Test
def allSatisfy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.allSatisfy((_: Int) => true)
}
@Test
def allSatisfyWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.allSatisfyWith((_: Int, _: Int) => true, 1)
}
@Test
def noneSatisfy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.noneSatisfy((_: Int) => true)
}
@Test
def noneSatisfyWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.noneSatisfyWith((_: Int, _: Int) => true, 1)
}
@Test
def detect_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.detect((_: Int) => true)
}
@Test
def detectIfNone_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.detectIfNone((_: Int) => true, () => 1)
}
@Test
def detectWith_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.detectWith((_: Int, _: Int) => true, 1)
}
@Test
def detectWithIfNone_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.detectWithIfNone((_: Int, _: Int) => true, 1, () => 1)
}
@Test
def asLazy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.asLazy
}
@Test
def asUnmodifiable_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.asUnmodifiable
}
@Test
def asSynchronized_safe(): Unit =
{
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.asSynchronized
}
}
@Test
def toImmutable_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toImmutable
}
@Test
def toList_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toList
}
@Test
def toSortedList_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedList
}
@Test
def toSortedList_withComparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedList((_: Int, _: Int) => 0)
}
@Test
def toSortedListBy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedListBy[String]((_: Int) => "")
}
@Test
def toSet_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSet
}
@Test
def toSortedSet_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedSet
}
@Test
def toSortedSet_withComparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedSet((_: Int, _: Int) => 0)
}
@Test
def toSortedSetBy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedSetBy[String]((_: Int) => "")
}
@Test
def toBag_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toBag
}
@Test
def toSortedBag_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedBag
}
@Test
def toSortedBag_withComparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedBag((_: Int, _: Int) => 0)
}
@Test
def toSortedBagBy_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedBagBy[String]((_: Int) => "")
}
@Test
def toMap_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toMap((_: Int) => 0, (_: Int) => 0)
}
@Test
def toSortedMap_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedMap((_: Int) => 0, (_: Int) => 0)
}
@Test
def toSortedMap_withComparator_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toSortedMap[Int, Int]((_: Int, _: Int) => 0, (_: Int) => 0, (_: Int) => 0)
}
@Test
def toArray_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toArray
}
@Test
def toArrayWithTarget_safe(): Unit =
this.assert(readersBlocked = false, writersBlocked = true)
{
this.classUnderTest.toArray(new Array[java.lang.Integer](10))
}
}
| g-votte/eclipse-collections | scala-unit-tests/src/test/scala/org/eclipse/collections/impl/MultiReaderThreadSafetyTestTrait.scala | Scala | bsd-3-clause | 27,093 |
/*
Copyright 2013 Josh Conrad
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package groupcache.peers.http
import java.net.URL
import java.util.concurrent.locks.ReentrantLock
import groupcache.peers.{Peer, PeerPicker}
import groupcache.hash.RingHash
import groupcache.Implicits._
/**
* Picks an HTTP peer as an owner of a given key.
* @param baseUrl Base URL of the current peer.
* @param peerUrls URL's of known HTTP peers.
*/
class HttpPeerPicker(private val baseUrl: URL,
peerUrls: Array[URL]) extends PeerPicker {
private val lock = new ReentrantLock()
private val defaultReplicas = 3
private var peers = this.setPeers(peerUrls)
/**
* Constructs an HTTP peer picker using the given port.
* @param localPort
* @param peerUrls
*/
def this(localPort: Int, peerUrls: Array[URL]) {
this(new URL(s"http://localhost:$localPort"), peerUrls)
}
/**
* Optionally picks an HTTP peer as the owner of the given key's value using
* the checksum of the key. Returns None if there are no peers or if the
* peer with the given base URL is the owner.
*/
override def pickPeer(key: String): Option[Peer] = {
var pickedPeer: Option[Peer] = None
lock.lock()
try {
if (!this.peers.isEmpty) {
val potential = this.peers.get(key)
if (potential != baseUrl.toString) {
pickedPeer = Some(new HttpPeer(potential))
}
}
}
finally {
lock.unlock()
}
pickedPeer
}
/**
* Updates this instance's peers in cases where peers change dynamically
* during the lifetime of this picker.
*/
def setPeerUrls(peerUrls: Array[URL]): RingHash = {
lock.lock()
try {
this.setPeers(peerUrls)
}
finally {
lock.unlock()
}
}
/**
* Updates this instance's peers in cases where peers change dynamically
* during the lifetime of this picker.
*/
private def setPeers(peerUrls: Array[URL]): RingHash = {
this.peers = new RingHash(this.defaultReplicas)
this.peers.add(peerUrls.map(elem => elem.toString):_*)
this.peers
}
}
| jmconrad/scala-groupcache | src/main/scala/groupcache/peers/http/HttpPeerPicker.scala | Scala | apache-2.0 | 2,588 |
package org.http4s
package headers
object `Retry-After` extends HeaderKey.Default
| hvesalai/http4s | core/src/main/scala/org/http4s/headers/Retry-After.scala | Scala | apache-2.0 | 84 |
// the type
Function1[A,B]
// can be written as
A => B
object ArrayUtils {
def filter(xs: Array[Int], pred: Function1[Int,Boolean]): Array[Int] = ???
}
| agconti/scala-school | 04-functions-as-values/slides/slide033.scala | Scala | mit | 159 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.