code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package util
import org.scalajs.dom.raw.Element
import scala.scalajs.js
import scala.scalajs.js.Object
import scala.scalajs.js.annotation.{JSName, ScalaJSDefined}
object termJs {
@JSName("Terminal")
@js.native
class Terminal(options: Object) extends Object {
def open(element: Element): Unit = js.native
def write(any: Any): Unit = js.native
def resize(cols: Int, rows: Int): Unit = js.native
def on(event: String, f: js.Function1[String, _]): Unit = js.native
def blur(): Unit = js.native
//def reset(): Unit = js.native
def destroy(): Unit = js.native
// use addons fit.js
def fit(): Unit = js.native
// use addons attach.js
def attach(any: Any): Unit = js.native
def proposeGeometry(): Geometry = js.native
}
def DefaultWithStdin = js.Dynamic.literal(cols = 150, rows = ROWS, screenKeys = true, cursorBlink = true)
def DefaultWithOutStdin = js.Dynamic.literal(cols = 150, rows = ROWS, screenKeys = false, useStyle = false, cursorBlink = false)
def initTerminal(terminal: Terminal, element: Element) = {
terminal.open(element)
terminal.fit()
}
val ROWS = 24
}
@ScalaJSDefined
class Geometry(
val cols: Int,
val rows: Int
) extends js.Object
| felixgborrego/simple-docker-ui | src/main/scala/util/TermJs.scala | Scala | mit | 1,243 |
package aiouniya.spark
import java.io.File
import java.util.{Date, Properties}
import aiouniya.spark.util.DateUtil
import org.apache.spark.{SparkConf, SparkContext}
import scala.io.Source
/**
* Created by zstorm on 2017/11/22.
*/
trait AzSparkJob {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf()
val appName = sparkConf.get("spark.app.name")
println(s"appName=$appName")
val propsNamePrefix = s"${appName}_props"
val propsFile = new File(".").listFiles().
filter(file => file.getName.startsWith(propsNamePrefix))(0)
val prop = new Properties()
Source.fromFile(propsFile, "UTF-8").getLines().filter(!_.startsWith("#")).foreach(line => {
val index = line.indexOf('=')
val key = line.substring(0, index)
val value = line.substring(index + 1).replaceAll("\\\\", "")
prop.put(key, value)
println(s"jobProp [$key] = [$value]")
})
sparkConf.getAll.map(x => s"sparkConf [${x._1}] = [${x._2}]").foreach(println)
val strFlowTime = prop.getProperty("azkaban.flow.start.timestamp")
flowTime = DateUtil.toDate(strFlowTime, "yyyy-MM-dd'T'HH:mm:ss.SSSX")
println("flowTime=" + DateUtil.format(flowTime))
setSparkConf(sparkConf, prop)
val sc = new SparkContext(sparkConf)
try {
run(sc, prop)
} finally {
sc.stop()
}
}
def run(sc: SparkContext, prop: Properties)
def setSparkConf(sparkConf: SparkConf, prop: Properties)
var flowTime: Date = new Date()
}
| 7u/spark-learning | aiouniya.spark/src/main/scala/aiouniya/spark/AzSparkJob.scala | Scala | apache-2.0 | 1,551 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.check
import io.gatling.core.check.CheckBuilder
package object async {
type AsyncCheckBuilder = CheckBuilder[AsyncCheck, String, _, _]
}
| thkluge/gatling | gatling-http/src/main/scala/io/gatling/http/check/async/package.scala | Scala | apache-2.0 | 784 |
package colang.ast.parsed
/**
* A trait that generalizes different applicable entities, providing some common behaviour
*/
trait Applicable {
def parameters: Seq[Variable]
/**
* Returns true if the object can be called with arguments of given types (this is, the arguments are
* implicitly convertible to parameter types).
* @param argumentTypes argument types
* @return true if the object can be called with these arguments
*/
def canBeAppliedTo(argumentTypes: Seq[Type]): Boolean = {
if (parameters.isEmpty && argumentTypes.isEmpty) {
true
} else if (parameters.size == argumentTypes.size) {
parameters map {_.type_} zip argumentTypes map { case (p, a) => a.isImplicitlyConvertibleTo(p) } reduce {_ && _}
} else false
}
}
object Applicable {
/**
* Checks if two applicable entities have the same parameter types.
* @param f first applicable
* @param g second applicable
* @return true if applicables have the same parameter types.
*/
def sameParameterTypes[A <: Applicable](f: A, g: A): Boolean = {
if (f.parameters.isEmpty && g.parameters.isEmpty) {
true
} else if (f.parameters.size == g.parameters.size) {
(f.parameters map { _.type_ }) zip (g.parameters map { _.type_ }) map { ts => ts._1 == ts._2 } reduce { _ && _ }
} else false
}
/**
* Tries to select a single unambiguous overload from a Seq that can be applied to arguments with given types.
* @param overloads overloads to choose from
* @param argumentTypes argument types
* @return OverloadResolutionResult, see below
*/
def resolveOverload[A <: Applicable](overloads: Seq[A], argumentTypes: Seq[Type]): OverloadResolutionResult[A] = {
val candidates = overloads filter { _ canBeAppliedTo argumentTypes }
if (candidates.nonEmpty) {
val nonAmbiguousResult = candidates find { candidate =>
val otherCandidates = candidates filterNot { _ eq candidate }
otherCandidates forall { _ canBeAppliedTo (candidate.parameters map { _.type_ }) }
}
nonAmbiguousResult match {
case Some(overload) => OverloadFound(overload)
case None => AmbiguousOverloadsFound(candidates)
}
} else {
NoOverloadsFound()
}
}
sealed trait OverloadResolutionResult[A <: Applicable]
case class OverloadFound[A <: Applicable](overload: A) extends OverloadResolutionResult[A]
case class NoOverloadsFound[A <: Applicable]() extends OverloadResolutionResult[A]
case class AmbiguousOverloadsFound[A <: Applicable](overloads: Seq[A]) extends OverloadResolutionResult[A]
}
| psenchanka/colang | src/main/scala/colang/ast/parsed/Applicable.scala | Scala | mit | 2,620 |
// Copyright 2012 by Christopher Brown - MIT Licensed
package timexla
import scala.collection.mutable.{ListBuffer,Map => MutableMap}
/**
* BIOMap is simply a way of aggregating BIOTags in one object, counting incoming tags,
* and initializing each key to an arbitrary type.
*/
case class BIOMap[A]() {
val internal = MutableMap[A, Array[Double]]()
def +=(key: A, tag: BIOTag.Value) {
// plus one smoothing! (eww, gross.)
if (!internal.contains(key)) internal(key) = Array.fill(BIOTag.values.size)(0.1d)
internal(key)(tag.id) += 1d
}
}
/**
* State is a linked list for backtracking through a Viterbi sequence.
*
* It inherits from Ordered so that you can compare two states based on their underlying probabilities
* toList recursively flattens a State (linked list) into a normal Scala List
*/
case class State(logprob: Double, tag: BIOTag.Value,
previous: Option[State]) extends Ordered[State] {
def compare(that: State) = this.logprob.compare(that.logprob)
def toList: List[State] = {
previous match {
case Some(actual) => actual.toList :+ this
case _ => List(this)
}
}
}
object State {
def Empty = State(Double.NegativeInfinity, BIOTag.B, None)
}
/**
* Features intends to be a collection of feature functions, weights for which are learned automatically.
* Soon, soon.
*/
object Features {
val gazetteer = List(
"minute", "hour", "day", "week", "month", "season", "quarter", "year",
"last", "earlier", "later", "before", "past", "coming", "ago", "next", "previous",
"yesterday", "tomorrow", "current", "beginning",
"first", "second", "third", "fourth", "fifth", "sixth",
"Jan", "Feb", "Mar", "Apr", "May", "June", "July", "Aug", "Sept", "Oct", "Nov", "Dec",
"mon", "tues", "wednes", "thurs", "fri", "satur", "sun",
"moment")
val gazetteer_regex = ("(?i)("+gazetteer.mkString("|")+")").r
def digit_ratio(token: String) = digit_count(token).toDouble / token.length
def digit_count(token: String) = """\\d""".r.findAllIn(token).length
def in_gazetteer(token: String) = { gazetteer_regex.findFirstIn(token) != None }
}
case class Hmm(documents: Seq[Document], lambda: Double) {
/* For the most part, our feature lists are (discrete-token-property -> BIO.distribution) pairs.
When the token property is continuous, we will do K-means clustering after the fact. Or not. */
val unigram_counts = MutableMap[String, Int]().withDefaultValue(1)
val emissions = ListBuffer[(BIOTag.Value, String)]()
val ratios = ListBuffer[(Double, BIOTag.Value)]()
val digits = BIOMap[Int]()
val gazette_matches = BIOMap[Boolean]()
val transitions = ListBuffer[(BIOTag.Value, BIOTag.Value)]()
val tags = ListBuffer[BIOTag.Value]()
// e.g.: val tags = List(BIOTag.B, BIOTag.I, BIOTag.O, BIOTag.O, BIOTag.B, BIOTag.I)
// println("Going through "+documents.length+" documents.")
documents.foreach { document =>
document.tokens.zip(document.tags).foreach { case (token, tag) =>
// unigram_bio_counts += (token, tag)
emissions += Tuple2(tag, token)
ratios += Tuple2(Features.digit_ratio(token), tag)
digits += (Features.digit_count(token), tag)
gazette_matches += (Features.in_gazetteer(token), tag)
unigram_counts(token) += 1
}
// l.zip(l.tail) == l.sliding(2) except it returns a list of tuples instead of a iterator of lists
tags ++= document.tags
transitions ++= document.tags.zip(document.tags.tail)
}
// ratios.groupBy(_._2) = List[tag: BIOTag, pair: ListBuffer[(ratio, tag)]]
val tag_ratio_centers = ratios.groupBy(_._2).map { case (tag, pair) =>
(tag, QuickMath.avg(pair.map(_._1)))
}
val tag_count = tags.length.toDouble
val tag_probs = tags.groupBy(x => x).mapValues(_.length/tag_count)
// transition_counts is a Map(tag0)(tag1) = count
val transition_counts = transitions.groupBy(_._1).mapValues { tag_pair_list =>
tag_pair_list.groupBy(_._2).mapValues(_.length).withDefaultValue(0)
}.withDefaultValue(Map[BIOTag.Value, Int]())
val transition_logprobs = BIOTag.values.map { tag0 =>
val tag1_counts = transition_counts(tag0)
// val tag1_counts = tag1_counts_unaltered.updated(BIOTag.B,
// tag1_counts_unaltered(BIOTag.B) + tag1_counts_unaltered(BIOTag.I))
val hapax_count = tag1_counts.count(_._2 == 1)
val probs = BIOTag.values.map { tag1 =>
tag1 -> ((hapax_count + lambda) * tag_probs(tag1) + tag1_counts(tag1))
}.toMap
val sum = probs.values.sum
tag0 -> probs.mapValues(prob => math.log(prob/sum))
}.toMap
val smoothed_total = 1 + unigram_counts.values.sum.toDouble
val unigram_probs = unigram_counts.mapValues(_/smoothed_total).toMap.withDefaultValue(1/smoothed_total)
// emissions is List[tag: BIOTag, token: String]
// emission_counts is Map(tag)(token) = count
val emission_counts_raw = emissions.groupBy(_._1).mapValues { tag_token_list =>
tag_token_list.groupBy(_._2).mapValues(_.length).withDefaultValue(0)
}
val emission_counts = emission_counts_raw.updated(BIOTag.B,
emission_counts_raw(BIOTag.B).map { case (token, count) =>
token -> (count + emission_counts_raw(BIOTag.I)(token))
}
)
val emission_logprobs = emission_counts.map { case (tag, token_counts) =>
// get the count of words that happened only once
val hapax_count = token_counts.count(_._2 == 1)
val smoothed_counts = token_counts.map { case (token, count) =>
token -> (count + (lambda + hapax_count) * unigram_probs(token))
}
val sum = lambda + smoothed_counts.values.sum
tag -> smoothed_counts.mapValues(count => math.log(count/sum)).withDefaultValue {
math.log((lambda + hapax_count) * (1 / smoothed_total) / sum)
}
}
def tag(unaligned_tokens: Seq[String]) = {
val tokens = "###" +: unaligned_tokens :+ "###"
// not really true, it's more like B: 0.5, I: 0, O: 0.5
val alpha = Array.fill(tokens.length, BIOTag.values.size)(State.Empty)
BIOTag.values.foreach { tag => alpha(0)(tag.id) = State(math.log(1d/3), tag, None) }
(1 until (tokens.length - 1)).foreach { i =>
// println("--"+tokens(i)+"--")
BIOTag.values.foreach { tag =>
// this will be extended with the other features
val emission_logprob = emission_logprobs(tag)(tokens(i))
/* for each of the previous states, take the probability of coming into this node from the previous
node, times the probabilty of the previous node (and store the state you came from for each)
thus, even if the transition is really likely, if the path to get there is unlikely,
we won't take it */
val inbound = alpha(i - 1).map { previous_state =>
val transition = transition_logprobs(previous_state.tag)(tag) // math.log(1d/6)
(previous_state.logprob + transition, previous_state)
}
// println("->"+tag+" "+inbound.map { x =>
// val color = if (x == inbound.max) Console.BLUE else ""
// "%s%s-> %3.4f%s" format (color, x._2.tag, x._1, Console.RESET)
// }.mkString(" "))
// println(" * emission_logprob (%3.4f) = %3.4f (%s->)" format
// (emission_logprob, inbound.max._1 + emission_logprob, inbound.max._2.tag))
alpha(i)(tag.id) = State(inbound.max._1 + emission_logprob, tag, Some(inbound.max._2))
}
}
val alpha_max_sequence = alpha(alpha.length - 2).max.toList.drop(1)
alpha_max_sequence.map(_.tag)
}
}
| scaling/timexla | src/main/scala/timexla/hmm.scala | Scala | mit | 7,482 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.example.shoppingcart.impl
import java.time.Instant
import akka.Done
import slick.dbio.DBIO
import slick.jdbc.PostgresProfile.api._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
/**
* The report repository manage the storage of ShoppingCartReport which is a API class (view model).
*
* It saves data in a ready for consumption format for that specific API model.
* If the API changes, we must regenerated the stored models.
*/
class ShoppingCartReportRepository(database: Database) {
class ShoppingCartReportTable(tag: Tag) extends Table[ShoppingCartReport](tag, "shopping_cart_report") {
def cartId = column[String]("cart_id", O.PrimaryKey)
def created = column[Boolean]("created")
def checkedOut = column[Boolean]("checked_out")
def * = (cartId, created, checkedOut) <> ((ShoppingCartReport.apply _).tupled, ShoppingCartReport.unapply)
}
val reportTable = TableQuery[ShoppingCartReportTable]
def createTable() = reportTable.schema.createIfNotExists
def findById(id: String): Future[Option[ShoppingCartReport]] =
database.run(findByIdQuery(id))
def createReport(cartId: String): DBIO[Done] = {
findByIdQuery(cartId).flatMap {
case None => reportTable += ShoppingCartReport(cartId, created = true, checkedOut = false)
case _ => DBIO.successful(Done)
}.map(_ => Done).transactionally
}
def addCheckoutTime(cartId: String): DBIO[Done] = {
findByIdQuery(cartId).flatMap {
case Some(cart) => reportTable.insertOrUpdate(cart.copy(checkedOut = true))
// if that happens we have a corrupted system
// cart checkout can only happens for a existing cart
case None => throw new RuntimeException(s"Didn't find cart for checkout. CartID: $cartId")
}.map(_ => Done).transactionally
}
private def findByIdQuery(cartId: String): DBIO[Option[ShoppingCartReport]] =
reportTable
.filter(_.cartId === cartId)
.result.headOption
}
case class ShoppingCartReport(cartId: String, created: Boolean, checkedOut: Boolean)
| ignasi35/lagom | dev/sbt-plugin/src/sbt-test/sbt-plugin/akka-persistence-typed-migration-scala/shopping-cart-lagom-persistence/src/main/scala/com/example/shoppingcart/impl/ShoppingCartReportRepository.scala | Scala | apache-2.0 | 2,158 |
package scala.scalajs.compiler.util
import language.implicitConversions
class ScopedVar[A](init: A) {
import ScopedVar.Assignment
private var value = init
def this()(implicit ev: Null <:< A) = this(ev(null))
def get: A = value
def :=(newValue: A): Assignment[A] = new Assignment(this, newValue)
}
object ScopedVar {
class Assignment[T](scVar: ScopedVar[T], value: T) {
private[ScopedVar] def push(): AssignmentStackElement[T] = {
val stack = new AssignmentStackElement(scVar, scVar.value)
scVar.value = value
stack
}
}
private class AssignmentStackElement[T](scVar: ScopedVar[T], oldValue: T) {
private[ScopedVar] def pop(): Unit = {
scVar.value = oldValue
}
}
implicit def toValue[T](scVar: ScopedVar[T]): T = scVar.get
def withScopedVars[T](ass: Assignment[_]*)(body: => T): T = {
val stack = ass.map(_.push())
try body
finally stack.reverse.foreach(_.pop())
}
}
| swhgoon/scala-js | compiler/src/main/scala/scala/scalajs/compiler/util/ScopedVar.scala | Scala | bsd-3-clause | 950 |
/*
* Copyright 2014β2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.mimir
import quasar.blueeyes._, json._
import quasar.precog.common._
import quasar.yggdrasil._
import quasar.precog.util.Identifier
import scalaz._
trait TypeInferencerSpecs[M[+_]] extends EvaluatorSpecification[M]
with LongIdMemoryDatasetConsumer[M] {
import dag._
import instructions.{
Line,
BuiltInFunction2Op,
Add, Neg,
DerefArray, DerefObject,
ArraySwap, WrapObject, JoinObject
}
import quasar.yggdrasil.bytecode._
import library._
def flattenType(jtpe : JType) : Map[JPath, Set[CType]] = {
def flattenAux(jtpe : JType) : Set[(JPath, Option[CType])] = jtpe match {
case p : JPrimitiveType => Schema.ctypes(p).map(tpe => (NoJPath, Some(tpe)))
case JArrayFixedT(elems) =>
for((i, jtpe) <- elems.toSet; (path, ctpes) <- flattenAux(jtpe)) yield (JPathIndex(i) \\ path, ctpes)
case JObjectFixedT(fields) =>
for((field, jtpe) <- fields.toSet; (path, ctpes) <- flattenAux(jtpe)) yield (JPathField(field) \\ path, ctpes)
case JUnionT(left, right) => flattenAux(left) ++ flattenAux(right)
case u @ (JArrayUnfixedT | JObjectUnfixedT) => Set((NoJPath, None))
case x => sys.error("Unexpected: " + x)
}
flattenAux(jtpe).groupBy(_._1).mapValues(_.flatMap(_._2))
}
def extractLoads(graph : DepGraph): Map[String, Map[JPath, Set[CType]]] = {
def merge(left: Map[String, Map[JPath, Set[CType]]], right: Map[String, Map[JPath, Set[CType]]]): Map[String, Map[JPath, Set[CType]]] = {
def mergeAux(left: Map[JPath, Set[CType]], right: Map[JPath, Set[CType]]): Map[JPath, Set[CType]] = {
left ++ right.map { case (path, ctpes) => path -> (ctpes ++ left.getOrElse(path, Set())) }
}
left ++ right.map { case (file, jtpes) => file -> mergeAux(jtpes, left.getOrElse(file, Map())) }
}
def extractSpecLoads(spec: BucketSpec): Map[String, Map[JPath, Set[CType]]] = spec match {
case UnionBucketSpec(left, right) =>
merge(extractSpecLoads(left), extractSpecLoads(right))
case IntersectBucketSpec(left, right) =>
merge(extractSpecLoads(left), extractSpecLoads(right))
case Group(id, target, child) =>
merge(extractLoads(target), extractSpecLoads(child))
case UnfixedSolution(id, target) =>
extractLoads(target)
case Extra(target) =>
extractLoads(target)
}
graph match {
case _ : Root => Map()
case New(parent) => extractLoads(parent)
case AbsoluteLoad(Const(CString(path)), jtpe) => Map(path -> flattenType(jtpe))
case Operate(_, parent) => extractLoads(parent)
case Reduce(_, parent) => extractLoads(parent)
case Morph1(_, parent) => extractLoads(parent)
case Morph2(_, left, right) => merge(extractLoads(left), extractLoads(right))
case Join(_, joinSort, left, right) => merge(extractLoads(left), extractLoads(right))
case Filter(_, target, boolean) => merge(extractLoads(target), extractLoads(boolean))
case AddSortKey(parent, _, _, _) => extractLoads(parent)
case Memoize(parent, _) => extractLoads(parent)
case Distinct(parent) => extractLoads(parent)
case Split(spec, child, _) => merge(extractSpecLoads(spec), extractLoads(child))
case _: SplitGroup | _: SplitParam => Map()
case x => sys.error("Unexpected: " + x)
}
}
val cLiterals = Set(CBoolean, CLong, CDouble, CNum, CString, CNull, CDate, CPeriod)
"type inference" should {
"propagate structure/type information through a trivial Join/DerefObject node" in {
val line = Line(1, 1, "")
val input =
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file"))(line))(line),
Const(CString("column"))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(JPath("column") -> cLiterals)
)
result must_== expected
}
"propagate structure/type information through New nodes" in {
val line = Line(1, 1, "")
val input =
Operate(Neg,
New(
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file"))(line))(line),
Const(CString("column"))(line))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(JPath("column") -> Set(CLong, CDouble, CNum))
)
result must_== expected
}
"propagate structure/type information through Operate nodes" in {
val line = Line(1, 1, "")
val input =
Operate(Neg,
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file"))(line))(line),
Const(CString("column"))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(JPath("column") -> Set(CLong, CDouble, CNum))
)
result must_== expected
}
"propagate structure/type information through Reduce nodes" in {
val line = Line(1, 1, "")
val input =
Reduce(Mean,
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file"))(line))(line),
Const(CString("column"))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(JPath("column") -> Set(CLong, CDouble, CNum))
)
result must_== expected
}
"propagate structure/type information through Morph1 nodes" in {
val line = Line(1, 1, "")
val input =
Morph1(toUpperCase,
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file"))(line))(line),
Const(CString("column"))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(JPath("column") -> Set(CString, CDate)))
result must_== expected
}
"propagate structure/type information through Morph2 nodes" in {
val line = Line(1, 1, "")
val input =
Morph2(concat,
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file0"))(line))(line),
Const(CString("column0"))(line))(line),
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file1"))(line))(line),
Const(CString("column1"))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file0" -> Map(JPath("column0") -> Set(CString, CDate)),
"/file1" -> Map(JPath("column1") -> Set(CString, CDate)))
result must_== expected
}
"propagate structure/type information through DerefArray Join nodes" in {
val line = Line(1, 1, "")
val input =
Operate(Neg,
New(
Join(DerefArray, Cross(None),
AbsoluteLoad(Const(CString("/file"))(line))(line),
Const(CLong(0))(line))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(JPath(0) -> Set(CLong, CDouble, CNum))
)
result must_== expected
}
"propagate structure/type information through ArraySwap Join nodes" in {
val line = Line(1, 1, "")
val input =
Join(ArraySwap, Cross(None),
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file0"))(line))(line),
Const(CString("column0"))(line))(line),
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file1"))(line))(line),
Const(CString("column1"))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file0" -> Map(JPath("column0") -> Set[CType]()),
"/file1" -> Map(JPath("column1") -> Set(CLong, CDouble, CNum))
)
result must_== expected
}
"propagate structure/type information through WrapObject Join nodes" in {
val line = Line(1, 1, "")
val input =
Join(WrapObject, Cross(None),
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file0"))(line))(line),
Const(CString("column0"))(line))(line),
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file1"))(line))(line),
Const(CString("column1"))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file0" -> Map(JPath("column0") -> Set(CString)),
"/file1" -> Map(JPath("column1") -> cLiterals)
)
result must_== expected
}
"propagate structure/type information through Op2 Join nodes" in {
val line = Line(1, 1, "")
val input =
Join(BuiltInFunction2Op(minOf), IdentitySort,
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file0"))(line))(line),
Const(CString("column0"))(line))(line),
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file0"))(line))(line),
Const(CString("column1"))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file0" -> Map(
JPath("column0") -> Set(CLong, CDouble, CNum),
JPath("column1") -> Set(CLong, CDouble, CNum)
)
)
result must_== expected
}
"propagate structure/type information through Filter nodes" in {
val line = Line(1, 1, "")
val input =
Filter(IdentitySort,
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file0"))(line))(line),
Const(CString("column0"))(line))(line),
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file1"))(line))(line),
Const(CString("column1"))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file0" -> Map(JPath("column0") -> cLiterals),
"/file1" -> Map(JPath("column1") -> Set(CBoolean))
)
result must_== expected
}
"propagate structure/type information through AddSortKey nodes" in {
val line = Line(1, 1, "")
val input =
Operate(Neg,
AddSortKey(
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file"))(line))(line),
Const(CString("column"))(line))(line),
"foo", "bar", 23
)
)(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(JPath("column") -> Set(CLong, CDouble, CNum))
)
result must_== expected
}
"propagate structure/type information through Memoize nodes" in {
val line = Line(1, 1, "")
val input =
Operate(Neg,
Memoize(
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file"))(line))(line),
Const(CString("column"))(line))(line),
23
)
)(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(JPath("column") -> Set(CLong, CDouble, CNum))
)
result must_== expected
}
"propagate structure/type information through Distinct nodes" in {
val line = Line(1, 1, "")
val input =
Operate(Neg,
Distinct(
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/file"))(line))(line),
Const(CString("column"))(line))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(JPath("column") -> Set(CLong, CDouble, CNum))
)
result must_== expected
}
"propagate structure/type information through Split nodes (1)" in {
val line = Line(1, 1, "")
def clicks = AbsoluteLoad(Const(CString("/file"))(line))(line)
val id = new Identifier
val input =
Split(
Group(
1,
clicks,
UnfixedSolution(0,
Join(DerefObject, Cross(None),
clicks,
Const(CString("column0"))(line))(line))),
Join(Add, Cross(None),
Join(DerefObject, Cross(None),
SplitParam(0, id)(line),
Const(CString("column1"))(line))(line),
Join(DerefObject, Cross(None),
SplitGroup(1, clicks.identities, id)(line),
Const(CString("column2"))(line))(line))(line), id)(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/file" -> Map(
JPath("column0") -> cLiterals,
JPath("column0.column1") -> Set(CLong, CDouble, CNum),
JPath("column2") -> Set(CLong, CDouble, CNum)
)
)
result mustEqual expected
}
"propagate structure/type information through Split nodes (2)" in {
val line = Line(1, 1, "")
def clicks = AbsoluteLoad(Const(CString("/clicks"))(line))(line)
val id = new Identifier
// clicks := //clicks forall 'user { user: 'user, num: count(clicks.user where clicks.user = 'user) }
val input =
Split(
Group(0,
Join(DerefObject, Cross(None), clicks, Const(CString("user"))(line))(line),
UnfixedSolution(1,
Join(DerefObject, Cross(None),
clicks,
Const(CString("user"))(line))(line))),
Join(JoinObject, Cross(None),
Join(WrapObject, Cross(None),
Const(CString("user"))(line),
SplitParam(1, id)(line))(line),
Join(WrapObject, Cross(None),
Const(CString("num"))(line),
Reduce(Count,
SplitGroup(0, clicks.identities, id)(line))(line))(line))(line), id)(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/clicks" -> Map(
JPath("user") -> cLiterals
)
)
result must_== expected
}
"propagate structure/type information through Split nodes (3)" in {
val line = Line(1, 1, "")
def clicks = AbsoluteLoad(Const(CString("/clicks"))(line))(line)
val id = new Identifier
// clicks := //clicks forall 'user { user: 'user, age: clicks.age, num: count(clicks.user where clicks.user = 'user) }
val input =
Split(
Group(0,
Join(DerefObject, Cross(None), clicks, Const(CString("user"))(line))(line),
UnfixedSolution(1,
Join(DerefObject, Cross(None),
clicks,
Const(CString("user"))(line))(line))),
Join(JoinObject, Cross(None),
Join(JoinObject, Cross(None),
Join(WrapObject, Cross(None),
Const(CString("user"))(line),
SplitParam(1, id)(line))(line),
Join(WrapObject, Cross(None),
Const(CString("num"))(line),
Reduce(Count,
SplitGroup(0, clicks.identities, id)(line))(line))(line))(line),
Join(WrapObject, Cross(None),
Const(CString("age"))(line),
Join(DerefObject, Cross(None),
clicks,
Const(CString("age"))(line))(line))(line))(line), id)(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/clicks" -> Map(
JPath("user") -> cLiterals,
JPath("age") -> cLiterals
)
)
result must_== expected
}
"rewrite loads for a trivial but complete DAG such that they will restrict the columns loaded" in {
val line = Line(1, 1, "")
val input =
Join(Add, IdentitySort,
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/clicks"))(line))(line),
Const(CString("time"))(line))(line),
Join(DerefObject, Cross(None),
AbsoluteLoad(Const(CString("/hom/heightWeight"))(line))(line),
Const(CString("height"))(line))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/clicks" -> Map(JPath("time") -> Set(CLong, CDouble, CNum)),
"/hom/heightWeight" -> Map(JPath("height") -> Set(CLong, CDouble, CNum))
)
result must_== expected
}
"negate type inference from deref by wrap" in {
val line = Line(1, 1, "")
val clicks = AbsoluteLoad(Const(CString("/clicks"))(line))(line)
val input =
Join(DerefObject, Cross(None),
Join(WrapObject, Cross(None),
Const(CString("foo"))(line),
clicks)(line),
Const(CString("foo"))(line))(line)
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/clicks" -> Map(NoJPath -> cLiterals))
result mustEqual expected
}
"propagate type information through split->wrap->deref" in {
val line = Line(1, 1, "")
val clicks = AbsoluteLoad(Const(CString("/clicks"))(line))(line)
val id = new Identifier
val clicksTime =
Join(DerefObject, Cross(None),
clicks,
Const(CString("time"))(line))(line)
val split =
Split(
Group(0, clicks, UnfixedSolution(1, clicksTime)),
Join(WrapObject, Cross(None),
Const(CString("foo"))(line),
SplitGroup(0, Identities.Specs(Vector(LoadIds("/clicks"))), id)(line))(line), id)(line)
val input =
Join(DerefObject, Cross(None),
split,
Const(CString("foo"))(line))(line)
/*
clicks := //clicks
split := solve 'time
clicks' := (clicks where clicks.time = 'time)
{ "foo": clicks' }
split.foo
*/
val result = extractLoads(inferTypes(JType.JPrimitiveUnfixedT)(input))
val expected = Map(
"/clicks" -> Map(
NoJPath -> cLiterals,
JPath("time") -> cLiterals))
result mustEqual expected
}
}
}
object TypeInferencerSpecs extends TypeInferencerSpecs[Need]
| drostron/quasar | mimir/src/test/scala/quasar/mimir/TypeInferencerSpecs.scala | Scala | apache-2.0 | 19,516 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.web.stats
import com.typesafe.scalalogging.LazyLogging
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import org.geotools.filter.text.ecql.ECQL
import org.json4s.{DefaultFormats, Formats}
import org.locationtech.geomesa.index.stats.{GeoMesaStats, HasGeoMesaStats}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.stats.{Histogram, MinMax, Stat}
import org.locationtech.geomesa.web.core.GeoMesaServletCatalog.GeoMesaLayerInfo
import org.locationtech.geomesa.web.core.{GeoMesaScalatraServlet, GeoMesaServletCatalog}
import org.opengis.filter.Filter
import org.scalatra.BadRequest
import org.scalatra.json._
import org.scalatra.swagger._
import scala.reflect.ClassTag
class GeoMesaStatsEndpoint(val swagger: Swagger, rootPath: String = GeoMesaScalatraServlet.DefaultRootPath)
extends GeoMesaScalatraServlet with LazyLogging with NativeJsonSupport with SwaggerSupport {
override def root: String = "stats"
override def defaultFormat: Symbol = 'json
override protected def applicationDescription: String = "The GeoMesa Stats API"
override protected implicit def jsonFormats: Formats = DefaultFormats
// GeoServer's AdvancedDispatcherFilter tries to help us out, but it gets in the way.
// For our purposes, we want to unwrap those filters.
// CorsSupport and other Scalatra classes/traits override handle, and will choke on GS's ADF:(
// To fix this, we need to have this unwrapping happen.
// We can achieve that in one of two ways:
// 1. Put the below override in this class.
// 2. Make a trait which has this override in and make sure it appears last (or merely latter than CorsSupport, etc.)
override def handle(req: HttpServletRequest, res: HttpServletResponse): Unit =
super.handle(GeoMesaScalatraServlet.wrap(req), res)
logger.info("*** Starting the stats REST API endpoint!")
before() {
contentType = formats(format)
}
val getCount = (
apiOperation[Integer]("getCount")
summary "Gets an estimated count of simple features"
notes "Gets an estimated count of simple features from the stats table in Accumulo."
parameters (
pathParam[String](GeoMesaStatsEndpoint.WorkspaceParam).description("GeoServer workspace."),
pathParam[String](GeoMesaStatsEndpoint.LayerParam).description("GeoServer layer."),
queryParam[Option[String]](GeoMesaStatsEndpoint.CqlFilterParam).description("A CQL filter to compute the count of simple features against. Defaults to Filter.INCLUDE."),
queryParam[Option[Boolean]](GeoMesaStatsEndpoint.NoCacheParam).description("Calculate stats against the data set instead of using cached statistics (may be slow)."))
)
get("/:workspace/:layer/count", operation(getCount)) {
retrieveLayerInfo("count") match {
case Some(statInfo) =>
val layer = params(GeoMesaStatsEndpoint.LayerParam)
val sft = statInfo.sft
val stats = statInfo.ds.asInstanceOf[HasGeoMesaStats].stats
val filter = params.get(GeoMesaStatsEndpoint.CqlFilterParam).map(ECQL.toFilter).getOrElse(Filter.INCLUDE)
val noCache = params.get(GeoMesaStatsEndpoint.NoCacheParam).exists(_.toBoolean)
logger.debug(s"Found a GeoMesa Accumulo datastore for $layer")
logger.debug(s"SFT for $layer is ${SimpleFeatureTypes.encodeType(sft)}")
logger.debug(s"Running stat with filter: $filter")
logger.debug(s"Running stat with no cached stats: $noCache")
val countStat = stats.getCount(sft, filter, noCache)
countStat match {
case Some(count) =>
logger.debug(s"Retrieved count $count for $layer")
count
case None =>
logger.debug(s"No estimated count for ${sft.getTypeName}")
BadRequest(s"Estimated count for ${sft.getTypeName} is not available")
}
case _ => BadRequest(s"No registered layer called ${params(GeoMesaStatsEndpoint.LayerParam)}")
}
}
val getBounds = (
apiOperation[String]("getBounds")
summary "Gets the bounds of attributes"
notes "Gets the bounds of attributes from the stats table in Accumulo."
parameters (
pathParam[String](GeoMesaStatsEndpoint.WorkspaceParam).description("GeoServer workspace."),
pathParam[String](GeoMesaStatsEndpoint.LayerParam).description("GeoServer layer."),
queryParam[Option[String]](GeoMesaStatsEndpoint.AttributesParam).description("A comma separated list of attribute names to retrieve bounds for. If omitted, all attributes will be used."),
queryParam[Option[String]](GeoMesaStatsEndpoint.CqlFilterParam).description("A CQL filter to compute the count of simple features against. Defaults to Filter.INCLUDE. Will not be used if the noCache parameter is false."),
queryParam[Option[Boolean]](GeoMesaStatsEndpoint.NoCacheParam).description("Calculate stats against the data set instead of using cached statistics (may be slow)."))
)
get("/:workspace/:layer/bounds", operation(getBounds)) {
retrieveLayerInfo("bounds") match {
case Some(statInfo) =>
val layer = params(GeoMesaStatsEndpoint.LayerParam)
val sft = statInfo.sft
val stats = statInfo.ds.asInstanceOf[HasGeoMesaStats].stats
val filter = params.get(GeoMesaStatsEndpoint.CqlFilterParam).map(ECQL.toFilter).getOrElse(Filter.INCLUDE)
val noCache = params.get(GeoMesaStatsEndpoint.NoCacheParam).exists(_.toBoolean)
logger.debug(s"Found a GeoMesa Accumulo datastore for $layer")
logger.debug(s"SFT for $layer is ${SimpleFeatureTypes.encodeType(sft)}")
logger.debug(s"Running stat with filter: $filter")
logger.debug(s"Running stat with no cached stats: $noCache")
val userAttributes: Seq[String] = params.get(GeoMesaStatsEndpoint.AttributesParam) match {
case Some(attributesString) => attributesString.split(',')
case None => Nil
}
val attributes = org.locationtech.geomesa.tools.stats.getAttributes(sft, userAttributes)
val statQuery = attributes.map(Stat.MinMax)
val boundStatList = stats.getSeqStat[MinMax[Any]](sft, statQuery, filter, noCache)
val jsonBoundsList = attributes.map { attribute =>
val out = boundStatList.find(_.property == attribute) match {
case None => "\\"unavailable\\""
case Some(mm) if mm.isEmpty => "\\"no matching data\\""
case Some(mm) => mm.toJson
}
s""""$attribute": $out"""
}
jsonBoundsList.mkString("{", ", ", "}")
case _ => BadRequest(s"No registered layer called ${params(GeoMesaStatsEndpoint.LayerParam)}")
}
}
val getHistograms = (
apiOperation[String]("getHistogram")
summary "Gets histograms of attributes"
notes "Gets histograms of attributes from the stats table in Accumulo."
parameters (
pathParam[String](GeoMesaStatsEndpoint.WorkspaceParam).description("GeoServer workspace."),
pathParam[String](GeoMesaStatsEndpoint.LayerParam).description("GeoServer layer."),
queryParam[Option[String]](GeoMesaStatsEndpoint.AttributesParam).description("A comma separated list of attribute names to retrieve bounds for. If omitted, all attributes will be used."),
queryParam[Option[Integer]](GeoMesaStatsEndpoint.BinsParam).description("The number of bins the histograms will have. Defaults to 1000."),
queryParam[Option[String]](GeoMesaStatsEndpoint.CqlFilterParam).description("A CQL filter to compute the count of simple features against. Defaults to Filter.INCLUDE. Will not be used if the noCache parameter is false."),
queryParam[Option[Boolean]](GeoMesaStatsEndpoint.NoCacheParam).description("Calculate stats against the data set instead of using cached statistics (may be slow)."),
queryParam[Option[Boolean]](GeoMesaStatsEndpoint.CalculateBoundsParam).description("Calculates the bounds of each histogram. Will use the default bounds if false. Will not be used if the noCache parameter is false."))
)
get("/:workspace/:layer/histogram", operation(getHistograms)) {
retrieveLayerInfo("histogram") match {
case Some(statInfo) =>
val layer = params(GeoMesaStatsEndpoint.LayerParam)
val sft = statInfo.sft
val stats = statInfo.ds.asInstanceOf[HasGeoMesaStats].stats
val filter = params.get(GeoMesaStatsEndpoint.CqlFilterParam).map(ECQL.toFilter).getOrElse(Filter.INCLUDE)
val noCache = params.get(GeoMesaStatsEndpoint.NoCacheParam).exists(_.toBoolean)
val calculateBounds = params.get(GeoMesaStatsEndpoint.CalculateBoundsParam).exists(_.toBoolean)
logger.debug(s"Found a GeoMesa Accumulo datastore for $layer")
logger.debug(s"SFT for $layer is ${SimpleFeatureTypes.encodeType(sft)}")
logger.debug(s"Running stat with filter: $filter")
logger.debug(s"Running stat with no cached stats: $noCache")
val userAttributes: Seq[String] = params.get(GeoMesaStatsEndpoint.AttributesParam) match {
case Some(attributesString) => attributesString.split(',')
case None => Nil
}
val attributes = org.locationtech.geomesa.tools.stats.getAttributes(sft, userAttributes)
val bins = params.get(GeoMesaStatsEndpoint.BinsParam).map(_.toInt)
val histograms = if (noCache) {
val bounds = scala.collection.mutable.Map.empty[String, (Any, Any)]
attributes.foreach { attribute =>
stats.getStat[MinMax[Any]](sft, attribute).foreach { b =>
bounds.put(attribute, if (b.min == b.max) Histogram.buffer(b.min) else b.bounds)
}
}
if (bounds.size != attributes.size) {
val noBounds = attributes.filterNot(bounds.contains)
logger.warn(s"Initial bounds are not available for attributes ${noBounds.mkString(", ")}.")
if (calculateBounds) {
logger.debug("Calculating bounds...")
stats.getSeqStat[MinMax[Any]](sft, noBounds.map(Stat.MinMax), filter, exact = true).foreach { mm =>
bounds.put(mm.property, mm.bounds)
}
} else {
logger.debug("Using default bounds.")
noBounds.foreach { attribute =>
val ct = ClassTag[Any](sft.getDescriptor(attribute).getType.getBinding)
bounds.put(attribute, GeoMesaStats.defaultBounds(ct.runtimeClass))
}
}
}
logger.debug("Running live histogram stat query...")
val length = bins.getOrElse(GeoMesaStats.DefaultHistogramSize)
val queries = attributes.map { attribute =>
val ct = ClassTag[Any](sft.getDescriptor(attribute).getType.getBinding)
val (lower, upper) = bounds(attribute)
Stat.Histogram[Any](attribute, length, lower, upper)(ct)
}
stats.getSeqStat[Histogram[Any]](sft, queries, filter, exact = true)
} else {
stats.getSeqStat[Histogram[Any]](sft, attributes.map(Stat.Histogram(_, 0, null, null))).map {
case histogram: Histogram[Any] if bins.forall(_ == histogram.length) => histogram
case histogram: Histogram[Any] =>
val descriptor = sft.getDescriptor(histogram.property)
val ct = ClassTag[Any](descriptor.getType.getBinding)
val statString = Stat.Histogram[Any](histogram.property, bins.get, histogram.min, histogram.max)(ct)
val binned = Stat(sft, statString).asInstanceOf[Histogram[Any]]
binned.addCountsFrom(histogram)
binned
}
}
val jsonHistogramList = attributes.map { attribute =>
val out = histograms.find(_.property == attribute) match {
case None => "\\"unavailable\\""
case Some(hist) if hist.isEmpty => "\\"no matching data\\""
case Some(hist) => hist.toJson
}
s""""$attribute": $out"""
}
jsonHistogramList.mkString("{", ", ", "}")
case _ => BadRequest(s"No registered layer called ${params(GeoMesaStatsEndpoint.LayerParam)}")
}
}
def retrieveLayerInfo(call: String): Option[GeoMesaLayerInfo] = {
val workspace = params(GeoMesaStatsEndpoint.WorkspaceParam)
val layer = params(GeoMesaStatsEndpoint.LayerParam)
logger.debug(s"Received $call request for workspace: $workspace and layer: $layer.")
GeoMesaServletCatalog.getGeoMesaLayerInfo(workspace, layer)
}
}
object GeoMesaStatsEndpoint {
val LayerParam = "layer"
val WorkspaceParam = "workspace"
val CqlFilterParam = "cql_filter"
val AttributesParam = "attributes"
val BinsParam = "bins"
val NoCacheParam = "noCache"
val CalculateBoundsParam = "calculateBounds"
}
| elahrvivaz/geomesa | geomesa-web/geomesa-web-stats/src/main/scala/org/locationtech/geomesa/web/stats/GeoMesaStatsEndpoint.scala | Scala | apache-2.0 | 13,360 |
package bad.robot.temperature.server
import bad.robot.temperature._
import bad.robot.temperature.rrd.{Host, Seconds}
import bad.robot.temperature.server.Requests._
import bad.robot.temperature.test._
import cats.effect.IO
import fs2.Scheduler
import org.http4s.Method._
import org.http4s.Status.{NoContent, Ok}
import org.http4s.implicits._
import org.http4s.{Request, Uri}
import org.specs2.mutable.Specification
import scala.concurrent.duration.FiniteDuration
import scalaz.{\\/, \\/-}
object Requests {
val Put: String => Request[IO] = (body) => Request[IO](PUT, Uri(path = s"temperature")).withBody(body).unsafeRunSync
}
class TemperatureEndpointTest extends Specification {
sequential
val scheduler = new Scheduler {
protected def scheduleOnce(delay: FiniteDuration)(thunk: => Unit) = ???
protected def scheduleAtFixedRate(period: FiniteDuration)(thunk: => Unit) = ???
}
"Put some temperature data" >> {
val service = TemperatureEndpoint(scheduler, stubReader(\\/-(List())), AllTemperatures(), Connections())
val measurement = """{
| "host" : {
| "name" : "localhost",
| "utcOffset" : null,
| "timezone" : null
| },
| "seconds" : 9000,
| "sensors" : [
| {
| "name" : "28-00000dfg34ca",
| "temperature" : {
| "celsius" : 31.1
| }
| }
| ]
|}""".stripMargin
val response = service.orNotFound.run(Put(measurement)).unsafeRunSync
response must haveStatus(NoContent)
}
"Bad json when writing sensor data" >> {
val service = TemperatureEndpoint(scheduler, stubReader(\\/-(List())), AllTemperatures(), Connections())
val request: Request[IO] = Put("bad json")
val response = service.orNotFound.run(request).unsafeRunSync
response must haveStatus(org.http4s.Status.BadRequest)
response.as[String].unsafeRunSync must_== "The request body was malformed."
}
"Get multiple sensors temperatures" >> {
val now = Seconds.now()
val measurement1 = s"""{
| "host" : {
| "name" : "lounge",
| "utcOffset" : null,
| "timezone" : null
| },
| "seconds" : ${now.value - 10},
| "sensors" : [
| {
| "name" : "28-00000dfg34ca",
| "temperature" : {
| "celsius" : 31.1
| }
| },
| {
| "name" : "28-00000f33fdc3",
| "temperature" : {
| "celsius" : 32.8
| }
| }
| ]
|}""".stripMargin
val measurement2 = s"""{
| "host" : {
| "name" : "bedroom",
| "utcOffset" : null,
| "timezone" : null
| },
| "seconds" : ${now.value},
| "sensors" : [
| {
| "name" : "28-00000f3554ds",
| "temperature" : {
| "celsius" : 21.1
| }
| },
| {
| "name" : "28-000003dd3433",
| "temperature" : {
| "celsius" : 22.8
| }
| }
| ]
|}""".stripMargin
val service = TemperatureEndpoint(scheduler, stubReader(\\/-(List())), AllTemperatures(), Connections())
service.orNotFound.run(Request[IO](DELETE, Uri.uri("/temperatures"))).unsafeRunSync
service.orNotFound.run(Put(measurement1)).unsafeRunSync
service.orNotFound.run(Put(measurement2)).unsafeRunSync
val request = Request[IO](GET, Uri.uri("/temperatures"))
val response = service.orNotFound.run(request).unsafeRunSync
response.status must_== Ok
val expected = s"""{
| "measurements" : [
| {
| "host" : {
| "name" : "lounge",
| "utcOffset" : null,
| "timezone" : null
| },
| "seconds" : ${now.value - 10},
| "sensors" : [
| {
| "name" : "28-00000dfg34ca",
| "temperature" : {
| "celsius" : 31.1
| }
| },
| {
| "name" : "28-00000f33fdc3",
| "temperature" : {
| "celsius" : 32.8
| }
| }
| ]
| },
| {
| "host" : {
| "name" : "bedroom",
| "utcOffset" : null,
| "timezone" : null
| },
| "seconds" : ${now.value},
| "sensors" : [
| {
| "name" : "28-00000f3554ds",
| "temperature" : {
| "celsius" : 21.1
| }
| },
| {
| "name" : "28-000003dd3433",
| "temperature" : {
| "celsius" : 22.8
| }
| }
| ]
| }
| ]
|}""".stripMargin
response.as[String].unsafeRunSync must_== expected
}
"Get multiple sensors, averaging the temperatures" >> {
val now = Seconds.now()
val measurement1 = s"""{
| "host" : {
| "name" : "lounge",
| "utcOffset" : null,
| "timezone" : null
| },
| "seconds" : ${now.value - 10},
| "sensors" : [
| {
| "name" : "28-00000dfg34ca",
| "temperature" : {
| "celsius" : 31.1
| }
| },
| {
| "name" : "28-00000f33fdc3",
| "temperature" : {
| "celsius" : 32.8
| }
| }
| ]
|}""".stripMargin
val measurement2 = s"""{
| "host" : {
| "name" : "bedroom",
| "utcOffset" : null,
| "timezone" : null
| },
| "seconds" : ${now.value},
| "sensors" : [
| {
| "name" : "28-00000f3554ds",
| "temperature" : {
| "celsius" : 21.1
| }
| },
| {
| "name" : "28-000003dd3433",
| "temperature" : {
| "celsius" : 22.8
| }
| }
| ]
|}""".stripMargin
val service = TemperatureEndpoint(scheduler, stubReader(\\/-(List())), AllTemperatures(), Connections())
service.orNotFound.run(Request[IO](DELETE, Uri.uri("/temperatures"))).unsafeRunSync
service.orNotFound.run(Put(measurement1)).unsafeRunSync
service.orNotFound.run(Put(measurement2)).unsafeRunSync
val request = Request[IO](GET, Uri.uri("/temperatures/average"))
val response = service.orNotFound.run(request).unsafeRunSync
response.status must_== Ok
response.as[String].unsafeRunSync must_==
s"""{
| "measurements" : [
| {
| "host" : {
| "name" : "lounge",
| "utcOffset" : null,
| "timezone" : null
| },
| "seconds" : ${now.value - 10},
| "sensors" : [
| {
| "name" : "Average",
| "temperature" : {
| "celsius" : 31.95
| }
| }
| ]
| },
| {
| "host" : {
| "name" : "bedroom",
| "utcOffset" : null,
| "timezone" : null
| },
| "seconds" : ${now.value},
| "sensors" : [
| {
| "name" : "Average",
| "temperature" : {
| "celsius" : 21.950000000000003
| }
| }
| ]
| }
| ]
|}""".stripMargin
}
def stubReader(result: Error \\/ List[SensorReading]) = new TemperatureReader {
def read: Error \\/ Measurement = result.map(x => Measurement(Host("A"), Seconds.now(), x))
}
def stubWriter(result: Error \\/ Unit) = new TemperatureWriter {
def write(measurement: Measurement) = result
}
def UnexpectedWriter = new TemperatureWriter {
def write(measurement: Measurement) = ???
}
} | tobyweston/temperature-machine | src/test/scala/bad/robot/temperature/server/TemperatureEndpointTest.scala | Scala | apache-2.0 | 11,799 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.controllers.application.assets.money
import iht.config.AppConfig
import iht.connector.{CachingConnector, IhtConnector}
import iht.controllers.application.EstateController
import iht.forms.ApplicationForms._
import iht.metrics.IhtMetrics
import iht.models.application.ApplicationDetails
import iht.models.application.assets.AllAssets
import iht.models.application.basicElements.ShareableBasicEstateElement
import iht.utils.{ApplicationKickOutHelper, CommonHelper}
import iht.views.html.application.asset.money.money_jointly_owned
import javax.inject.Inject
import play.api.mvc.MessagesControllerComponents
import uk.gov.hmrc.auth.core.AuthConnector
import uk.gov.hmrc.auth.core.retrieve.v2.Retrievals.{nino => ninoRetrieval}
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
class MoneyJointlyOwnedControllerImpl @Inject()(val metrics: IhtMetrics,
val ihtConnector: IhtConnector,
val cachingConnector: CachingConnector,
val authConnector: AuthConnector,
val moneyJointlyOwnedView: money_jointly_owned,
implicit val appConfig: AppConfig,
val cc: MessagesControllerComponents) extends FrontendController(cc) with MoneyJointlyOwnedController {
}
trait MoneyJointlyOwnedController extends EstateController {
override val applicationSection = Some(ApplicationKickOutHelper.ApplicationSectionAssetsMoneyJointlyOwned)
val moneyJointlyOwnedView: money_jointly_owned
lazy val submitUrl = CommonHelper.addFragmentIdentifier(
iht.controllers.application.assets.money.routes.MoneyOverviewController.onPageLoad(), Some(appConfig.AssetsMoneySharedID))
def onPageLoad = authorisedForIhtWithRetrievals(ninoRetrieval) { userNino =>
implicit request =>
estateElementOnPageLoad[ShareableBasicEstateElement](moneyJointlyOwnedForm, moneyJointlyOwnedView.apply, _.allAssets.flatMap(_.money), userNino)
}
def onSubmit = authorisedForIhtWithRetrievals(ninoRetrieval) { userNino =>
implicit request => {
val updateApplicationDetails: (ApplicationDetails, Option[String], ShareableBasicEstateElement) =>
(ApplicationDetails, Option[String]) =
(appDetails, _, money) => {
val existingValue = appDetails.allAssets.flatMap(_.money.flatMap(_.value))
val existingIsOwned = appDetails.allAssets.flatMap(_.money.flatMap(_.isOwned))
val updatedAD = appDetails.copy(allAssets = Some(appDetails.allAssets.fold
(new AllAssets(action = None, money = Some(money)))
(
money.isOwnedShare match {
case Some(true) => _.copy(money = Some(money.copy(value = existingValue, isOwned = existingIsOwned)))
case Some(false) => _.copy(money = Some(money.copy(shareValue = None, value = existingValue,
isOwned = existingIsOwned)))
case None => throw new RuntimeException
}
)))
(updatedAD, None)
}
estateElementOnSubmit[ShareableBasicEstateElement](
moneyJointlyOwnedForm,
moneyJointlyOwnedView.apply,
updateApplicationDetails,
submitUrl,
userNino
)
}
}
}
| hmrc/iht-frontend | app/iht/controllers/application/assets/money/MoneyJointlyOwnedController.scala | Scala | apache-2.0 | 4,011 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.geojson.servlet
import java.net.URLEncoder
import java.nio.file.Files
import org.json4s.{DefaultFormats, Formats}
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.TestWithDataStore
import org.locationtech.geomesa.utils.cache.FilePersistence
import org.locationtech.geomesa.utils.io.PathUtils
import org.scalatra.test.specs2.MutableScalatraSpec
import org.specs2.runner.JUnitRunner
import org.specs2.specification.core.Fragments
@RunWith(classOf[JUnitRunner])
class GeoJsonServletTest extends TestWithDataStore with MutableScalatraSpec {
sequential
val tmpDir = Files.createTempDirectory("geojsontest")
val f0 = """{"type":"Feature","geometry":{"type":"Point","coordinates":[30,10]},"properties":{"id":"0","name":"n0"}}"""
val f1 = """{"type":"Feature","geometry":{"type":"Point","coordinates":[32,10]},"properties":{"id":"1","name":"n1"}}"""
val f2 = """{"type":"Feature","geometry":{"type":"Point","coordinates":[34,10]},"properties":{"id":"2","name":"n2"}}"""
val f0Updated = f0.replace("n0", "n0-updated").replace("30,10", "20,10")
val f1Updated = f1.replace("n1", "n1-updated").replace("32,10", "22,10")
def urlEncode(s: String): String = URLEncoder.encode(s, "UTF-8")
// cleanup tmp dir after tests run
override def map(fragments: => Fragments): Fragments = super.map(fragments) ^ step {
PathUtils.deleteRecursively(tmpDir)
}
addServlet(new GeoJsonServlet(new FilePersistence(tmpDir.toFile, "servlet")), "/*")
implicit val formats: Formats = DefaultFormats
"GeoJsonServlet" should {
"register a datastore" in {
post("/ds/geojsontest", dsParams) {
status mustEqual 200
}
post("/index/geojsontest/geojsontest", Map("id" -> "properties.id")) {
status mustEqual 201 // created
}
}
"return empty list from query" in {
get("/index/geojsontest/geojsontest/features") {
status mustEqual 200
body mustEqual """{"type":"FeatureCollection","features":[]}"""
}
}
"add geojson features" in {
post("/index/geojsontest/geojsontest/features", f0.getBytes("UTF-8")) {
status mustEqual 200
body mustEqual """["0"]"""
}
get("/index/geojsontest/geojsontest/features") {
status mustEqual 200
body mustEqual s"""{"type":"FeatureCollection","features":[$f0]}"""
}
}
"add geojson feature collections" in {
post("/index/geojsontest/geojsontest/features",
s"""{"type":"FeatureCollection","features":[$f1,$f2]}""".getBytes("UTF-8")) {
status mustEqual 200
body mustEqual """["1","2"]"""
}
get("/index/geojsontest/geojsontest/features") {
status mustEqual 200
body must startWith("""{"type":"FeatureCollection","features":[""")
body must endWith("]}")
body must haveLength(s"""{"type":"FeatureCollection","features":[$f0,$f1,$f2]}""".length)
body must contain(f0)
body must contain(f1)
body must contain(f2)
}
}
"query geojson features by id" in {
get(s"/index/geojsontest/geojsontest/features?q=${urlEncode("""{"properties.id":"0"}""")}") {
status mustEqual 200
body mustEqual s"""{"type":"FeatureCollection","features":[$f0]}"""
}
get(s"/index/geojsontest/geojsontest/features?q=${urlEncode("""{"properties.id":"1"}""")}") {
status mustEqual 200
body mustEqual s"""{"type":"FeatureCollection","features":[$f1]}"""
}
get(s"/index/geojsontest/geojsontest/features/1") {
status mustEqual 200
body mustEqual s"""{"type":"FeatureCollection","features":[$f1]}"""
}
get(s"/index/geojsontest/geojsontest/features/1,2") {
status mustEqual 200
body must startWith("""{"type":"FeatureCollection","features":[""")
body must endWith("]}")
body must haveLength(s"""{"type":"FeatureCollection","features":[$f1,$f2]}""".length)
body must contain(f1)
body must contain(f2)
}
}
"query geojson features by geometry" in {
get(s"/index/geojsontest/geojsontest/features?q=${urlEncode("""{"geometry":{"$bbox":[33,9,35,11]}}""")}") {
status mustEqual 200
body mustEqual s"""{"type":"FeatureCollection","features":[$f2]}"""
}
}
"query geojson features by properties" in {
get(s"/index/geojsontest/geojsontest/features?q=${urlEncode("""{"properties.name":"n1"}""")}") {
status mustEqual 200
body mustEqual s"""{"type":"FeatureCollection","features":[$f1]}"""
}
}
"update geojson features" in {
put(s"/index/geojsontest/geojsontest/features", f0Updated.getBytes("UTF-8")) {
status mustEqual 200
}
get(s"/index/geojsontest/geojsontest/features/0") {
status mustEqual 200
body mustEqual s"""{"type":"FeatureCollection","features":[$f0Updated]}"""
}
put(s"/index/geojsontest/geojsontest/features/1", f1Updated.getBytes("UTF-8")) {
status mustEqual 200
}
get(s"/index/geojsontest/geojsontest/features/1") {
status mustEqual 200
body mustEqual s"""{"type":"FeatureCollection","features":[$f1Updated]}"""
}
get("/index/geojsontest/geojsontest/features") {
status mustEqual 200
body must startWith("""{"type":"FeatureCollection","features":[""")
body must endWith("]}")
body must haveLength(s"""{"type":"FeatureCollection","features":[$f0Updated,$f1Updated,$f2]}""".length)
body must contain(f0Updated)
body must contain(f1Updated)
body must contain(f2)
}
}
"delete geojson features" in {
delete(s"/index/geojsontest/geojsontest/features/0") {
status mustEqual 200
}
get(s"/index/geojsontest/geojsontest/features/0") {
status mustEqual 404
}
get("/index/geojsontest/geojsontest/features") {
status mustEqual 200
body must startWith("""{"type":"FeatureCollection","features":[""")
body must endWith("]}")
body must haveLength(s"""{"type":"FeatureCollection","features":[$f1Updated,$f2]}""".length)
body must contain(f1Updated)
body must contain(f2)
}
}
}
}
| locationtech/geomesa | geomesa-geojson/geomesa-geojson-rest/src/test/scala/org/locationtech/geomesa/geojson/servlet/GeoJsonServletTest.scala | Scala | apache-2.0 | 6,735 |
package formats.json
import play.api.libs.json.{JsPath, Json, Reads}
import play.api.libs.functional.syntax._
/**
* The client can submit various JSON payloads relating to computer vision ground truth audits. For example,
* a list of panoIds to be audited along with their lat/lng positions or a panoId that has been audited.
*/
object CVGroundTruthSubmissionFormats {
case class CVGroundTruthPanoidListSubmission(panos: Seq[PanoData], numPanos: Int)
case class CVGroundTruthPanoIdSubmission(pano: String, numRemaining: Int, missionId: Int)
case class PanoData(panoId: String, lat: Float, lng: Float)
implicit val panoDataSubmission: Reads[PanoData] = (
(JsPath \ "panoId").read[String] and
(JsPath \ "lat").read[Float] and
(JsPath \ "lng").read[Float]
)(PanoData.apply _)
implicit val groundTruthPanoIdListSubmission: Reads[CVGroundTruthPanoidListSubmission] = (
(JsPath \ "panos").read[Seq[PanoData]] and
(JsPath \ "num_panos").read[Int]
)(CVGroundTruthPanoidListSubmission.apply _)
implicit val groundTruthPanoCompleteSubmission: Reads[CVGroundTruthPanoIdSubmission] = (
(JsPath \ "pano").read[String] and
(JsPath \ "num_remaining").read[Int] and
(JsPath \ "mission_id").read[Int]
)(CVGroundTruthPanoIdSubmission.apply _)
}
| ProjectSidewalk/SidewalkWebpage | app/formats/json/CVGroundTruthSubmissionFormats.scala | Scala | mit | 1,306 |
package sbencoding
/**
* Provides additional BencodingFormats and helpers
*/
trait AdditionalFormats {
implicit object BcValueFormat extends BencodingFormat[BcValue] {
def write(value: BcValue) = value
def read(value: BcValue) = value
}
/**
* Constructs a BencodingFormat from its two parts, BencodingReader and BencodingWriter.
*/
def bencodingFormat[T](reader: BencodingReader[T], writer: BencodingWriter[T]) = new BencodingFormat[T] {
def write(obj: T) = writer.write(obj)
def read(json: BcValue) = reader.read(json)
}
/**
* Turns a BencodingWriter into a BencodingFormat that throws an UnsupportedOperationException for reads.
*/
def lift[T](writer: BencodingWriter[T]) = new BencodingFormat[T] {
def write(obj: T): BcValue = writer.write(obj)
def read(value: BcValue) =
throw new UnsupportedOperationException("BencodingReader implementation missing")
}
/**
* Turns a BencodingReader into a BencodingFormat that throws an UnsupportedOperationException for writes.
*/
def lift[T <: AnyRef](reader: BencodingReader[T]) = new BencodingFormat[T] {
def write(obj: T): BcValue =
throw new UnsupportedOperationException("No BencodingWriter[" + obj.getClass + "] available")
def read(value: BcValue) = reader.read(value)
}
/**
* Lazy wrapper around serialization. Useful when you want to serialize (mutually) recursive structures.
*/
def lazyFormat[T](format: => BencodingFormat[T]) = new BencodingFormat[T] {
lazy val delegate = format;
def write(x: T) = delegate.write(x);
def read(value: BcValue) = delegate.read(value);
}
/**
* Wraps an existing BencodingReader with Exception protection.
*/
def safeReader[A: BencodingReader] = new BencodingReader[Either[Exception, A]] {
def read(json: BcValue) = {
try {
Right(json.convertTo[A])
} catch {
case e: Exception => Left(e)
}
}
}
}
| zhaoyao/sbencoding | src/main/scala/sbencoding/AdditionalFormats.scala | Scala | mit | 1,952 |
package controllers
import booli.{Configuration, Download}
import play.api.mvc._
class Application extends Controller {
def index = Action {
println("Environment")
println(sys.env.mkString("\n"))
Ok(views.html.Application.main())
}
def admin(key: String) = Action {
println(Configuration.adminKey)
if (key == Configuration.adminKey) {
Ok(views.html.Application.admin())
} else NotFound
}
def adminDownload(key: String) = Action {
if (key == Configuration.adminKey) {
Download.downloadBooliData()
Ok("Downloading Booli Data")
} else NotFound
}
}
| lastsys/pricemap | server/app/controllers/Application.scala | Scala | bsd-3-clause | 613 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services
import connectors._
import models.{CurrentProfile, S4LKey}
import play.api.libs.json._
import uk.gov.hmrc.http.cache.client.CacheMap
import uk.gov.hmrc.http.{HeaderCarrier, HttpResponse}
import javax.inject.{Inject, Singleton}
import scala.concurrent.{ExecutionContext, Future}
@Singleton
class S4LService @Inject()(val s4LConnector: S4LConnector)
(implicit executionContext: ExecutionContext) {
def save[T: S4LKey](data: T)(implicit profile: CurrentProfile, hc: HeaderCarrier, writes: Writes[T]): Future[CacheMap] =
s4LConnector.save[T](profile.registrationId, S4LKey[T].key, data)
def fetchAndGet[T: S4LKey](implicit profile: CurrentProfile, hc: HeaderCarrier, reads: Reads[T]): Future[Option[T]] =
s4LConnector.fetchAndGet[T](profile.registrationId, S4LKey[T].key).recover {
case _: JsResultException => None
}
def clear(implicit hc: HeaderCarrier, profile: CurrentProfile): Future[HttpResponse] =
s4LConnector.clear(profile.registrationId)
def clearKey[T: S4LKey](implicit profile: CurrentProfile, hc: HeaderCarrier): Future[CacheMap] =
s4LConnector.save(profile.registrationId, S4LKey[T].key, Json.obj())
}
| hmrc/vat-registration-frontend | app/services/S4LService.scala | Scala | apache-2.0 | 1,802 |
/* TestMemory: Units tests for tarski.Memory logging */
package tarski
import utility.Utility._
import com.amazonaws.AmazonServiceException
import com.amazonaws.AmazonServiceException.ErrorType
import scala.concurrent._
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
import org.testng.annotations.Test
class TestMemory {
// A large random string for unit test purposes. Guaranteed random.
val install = "46240bbb2d11736a1a0fd86ac0d31c37bab17e13d02bde533668d0f8"
val base = Memory.basics(install=install,version="0.1",project="TestMemory",ideaVersion="test")
@Test def remember() = {
val log = Memory.log(base.add("kind","TestMemory.remember").add("input","x = 3"), noLog=false)
Await.result(log,10.second)
}
@Test def writeOnly(): Unit = {
val get = Memory.table map { table => table.getItem("install",install,"time","1421091238.125") }
try {
Await.result(get,Duration.Inf)
impossible // If get succeeds, we have a security problem
} catch {
case e:AmazonServiceException =>
assert(e.getErrorType == ErrorType.Client)
assert(e.getErrorMessage contains "eddy-public is not authorized to perform: dynamodb:GetItem", e.getErrorMessage)
}
}
@Test def error() = {
try throw new AssertionError("an assertion")
catch { case e:Throwable =>
val log = Memory.log(base.add("kind","TestMemory.error").error(e), noLog=false)
Await.result(log,10.second)
}
}
}
| eddysystems/eddy | tests/src/tarski/TestMemory.scala | Scala | bsd-2-clause | 1,499 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen.agg
import org.apache.flink.api.common.functions.RuntimeContext
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment => ScalaStreamExecEnv}
import org.apache.flink.table.api.internal.TableEnvironmentImpl
import org.apache.flink.table.api.scala.StreamTableEnvironment
import org.apache.flink.table.api.{DataTypes, EnvironmentSettings}
import org.apache.flink.table.planner.calcite.{FlinkTypeFactory, FlinkTypeSystem}
import org.apache.flink.table.planner.codegen.CodeGeneratorContext
import org.apache.flink.table.planner.dataview.DataViewSpec
import org.apache.flink.table.planner.delegation.PlannerBase
import org.apache.flink.table.planner.functions.aggfunctions.AvgAggFunction.{DoubleAvgAggFunction, LongAvgAggFunction}
import org.apache.flink.table.planner.plan.utils.{AggregateInfo, AggregateInfoList}
import org.apache.flink.table.runtime.context.ExecutionContext
import org.apache.flink.table.types.logical.{BigIntType, DoubleType, LogicalType, RowType, VarCharType}
import org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType
import org.apache.calcite.rel.core.AggregateCall
import org.apache.calcite.tools.RelBuilder
import org.powermock.api.mockito.PowerMockito.{mock, when}
/**
* Agg test base to mock agg information and etc.
*/
abstract class AggTestBase(isBatchMode: Boolean) {
val typeFactory: FlinkTypeFactory = new FlinkTypeFactory(new FlinkTypeSystem())
val env = new ScalaStreamExecEnv(new LocalStreamEnvironment)
private val tEnv = if (isBatchMode) {
val settings = EnvironmentSettings.newInstance().inBatchMode().build()
// use impl class instead of interface class to avoid
// "Static methods in interface require -target:jvm-1.8"
TableEnvironmentImpl.create(settings)
} else {
val settings = EnvironmentSettings.newInstance().inStreamingMode().build()
StreamTableEnvironment.create(env, settings)
}
private val planner = tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
val inputNames = Array("f0", "f1", "f2", "f3", "f4")
val inputTypes: Array[LogicalType] = Array(
new VarCharType(VarCharType.MAX_LENGTH), new BigIntType(), new DoubleType(), new BigIntType(),
new VarCharType(VarCharType.MAX_LENGTH))
val inputType: RowType = RowType.of(inputTypes, inputNames)
val relBuilder: RelBuilder = planner.getRelBuilder.values(
typeFactory.buildRelNodeRowType(inputNames, inputTypes))
val aggInfo1: AggregateInfo = {
val aggInfo = mock(classOf[AggregateInfo])
val call = mock(classOf[AggregateCall])
when(aggInfo, "agg").thenReturn(call)
when(call, "getName").thenReturn("avg1")
when(aggInfo, "function").thenReturn(new LongAvgAggFunction)
when(aggInfo, "externalAccTypes").thenReturn(Array(DataTypes.BIGINT, DataTypes.BIGINT))
when(aggInfo, "argIndexes").thenReturn(Array(1))
when(aggInfo, "aggIndex").thenReturn(0)
when(aggInfo, "externalResultType").thenReturn(DataTypes.BIGINT)
aggInfo
}
val aggInfo2: AggregateInfo = {
val aggInfo = mock(classOf[AggregateInfo])
val call = mock(classOf[AggregateCall])
when(aggInfo, "agg").thenReturn(call)
when(call, "getName").thenReturn("avg2")
when(aggInfo, "function").thenReturn(new DoubleAvgAggFunction)
when(aggInfo, "externalAccTypes").thenReturn(Array(DataTypes.DOUBLE, DataTypes.BIGINT))
when(aggInfo, "argIndexes").thenReturn(Array(2))
when(aggInfo, "aggIndex").thenReturn(1)
when(aggInfo, "externalResultType").thenReturn(DataTypes.DOUBLE)
aggInfo
}
val imperativeAggFunc = new TestLongAvgFunc
val aggInfo3: AggregateInfo = {
val aggInfo = mock(classOf[AggregateInfo])
val call = mock(classOf[AggregateCall])
when(aggInfo, "agg").thenReturn(call)
when(call, "getName").thenReturn("avg3")
when(aggInfo, "function").thenReturn(imperativeAggFunc)
when(aggInfo, "externalAccTypes").thenReturn(
Array(fromLegacyInfoToDataType(imperativeAggFunc.getAccumulatorType)))
when(aggInfo, "externalResultType").thenReturn(DataTypes.BIGINT)
when(aggInfo, "viewSpecs").thenReturn(Array[DataViewSpec]())
when(aggInfo, "argIndexes").thenReturn(Array(3))
when(aggInfo, "aggIndex").thenReturn(2)
aggInfo
}
val aggInfoList = AggregateInfoList(
Array(aggInfo1, aggInfo2, aggInfo3), None, countStarInserted = false, Array())
val ctx = new CodeGeneratorContext(tEnv.getConfig)
val classLoader: ClassLoader = Thread.currentThread().getContextClassLoader
val context: ExecutionContext = mock(classOf[ExecutionContext])
when(context, "getRuntimeContext").thenReturn(mock(classOf[RuntimeContext]))
}
| hequn8128/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/codegen/agg/AggTestBase.scala | Scala | apache-2.0 | 5,562 |
package org.scalatra
package atmosphere
import java.nio.CharBuffer
import javax.servlet.http.{ HttpServletRequest, HttpSession }
import grizzled.slf4j.Logger
import org.atmosphere.cpr.AtmosphereResource.TRANSPORT._
import org.atmosphere.cpr._
import org.atmosphere.handler.AbstractReflectorAtmosphereHandler
import org.scalatra.servlet.ServletApiImplicits._
import org.scalatra.util.RicherString._
object ScalatraAtmosphereHandler {
@deprecated("Use `org.scalatra.atmosphere.AtmosphereClientKey` instead", "2.2.1")
val AtmosphereClientKey = org.scalatra.atmosphere.AtmosphereClientKey
@deprecated("Use `org.scalatra.atmosphere.AtmosphereRouteKey` instead", "2.2.1")
val AtmosphereRouteKey = org.scalatra.atmosphere.AtmosphereRouteKey
private class ScalatraResourceEventListener extends AtmosphereResourceEventListener {
def client(resource: AtmosphereResource) =
Option(resource.session()).flatMap(_.get(org.scalatra.atmosphere.AtmosphereClientKey)).map(_.asInstanceOf[AtmosphereClient])
def onPreSuspend(event: AtmosphereResourceEvent) {}
def onBroadcast(event: AtmosphereResourceEvent) {
val resource = event.getResource
resource.transport match {
case JSONP | AJAX | LONG_POLLING =>
case _ => resource.getResponse.flushBuffer()
}
}
def onDisconnect(event: AtmosphereResourceEvent) {
val disconnector = if (event.isCancelled) ClientDisconnected else ServerDisconnected
client(event.getResource) foreach (_.receive.lift(Disconnected(disconnector, Option(event.throwable))))
// if (!event.getResource.isResumed) {
// event.getResource.session.invalidate()
// } else {
event.getResource.session.removeAttribute(org.scalatra.atmosphere.AtmosphereClientKey)
// }
}
def onResume(event: AtmosphereResourceEvent) {}
def onSuspend(event: AtmosphereResourceEvent) {}
def onThrowable(event: AtmosphereResourceEvent) {
client(event.getResource) foreach (_.receive.lift(Error(Option(event.throwable()))))
}
def onClose(event: AtmosphereResourceEvent) {}
}
}
class ScalatraAtmosphereException(message: String) extends ScalatraException(message)
class ScalatraAtmosphereHandler(implicit wireFormat: WireFormat) extends AbstractReflectorAtmosphereHandler {
import org.scalatra.atmosphere.ScalatraAtmosphereHandler._
private[this] val internalLogger = Logger(getClass)
def onRequest(resource: AtmosphereResource) {
val req = resource.getRequest
val route = Option(req.getAttribute(org.scalatra.atmosphere.AtmosphereRouteKey)).map(_.asInstanceOf[MatchedRoute])
var session = resource.session()
val isNew = !session.contains(org.scalatra.atmosphere.AtmosphereClientKey)
(req.requestMethod, route.isDefined) match {
case (Post, _) =>
var client: AtmosphereClient = null
if (isNew) {
session = AtmosphereResourceFactory.getDefault.find(resource.uuid).session
}
client = session(org.scalatra.atmosphere.AtmosphereClientKey).asInstanceOf[AtmosphereClient]
handleIncomingMessage(req, client)
case (_, true) =>
val cl = if (isNew) {
createClient(route.get, session, resource)
} else null
addEventListener(resource)
resumeIfNeeded(resource)
configureBroadcaster(resource)
if (isNew && cl != null) cl.receive.lift(Connected)
resource.suspend
case _ =>
val ex = new ScalatraAtmosphereException("There is no atmosphere route defined for " + req.getRequestURI)
internalLogger.warn(ex.getMessage)
throw ex
}
}
private[this] def createClient(route: MatchedRoute, session: HttpSession, resource: AtmosphereResource) = {
withRouteMultiParams(route, resource.getRequest) {
val client = clientForRoute(route)
session(org.scalatra.atmosphere.AtmosphereClientKey) = client
client.resource = resource
client
}
}
private[this] def createClient(route: MatchedRoute, resource: AtmosphereResource) = {
withRouteMultiParams(route, resource.getRequest) {
val client = clientForRoute(route)
client.resource = resource
client
}
}
private[this] def clientForRoute(route: MatchedRoute): AtmosphereClient = {
liftAction(route.action) getOrElse {
throw new ScalatraException("An atmosphere route should return an atmosphere client")
}
}
private[this] def requestUri(resource: AtmosphereResource) = {
val u = resource.getRequest.getRequestURI.blankOption getOrElse "/"
if (u.endsWith("/")) u + "*" else u + "/*"
}
private[this] def configureBroadcaster(resource: AtmosphereResource) {
val bc = BroadcasterFactory.getDefault.get(requestUri(resource))
resource.setBroadcaster(bc)
}
private[this] def handleIncomingMessage(req: AtmosphereRequest, client: AtmosphereClient) {
val parsed: InboundMessage = wireFormat.parseInMessage(readBody(req))
client.receive.lift(parsed)
}
private[this] def readBody(req: AtmosphereRequest) = {
val buff = CharBuffer.allocate(8192)
val body = new StringBuilder
val rdr = req.getReader
while (rdr.read(buff) >= 0) {
body.append(buff.flip.toString)
buff.clear()
}
body.toString()
}
private[this] def addEventListener(resource: AtmosphereResource) {
resource.addEventListener(new ScalatraResourceEventListener)
}
/**
* The current multiparams. Multiparams are a result of merging the
* standard request params (query string or post params) with the route
* parameters extracted from the route matchers of the current route.
* The default value for an unknown param is the empty sequence. Invalid
* outside `handle`.
*/
private[this] def multiParams(request: HttpServletRequest): MultiParams = {
val read = request.contains("MultiParamsRead")
val found = request.get(MultiParamsKey) map (
_.asInstanceOf[MultiParams] ++ (if (read) Map.empty else request.multiParameters)
)
val multi = found getOrElse request.multiParameters
request("MultiParamsRead") = new {}
request(MultiParamsKey) = multi
multi.withDefaultValue(Seq.empty)
}
private[this] def withRouteMultiParams[S](matchedRoute: MatchedRoute, request: HttpServletRequest)(thunk: => S): S = {
val originalParams = multiParams(request)
setMultiparams(matchedRoute, originalParams, request)
try {
thunk
} finally {
request(MultiParamsKey) = originalParams
}
}
def setMultiparams[S](matchedRoute: MatchedRoute, originalParams: MultiParams, request: HttpServletRequest) {
val routeParams = matchedRoute.multiParams map {
case (key, values) =>
key -> values.map(UriDecoder.secondStep(_))
}
request(MultiParamsKey) = originalParams ++ routeParams
}
private[this] def liftAction(action: org.scalatra.Action) = try {
action() match {
case cl: AtmosphereClient => Some(cl)
case _ => None
}
} catch {
case t: Throwable =>
t.printStackTrace()
None
}
private[this] def resumeIfNeeded(resource: AtmosphereResource) {
import org.atmosphere.cpr.AtmosphereResource.TRANSPORT._
resource.transport match {
case JSONP | AJAX | LONG_POLLING => resource.resumeOnBroadcast(true)
case _ =>
}
}
}
| lloydmeta/scalatra | atmosphere/src/main/scala/org/scalatra/atmosphere/ScalatraAtmosphereHandler.scala | Scala | bsd-2-clause | 7,353 |
package org.broadinstitute.dsde.workbench.sam.service
import java.net.URI
import java.util.UUID
import akka.http.scaladsl.model.headers.OAuth2BearerToken
import cats.effect.IO
import cats.implicits._
import com.unboundid.ldap.sdk.{LDAPConnection, LDAPConnectionPool}
import org.broadinstitute.dsde.workbench.model._
import org.broadinstitute.dsde.workbench.sam.Generator.{genPolicy, genResourceTypeNameExcludeManagedGroup, genUserInfo, _}
import org.broadinstitute.dsde.workbench.sam.TestSupport
import org.broadinstitute.dsde.workbench.sam.TestSupport._
import org.broadinstitute.dsde.workbench.sam.dataAccess.{AccessPolicyDAO, DirectoryDAO, PostgresAccessPolicyDAO, PostgresDirectoryDAO}
import org.broadinstitute.dsde.workbench.sam.model._
import org.broadinstitute.dsde.workbench.sam.schema.JndiSchemaDAO
import org.scalatest._
import scala.concurrent.ExecutionContext.Implicits.global
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
class PolicyEvaluatorServiceSpec extends AnyFlatSpec with Matchers with TestSupport with BeforeAndAfterEach {
val dirURI = new URI(directoryConfig.directoryUrl)
val connectionPool = new LDAPConnectionPool(
new LDAPConnection(dirURI.getHost, dirURI.getPort, directoryConfig.user, directoryConfig.password),
directoryConfig.connectionPoolSize)
lazy val dirDAO: DirectoryDAO = new PostgresDirectoryDAO(TestSupport.dbRef, TestSupport.dbRef)
lazy val policyDAO: AccessPolicyDAO = new PostgresAccessPolicyDAO(TestSupport.dbRef, TestSupport.dbRef)
val schemaDao = new JndiSchemaDAO(directoryConfig, schemaLockConfig)
override protected def beforeEach(): Unit = {
setup().unsafeRunSync()
super.beforeEach()
}
private[service] val dummyUserInfo =
UserInfo(OAuth2BearerToken("token"), WorkbenchUserId("userid"), WorkbenchEmail("user@company.com"), 0)
private[service] val defaultResourceTypeActions = Set(
ResourceAction("alter_policies"),
ResourceAction("delete"),
ResourceAction("read_policies"),
ResourceAction("view"),
ResourceAction("non_owner_action"))
private[service] val defaultResourceTypeActionPatterns = Set(
SamResourceActionPatterns.alterPolicies,
SamResourceActionPatterns.delete,
SamResourceActionPatterns.readPolicies,
ResourceActionPattern("view", "", false),
ResourceActionPattern("non_owner_action", "", false)
)
private[service] val defaultResourceType = ResourceType(
ResourceTypeName(UUID.randomUUID().toString),
defaultResourceTypeActionPatterns,
Set(
ResourceRole(ResourceRoleName("owner"), defaultResourceTypeActions - ResourceAction("non_owner_action")),
ResourceRole(ResourceRoleName("other"), Set(ResourceAction("view"), ResourceAction("non_owner_action")))
),
ResourceRoleName("owner")
)
private[service] val otherResourceType = ResourceType(
ResourceTypeName(UUID.randomUUID().toString),
defaultResourceTypeActionPatterns,
Set(
ResourceRole(ResourceRoleName("owner"), defaultResourceTypeActions - ResourceAction("non_owner_action")),
ResourceRole(ResourceRoleName("other"), Set(ResourceAction("view"), ResourceAction("non_owner_action")))
),
ResourceRoleName("owner")
)
private val constrainableActionPatterns = Set(
ResourceActionPattern("constrainable_view", "Can be constrained by an auth domain", true),
ResourceActionPattern("unconstrainable_view", "Not constrained by an auth domain", false))
private val constrainableViewAction = ResourceAction("constrainable_view")
private val unconstrainableViewAction = ResourceAction("unconstrainable_view")
private val constrainableResourceTypeActions = Set(constrainableViewAction)
private[service] val constrainableReaderRoleName = ResourceRoleName("constrainable_reader")
private[service] val constrainableResourceType = ResourceType(
genResourceTypeNameExcludeManagedGroup.sample.get,
constrainableActionPatterns,
Set(ResourceRole(constrainableReaderRoleName, constrainableResourceTypeActions)),
constrainableReaderRoleName
)
private[service] val constrainablePolicyMembership =
AccessPolicyMembership(Set(dummyUserInfo.userEmail), Set(constrainableViewAction), Set(constrainableReaderRoleName), None)
private[service] val managedGroupResourceType = configResourceTypes.getOrElse(
ResourceTypeName("managed-group"),
throw new Error("Failed to load managed-group resource type from reference.conf"))
private val emailDomain = "example.com"
private val policyEvaluatorService = PolicyEvaluatorService(
emailDomain,
Map(defaultResourceType.name -> defaultResourceType, otherResourceType.name -> otherResourceType),
policyDAO,
dirDAO)
private[service] val service = new ResourceService(
Map(defaultResourceType.name -> defaultResourceType, otherResourceType.name -> otherResourceType),
policyEvaluatorService,
policyDAO,
dirDAO,
NoExtensions,
emailDomain
)
private val constrainableResourceTypes = Map(
constrainableResourceType.name -> constrainableResourceType,
managedGroupResourceType.name -> managedGroupResourceType)
private val constrainablePolicyEvaluatorService = PolicyEvaluatorService(emailDomain, constrainableResourceTypes, policyDAO, dirDAO)
private[service] val constrainableService = new ResourceService(
constrainableResourceTypes,
constrainablePolicyEvaluatorService,
policyDAO,
dirDAO,
NoExtensions,
emailDomain)
val managedGroupService = new ManagedGroupService(
constrainableService,
constrainablePolicyEvaluatorService,
constrainableResourceTypes,
policyDAO,
dirDAO,
NoExtensions,
emailDomain)
private object SamResourceActionPatterns {
val readPolicies = ResourceActionPattern("read_policies", "", false)
val alterPolicies = ResourceActionPattern("alter_policies", "", false)
val delete = ResourceActionPattern("delete", "", false)
val sharePolicy = ResourceActionPattern("share_policy::.+", "", false)
val readPolicy = ResourceActionPattern("read_policy::.+", "", false)
}
def setup(): IO[Unit] = {
for{
_ <- clearDatabase()
_ <- dirDAO.createUser(WorkbenchUser(dummyUserInfo.userId, TestSupport.genGoogleSubjectId(), dummyUserInfo.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
} yield ()
}
protected def clearDatabase(): IO[Unit] = IO(TestSupport.truncateAll).void
private[service] def savePolicyMembers(policy: AccessPolicy) = {
policy.members.toList.traverse {
case u: WorkbenchUserId => dirDAO.createUser(WorkbenchUser(u, None, WorkbenchEmail(u.value + "@foo.bar"), None), samRequestContext).recoverWith {
case _: WorkbenchException => IO.pure(WorkbenchUser(u, None, WorkbenchEmail(u.value + "@foo.bar"), None))
}
case g: WorkbenchGroupName => managedGroupService.createManagedGroup(ResourceId(g.value), dummyUserInfo, samRequestContext = samRequestContext).recoverWith {
case _: WorkbenchException => IO.pure(Resource(defaultResourceType.name, ResourceId(g.value), Set.empty))
}
case _ => IO.unit
}
}
"hasPermission" should "return true if given action is granted through membership in another policy" in {
val user = genUserInfo.sample.get
val action = ResourceAction("weirdAction")
val resource = genResource.sample.get.copy(resourceTypeName = defaultResourceType.name)
val samplePolicy = genPolicy.sample.get
val policyWithUser = AccessPolicy.members.set(samplePolicy.members + user.userId)(samplePolicy)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyWithUser)
val resource2 = genResource.sample.get.copy(resourceTypeName = defaultResourceType.name)
val samplePolicy2 = genPolicy.sample.get
val policy2ExtraAction = AccessPolicy.actions.set(samplePolicy.actions + action)(samplePolicy2)
val policy2WithNestedPolicy = AccessPolicy.members.set(Set(policy.id))(policy2ExtraAction)
val policy2 = SamLenses.resourceIdentityAccessPolicy.set(resource2.fullyQualifiedId)(policy2WithNestedPolicy)
val res = for{
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
_ <- resource2.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- savePolicyMembers(policy2)
_ <- policyDAO.createResourceType(defaultResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createResource(resource2, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
_ <- policyDAO.createPolicy(policy2, samRequestContext)
r <- service.policyEvaluatorService.hasPermission(policy2.id.resource, action, user.userId, samRequestContext)
} yield {
r shouldBe true
}
res.unsafeRunSync()
}
it should "return false if given action is not allowed for a user" in {
val user = genUserInfo.sample.get
val samplePolicy = genPolicy.sample.get
val action = ResourceAction("weirdAction")
val resource = genResource.sample.get.copy(resourceTypeName = defaultResourceType.name)
val policyWithUser = AccessPolicy.members.set(samplePolicy.members + user.userId)(samplePolicy)
val policyExcludeAction = AccessPolicy.actions.set(samplePolicy.actions - action)(policyWithUser)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyExcludeAction)
val res = for{
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResourceType(defaultResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- service.policyEvaluatorService.hasPermission(policy.id.resource, action, user.userId, samRequestContext)
} yield {
r shouldBe false
}
res.unsafeRunSync()
}
it should "return false if user is not a member of the resource" in {
val user = genUserInfo.sample.get
val samplePolicy = genPolicy.sample.get
val action = genResourceAction.sample.get
val resource = genResource.sample.get.copy(resourceTypeName = defaultResourceType.name)
val policyWithUser = AccessPolicy.members.set(samplePolicy.members - user.userId)(samplePolicy)
val policyExcludeAction = AccessPolicy.actions.set(samplePolicy.actions - action)(policyWithUser)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyExcludeAction)
val res = for{
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResourceType(defaultResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- service.policyEvaluatorService.hasPermission(policy.id.resource, action, user.userId, samRequestContext)
} yield {
r shouldBe(false)
}
res.unsafeRunSync()
}
it should "return true if given action is allowed for a user and resource is not constrained by auth domains" in {
val user = genUserInfo.sample.get
val samplePolicy = genPolicy.sample.get
val action = genResourceAction.sample.get
val resource = genResource.sample.get.copy(authDomain = Set.empty, resourceTypeName = defaultResourceType.name)
val policyWithUser = AccessPolicy.members.modify(_ + user.userId)(samplePolicy)
val policyWithAction = AccessPolicy.actions.modify(_ + action)(policyWithUser)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyWithAction)
val res = for{
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- policyDAO.createResourceType(defaultResourceType, samRequestContext)
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- service.policyEvaluatorService.hasPermission(policy.id.resource, action, user.userId, samRequestContext)
} yield {
r shouldBe(true)
}
res.unsafeRunSync()
}
it should "return true if given action is allowed for a user, action is constrained by auth domains, user is a member of all required auth domains" in {
val user = genUserInfo.sample.get
val samplePolicy = SamLenses.resourceTypeNameInAccessPolicy.modify(_ => constrainableResourceType.name)(genPolicy.sample.get)
val action = constrainableViewAction
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name)
val policyWithUser = AccessPolicy.members.modify(_ + user.userId)(samplePolicy)
val policyWithResource = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyWithUser)
val policy = AccessPolicy.actions.modify(_ + action)(policyWithResource).copy(roles = Set.empty)
val res = for{
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- policyDAO.createResourceType(constrainableResourceType, samRequestContext)
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), user, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- constrainableService.policyEvaluatorService.hasPermission(policy.id.resource, action, user.userId, samRequestContext)
} yield {
r shouldBe(true)
}
res.unsafeRunSync()
}
it should "return true if given action is allowed for a user, action is constrained by auth domains, resource has no auth domain" in {
val user = genUserInfo.sample.get
val samplePolicy = SamLenses.resourceTypeNameInAccessPolicy.modify(_ => constrainableResourceType.name)(genPolicy.sample.get)
val action = constrainableViewAction
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name, authDomain = Set.empty)
val policyWithUser = AccessPolicy.members.modify(_ + user.userId)(samplePolicy)
val policyWithResource = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyWithUser)
val policy = AccessPolicy.actions.modify(_ + action)(policyWithResource).copy(roles = Set.empty)
val res = for{
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResourceType(constrainableResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- constrainableService.policyEvaluatorService.hasPermission(policy.id.resource, action, user.userId, samRequestContext)
} yield {
r shouldBe(true)
}
res.unsafeRunSync()
}
it should "return false if given action is NOT allowed for a user, action is constrained by auth domains, user is a member of required auth domains" in {
val user = genUserInfo.sample.get
val samplePolicy = SamLenses.resourceTypeNameInAccessPolicy.modify(_ => constrainableResourceType.name)(genPolicy.sample.get)
val action = constrainableViewAction
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name, authDomain = Set(genWorkbenchGroupName.sample.get))
val policyWithResource = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(samplePolicy)
val policy = AccessPolicy.actions.modify(_ + action)(policyWithResource).copy(roles = Set.empty)
val res = for{
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), user, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResourceType(constrainableResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- constrainableService.policyEvaluatorService.hasPermission(policy.id.resource, action, user.userId, samRequestContext)
} yield {
r shouldBe(false)
}
res.unsafeRunSync()
}
it should "return false if given action is allowed for a user, action is constrained by auth domains, user is NOT a member of auth domain" in {
val user = genUserInfo.sample.get
val probeUser = genUserInfo.sample.get
val samplePolicy = SamLenses.resourceTypeNameInAccessPolicy.modify(_ => constrainableResourceType.name)(genPolicy.sample.get)
val action = constrainableViewAction
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name)
val policyWithUser = AccessPolicy.members.modify(_ + probeUser.userId)(samplePolicy)
val policyWithResource = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyWithUser)
val policy = AccessPolicy.actions.modify(_ + action)(policyWithResource).copy(roles = Set.empty)
val res = for{
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- dirDAO.createUser(WorkbenchUser(probeUser.userId, TestSupport.genGoogleSubjectId(), probeUser.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), user, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResourceType(constrainableResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- constrainableService.policyEvaluatorService.hasPermission(policy.id.resource, action, probeUser.userId, samRequestContext)
} yield {
r shouldBe(false)
}
res.unsafeRunSync()
}
it should "return true if given action is allowed for a user, action is NOT constrained by auth domains, user is not a member of auth domain" in {
val user = genUserInfo.sample.get
val probeUser = genUserInfo.sample.get
val samplePolicy = SamLenses.resourceTypeNameInAccessPolicy.modify(_ => constrainableResourceType.name)(genPolicy.sample.get)
val action = unconstrainableViewAction
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name)
val policyWithUser = AccessPolicy.members.modify(_ + probeUser.userId)(samplePolicy)
val policyWithResource = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyWithUser)
val policy = AccessPolicy.actions.modify(_ + action)(policyWithResource).copy(roles = Set.empty)
val res = for{
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- dirDAO.createUser(WorkbenchUser(probeUser.userId, TestSupport.genGoogleSubjectId(), probeUser.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), user, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResourceType(constrainableResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- constrainableService.policyEvaluatorService.hasPermission(policy.id.resource, action, probeUser.userId, samRequestContext)
} yield {
r shouldBe(true)
}
res.unsafeRunSync()
}
"hasPermissionByUserEmail" should "return true if given action is allowed for a user, action is NOT constrained by auth domains, user is not a member of auth domain" in {
val user = genUserInfo.sample.get
val probeUser = genUserInfo.sample.get
val samplePolicy = SamLenses.resourceTypeNameInAccessPolicy.modify(_ => constrainableResourceType.name)(genPolicy.sample.get)
val action = unconstrainableViewAction
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name)
val policyWithUser = AccessPolicy.members.modify(_ + probeUser.userId)(samplePolicy)
val policyWithResource = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyWithUser)
val policy = AccessPolicy.actions.modify(_ + action)(policyWithResource).copy(roles = Set.empty)
val res = for{
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- dirDAO.createUser(WorkbenchUser(probeUser.userId, TestSupport.genGoogleSubjectId(), probeUser.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), user, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResourceType(constrainableResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- constrainableService.policyEvaluatorService.hasPermissionByUserEmail(policy.id.resource, action, probeUser.userEmail, samRequestContext)
} yield {
r shouldBe(true)
}
res.unsafeRunSync()
}
it should "return false if given action is not allowed for a user" in {
val user = genUserInfo.sample.get
val samplePolicy = genPolicy.sample.get
val action = ResourceAction("weirdAction")
val resource = genResource.sample.get.copy(resourceTypeName = defaultResourceType.name)
val policyWithUser = AccessPolicy.members.set(samplePolicy.members + user.userId)(samplePolicy)
val policyExcludeAction = AccessPolicy.actions.set(samplePolicy.actions - action)(policyWithUser)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyExcludeAction)
val res = for{
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResourceType(defaultResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- service.policyEvaluatorService.hasPermissionByUserEmail(policy.id.resource, action, user.userEmail, samRequestContext)
} yield {
r shouldBe false
}
res.unsafeRunSync()
}
it should "return false if user not found" in {
val samplePolicy = genPolicy.sample.get
val action = ResourceAction("weirdAction")
val resource = genResource.sample.get.copy(resourceTypeName = defaultResourceType.name)
val policyWithOutUser = AccessPolicy.members.set(samplePolicy.members)(samplePolicy)
val policyExcludeAction = AccessPolicy.actions.set(samplePolicy.actions - action)(policyWithOutUser)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyExcludeAction)
val res = for{
_ <- policyDAO.createResourceType(managedGroupResourceType, samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
_ <- policyDAO.createResourceType(defaultResourceType, samRequestContext)
_ <- policyDAO.createResource(resource, samRequestContext)
_ <- policyDAO.createPolicy(policy, samRequestContext)
r <- service.policyEvaluatorService.hasPermissionByUserEmail(policy.id.resource, action, WorkbenchEmail("randomEmail@foo.com"), samRequestContext)
} yield {
r shouldBe false
}
res.unsafeRunSync()
}
"listUserResources" should "list user's resources but not others" in {
val resource1 = FullyQualifiedResourceId(defaultResourceType.name, ResourceId("my-resource1"))
val resource2 = FullyQualifiedResourceId(defaultResourceType.name, ResourceId("my-resource2"))
val resource3 = FullyQualifiedResourceId(otherResourceType.name, ResourceId("my-resource1"))
val resource4 = FullyQualifiedResourceId(otherResourceType.name, ResourceId("my-resource2"))
val test = for {
_ <- service.createResourceType(defaultResourceType, samRequestContext)
_ <- service.createResourceType(otherResourceType, samRequestContext)
_ <- service.createResource(defaultResourceType, resource1.resourceId, dummyUserInfo, samRequestContext)
_ <- service.createResource(defaultResourceType, resource2.resourceId, dummyUserInfo, samRequestContext)
_ <- service.createResource(otherResourceType, resource3.resourceId, dummyUserInfo, samRequestContext)
_ <- service.createResource(otherResourceType, resource4.resourceId, dummyUserInfo, samRequestContext)
_ <- service.overwritePolicy(defaultResourceType, AccessPolicyName("in-it"), resource1, AccessPolicyMembership(Set(dummyUserInfo.userEmail), Set(ResourceAction("alter_policies")), Set.empty), samRequestContext)
_ <- service.overwritePolicy(defaultResourceType, AccessPolicyName("not-in-it"), resource1, AccessPolicyMembership(Set.empty, Set(ResourceAction("non_owner_action")), Set.empty), samRequestContext)
_ <- service.overwritePolicy(otherResourceType, AccessPolicyName("in-it"), resource3, AccessPolicyMembership(Set(dummyUserInfo.userEmail), Set(ResourceAction("alter_policies")), Set.empty), samRequestContext)
_ <- service.overwritePolicy(otherResourceType, AccessPolicyName("not-in-it"), resource3, AccessPolicyMembership(Set.empty, Set(ResourceAction("non_owner_action")), Set.empty), samRequestContext)
r <- service.policyEvaluatorService.listUserResources(defaultResourceType.name, dummyUserInfo.userId, samRequestContext)
} yield {
r should contain theSameElementsAs Set(
UserResourcesResponse(resource1.resourceId, RolesAndActions(Set(defaultResourceType.ownerRoleName), Set(ResourceAction("alter_policies"))), RolesAndActions.empty, RolesAndActions.empty, Set.empty, Set.empty),
UserResourcesResponse(resource2.resourceId, RolesAndActions.fromRoles(Set(defaultResourceType.ownerRoleName)), RolesAndActions.empty, RolesAndActions.empty, Set.empty, Set.empty)
)
}
test.unsafeRunSync()
}
it should "return no auth domains where there is a resource in a constrainable type but does not have any auth domains" in {
val resource = genResource.sample.get.copy(authDomain = Set.empty)
val policyWithConstrainable = SamLenses.resourceTypeNameInAccessPolicy.set(constrainableResourceType.name)(genPolicy.sample.get)
val viewPolicyName = AccessPolicyName(constrainableReaderRoleName.value)
val res = for{
_ <- constrainableService.createResourceType(constrainableResourceType, samRequestContext)
_ <- constrainableService.createResourceType(managedGroupResourceType, samRequestContext) // make sure managed groups in auth domain set are created. dummyUserInfo will be member of the created resourceId
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
// create resource that dummyUserInfo is a member of for constrainableResourceType
_ <- constrainableService.createResource(constrainableResourceType, resource.resourceId, Map(viewPolicyName -> constrainablePolicyMembership), resource.authDomain, None, dummyUserInfo.userId, samRequestContext)
r <- constrainableService.policyEvaluatorService.listUserResources(constrainableResourceType.name, dummyUserInfo.userId, samRequestContext)
} yield {
val expected = Set(UserResourcesResponse(resource.resourceId, RolesAndActions.fromPolicyMembership(constrainablePolicyMembership), RolesAndActions.empty, RolesAndActions.empty, Set.empty, Set.empty))
r should contain theSameElementsAs expected
}
res.unsafeRunSync()
}
it should "list required authDomains if constrainable" in {
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name)
val viewPolicyName = AccessPolicyName(constrainableReaderRoleName.value)
val res = for{
_ <- constrainableService.createResourceType(constrainableResourceType, samRequestContext)
_ <- constrainableService.createResourceType(managedGroupResourceType, samRequestContext) // make sure managed groups in auth domain set are created. dummyUserInfo will be member of the created resourceId
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
// create resource that dummyUserInfo is a member of for constrainableResourceType
_ <- constrainableService.createResource(constrainableResourceType, resource.resourceId, Map(viewPolicyName -> constrainablePolicyMembership), resource.authDomain, None, dummyUserInfo.userId, samRequestContext)
r <- constrainableService.policyEvaluatorService.listUserResources(constrainableResourceType.name, dummyUserInfo.userId, samRequestContext)
} yield {
val expected = Set(UserResourcesResponse(resource.resourceId, RolesAndActions.fromPolicyMembership(constrainablePolicyMembership), RolesAndActions.empty, RolesAndActions.empty, resource.authDomain, Set.empty))
r should contain theSameElementsAs expected
}
res.unsafeRunSync()
}
it should "list required authDomains and authDomains user is not a member of if constrainable" in {
val user = genUserInfo.sample.get
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(genPolicy.sample.get).copy(roles = Set.empty)
val viewPolicyName = AccessPolicyName(constrainableReaderRoleName.value)
val res = for{
_ <- constrainableService.createResourceType(constrainableResourceType, samRequestContext)
_ <- constrainableService.createResourceType(managedGroupResourceType, samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
// create resource that dummyUserInfo is a member of for constrainableResourceType
_ <- constrainableService.createResource(constrainableResourceType, resource.resourceId, Map(viewPolicyName -> constrainablePolicyMembership), resource.authDomain, None, dummyUserInfo.userId, samRequestContext)
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- constrainableService.createPolicy(policy.id, policy.members + user.userId, policy.roles, policy.actions, Set.empty, samRequestContext)
r <- constrainableService.policyEvaluatorService.listUserResources(constrainableResourceType.name, user.userId, samRequestContext)
} yield {
val expected = Set(UserResourcesResponse(resource.resourceId, RolesAndActions.fromPolicy(policy), RolesAndActions.empty, RolesAndActions.empty, resource.authDomain, resource.authDomain))
r should contain theSameElementsAs expected
}
res.unsafeRunSync()
}
}
@deprecated("this allows testing of deprecated functions, remove as part of CA-1783", "")
class DeprecatedPolicyEvaluatorSpec extends PolicyEvaluatorServiceSpec {
"listUserAccessPolicies" should "list user's access policies but not others" in {
val resource1 = FullyQualifiedResourceId(defaultResourceType.name, ResourceId("my-resource1"))
val resource2 = FullyQualifiedResourceId(defaultResourceType.name, ResourceId("my-resource2"))
val resource3 = FullyQualifiedResourceId(otherResourceType.name, ResourceId("my-resource1"))
val resource4 = FullyQualifiedResourceId(otherResourceType.name, ResourceId("my-resource2"))
val test = for {
_ <- service.createResourceType(defaultResourceType, samRequestContext)
_ <- service.createResourceType(otherResourceType, samRequestContext)
_ <- service.createResource(defaultResourceType, resource1.resourceId, dummyUserInfo, samRequestContext)
_ <- service.createResource(defaultResourceType, resource2.resourceId, dummyUserInfo, samRequestContext)
_ <- service.createResource(otherResourceType, resource3.resourceId, dummyUserInfo, samRequestContext)
_ <- service.createResource(otherResourceType, resource4.resourceId, dummyUserInfo, samRequestContext)
_ <- service.overwritePolicy(defaultResourceType, AccessPolicyName("in-it"), resource1, AccessPolicyMembership(Set(dummyUserInfo.userEmail), Set(ResourceAction("alter_policies")), Set.empty, None), samRequestContext)
_ <- service.overwritePolicy(defaultResourceType, AccessPolicyName("not-in-it"), resource1, AccessPolicyMembership(Set.empty, Set(ResourceAction("alter_policies")), Set.empty, None), samRequestContext)
_ <- service.overwritePolicy(otherResourceType, AccessPolicyName("in-it"), resource3, AccessPolicyMembership(Set(dummyUserInfo.userEmail), Set(ResourceAction("alter_policies")), Set.empty, None), samRequestContext)
_ <- service.overwritePolicy(otherResourceType, AccessPolicyName("not-in-it"), resource3, AccessPolicyMembership(Set.empty, Set(ResourceAction("alter_policies")), Set.empty, None), samRequestContext)
r <- service.policyEvaluatorService.listUserAccessPolicies(defaultResourceType.name, dummyUserInfo.userId, samRequestContext)
} yield {
r should contain theSameElementsAs Set(
UserPolicyResponse(resource1.resourceId, AccessPolicyName(defaultResourceType.ownerRoleName.value), Set.empty, Set.empty, false),
UserPolicyResponse(resource2.resourceId, AccessPolicyName(defaultResourceType.ownerRoleName.value), Set.empty, Set.empty, false),
UserPolicyResponse(resource1.resourceId, AccessPolicyName("in-it"), Set.empty, Set.empty, false))
}
test.unsafeRunSync()
}
it should "return no auth domains where there is a resource in a constrainable type but does not have any auth domains" in {
val resource = genResource.sample.get.copy(authDomain = Set.empty)
val policyWithConstrainable = SamLenses.resourceTypeNameInAccessPolicy.set(constrainableResourceType.name)(genPolicy.sample.get)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(policyWithConstrainable)
val viewPolicyName = AccessPolicyName(constrainableReaderRoleName.value)
val res = for{
_ <- constrainableService.createResourceType(constrainableResourceType, samRequestContext)
_ <- constrainableService.createResourceType(managedGroupResourceType, samRequestContext) // make sure managed groups in auth domain set are created. dummyUserInfo will be member of the created resourceId
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
// create resource that dummyUserInfo is a member of for constrainableResourceType
_ <- constrainableService.createResource(constrainableResourceType, resource.resourceId, Map(viewPolicyName -> constrainablePolicyMembership), resource.authDomain, None, dummyUserInfo.userId, samRequestContext)
r <- constrainableService.policyEvaluatorService.listUserAccessPolicies(constrainableResourceType.name, dummyUserInfo.userId, samRequestContext)
} yield {
val expected = Set(UserPolicyResponse(resource.resourceId, viewPolicyName, Set.empty, Set.empty, false))
r shouldBe(expected)
}
res.unsafeRunSync()
}
it should "list required authDomains if constrainable" in {
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(genPolicy.sample.get)
val viewPolicyName = AccessPolicyName(constrainableReaderRoleName.value)
val res = for{
_ <- constrainableService.createResourceType(constrainableResourceType, samRequestContext)
_ <- constrainableService.createResourceType(managedGroupResourceType, samRequestContext) // make sure managed groups in auth domain set are created. dummyUserInfo will be member of the created resourceId
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
// create resource that dummyUserInfo is a member of for constrainableResourceType
_ <- constrainableService.createResource(constrainableResourceType, resource.resourceId, Map(viewPolicyName -> constrainablePolicyMembership), resource.authDomain, None, dummyUserInfo.userId, samRequestContext)
r <- constrainableService.policyEvaluatorService.listUserAccessPolicies(constrainableResourceType.name, dummyUserInfo.userId, samRequestContext)
} yield {
val expected = Set(UserPolicyResponse(resource.resourceId, viewPolicyName, resource.authDomain, Set.empty, false))
r shouldBe(expected)
}
res.unsafeRunSync()
}
it should "list required authDomains and authDomains user is not a member of if constrainable" in {
val user = genUserInfo.sample.get
val resource = genResource.sample.get.copy(resourceTypeName = constrainableResourceType.name)
val policy = SamLenses.resourceIdentityAccessPolicy.set(resource.fullyQualifiedId)(genPolicy.sample.get).copy(roles = Set.empty)
val viewPolicyName = AccessPolicyName(constrainableReaderRoleName.value)
val res = for{
_ <- constrainableService.createResourceType(constrainableResourceType, samRequestContext)
_ <- constrainableService.createResourceType(managedGroupResourceType, samRequestContext)
_ <- resource.authDomain.toList.traverse(a => managedGroupService.createManagedGroup(ResourceId(a.value), dummyUserInfo, samRequestContext = samRequestContext))
_ <- savePolicyMembers(policy)
// create resource that dummyUserInfo is a member of for constrainableResourceType
_ <- constrainableService.createResource(constrainableResourceType, resource.resourceId, Map(viewPolicyName -> constrainablePolicyMembership), resource.authDomain, None, dummyUserInfo.userId, samRequestContext)
_ <- dirDAO.createUser(WorkbenchUser(user.userId, TestSupport.genGoogleSubjectId(), user.userEmail, Some(TestSupport.genAzureB2CId())), samRequestContext)
_ <- constrainableService.createPolicy(policy.id, policy.members + user.userId, policy.roles, policy.actions, Set.empty, samRequestContext)
r <- constrainableService.policyEvaluatorService.listUserAccessPolicies(constrainableResourceType.name, user.userId, samRequestContext)
} yield {
val expected = Set(UserPolicyResponse(resource.resourceId, policy.id.accessPolicyName, resource.authDomain, resource.authDomain, false))
r shouldBe(expected)
}
res.unsafeRunSync()
}
}
| broadinstitute/sam | src/test/scala/org/broadinstitute/dsde/workbench/sam/service/PolicyEvaluatorServiceSpec.scala | Scala | bsd-3-clause | 42,216 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.internal
import com.intel.analytics.bigdl.dllib.nn.abstractnn._
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.Shape
import scala.reflect.ClassTag
/**
* Spatial 1D version of Dropout.
* This version performs the same function as Dropout, however it drops
* entire 1D feature maps instead of individual elements. If adjacent frames
* within feature maps are strongly correlated (as is normally the case in
* early convolution layers) then regular dropout will not regularize the
* activations and will otherwise just result in an effective learning rate
* decrease. In this case, SpatialDropout1D will help promote independence
* between feature maps and should be used instead.
* The input of this layer should be 3D.
*
* When you use this layer as the first layer of a model, you need to provide the argument
* inputShape (a Single Shape, does not include the batch dimension).
*
* @param p Fraction of the input units to drop. Double between 0 and 1.
* @tparam T Numeric type of parameter(e.g. weight, bias). Only support float/double now.
*/
class SpatialDropout1D[T: ClassTag](
val p: Double = 0.5,
val inputShape: Shape = null)(implicit ev: TensorNumeric[T])
extends KerasLayer[Tensor[T], Tensor[T], T](KerasLayer.addBatch(inputShape))
with IdentityOutputShape {
override def doBuild(inputShape: Shape): AbstractModule[Tensor[T], Tensor[T], T] = {
val layer = com.intel.analytics.bigdl.dllib.nn.SpatialDropout1D(initP = p)
layer.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]]
}
}
object SpatialDropout1D {
def apply[@specialized(Float, Double) T: ClassTag](
p: Double = 0.5,
inputShape: Shape = null)(implicit ev: TensorNumeric[T]): SpatialDropout1D[T] = {
new SpatialDropout1D[T](p, inputShape)
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/internal/SpatialDropout1D.scala | Scala | apache-2.0 | 2,548 |
package fs2.io
import java.net.{InetSocketAddress,NetworkInterface,ProtocolFamily,StandardSocketOptions}
import java.nio.channels.DatagramChannel
import fs2._
package object udp {
/**
* A single packet to send to the specified remote address or received from the specified address.
*
* @param remote remote party to send/receive packet to/from
* @param bytes data to send/receive
*/
case class Packet(remote: InetSocketAddress, bytes: Chunk[Byte])
/**
* Provides a singleton stream of a UDP Socket that when run will bind to specified adress
* by `bind`.
*
* @param bind address to bind to; defaults to an ephemeral port on all interfaces
* @param reuseAddress whether address has to be reused (@see [[java.net.StandardSocketOptions.SO_REUSEADDR]])
* @param sendBufferSize size of send buffer (@see [[java.net.StandardSocketOptions.SO_SNDBUF]])
* @param receiveBufferSize size of receive buffer (@see [[java.net.StandardSocketOptions.SO_RCVBUF]])
* @param allowBroadcast whether broadcast messages are allowed to be sent; defaults to true
* @param protocolFamily protocol family to use when opening the supporting [[DatagramChannel]]
* @param multicastInterface network interface for sending multicast packets
* @param multicastTTL time to live of sent multicast packets
* @param multicastLoopback whether sent multicast packets should be looped back to this host
*/
def open[F[_]](
bind: InetSocketAddress = new InetSocketAddress(0)
, reuseAddress: Boolean = false
, sendBufferSize: Option[Int] = None
, receiveBufferSize: Option[Int] = None
, allowBroadcast: Boolean = true
, protocolFamily: Option[ProtocolFamily] = None
, multicastInterface: Option[NetworkInterface] = None
, multicastTTL: Option[Int] = None
, multicastLoopback: Boolean = true
)(implicit AG: AsynchronousSocketGroup, F: Async[F], FR: Async.Run[F]): Stream[F,Socket[F]] = {
val mkChannel = F.delay {
val channel = protocolFamily.map { pf => DatagramChannel.open(pf) }.getOrElse(DatagramChannel.open())
channel.setOption[java.lang.Boolean](StandardSocketOptions.SO_REUSEADDR, reuseAddress)
sendBufferSize.foreach { sz => channel.setOption[Integer](StandardSocketOptions.SO_SNDBUF, sz) }
receiveBufferSize.foreach { sz => channel.setOption[Integer](StandardSocketOptions.SO_RCVBUF, sz) }
channel.setOption[java.lang.Boolean](StandardSocketOptions.SO_BROADCAST, allowBroadcast)
multicastInterface.foreach { iface => channel.setOption[NetworkInterface](StandardSocketOptions.IP_MULTICAST_IF, iface) }
multicastTTL.foreach { ttl => channel.setOption[Integer](StandardSocketOptions.IP_MULTICAST_TTL, ttl) }
channel.setOption[java.lang.Boolean](StandardSocketOptions.IP_MULTICAST_LOOP, multicastLoopback)
channel.bind(bind)
channel
}
Stream.bracket(F.bind(mkChannel)(ch => Socket.mkSocket(ch)))(s => Stream.emit(s), _.close)
}
}
| japgolly/scalaz-stream | io/src/main/scala/fs2/io/udp/udp.scala | Scala | mit | 3,036 |
package carldata.sf.compiler
import carldata.sf.compiler.AST._
import carldata.sf.compiler.gen.{FlowScriptLexer, FlowScriptParser}
import carldata.sf.compiler.gen.FlowScriptParser._
import org.antlr.v4.runtime._
import scala.collection.JavaConverters._
/**
* Script parser.
* This parser uses ANTLR4 generated Parser and Lexer.
* What this object adds is conversion of ANTLR AST into FlowScript AST.
*/
object Parser {
def parse(code: String): Either[String, Module] = {
val errorListener = new SyntaxErrorListener()
val input = CharStreams.fromString(code)
val lexer = new FlowScriptLexer(input)
lexer.removeErrorListeners()
lexer.addErrorListener(errorListener)
val tokens = new CommonTokenStream(lexer)
val parser = new FlowScriptParser(tokens)
// Don't show parser error on the console
parser.removeErrorListeners()
parser.addErrorListener(errorListener)
// Now we are ready to run ANTLR parser
val compilationUnit = parser.compilationUnit()
// Parser succeeded only if error list is empty
if(errorListener.getErrors.size() > 0)
Left(errorListener.getErrors.firstElement())
else
Right(convertCompilationUnit(compilationUnit))
}
/** Convert ANTLR Context into AST Module node */
def convertCompilationUnit(ctx: CompilationUnitContext): Module = {
val xs = ctx.externalFunDef().asScala.map(convertExternFun).toList
val funDecl = ctx.functionDefinition().asScala.map(x => convertFunDef(x)).toList
Module(xs, funDecl)
}
/** Convert ANTLR Context into Function Definition node */
def convertExternFun(ctx: ExternalFunDefContext): ExternalFun = {
// Function name
val funName = ctx.Identifier().getText
// Function params
val params = if(ctx.paramList() == null) {
Seq()
} else {
ctx.paramList().param().asScala.map{pctx =>
FunParam(pctx.Identifier().getText, convertTypeDecl(pctx.typeDefinition()))
}.toList
}
// Function type
val typeDefCtx = ctx.typeDefinition()
val typeName = convertTypeDecl(typeDefCtx)
ExternalFun(funName, params, typeName)
}
/** Convert ANTLR Context into Function Definition node */
def convertFunDef(ctx: FunctionDefinitionContext): FunctionDef = {
// Function name
val funName = ctx.Identifier().getText
// Function params
val params = if(ctx.paramList() == null) {
Seq()
} else {
ctx.paramList().param().asScala.map{pctx =>
FunParam(pctx.Identifier().getText, convertTypeDecl(pctx.typeDefinition))
}.toList
}
// Function type
val typeDefCtx = ctx.typeDefinition()
val typeName = convertTypeDecl(typeDefCtx)
// function body
val body = convertBody(ctx.functionBody())
FunctionDef(funName, params, typeName, body)
}
def convertTypeDecl(context: TypeDefinitionContext): TypeDecl = {
def toType(name: String): TypeDecl = name match {
case "Number" => NumberType
case "String" => StringType
case "TimeSeries" => SeriesType
case other => CustomType(other)
}
if(context.typeList() != null) {
val inputTypes = context.typeList().Identifier().asScala.map(n => toType(n.getText)).toList
val outputType = toType(context.Identifier.getText)
FunType(inputTypes, outputType)
} else {
toType(context.Identifier.getText)
}
}
def convertBody(ctx: FunctionBodyContext): FunctionBody = {
val as = ctx.assignment().asScala.map { actx =>
Assignment(actx.Identifier().getText, convertExpr(actx.expression()))
}.toList
val e = convertExpr(ctx.expression())
FunctionBody(as, e)
}
/** Convert ANTLR Context into Term node */
def convertExpr(ctx: ExpressionContext): Expression = {
if(ctx.MultiplyOp() != null) {
val e1 = convertExpr(ctx.expression(0))
val e2 = convertExpr(ctx.expression(1))
val op = ctx.MultiplyOp().getText
BinaryOpExpr(e1, op, e2)
} else if(ctx.minusOp != null) {
val e1 = convertExpr(ctx.expression(0))
MinusOpExpr(e1)
} else if(ctx.negOp != null) {
val e1 = convertExpr(ctx.expression(0))
NegOpExpr(e1)
} else if(ctx.addOp != null) {
val e1 = convertExpr(ctx.expression(0))
val e2 = convertExpr(ctx.expression(1))
val op = ctx.addOp.getText
BinaryOpExpr(e1, op, e2)
} else if(ctx.boolOp != null) {
val e1 = convertExpr(ctx.expression(0))
val e2 = convertExpr(ctx.expression(1))
val op = ctx.boolOp.getText
BoolOpExpr(e1, op, e2)
} else if(ctx.RelationOp() != null) {
val e1 = convertExpr(ctx.expression(0))
val e2 = convertExpr(ctx.expression(1))
val op = ctx.RelationOp().getText
RelationExpr(e1, op, e2)
} else if(ctx.ifExpr != null) {
val e1 = convertExpr(ctx.expression(0))
val e2 = convertExpr(ctx.expression(1))
val e3 = convertExpr(ctx.expression(2))
IfExpr(e1, e2, e3)
} else if(ctx.funApp() != null) {
val name = ctx.funApp().Identifier().getText
val params = if(ctx.funApp().expressionList() == null) {
Seq()
} else {
ctx.funApp().expressionList().expression().asScala.map(convertExpr).toList
}
AppExpr(name, params)
} else if(ctx.variableExpr() != null) {
VariableExpr(ctx.variableExpr().Identifier().getText)
} else if(ctx.stringLiteral() != null){
val str = ctx.stringLiteral().QuotedString.getText
StringLiteral(str.substring(1,str.length-1))
} else if(ctx.numberLiteral() != null){
val v1 = ctx.numberLiteral().Integer(0).getText
val v2 = if(ctx.numberLiteral().Integer().size() > 1){
v1 + "." + ctx.numberLiteral().Integer(1).getText
} else {
v1
}
NumberLiteral(v2.toDouble)
} else if(ctx.nullLiteral() != null){
NumberLiteral(Double.NaN)
} else {
// Convert: '(' expression ')'
convertExpr(ctx.expression(0))
}
}
}
| carldata/flow-script | src/main/scala/carldata/sf/compiler/Parser.scala | Scala | apache-2.0 | 5,961 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.utils
import org.apache.calcite.tools.RuleSet
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.api.{QueryConfig, Table, TableConfig, TableEnvironment}
import org.apache.flink.table.descriptors.{ConnectorDescriptor, TableDescriptor}
import org.apache.flink.table.sinks.TableSink
import org.apache.flink.table.sources.TableSource
class MockTableEnvironment extends TableEnvironment(new TableConfig) {
override private[flink] def writeToSink[T](
table: Table,
sink: TableSink[T],
queryConfig: QueryConfig): Unit = ???
override protected def checkValidTableName(name: String): Unit = ???
override def sql(query: String): Table = ???
override protected def getBuiltInNormRuleSet: RuleSet = ???
override protected def getBuiltInPhysicalOptRuleSet: RuleSet = ???
override def registerTableSink(
name: String,
fieldNames: Array[String],
fieldTypes: Array[TypeInformation[_]],
tableSink: TableSink[_]): Unit = ???
override def registerTableSink(name: String, tableSink: TableSink[_]): Unit = ???
override protected def createUniqueTableName(): String = ???
override protected def registerTableSourceInternal(name: String, tableSource: TableSource[_])
: Unit = ???
override def explain(table: Table): String = ???
override def connect(connectorDescriptor: ConnectorDescriptor): TableDescriptor = ???
}
| zhangminglei/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/utils/MockTableEnvironment.scala | Scala | apache-2.0 | 2,252 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015-2019 Helge Holzmann (Internet Archive) <helge@archive.org>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.archive.archivespark.dataspecs.access
import java.io.InputStream
import java.util.zip.GZIPInputStream
import org.archive.archivespark.sparkling.io.ByteArray
class ByteArrayAccessor(bytes: ByteArray, gz: Boolean = false) extends CloseableDataAccessor[InputStream] {
def this(bytes: Array[Byte], gz: Boolean) = this({
val array = new ByteArray()
array.append(bytes)
array
}, gz)
def this(bytes: Array[Byte]) = this(bytes, false)
override def get: Option[InputStream] = {
var stream: InputStream = null
try {
stream = bytes.toInputStream
stream = if (gz) new GZIPInputStream(stream) else stream
Some(stream)
} catch {
case e: Exception =>
e.printStackTrace()
if (stream != null) stream.close()
None
}
}
} | helgeho/ArchiveSpark | src/main/scala/org/archive/archivespark/dataspecs/access/ByteArrayAccessor.scala | Scala | mit | 1,996 |
import edu.uta.diql._
import org.apache.spark._
import org.apache.spark.rdd._
import org.apache.log4j._
import scala.util.Random
object Add {
def main ( args: Array[String] ) {
val repeats = args(0).toInt
val n = args(1).toInt
val m = args(2).toInt
val mm = m
val conf = new SparkConf().setAppName("Add")
val sc = new SparkContext(conf)
conf.set("spark.logConf","false")
conf.set("spark.eventLog.enabled","false")
LogManager.getRootLogger().setLevel(Level.WARN)
def randomMatrix ( n: Int, m: Int ) = {
val max = 10
val l = Random.shuffle((0 until n).toList)
val r = Random.shuffle((0 until m).toList)
sc.parallelize(l)
.flatMap{ i => val rand = new Random()
r.map{ j => ((i.toLong,j.toLong),rand.nextDouble()*max) } }
.cache()
}
val M = randomMatrix(n,m)
val N = randomMatrix(n,m)
val size = sizeof(((1L,1L),1.0D))
println("*** %d %d %.2f GB".format(n,m,(n.toDouble*m)*size/(1024.0*1024.0*1024.0)))
def test () {
var t: Long = System.currentTimeMillis()
try {
val R = M.join(N).mapValues{ case (m,n) => n + m }
println(R.count)
println("**** AddSpark run time: "+(System.currentTimeMillis()-t)/1000.0+" secs")
} catch { case x: Throwable => println(x) }
t = System.currentTimeMillis()
try {
v(sc,"""
var R: matrix[Double] = matrix();
for i = 0, n-1 do
for j = 0, mm-1 do
R[i,j] := M[i,j]+N[i,j];
println(R.count());
""")
println("**** AddDiablo run time: "+(System.currentTimeMillis()-t)/1000.0+" secs")
} catch { case x: Throwable => println(x) }
}
for ( i <- 1 to repeats )
test()
sc.stop()
}
}
| fegaras/DIQL | benchmarks/diablo/add.scala | Scala | apache-2.0 | 1,797 |
package org.scalajs.openui5.sap.m
import org.scalajs.openui5.util.{Settings, SettingsMap, noSettings}
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSName, ScalaJSDefined}
@ScalaJSDefined
trait TableSettings extends ListBaseSettings
object TableSettings extends TableSettingsBuilder(noSettings)
class TableSettingsBuilder(val dict: SettingsMap)
extends Settings[TableSettings, TableSettingsBuilder](new TableSettingsBuilder(_))
with TableSetters[TableSettings, TableSettingsBuilder]
trait TableSetters[T <: js.Object, B <: Settings[T, _]] extends ListBaseSetters[T, B] {
def backgroundDesign(v: BackgroundDesign) = setting("backgroundDesign", v)
def fixedLayout(v: Boolean) = setting("fixedLayout", v)
def showOverlay(v: Boolean) = setting("showOverlay", v)
def columns(v: js.Array[Column]) = setting("columns", v)
}
/** sap.m.Table control provides a set of sophisticated and convenience
* functions for responsive table design. For mobile devices, the
* recommended limit of table rows is 100 (based on 4 columns) to assure
* proper performance. To improve initial rendering on large tables, use the
* growing feature.
*
* @since 1.16
*/
@JSName("sap.m.Table")
@js.native
class Table(id: js.UndefOr[String] = js.native,
settings: js.UndefOr[TableSettings] = js.native) extends ListBase {
def this(id: String) = this(id, js.undefined)
def this(settings: TableSettings) = this(js.undefined, settings)
def addColumn(column: Column): this.type = js.native
}
| lastsys/scalajs-openui5 | src/main/scala/org/scalajs/openui5/sap/m/Table.scala | Scala | mit | 1,525 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import org.scalatest.{FlatSpec, Matchers}
import com.intel.analytics.bigdl.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl._
import scala.math._
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.optim.{L1Regularizer, L2Regularizer, SGD}
import com.intel.analytics.bigdl.utils.{RandomGenerator, T}
@com.intel.analytics.bigdl.tags.Parallel
class LinearSpec extends FlatSpec with Matchers {
"Linear L2 regularizer" should "works correctly" in {
import com.intel.analytics.bigdl.numeric.NumericDouble
val state1 = T("learningRate" -> 0.1, "learningRateDecay" -> 5e-7,
"weightDecay" -> 0.1, "momentum" -> 0.002)
val state2 = T("learningRate" -> 0.1, "learningRateDecay" -> 5e-7,
"weightDecay" -> 0.0, "momentum" -> 0.002)
val inputN = 5
val outputN = 2
val batchSize = 5
val criterion = new MSECriterion[Double]
val input = Tensor[Double](batchSize, inputN).rand()
val labels = Tensor[Double](batchSize, outputN).rand()
val model1 = Sequential()
.add(Linear(inputN, outputN))
.add(Sigmoid())
val (weights1, grad1) = model1.getParameters()
val model2 = Sequential()
.add(Linear(inputN, outputN,
wRegularizer = L2Regularizer(0.1), bRegularizer = L2Regularizer(0.1)))
.add(Sigmoid())
val (weights2, grad2) = model2.getParameters()
weights2.copy(weights1.clone())
grad2.copy(grad1.clone())
val sgd = new SGD[Double]
def feval1(x: Tensor[Double]): (Double, Tensor[Double]) = {
val output = model1.forward(input).toTensor[Double]
val _loss = criterion.forward(output, labels)
model1.zeroGradParameters()
val gradInput = criterion.backward(output, labels)
model1.backward(input, gradInput)
(_loss, grad1)
}
def feval2(x: Tensor[Double]): (Double, Tensor[Double]) = {
val output = model2.forward(input).toTensor[Double]
val _loss = criterion.forward(output, labels)
model2.zeroGradParameters()
val gradInput = criterion.backward(output, labels)
model2.backward(input, gradInput)
(_loss, grad2)
}
var loss1: Array[Double] = null
for (i <- 1 to 100) {
loss1 = sgd.optimize(feval1, weights1, state1)._2
println(s"${i}-th loss = ${loss1(0)}")
}
var loss2: Array[Double] = null
for (i <- 1 to 100) {
loss2 = sgd.optimize(feval2, weights2, state2)._2
println(s"${i}-th loss = ${loss2(0)}")
}
weights1 should be(weights2)
loss1 should be(loss2)
}
"Linear without bias L2 regularizer" should "works correctly" in {
import com.intel.analytics.bigdl.numeric.NumericDouble
val state1 = T("learningRate" -> 0.1, "learningRateDecay" -> 5e-7,
"weightDecay" -> 0.1, "momentum" -> 0.002)
val state2 = T("learningRate" -> 0.1, "learningRateDecay" -> 5e-7,
"weightDecay" -> 0.0, "momentum" -> 0.002)
val inputN = 5
val outputN = 2
val batchSize = 5
val criterion = new MSECriterion[Double]
val input = Tensor[Double](batchSize, inputN).rand()
val labels = Tensor[Double](batchSize, outputN).rand()
val model1 = Sequential()
.add(Linear(inputN, outputN, withBias = false))
.add(Sigmoid())
val (weights1, grad1) = model1.getParameters()
val model2 = Sequential()
.add(Linear(inputN, outputN, withBias = false,
wRegularizer = L2Regularizer(0.1), bRegularizer = L2Regularizer(0.1)))
.add(Sigmoid())
val (weights2, grad2) = model2.getParameters()
weights2.copy(weights1.clone())
grad2.copy(grad1.clone())
val sgd = new SGD[Double]
def feval1(x: Tensor[Double]): (Double, Tensor[Double]) = {
val output = model1.forward(input).toTensor[Double]
val _loss = criterion.forward(output, labels)
model1.zeroGradParameters()
val gradInput = criterion.backward(output, labels)
model1.backward(input, gradInput)
(_loss, grad1)
}
def feval2(x: Tensor[Double]): (Double, Tensor[Double]) = {
val output = model2.forward(input).toTensor[Double]
val _loss = criterion.forward(output, labels)
model2.zeroGradParameters()
val gradInput = criterion.backward(output, labels)
model2.backward(input, gradInput)
(_loss, grad2)
}
var loss1: Array[Double] = null
for (i <- 1 to 100) {
loss1 = sgd.optimize(feval1, weights1, state1)._2
println(s"${i}-th loss = ${loss1(0)}")
}
var loss2: Array[Double] = null
for (i <- 1 to 100) {
loss2 = sgd.optimize(feval2, weights2, state2)._2
println(s"${i}-th loss = ${loss2(0)}")
}
weights1 should be(weights2)
loss1 should be(loss2)
}
"Linear module" should "converge to correct weight and bias" in {
val inputN = 5
val outputN = 2
val linear = new Linear[Double](inputN, outputN)
val mse = new MSECriterion[Double]
val input = Tensor[Double](inputN)
val res = Tensor[Double](outputN)
var err = 0.0
for (i <- 1 to 10000) {
input.rand()
for (y <- 1 to outputN) {
res(Array(y)) = 1.0 * y
for (x <- 1 to inputN) {
res(Array(y)) += 0.1 * y * x * input(Array(x))
}
}
val output = linear.forward(input)
err = mse.forward(output, res)
val grad = mse.backward(output, res)
linear.zeroGradParameters()
linear.backward(input, grad)
linear.updateParameters(0.5 / log(i + 3))
}
val params = linear.parameters()
val weight = params._1(0)
val bias = params._1(1)
val expectedWeight = Tensor[Double](outputN, inputN)
val expectedBias = Tensor[Double](outputN)
for (y <- 1 to outputN) {
expectedBias(Array(y)) = 1.0 * y
for (x <- 1 to inputN) {
expectedWeight(Array(y, x)) = 0.1 * y * x
}
}
expectedBias.map(bias, (v1, v2) => {
assert(abs(v1 - v2) < 1e-6);
v1
})
expectedWeight.map(weight, (v1, v2) => {
assert(abs(v1 - v2) < 1e-6);
v1
})
assert(err < 1e-6)
}
"Linear module in batch mode" should "converge to correct weight and bias" in {
val inputN = 5
val outputN = 2
val batchN = 3
val linear = new Linear[Double](inputN, outputN)
val mse = new MSECriterion[Double]
val input = Tensor[Double](batchN, inputN)
val res = Tensor[Double](batchN, outputN)
var err = 0.0
for (i <- 1 to 10000) {
input.rand()
for (k <- 1 to batchN) {
for (y <- 1 to outputN) {
res(Array(k, y)) = 1.0 * y
for (x <- 1 to inputN) {
res(Array(k, y)) += 0.1 * y * x * input(Array(k, x))
}
}
}
val output = linear.forward(input)
err = mse.forward(output, res)
val grad = mse.backward(output, res)
linear.zeroGradParameters()
linear.backward(input, grad)
linear.updateParameters(0.5 / log(i + 3))
}
val params = linear.parameters()
val weight = params._1(0)
val bias = params._1(1)
val expectedWeight = Tensor[Double](outputN, inputN)
val expectedBias = Tensor[Double](outputN)
for (y <- 1 to outputN) {
expectedBias(Array(y)) = 1.0 * y
for (x <- 1 to inputN) {
expectedWeight(Array(y, x)) = 0.1 * y * x
}
}
expectedBias.map(bias, (v1, v2) => {
assert(abs(v1 - v2) < 1e-6);
v1
})
expectedWeight.map(weight, (v1, v2) => {
assert(abs(v1 - v2) < 1e-6);
v1
})
assert(err < 1e-6)
}
"Linear module in batch mode without bias" should "converate to correct weight and bias" in {
val inputN = 5
val outputN = 2
val batchN = 3
val linear = new Linear[Double](inputN, outputN, withBias = false)
val mse = new MSECriterion[Double]
val input = Tensor[Double](batchN, inputN)
val res = Tensor[Double](batchN, outputN)
var err = 0.0
for (i <- 1 to 10000) {
input.rand()
for (k <- 1 to batchN) {
for (y <- 1 to outputN) {
res(Array(k, y)) = 0
for (x <- 1 to inputN) {
res(Array(k, y)) += 0.1 * y * x * input(Array(k, x))
}
}
}
val output = linear.forward(input)
err = mse.forward(output, res)
val grad = mse.backward(output, res)
linear.zeroGradParameters()
linear.backward(input, grad)
linear.updateParameters(0.5 / log(i + 3))
}
val params = linear.parameters()
val weight = params._1(0)
val expectedWeight = Tensor[Double](outputN, inputN)
for (y <- 1 to outputN) {
for (x <- 1 to inputN) {
expectedWeight(Array(y, x)) = 0.1 * y * x
}
}
expectedWeight.map(weight, (v1, v2) => {
assert(abs(v1 - v2) < 1e-6);
v1
})
assert(err < 1e-6)
}
"Linear module in batch mode" should "be good in gradient check" in {
val linear = new Linear[Double](5, 2)
linear.reset()
val input = Tensor[Double](3, 5).rand()
val checker = new GradientChecker(1e-4, 1e-2)
checker.checkLayer[Double](linear, input) should be(true)
}
"Linear forward" should "be correct" in {
val linear = new Linear[Double](3, 2)
linear.weight.setValue(1, 1, 1.0)
linear.weight.setValue(1, 2, 2.0)
linear.weight.setValue(1, 3, 3.0)
linear.weight.setValue(2, 1, 4.0)
linear.weight.setValue(2, 2, 5.0)
linear.weight.setValue(2, 3, 6.0)
linear.bias.setValue(1, 7.0)
linear.bias.setValue(2, 8.0)
val input = Tensor[Double](T(0.1, 0.2, 0.3))
linear.forward(input) should be(Tensor[Double](T(8.4, 11.2)))
}
"Linear forward" should "be correct with given weight" in {
val weight = Tensor[Double](T(
T(1.0, 2.0, 3.0),
T(4.0, 5.0, 6.0)
))
val bias = Tensor[Double](T(
T(7.0, 8.0)
))
val linear = new Linear[Double](inputSize = 3, outputSize = 2,
initWeight = weight, initBias = bias)
val input = Tensor[Double](T(0.1, 0.2, 0.3))
linear.forward(input) should be(Tensor[Double](T(8.4, 11.2)))
}
"Linear forward" should "be correct in batch mode" in {
val linear = new Linear[Double](3, 2)
linear.weight.setValue(1, 1, 1.0)
linear.weight.setValue(1, 2, 2.0)
linear.weight.setValue(1, 3, 3.0)
linear.weight.setValue(2, 1, 4.0)
linear.weight.setValue(2, 2, 5.0)
linear.weight.setValue(2, 3, 6.0)
linear.bias.setValue(1, 7.0)
linear.bias.setValue(2, 8.0)
val input = Tensor[Double](T(T(0.1, 0.2, 0.3), T(0.2, 0.4, 0.6)))
linear.forward(input) should be(Tensor[Double](T(T(8.4, 11.2), T(9.8, 14.4))))
}
"Linear with scaleW and scaleB" should "be correct with given weight" in {
val weight = Tensor[Double](T(
T(1.0, 2.0, 3.0),
T(4.0, 5.0, 6.0)
))
val bias = Tensor[Double](T(
T(7.0, 8.0)
))
val linear = new Linear[Double](inputSize = 3, outputSize = 2,
initWeight = weight, initBias = bias)
val linear2 = linear.cloneModule().asInstanceOf[Linear[Double]].setScaleB(2.0).setScaleW(0.5)
val input = Tensor[Double](T(0.1, 0.2, 0.3))
val output1 = linear.forward(input)
val output2 = linear2.forward(input)
output1 should be(output2)
val gradOutput = Tensor(output1)
val gradInput1 = linear.backward(input, gradOutput)
val gradInput2 = linear2.backward(input, gradOutput)
gradInput1 should be(gradInput2)
linear2.gradWeight should be(linear.gradWeight.mul(0.5))
linear2.gradBias should be(linear.gradBias.mul(2))
}
"Xavier" should "init right in SpatialConvolution" in {
RandomGenerator.RNG.setSeed(1)
val linear = Linear[Float](3, 5)
.setInitMethod(Xavier, Zeros)
val exceptedWeight = Tensor[Float](Storage(Array(
-0.1399592, -0.32341975, 0.32080957,
0.042518664, -0.5119037, -0.097942464,
0.6549186, -0.468386, -0.8185887,
0.059606634, 0.29525837, 0.7170032,
-0.14323229, -0.07412344, 0.10165376
).map(_.toFloat))).resize(5, 3)
val exceptedBias = Tensor[Float](T(0f, 0f, 0f, 0f, 0f))
linear.weight should be (exceptedWeight)
linear.bias should be (exceptedBias)
}
}
| JerryYanWan/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/nn/LinearSpec.scala | Scala | apache-2.0 | 12,701 |
/*
* ClassDependenceAnalyser - A tool for java classes dependence analysis
* Copyright (C) 2016 <chentaov5@gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* ___====-_ _-====___
* _--^^^#####// \\\\#####^^^--_
* _-^##########// ( ) \\\\##########^-_
* -############// |\\^^/| \\\\############-
* _/############// (@::@) \\\\############\\_
* /#############(( \\\\// ))#############\\
* -###############\\\\ (oo) //###############-
* -#################\\\\ / VV \\ //#################-
* -###################\\\\/ \\//###################-
* _#/|##########/\\######( /\\ )######/\\##########|\\#_
* |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\|
* ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| '
* ` ` ` ` / | | | | \\ ' ' ' '
* ( | | | | )
* __\\ | | | | /__
* (vvv(VVV)(VVV)vvv)
*
* HERE BE DRAGONS
*
*/
package com.github.jllk.analyser
import java.io._
/**
* @author chentaov5@gmail.com
*
*/
object ProcessUtils {
def exec(cmd: String): String = {
try {
val fos = new ByteArrayOutputStream
val rt = Runtime.getRuntime
val proc = rt.exec(cmd)
val errorGobbler = new StreamGobbler(proc.getErrorStream(), "ERROR")
val outputGobbler = new StreamGobbler(proc.getInputStream(), "OUTPUT", fos)
errorGobbler.start()
outputGobbler.start()
val exitVal = proc.waitFor
System.out.println(s"ExitValue: $exitVal")
fos.flush()
fos.close()
new String(fos.toByteArray)
}
catch {
case t: Throwable => {
t.printStackTrace()
""
}
}
}
}
class StreamGobbler(private val is: InputStream, private val ty: String, private val os: OutputStream) extends Thread {
def this(is: InputStream, ty: String) = this(is, ty, null)
override def run() {
try {
var pw: PrintWriter = null
if (os != null) {
pw = new PrintWriter(os)
}
val isr = new InputStreamReader(is)
val br = new BufferedReader(isr)
var line: String = null
while ( {
line = br.readLine
line != null
}) {
if (pw != null) {
pw.println(line)
}
}
if (pw != null) pw.flush()
}
catch {
case ioe: IOException => {
ioe.printStackTrace()
}
}
}
}
| JLLK/ClassDependenceAnalyser | src/main/scala/com/github/jllk/analyser/ProcessUtils.scala | Scala | gpl-2.0 | 3,252 |
import models.{Transformer, User}
import play.api.Logger
/**
* Author: matthijs
* Created on: 24 May 2014.
*
* Initial set of data to be loaded
*/
object InitialData {
def insert() {
// Create a default user
val defaultEmail : String = "sendr@localhost"
val defaultPassword : String = "klJJS13j#k"
User.findByEmail(defaultEmail) match {
case Some(user) =>
if (user.password.equals(defaultPassword)) {
Logger.debug("Password is unhashed, removing and recreating account")
User.delete(defaultEmail)
User.create(defaultEmail,defaultPassword)
}
// Nothing to create
case None => User.create(defaultEmail,defaultPassword)
}
// Create an example transformer
if (Transformer.all.isEmpty) {
val soapMessage : String =
"""
| <soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:ced="http://www.ced-europe.com/schemas/wsdl/replicatie/CEDWEPF">
| <soapenv:Header>
| <ced:authentication>
| <username>{user}</username>
| <password>{password}</password>
| </ced:authentication>
| </soapenv:Header>
| <soapenv:Body>
| <ced:CEDWEPF>
| <ActionForCEDWEPF>
| <Mode>Insert</Mode>
| <TimeStampESB>{timestamp}</TimeStampESB>
| <TimeStampEIS>{timestamp}</TimeStampEIS>
| <SPLCD>{0}</SPLCD>
| <WKZOMS>{1}</WKZOMS>
| </ActionForCEDWEPF>
| </ced:CEDWEPF>
| </soapenv:Body>
| </soapenv:Envelope>
""".stripMargin
Transformer
.create(new Transformer(1,"CEDWEPPF",null,"/home/sendr/import/cedwepf", ".csv","cp1252","UTF-8",
"http://localhost:9000/WS/cedwepf","username","secret",10000,soapMessage,"2014-05-24T13:59:00",1))
}
}
}
| plamola/sendR | app/InitialData.scala | Scala | gpl-2.0 | 2,019 |
package im.actor.server.presences
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import akka.testkit.TestProbe
import akka.util.Timeout
import org.scalatest.time.{ Seconds, Span }
import im.actor.server.{ ActorSuite, KafkaSpec, SqlSpecHelpers }
class PresenceManagerSpec extends ActorSuite with SqlSpecHelpers {
behavior of "PresenceManager"
it should "subscribe to presences" in e1
it should "send presence on subscription" in e2
it should "deliver presence changes" in e3
it should "change presence to Offline after timeout" in e4
import PresenceManager._
import Presences._
implicit val ec: ExecutionContext = system.dispatcher
implicit lazy val (ds, db) = migrateAndInitDb()
override implicit val patienceConfig = PatienceConfig(timeout = Span(5, Seconds))
implicit val timeout: Timeout = Timeout(5.seconds)
implicit val region = PresenceManager.startRegion()
val probe = TestProbe()
val userId = 1
def e1() = {
whenReady(subscribe(userId, probe.ref)) { _ β }
}
def e2() = {
probe.expectMsg(PresenceState(userId, Offline, None))
}
def e3() = {
presenceSetOnline(userId, 500)
val lastSeenAt = probe.expectMsgPF() {
case PresenceState(1, Online, Some(ls)) β
ls
}
presenceSetOffline(userId, 100)
probe.expectMsgPF() {
case PresenceState(1, Offline, Some(ls)) β
ls should ===(lastSeenAt)
}
}
def e4() = {
presenceSetOnline(userId, 100)
val lastSeenAt = probe.expectMsgPF() {
case PresenceState(1, Online, Some(ls)) β
ls
}
Thread.sleep(200)
probe.expectMsgPF() {
case PresenceState(1, Offline, Some(ls)) β
ls should ===(lastSeenAt)
}
}
override def afterAll: Unit = {
super.afterAll()
system.awaitTermination()
ds.close()
}
}
| boneyao/actor-platform | actor-server/actor-tests/src/test/scala/im/actor/server/presences/PresenceManagerSpec.scala | Scala | mit | 1,854 |
import scala.io.Source
var count: Int = 0
def isTr(a: Int, b: Int, c: Int): Boolean = (a + b) > c && (a + c) > b && (b + c) > a
for (l <- Source.fromFile("day03.input").getLines()) {
val t = l.split(" ")
.toList
.filter(_ != "")
.map(_.toInt)
if (isTr(t(0), t(1), t(2))) count += 1
}
println(count)
//part 2
def isSame(a: Int, b: Int, c: Int): Boolean = (a % 100 == b % 100) && (a % 100 == c % 100)
count = 0
for (l <- Source.fromFile("day03.input").getLines()) {
val t = l.split(" ")
.toList
.filter(_ != "")
.map(_.toInt)
if (isTr(t(0), t(1), t(2)) && isSame(t(0), t(1), t(2))) {
println(t)
count += 1
}
}
println(count) | mozartvn/adventofcode2016 | day03.scala | Scala | mit | 674 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.thriftserver.ui
import javax.servlet.http.HttpServletRequest
import scala.xml.Node
import org.apache.spark.internal.Logging
import org.apache.spark.ui._
import org.apache.spark.ui.UIUtils._
import org.apache.spark.util.Utils
/** Page for Spark Web UI that shows statistics of jobs running in the thrift server */
private[ui] class ThriftServerSessionPage(parent: ThriftServerTab)
extends WebUIPage("session") with Logging {
val store = parent.store
private val startTime = parent.startTime
/** Render the page */
def render(request: HttpServletRequest): Seq[Node] = {
val parameterId = request.getParameter("id")
require(parameterId != null && parameterId.nonEmpty, "Missing id parameter")
val content = store.synchronized { // make sure all parts in this page are consistent
val sessionStat = store.getSession(parameterId).getOrElse(null)
require(sessionStat != null, "Invalid sessionID[" + parameterId + "]")
generateBasicStats() ++
<br/> ++
<h4>
User {sessionStat.userName},
IP {sessionStat.ip},
Session created at {formatDate(sessionStat.startTimestamp)},
Total run {sessionStat.totalExecution} SQL
</h4> ++
generateSQLStatsTable(request, sessionStat.sessionId)
}
UIUtils.headerSparkPage(request, "JDBC/ODBC Session", content, parent)
}
/** Generate basic stats of the thrift server program */
private def generateBasicStats(): Seq[Node] = {
val timeSinceStart = System.currentTimeMillis() - startTime.getTime
<ul class ="list-unstyled">
<li>
<strong>Started at: </strong> {formatDate(startTime)}
</li>
<li>
<strong>Time since start: </strong>{formatDurationVerbose(timeSinceStart)}
</li>
</ul>
}
/** Generate stats of batch statements of the thrift server program */
private def generateSQLStatsTable(request: HttpServletRequest, sessionID: String): Seq[Node] = {
val executionList = store.getExecutionList
.filter(_.sessionId == sessionID)
val numStatement = executionList.size
val table = if (numStatement > 0) {
val sqlTableTag = "sqlsessionstat"
val sqlTablePage =
Option(request.getParameter(s"$sqlTableTag.page")).map(_.toInt).getOrElse(1)
try {
Some(new SqlStatsPagedTable(
request,
parent,
executionList,
"sqlserver/session",
UIUtils.prependBaseUri(request, parent.basePath),
sqlTableTag
).table(sqlTablePage))
} catch {
case e@(_: IllegalArgumentException | _: IndexOutOfBoundsException) =>
Some(<div class="alert alert-error">
<p>Error while rendering job table:</p>
<pre>
{Utils.exceptionString(e)}
</pre>
</div>)
}
} else {
None
}
val content =
<span id="sqlsessionstat" class="collapse-aggregated-sqlsessionstat collapse-table"
onClick="collapseTable('collapse-aggregated-sqlsessionstat',
'aggregated-sqlsessionstat')">
<h4>
<span class="collapse-table-arrow arrow-open"></span>
<a>SQL Statistics</a>
</h4>
</span> ++
<div class="aggregated-sqlsessionstat collapsible-table">
{table.getOrElse("No statistics have been generated yet.")}
</div>
content
}
}
| maropu/spark | sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/ui/ThriftServerSessionPage.scala | Scala | apache-2.0 | 4,242 |
package edu.osu.cse.groenkeb.logic.proof.engine
import scala.collection.immutable._
import edu.osu.cse.groenkeb.logic.proof.ProofContext
import edu.osu.cse.groenkeb.logic.proof.CompleteProof
sealed abstract class ProofResult extends Product with Serializable {
def context: ProofContext
}
final case class Failure(context : ProofContext, hint: SearchHint = Cut()) extends ProofResult
final case class Success(proof: CompleteProof, context: ProofContext, hint: SearchHint = Cut()) extends ProofResult
final case class Pending(context: ProofContext,
steps: Seq[ProofStep],
aggregator: (ProofContext, Seq[Stream[ProofResult]]) => ProofResult) extends ProofResult
| bgroenks96/PropLogic | proof-engine/src/main/scala/edu/osu/cse/groenkeb/logic/proof/engine/proofResult.scala | Scala | mit | 714 |
/*
* Copyright 2011 Christos KK Loverdos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ckkloverdos.resource
import java.net.URL
import java.io.{BufferedReader, Reader, InputStream, File => JFile}
import com.ckkloverdos.maybe._
/**
* A `StreamResource` is something for which we can potentially obtain an `InputStream`.
*
* Each stream resource is identifiable by a URL.
*
* The character- and string-oriented methods assume UTF-8 as the encoding.
*
* @author Christos KK Loverdos <loverdos@gmail.com>.
*/
trait StreamResource {
/**
* Returns the [[com.ckkloverdos.resource.StreamResourceContext]] that resolved this resource.
*/
def resolver: StreamResourceContext
def exists: Boolean
def url: URL
def name: String
def path: String
def canonicalPath: String
def metadata: Map[String, String]
def mapStream[A](f: InputStream => A): Maybe[A]
def flatMapStream[A](f: InputStream => Maybe[A]): Maybe[A]
def mapReader[A](f: Reader => A): Maybe[A]
def mapBufferedReader[A](f: BufferedReader => A): Maybe[A]
def mapBytes[A](f: Array[Byte] => A): Maybe[A]
def mapString[A](f: String => A): Maybe[A]
def stringContent: Maybe[String]
def byteContent: Maybe[Array[Byte]]
}
object StreamResource {
// TODO move elsewhere
def readBytes(is: InputStream, close: Boolean = false, bufferSize: Int = 4096): Maybe[Array[Byte]] = Maybe {
var result = new Array[Byte](0)
var buffer = new Array[Byte](bufferSize)
var count = is.read(buffer)
while(count > -1) {
var newresult = new Array[Byte](result.length + count)
System.arraycopy(result, 0, newresult, 0, result.length)
System.arraycopy(buffer, 0, newresult, result.length, count)
result = newresult
count = is.read(buffer)
}
if(close) {
is.close()
}
result
}
// TODO move elsewhere
def readStringFromStream(
is: InputStream,
encoding: String = "UTF-8",
close: Boolean = false,
bufferSize: Int = 4096
): Maybe[String] =
readBytes(is, close, bufferSize).map(x β new String(x, encoding))
def apply(file: JFile): Maybe[StreamResource] =
FileSystemRootResourceContext.getResource(file.getAbsolutePath)
}
| loverdos/streamresource | src/main/scala/com/ckkloverdos/resource/StreamResource.scala | Scala | apache-2.0 | 2,743 |
package com.github.vooolll.client.feed
import com.github.vooolll.base.FacebookClientSupport
import cats.implicits._
import com.github.vooolll.domain.FacebookPaging
import com.github.vooolll.domain.likes.{FacebookLike, FacebookLikes, FacebookLikesSummary}
import com.github.vooolll.domain.profile.FacebookUserId
class LikeSpec extends FacebookClientSupport {
import com.github.vooolll.base.TestConfiguration._
val like = FacebookLike(FacebookUserId("117656352360395"), "Bob Willins".some)
val likePaging =
FacebookPaging("MTE3NjU2MzUyMzYwMzk1".some, "MTE3NjU2MzUyMzYwMzk1".some)
val likes = FacebookLikes(List(like), paging = None)
val likesSummary = FacebookLikesSummary(
totalCount = 1,
canLike = true.some,
hasLikes = true.some
)
val likesWithSummary = likes.copy(summary = likesSummary.some)
"Facebook Graph Api" should {
"return likes of post" in { c =>
c.likes(postId).withoutPaging map (_ shouldBe likes)
}
"return likes of post result" in { c =>
c.likesResult(postId).withoutPaging map (_ shouldBe likes.asRight)
}
"return likes of post with summary" in { c =>
c.likes(postId, summary = true).withoutPaging map (_ shouldBe likesWithSummary)
}
"return likes of post result with summary" in { c =>
c.likesResult(postId, summary = true).withoutPaging map (_ shouldBe likesWithSummary.asRight)
}
}
}
| vooolll/facebook4s | src/test/scala/com/github/vooolll/client/feed/LikeSpec.scala | Scala | apache-2.0 | 1,407 |
package scalajs.antdesign
sealed abstract class SortOrder(val id: String)
object SortOrder {
case object Ascend extends SortOrder("ascend")
case object Descend extends SortOrder("descend")
}
| mdedetrich/scalajs-antdesign | src/main/scala/scalajs/antdesign/SortOrder.scala | Scala | bsd-3-clause | 198 |
package scala.xml
import scala.collection.Seq
import org.junit.Assert.assertEquals
import org.junit.Test
class SerializationTest {
@Test
def xmlLiteral: Unit = {
val n = <node/>
assertEquals(n, JavaByteSerialization.roundTrip(n))
}
@Test
def empty: Unit = {
assertEquals(NodeSeq.Empty, JavaByteSerialization.roundTrip(NodeSeq.Empty))
}
@Test
def unmatched: Unit = {
assertEquals(NodeSeq.Empty, JavaByteSerialization.roundTrip(<xml/> \\ "HTML"))
}
@Test
def implicitConversion: Unit = {
val parent = <parent><child></child><child/></parent>
val children: Seq[Node] = parent.child
val asNodeSeq: NodeSeq = children
assertEquals(asNodeSeq, JavaByteSerialization.roundTrip(asNodeSeq))
}
}
| scala/scala-xml | jvm/src/test/scala/scala/xml/SerializationTest.scala | Scala | apache-2.0 | 745 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.models.python
import java.util.{List => JList, Map => JMap}
import com.intel.analytics.bigdl.{Criterion}
import com.intel.analytics.bigdl.dataset.PaddingParam
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.nn.keras.KerasLayer
import com.intel.analytics.bigdl.optim.{OptimMethod, ValidationMethod, ValidationResult}
import com.intel.analytics.bigdl.python.api.{EvaluatedResult, JTensor, Sample}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.transform.vision.image.ImageFeature
import com.intel.analytics.bigdl.utils.{Shape, Table}
import com.intel.analytics.zoo.common.PythonZoo
import com.intel.analytics.zoo.feature.common.Preprocessing
import com.intel.analytics.zoo.feature.image._
import com.intel.analytics.zoo.feature.text.TextSet
import com.intel.analytics.zoo.models.anomalydetection.{AnomalyDetector, FeatureLabelIndex}
import com.intel.analytics.zoo.models.common.{KerasZooModel, Ranker, ZooModel}
import com.intel.analytics.zoo.models.image.common.{ImageConfigure, ImageModel}
import com.intel.analytics.zoo.models.image.objectdetection._
import com.intel.analytics.zoo.models.image.imageclassification.{ImageClassifier, LabelReader => IMCLabelReader}
import com.intel.analytics.zoo.models.recommendation.{NeuralCF, Recommender, UserItemFeature, UserItemPrediction}
import com.intel.analytics.zoo.models.recommendation._
import com.intel.analytics.zoo.models.seq2seq.{RNNDecoder, RNNEncoder, Seq2seq}
import com.intel.analytics.zoo.models.textclassification.TextClassifier
import com.intel.analytics.zoo.models.textmatching.KNRM
import com.intel.analytics.zoo.pipeline.api.keras.layers.{Embedding, WordEmbedding}
import com.intel.analytics.zoo.pipeline.api.keras.models.KerasNet
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame}
import scala.reflect.ClassTag
import scala.collection.JavaConverters._
object PythonZooModel {
def ofFloat(): PythonZooModel[Float] = new PythonZooModel[Float]()
def ofDouble(): PythonZooModel[Double] = new PythonZooModel[Double]()
}
class PythonZooModel[T: ClassTag](implicit ev: TensorNumeric[T]) extends PythonZoo[T] {
def saveZooModel(
model: ZooModel[Activity, Activity, T],
path: String,
weightPath: String = null,
overWrite: Boolean = false): ZooModel[Activity, Activity, T] = {
model.saveModel(path, weightPath, overWrite)
}
def createZooTextClassifier(
classNum: Int,
embedding: Embedding[T],
sequenceLength: Int = 500,
encoder: String = "cnn",
encoderOutputDim: Int = 256,
model: AbstractModule[Activity, Activity, T]): TextClassifier[T] = {
TextClassifier[T](classNum, embedding, sequenceLength, encoder, encoderOutputDim, model)
}
def loadTextClassifier(
path: String,
weightPath: String = null): TextClassifier[T] = {
TextClassifier.loadModel(path, weightPath)
}
def textClassifierCompile(
model: TextClassifier[T],
optimizer: OptimMethod[T],
loss: Criterion[T],
metrics: JList[ValidationMethod[T]] = null): Unit = {
model.compile(optimizer, loss,
if (metrics == null) null else metrics.asScala.toList)
}
def textClassifierFit(
model: TextClassifier[T],
x: TextSet,
batchSize: Int,
nbEpoch: Int,
validationData: TextSet): Unit = {
model.fit(x, batchSize, nbEpoch, validationData)
}
def textClassifierPredict(
model: TextClassifier[T],
x: TextSet,
batchPerThread: Int): TextSet = {
model.predict(x, batchPerThread)
}
def textClassifierEvaluate(
model: TextClassifier[T],
x: TextSet,
batchSize: Int): JList[EvaluatedResult] = {
val resultArray = model.evaluate(x, batchSize)
processEvaluateResult(resultArray)
}
private def processEvaluateResult(
resultArray: Array[(ValidationResult, ValidationMethod[T])]): JList[EvaluatedResult] = {
resultArray.map { result =>
EvaluatedResult(result._1.result()._1, result._1.result()._2,
result._2.toString())
}.toList.asJava
}
def textClassifierSetCheckpoint(
model: TextClassifier[T],
path: String,
overWrite: Boolean = true): Unit = {
model.setCheckpoint(path, overWrite)
}
def textClassifierSetTensorBoard(
model: TextClassifier[T],
logDir: String,
appName: String): Unit = {
model.setTensorBoard(logDir, appName)
}
def createZooAnomalyDetector(
featureShape: JList[Int],
hiddenLayers: JList[Int],
dropouts: JList[Double],
model: AbstractModule[Activity, Activity, T]): AnomalyDetector[T] = {
new AnomalyDetector[T](Shape(featureShape.asScala.toArray),
hiddenLayers.asScala.toArray, dropouts.asScala.toArray)
.addModel(model.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]])
}
def loadAnomalyDetector(
path: String,
weightPath: String = null): AnomalyDetector[T] = {
AnomalyDetector.loadModel(path, weightPath)
}
def standardScaleDF(df: DataFrame): DataFrame = {
val fields = df.columns
com.intel.analytics.zoo.models.anomalydetection.Utils.standardScale(df, fields)
}
def unroll(dataRdd: JavaRDD[JList[Double]],
unrollLength: Int,
predictStep: Int = 1): JavaRDD[JList[String]] = {
val rdd: RDD[Array[Float]] = dataRdd.rdd.map(x => x.asScala.toArray.map(_.toFloat))
val unrolled = AnomalyDetector.unroll[Float](rdd, unrollLength, predictStep)
toUnrolledJavaRdd(unrolled)
}
private def toUnrolledJavaRdd(features: RDD[FeatureLabelIndex[Float]]): JavaRDD[JList[String]] = {
features.map(x =>
List(x.feature.map(x => x.mkString("|")).mkString(","), x.label.toString,
x.index.toString).asJava).toJavaRDD()
}
private def toAnomaliesJavaRdd(anomaliesRdd: RDD[(Double, Double, Any)]): JavaRDD[JList[Any]] = {
anomaliesRdd.map(x =>
List(x._1, x._2, x._3.asInstanceOf[Any])
.asJava).toJavaRDD()
}
def detectAnomalies(
yTruth: JavaRDD[Object],
yPredict: JavaRDD[Object],
anomalySize: Int = 5): JavaRDD[JList[Any]] = {
val out: RDD[(Double, Double, Any)] = AnomalyDetector.detectAnomalies[Double](
yTruth.rdd.map(_.asInstanceOf[Double]), yPredict.rdd.map(_.asInstanceOf[Double]), anomalySize)
toAnomaliesJavaRdd(out)
}
def zooModelSetEvaluateStatus(
model: ZooModel[Activity, Activity, T]): ZooModel[Activity, Activity, T] = {
model.setEvaluateStatus()
}
def loadObjectDetector(path: String, weightPath: String = null): ObjectDetector[T] = {
ObjectDetector.loadModel(path, weightPath)
}
def loadImageClassifier(path: String, weightPath: String = null): ImageClassifier[T] = {
ImageClassifier.loadModel(path, weightPath)
}
def readPascalLabelMap(): JMap[Int, String] = {
LabelReader.readPascalLabelMap().asJava
}
def readCocoLabelMap(): JMap[Int, String] = {
LabelReader.readCocoLabelMap().asJava
}
def readImagenetLabelMap(): JMap[Int, String] = {
IMCLabelReader.readImagenetlLabelMap().asJava
}
def imageModelPredict(model: ImageModel[T],
image: ImageSet,
config: ImageConfigure[T] = null): ImageSet = {
model.predictImageSet(image, config)
}
def getImageConfig(model: ImageModel[T]): ImageConfigure[T] = {
model.getConfig
}
def createImageConfigure(
preProcessor: Preprocessing[ImageFeature, ImageFeature],
postProcessor: Preprocessing[ImageFeature, ImageFeature],
batchPerPartition: Int,
labelMap: JMap[Int, String],
paddingParam: PaddingParam[T]): ImageConfigure[T] = {
val map = if (labelMap == null) null else labelMap.asScala.toMap
ImageConfigure(preProcessor, postProcessor, batchPerPartition, map, Option(paddingParam))
}
def createVisualizer(labelMap: JMap[Int, String], thresh: Float = 0.3f,
encoding: String): Preprocessing[ImageFeature, ImageFeature] = {
Visualizer(labelMap.asScala.toMap, thresh, encoding, Visualizer.visualized) ->
ImageBytesToMat(Visualizer.visualized) -> ImageMatToFloats(shareBuffer = false)
}
def getLabelMap(imageConfigure: ImageConfigure[T]): JMap[Int, String] = {
if (imageConfigure.labelMap == null) null else imageConfigure.labelMap.asJava
}
def createImInfo(): ImInfo = {
ImInfo()
}
def createDecodeOutput(): DecodeOutput = {
DecodeOutput()
}
def createScaleDetection(): ScaleDetection = {
ScaleDetection()
}
def createPaddingParam(): PaddingParam[T] = {
PaddingParam()
}
def createZooNeuralCF(
userCount: Int,
itemCount: Int,
numClasses: Int,
userEmbed: Int = 20,
itemEmbed: Int = 20,
hiddenLayers: JList[Int],
includeMF: Boolean = true,
mfEmbed: Int = 20,
model: AbstractModule[Activity, Activity, T]): NeuralCF[T] = {
new NeuralCF[T](userCount, itemCount, numClasses, userEmbed, itemEmbed,
hiddenLayers.asScala.toArray, includeMF, mfEmbed)
.addModel(model.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]])
}
def loadNeuralCF(
path: String,
weightPath: String = null): NeuralCF[T] = {
NeuralCF.loadModel(path, weightPath)
}
def createZooWideAndDeep(
modelType: String = "wide_n_deep",
numClasses: Int,
hiddenLayers: JList[Int],
wideBaseDims: JList[Int],
wideCrossDims: JList[Int],
indicatorDims: JList[Int],
embedInDims: JList[Int],
embedOutDims: JList[Int],
continuousCols: JList[String],
model: AbstractModule[Activity, Activity, T]): WideAndDeep[T] = {
new WideAndDeep[T](modelType,
numClasses,
wideBaseDims.asScala.toArray,
wideCrossDims.asScala.toArray,
indicatorDims.asScala.toArray,
embedInDims.asScala.toArray,
embedOutDims.asScala.toArray,
continuousCols.asScala.toArray,
hiddenLayers.asScala.toArray)
.addModel(model.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]])
}
def loadWideAndDeep(
path: String,
weightPath: String = null): WideAndDeep[T] = {
WideAndDeep.loadModel(path, weightPath)
}
def createZooSessionRecommender(
itemCount: Int,
itemEmbed: Int,
rnnHiddenLayers: JList[Int],
sessionLength: Int,
includeHistory: Boolean,
mlpHiddenLayers: JList[Int],
historyLength: Int,
model: AbstractModule[Activity, Activity, T]): SessionRecommender[T] = {
new SessionRecommender[T](itemCount, itemEmbed, rnnHiddenLayers.asScala.toArray, sessionLength,
includeHistory, mlpHiddenLayers.asScala.toArray, historyLength)
.addModel(model.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]])
}
def loadSessionRecommender(
path: String,
weightPath: String = null): SessionRecommender[T] = {
SessionRecommender.loadModel(path, weightPath)
}
def toUserItemFeatureRdd(featureRdd: JavaRDD[Array[Object]]): RDD[UserItemFeature[T]] = {
featureRdd.rdd.foreach(x =>
require(x.length == 3, "UserItemFeature should consist of userId, itemId and sample"))
featureRdd.rdd.map(x =>
UserItemFeature(x(0).asInstanceOf[Int], x(1).asInstanceOf[Int],
toJSample(x(2).asInstanceOf[Sample])))
}
def toPredictionJavaRdd(predictionRdd: RDD[UserItemPrediction]): JavaRDD[JList[Double]] = {
predictionRdd.map(x =>
List(x.userId.toDouble, x.itemId.toDouble, x.prediction.toDouble, x.probability)
.asJava).toJavaRDD()
}
def predictUserItemPair(
model: Recommender[T],
featureRdd: JavaRDD[Array[Object]]): JavaRDD[JList[Double]] = {
val predictionRdd = model.predictUserItemPair(toUserItemFeatureRdd(featureRdd))
toPredictionJavaRdd(predictionRdd)
}
def recommendForUser(
model: Recommender[T],
featureRdd: JavaRDD[Array[Object]],
maxItems: Int): JavaRDD[JList[Double]] = {
val predictionRdd = model.recommendForUser(toUserItemFeatureRdd(featureRdd), maxItems)
toPredictionJavaRdd(predictionRdd)
}
def recommendForItem(
model: Recommender[T],
featureRdd: JavaRDD[Array[Object]],
maxUsers: Int): JavaRDD[JList[Double]] = {
val predictionRdd = model.recommendForItem(toUserItemFeatureRdd(featureRdd), maxUsers)
toPredictionJavaRdd(predictionRdd)
}
def recommendForSession(
model: SessionRecommender[T],
featureRdd: JavaRDD[Sample],
maxItems: Int,
zeroBasedLabel: Boolean): JavaRDD[JList[JList[Float]]] = {
val predictionRdd: RDD[Array[(Int, Float)]] = model
.recommendForSession(toJSample(featureRdd), maxItems, zeroBasedLabel)
predictionRdd.map(x => x.toList.map(y => List(y._1.toFloat, y._2).asJava).asJava).toJavaRDD()
}
def getNegativeSamples(indexed: DataFrame): DataFrame = {
Utils.getNegativeSamples(indexed)
}
def zooModelSummary(model: ZooModel[Activity, Activity, T]): Unit = {
model.summary()
}
def zooModelPredictClasses(
module: ZooModel[Activity, Activity, T],
x: JavaRDD[Sample],
batchSize: Int = 32,
zeroBasedLabel: Boolean = true): JavaRDD[Int] = {
module.predictClasses(toJSample(x), batchSize, zeroBasedLabel).toJavaRDD()
}
def createZooKNRM(
text1Length: Int,
text2Length: Int,
vocabSize: Int,
embedSize: Int,
embedWeights: JTensor = null,
trainEmbed: Boolean = true,
kernelNum: Int = 21,
sigma: Double = 0.1,
exactSigma: Double = 0.001,
targetMode: String = "ranking",
model: AbstractModule[Activity, Activity, T]): KNRM[T] = {
KNRM[T](text1Length, text2Length, vocabSize, embedSize, toTensor(embedWeights),
trainEmbed, kernelNum, sigma, exactSigma, targetMode, model)
}
def loadKNRM(
path: String,
weightPath: String = null): KNRM[T] = {
KNRM.loadModel(path, weightPath)
}
def prepareEmbedding(
embeddingFile: String,
wordIndex: JMap[String, Int] = null,
randomizeUnknown: Boolean = false,
normalize: Boolean = false): JTensor = {
val (_, _, embedWeights) = WordEmbedding.prepareEmbedding[T](
embeddingFile, if (wordIndex!= null) wordIndex.asScala.toMap else null,
randomizeUnknown, normalize)
toJTensor(embedWeights)
}
def createZooSeq2seq(encoder: RNNEncoder[T],
decoder: RNNDecoder[T],
inputShape: JList[Int],
outputShape: JList[Int],
bridge: KerasLayer[Activity, Activity, T] = null,
generator: KerasLayer[Activity, Activity, T] = null,
model: AbstractModule[Table, Tensor[T], T]): Seq2seq[T] = {
Seq2seq(encoder, decoder, toScalaShape(inputShape),
toScalaShape(outputShape), bridge, generator, model)
}
def evaluateNDCG(
ranker: Ranker[T],
x: TextSet,
k: Int,
threshold: Double): Double = {
ranker.evaluateNDCG(x, k, threshold)
}
def evaluateMAP(
ranker: Ranker[T],
x: TextSet,
threshold: Double): Double = {
ranker.evaluateMAP(x, threshold)
}
def seq2seqSetCheckpoint(model: Seq2seq[T],
path: String,
overWrite: Boolean = true): Unit = {
model.setCheckpoint(path, overWrite)
}
def loadSeq2seq(path: String,
weightPath: String = null): Seq2seq[T] = {
Seq2seq.loadModel(path, weightPath)
}
def seq2seqCompile(
model: Seq2seq[T],
optimizer: OptimMethod[T],
loss: Criterion[T],
metrics: JList[ValidationMethod[T]] = null): Unit = {
model.compile(optimizer, loss,
if (metrics == null) null else metrics.asScala.toList)
}
def seq2seqFit(model: Seq2seq[T],
x: JavaRDD[Sample],
batchSize: Int,
nbEpoch: Int,
validationData: JavaRDD[Sample] = null): Unit = {
model.fit(toJSample(x), batchSize, nbEpoch, toJSample(validationData))
}
def seq2seqInfer(model: Seq2seq[T],
input: JTensor,
startSign: JTensor,
maxSeqLen: Int = 30,
stopSign: JTensor = null,
buildOutput: KerasLayer[Tensor[T], Tensor[T], T]): JTensor = {
val result =
model.infer(toTensor(input), toTensor(startSign), maxSeqLen,
toTensor(stopSign), buildOutput)
toJTensor(result)
}
def getModule(model: KerasZooModel[Activity, Activity, T]): KerasNet[T] = {
model.model.asInstanceOf[KerasNet[T]]
}
}
| intel-analytics/analytics-zoo | zoo/src/main/scala/com/intel/analytics/zoo/models/python/PythonZooModel.scala | Scala | apache-2.0 | 16,969 |
package systems.opalia.commons.json
import play.api.libs.json._
import scala.collection.TraversableLike
import scala.util.matching.Regex
object ConstraintReadsExtension {
def max[T](maximum: T)(implicit reads: Reads[T], ord: Ordering[T]): Reads[T] =
Reads.filterNot[T](JsonValidationError("error.maximum", maximum))(x => ord.gt(x, maximum))
def max[T](maximum: Option[T])(implicit reads: Reads[T], ord: Ordering[T]): Reads[T] =
maximum match {
case Some(x) => max[T](x)
case None => Reads[T](reads.reads)
}
def min[T](minimum: T)(implicit reads: Reads[T], ord: Ordering[T]): Reads[T] =
Reads.filterNot[T](JsonValidationError("error.minimum", minimum))(x => ord.lt(x, minimum))
def min[T](minimum: Option[T])(implicit reads: Reads[T], ord: Ordering[T]): Reads[T] =
minimum match {
case Some(x) => min[T](x)
case None => Reads[T](reads.reads)
}
def distinct[T]()(implicit reads: Reads[T], f: T => TraversableLike[_, T]): Reads[T] =
Reads.filterNot[T](JsonValidationError("error.distinct"))((x) => x.size != x.toSeq.distinct.size)
def exactLength[T](length: Int)(implicit reads: Reads[T], f: T => TraversableLike[_, T]): Reads[T] =
Reads.filterNot[T](JsonValidationError("error.exact_length", length))(_.size != length)
def maxLength[T](length: Int)(implicit reads: Reads[T], f: T => TraversableLike[_, T]): Reads[T] =
Reads.filterNot[T](JsonValidationError("error.maximum_length", length))(_.size > length)
def minLength[T](length: Int)(implicit reads: Reads[T], f: T => TraversableLike[_, T]): Reads[T] =
Reads.filterNot[T](JsonValidationError("error.minimum_length", length))(_.size < length)
def maxStringLength[T](length: Int)(implicit reads: Reads[T]): Reads[T] =
Reads.filterNot[T](JsonValidationError("error.maximum_length_as_string", length))(_.toString.length > length)
def minStringLength[T](length: Int)(implicit reads: Reads[T]): Reads[T] =
Reads.filterNot[T](JsonValidationError("error.minimum_length_as_string", length))(_.toString.length < length)
def choice[T](seq: T*)(implicit reads: Reads[T]): Reads[T] =
Reads.filterNot[T](JsonValidationError("error.choice", seq: _*))(x => !seq.contains(x))
def singleLine()(implicit reads: Reads[String]): Reads[String] =
Reads.filterNot[String](JsonValidationError("error.single_line"))(_.lines.length > 1)
def trimmed()(implicit reads: Reads[String]): Reads[String] =
Reads.filterNot[String](JsonValidationError("error.trimmed"))(x => x != x.trim)
def pattern(regex: Regex)(implicit reads: Reads[String]): Reads[String] =
Reads.pattern(regex)
def pattern(regex: Option[Regex])(implicit reads: Reads[String]): Reads[String] =
regex match {
case Some(x) => Reads.pattern(x)
case None => Reads[String](reads.reads)
}
}
| OpaliaSystems/commons | src/main/scala/systems/opalia/commons/json/ConstraintReadsExtension.scala | Scala | apache-2.0 | 2,827 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions
import org.apache.calcite.rex._
import org.apache.flink.api.common.typeinfo.BasicTypeInfo._
import org.apache.flink.api.common.typeinfo.{LocalTimeTypeInfo, SqlTimeTypeInfo, TypeInformation}
import org.apache.flink.table.planner.calcite.FlinkRelBuilder
import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable
import org.apache.flink.table.planner.typeutils.TypeInfoCheckUtils
import org.apache.flink.table.planner.typeutils.TypeInfoCheckUtils.isTimeInterval
import org.apache.flink.table.planner.validate.{ValidationFailure, ValidationResult, ValidationSuccess}
import org.apache.flink.table.runtime.typeutils.LegacyLocalDateTimeTypeInfo
import org.apache.flink.table.typeutils.TimeIntervalTypeInfo
case class Extract(timeIntervalUnit: PlannerExpression, temporal: PlannerExpression)
extends PlannerExpression {
override private[flink] def children: Seq[PlannerExpression] = timeIntervalUnit :: temporal :: Nil
override private[flink] def resultType: TypeInformation[_] = LONG_TYPE_INFO
override private[flink] def validateInput(): ValidationResult = {
if (!TypeInfoCheckUtils.isTemporal(temporal.resultType)) {
return ValidationFailure(s"Extract operator requires Temporal input, " +
s"but $temporal is of type ${temporal.resultType}")
}
timeIntervalUnit match {
case SymbolPlannerExpression(PlannerTimeIntervalUnit.YEAR)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.QUARTER)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.MONTH)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.WEEK)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.DAY)
if temporal.resultType == SqlTimeTypeInfo.DATE
|| temporal.resultType == SqlTimeTypeInfo.TIMESTAMP
|| temporal.resultType == LocalTimeTypeInfo.LOCAL_DATE
|| temporal.resultType == LocalTimeTypeInfo.LOCAL_DATE_TIME
|| temporal.resultType.isInstanceOf[LegacyLocalDateTimeTypeInfo]
|| temporal.resultType == TimeIntervalTypeInfo.INTERVAL_MILLIS
|| temporal.resultType == TimeIntervalTypeInfo.INTERVAL_MONTHS =>
ValidationSuccess
case SymbolPlannerExpression(PlannerTimeIntervalUnit.HOUR)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.MINUTE)
| SymbolPlannerExpression(PlannerTimeIntervalUnit.SECOND)
if temporal.resultType == SqlTimeTypeInfo.TIME
|| temporal.resultType == SqlTimeTypeInfo.TIMESTAMP
|| temporal.resultType == LocalTimeTypeInfo.LOCAL_TIME
|| temporal.resultType == LocalTimeTypeInfo.LOCAL_DATE_TIME
|| temporal.resultType.isInstanceOf[LegacyLocalDateTimeTypeInfo]
|| temporal.resultType == TimeIntervalTypeInfo.INTERVAL_MILLIS =>
ValidationSuccess
case _ =>
ValidationFailure(s"Extract operator does not support unit '$timeIntervalUnit' for input" +
s" of type '${temporal.resultType}'.")
}
}
override def toString: String = s"($temporal).extract($timeIntervalUnit)"
}
abstract class CurrentTimePoint(
targetType: TypeInformation[_],
local: Boolean)
extends LeafExpression {
override private[flink] def resultType: TypeInformation[_] = targetType
override private[flink] def validateInput(): ValidationResult = {
if (!TypeInfoCheckUtils.isTimePoint(targetType)) {
ValidationFailure(s"CurrentTimePoint operator requires Time Point target type, " +
s"but get $targetType.")
} else if (local && targetType == SqlTimeTypeInfo.DATE) {
ValidationFailure(s"Localized CurrentTimePoint operator requires Time or Timestamp target " +
s"type, but get $targetType.")
} else {
ValidationSuccess
}
}
override def toString: String = if (local) {
s"local$targetType()"
} else {
s"current$targetType()"
}
}
case class CurrentDate() extends CurrentTimePoint(SqlTimeTypeInfo.DATE, local = false)
case class CurrentTime() extends CurrentTimePoint(SqlTimeTypeInfo.TIME, local = false)
case class CurrentTimestamp() extends CurrentTimePoint(SqlTimeTypeInfo.TIMESTAMP, local = false)
case class LocalTime() extends CurrentTimePoint(SqlTimeTypeInfo.TIME, local = true)
case class LocalTimestamp() extends CurrentTimePoint(SqlTimeTypeInfo.TIMESTAMP, local = true)
/**
* Determines whether two anchored time intervals overlap.
*/
case class TemporalOverlaps(
leftTimePoint: PlannerExpression,
leftTemporal: PlannerExpression,
rightTimePoint: PlannerExpression,
rightTemporal: PlannerExpression)
extends PlannerExpression {
override private[flink] def children: Seq[PlannerExpression] =
Seq(leftTimePoint, leftTemporal, rightTimePoint, rightTemporal)
override private[flink] def resultType: TypeInformation[_] = BOOLEAN_TYPE_INFO
override private[flink] def validateInput(): ValidationResult = {
if (!TypeInfoCheckUtils.isTimePoint(leftTimePoint.resultType)) {
return ValidationFailure(s"TemporalOverlaps operator requires leftTimePoint to be of type " +
s"Time Point, but get ${leftTimePoint.resultType}.")
}
if (!TypeInfoCheckUtils.isTimePoint(rightTimePoint.resultType)) {
return ValidationFailure(s"TemporalOverlaps operator requires rightTimePoint to be of " +
s"type Time Point, but get ${rightTimePoint.resultType}.")
}
if (leftTimePoint.resultType != rightTimePoint.resultType) {
return ValidationFailure(s"TemporalOverlaps operator requires leftTimePoint and " +
s"rightTimePoint to be of same type.")
}
// leftTemporal is point, then it must be comparable with leftTimePoint
if (TypeInfoCheckUtils.isTimePoint(leftTemporal.resultType)) {
if (leftTemporal.resultType != leftTimePoint.resultType) {
return ValidationFailure(s"TemporalOverlaps operator requires leftTemporal and " +
s"leftTimePoint to be of same type if leftTemporal is of type Time Point.")
}
} else if (!isTimeInterval(leftTemporal.resultType)) {
return ValidationFailure(s"TemporalOverlaps operator requires leftTemporal to be of " +
s"type Time Point or Time Interval.")
}
// rightTemporal is point, then it must be comparable with rightTimePoint
if (TypeInfoCheckUtils.isTimePoint(rightTemporal.resultType)) {
if (rightTemporal.resultType != rightTimePoint.resultType) {
return ValidationFailure(s"TemporalOverlaps operator requires rightTemporal and " +
s"rightTimePoint to be of same type if rightTemporal is of type Time Point.")
}
} else if (!isTimeInterval(rightTemporal.resultType)) {
return ValidationFailure(s"TemporalOverlaps operator requires rightTemporal to be of " +
s"type Time Point or Time Interval.")
}
ValidationSuccess
}
override def toString: String = s"temporalOverlaps(${children.mkString(", ")})"
/**
* Standard conversion of the OVERLAPS operator.
* Source: [[org.apache.calcite.sql2rel.StandardConvertletTable#convertOverlaps()]]
*/
private def convertOverlaps(
leftP: RexNode,
leftT: RexNode,
rightP: RexNode,
rightT: RexNode,
relBuilder: FlinkRelBuilder)
: RexNode = {
val convLeftT = convertOverlapsEnd(relBuilder, leftP, leftT, leftTemporal.resultType)
val convRightT = convertOverlapsEnd(relBuilder, rightP, rightT, rightTemporal.resultType)
// sort end points into start and end, such that (s0 <= e0) and (s1 <= e1).
val (s0, e0) = buildSwap(relBuilder, leftP, convLeftT)
val (s1, e1) = buildSwap(relBuilder, rightP, convRightT)
// (e0 >= s1) AND (e1 >= s0)
val leftPred = relBuilder.call(FlinkSqlOperatorTable.GREATER_THAN_OR_EQUAL, e0, s1)
val rightPred = relBuilder.call(FlinkSqlOperatorTable.GREATER_THAN_OR_EQUAL, e1, s0)
relBuilder.call(FlinkSqlOperatorTable.AND, leftPred, rightPred)
}
private def convertOverlapsEnd(
relBuilder: FlinkRelBuilder,
start: RexNode, end: RexNode,
endType: TypeInformation[_]) = {
if (isTimeInterval(endType)) {
relBuilder.call(FlinkSqlOperatorTable.DATETIME_PLUS, start, end)
} else {
end
}
}
private def buildSwap(relBuilder: FlinkRelBuilder, start: RexNode, end: RexNode) = {
val le = relBuilder.call(FlinkSqlOperatorTable.LESS_THAN_OR_EQUAL, start, end)
val l = relBuilder.call(FlinkSqlOperatorTable.CASE, le, start, end)
val r = relBuilder.call(FlinkSqlOperatorTable.CASE, le, end, start)
(l, r)
}
}
case class DateFormat(timestamp: PlannerExpression, format: PlannerExpression)
extends PlannerExpression {
override private[flink] def children = timestamp :: format :: Nil
override def toString: String = s"$timestamp.dateFormat($format)"
override private[flink] def resultType = STRING_TYPE_INFO
}
case class TimestampDiff(
timePointUnit: PlannerExpression,
timePoint1: PlannerExpression,
timePoint2: PlannerExpression)
extends PlannerExpression {
override private[flink] def children: Seq[PlannerExpression] =
timePointUnit :: timePoint1 :: timePoint2 :: Nil
override private[flink] def validateInput(): ValidationResult = {
if (!TypeInfoCheckUtils.isTimePoint(timePoint1.resultType)) {
return ValidationFailure(
s"$this requires an input time point type, " +
s"but timePoint1 is of type '${timePoint1.resultType}'.")
}
if (!TypeInfoCheckUtils.isTimePoint(timePoint2.resultType)) {
return ValidationFailure(
s"$this requires an input time point type, " +
s"but timePoint2 is of type '${timePoint2.resultType}'.")
}
timePointUnit match {
case SymbolPlannerExpression(PlannerTimePointUnit.YEAR)
| SymbolPlannerExpression(PlannerTimePointUnit.QUARTER)
| SymbolPlannerExpression(PlannerTimePointUnit.MONTH)
| SymbolPlannerExpression(PlannerTimePointUnit.WEEK)
| SymbolPlannerExpression(PlannerTimePointUnit.DAY)
| SymbolPlannerExpression(PlannerTimePointUnit.HOUR)
| SymbolPlannerExpression(PlannerTimePointUnit.MINUTE)
| SymbolPlannerExpression(PlannerTimePointUnit.SECOND)
if timePoint1.resultType == SqlTimeTypeInfo.DATE
|| timePoint1.resultType == SqlTimeTypeInfo.TIMESTAMP
|| timePoint2.resultType == SqlTimeTypeInfo.DATE
|| timePoint2.resultType == SqlTimeTypeInfo.TIMESTAMP
|| timePoint1.resultType == LocalTimeTypeInfo.LOCAL_DATE
|| timePoint1.resultType == LocalTimeTypeInfo.LOCAL_DATE_TIME
|| timePoint2.resultType == LocalTimeTypeInfo.LOCAL_DATE
|| timePoint2.resultType == LocalTimeTypeInfo.LOCAL_DATE_TIME =>
ValidationSuccess
case _ =>
ValidationFailure(s"$this operator does not support unit '$timePointUnit'" +
s" for input of type ('${timePoint1.resultType}', '${timePoint2.resultType}').")
}
}
override def toString: String = s"timestampDiff(${children.mkString(", ")})"
override private[flink] def resultType = INT_TYPE_INFO
}
| tzulitai/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/expressions/time.scala | Scala | apache-2.0 | 11,895 |
package fyrie
import net.fyrie.redis._
import akka.actor.{ ActorSystem, Actor, Props, ActorRef }
import akka.bita.{ RandomScheduleHelper, Scheduler }
import akka.bita.pattern.Patterns._
import akka.util.duration._
import akka.util.Timeout
import akka.dispatch.Await
import bita.util.{ FileHelper, TestHelper }
import bita.criteria._
import bita.ScheduleOptimization._
import org.scalatest._
import akka.testkit._
import akka.testkit.TestProbe
import com.typesafe.config.ConfigFactory
/**
* Ported from net.fyrie.redis.KeysSpec
* Test: keys should fetch keys
*/
class SyncInsertSpec extends BitaTests {
override def name = "Fyrie-insert-sync"
def run {
system = ActorSystem("ActorSystem", ConfigFactory.parseString("""
akka {
event-handlers = ["akka.testkit.TestEventListener"]
loglevel = "WARNING"
actor {
default-dispatcher {
core-pool-size-min = 4
core-pool-size-factor = 2.0
throughput = 10
}
}
}
"""))
if (random) {
RandomScheduleHelper.setMaxDelay(250) // Increase the delay between messages to 250 ms
RandomScheduleHelper.setSystem(system)
}
val probe = new TestProbe(system) // Use a testprobe to represent the tests.
val r = new RedisClient("localhost", 6379, RedisClientConfig(connections = 1))(system)
r.set("anshin-1", "debasish")
r.set("anshin-2", "maulindu")
val result = r.sync.keys("anshin*").size
if (result == 2) {
bugDetected = false
println(Console.GREEN + Console.BOLD+"***SUCCESS***"+Console.RESET)
} else {
bugDetected = true
println(Console.RED + Console.BOLD+"***FAILURE***"+Console.RESET)
}
}
} | Tjoene/thesis | benchmark/src/test/scala/fyrie/SyncInsertSpec.scala | Scala | gpl-2.0 | 1,865 |
object Test1 {
val macro = ???
}
object Test2 {
var macro = ???
}
object Test3 {
type macro = Int
}
package test4 {
class macro
}
object Test5 {
class macro
}
package test6 {
object macro
}
object Test7 {
object macro
}
package test8 {
trait macro
}
object Test9 {
trait macro
}
package macro {
package macro.bar {
}
}
package foo {
package macro.foo {
}
}
object Test12 {
val Some(macro) = Some(42)
macro match {
case macro => println(macro)
}
}
object Test13 {
def macro = 2
} | felixmulder/scala | test/files/neg/macro-deprecate-idents.scala | Scala | bsd-3-clause | 528 |
package org.killingbilling.junction
import java.io.{Reader, File}
import java.util.function.{Function => JFunction}
import java.util.{List => JList, ArrayList => JArrayList, Map => JMap, HashMap => JHashMap}
import javax.script.ScriptContext._
import javax.script._
import org.killingbilling.junction.utils._
import scala.beans.BeanInfo
import scala.io.{BufferedSource, Source}
import scala.util.Try
import scala.util.parsing.json.JSON
object Module {
case class WithObj[T](module: Module, obj: Option[T])
case class Context(module: Module) {
val locals = module.engine.createBindings()
val globals = createGlobals()
globals.put("global", if (module eq module.root) globals else module.root.context.globals)
private def createGlobals(): Bindings = {
val g = module.engine.createBindings()
g.put("process", Process) // global
g.put("console", module._require("console")) // global
g.put("Buffer", Buffer) // global
g.put("require", module._require)
g.put("__filename", module.filename)
g.put("__dirname", module._dir.getPath)
g.put("module", module)
g
}
private def swapGlobals(newGlobals: Bindings): Bindings = {
val oldGlobals = module.engine.getBindings(GLOBAL_SCOPE)
module.engine.setBindings(newGlobals, GLOBAL_SCOPE)
oldGlobals
}
def eval[T](source: Reader, tOpt: Option[Class[T]]): Option[T] = {
val old = swapGlobals(globals)
try {
module.singleton.initExports.eval(locals)
val exports = module.singleton.getExports.eval(locals)
globals.put("exports", exports)
module.engine.eval(source, locals) // run!
val obj = tOpt map {t =>
val exports = module.singleton.getExports.eval(locals)
module.engine.asInstanceOf[Invocable].getInterface(exports, t)
} flatMap {v => Option(v)}
obj
} finally {
swapGlobals(old)
}
}
}
case class Singleton(root: Module, engine: ScriptEngine) {
private val comp = engine.asInstanceOf[Compilable]
val initExports: CompiledScript = comp.compile("'use strict'; module.exports = {};")
val getExports: CompiledScript = comp.compile("'use strict'; module.exports")
}
}
@BeanInfo
class Module(val id: String = "[root]", parent: Option[Module] = None)
(implicit createEngine: () => ScriptEngine) {self =>
import Module._
private val singleton: Singleton = parent map {_.singleton} getOrElse Singleton(self, createEngine())
private val root = singleton.root
private val engine = singleton.engine
private lazy val context = Context(self)
private var _exports: AnyRef = _
def getExports: AnyRef = _exports
def setExports(o: AnyRef) {_exports = o}
private object _require extends JFunction[String, AnyRef] with Require {
protected def moduleWithObj[T](path: String, t: Option[Class[T]]): WithObj[T] = _resolve(path)(_dir) map {
case (true, resolved) => Option(_core.get(resolved)) map {WithObj[T](_, None)} getOrElse
_coreModule(resolved, t)
case (false, resolved) => Option(_cache.get(resolved)) map {WithObj[T](_, None)} getOrElse
_loadModule(resolved, t)
} getOrElse {
throw new RuntimeException(s"Error: Cannot find module '$path'")
}
def apply(path: String): AnyRef = moduleWithObj(path, None).module.getExports
def impl[T <: Any](path: String, t: Class[T]): T = _impl(resolve(path), t).asInstanceOf[T]
private val _impl = Memo2[String, Class[_], Any](impl0)
private def impl0(path: String, t: Class[_]): Any = moduleWithObj(path, t).obj getOrElse
{throw new RuntimeException(s"Error: Cannot implement type ${t.getName} with $path")}
def resolve(path: String): String = _resolve(path)(_dir) map {_._2} getOrElse {
throw new RuntimeException(s"Error: Cannot find module '$path'")
}
private def _resolve(path: String)(dir: File): Option[(Boolean, String)] =
if (path.startsWith(".") || path.startsWith("/")) {
val file = new File(path)
val absPath = (if (file.isAbsolute) file else new File(dir, path)).getCanonicalPath
(List("", ".js", ".json") map {ext => new File(absPath + ext)} collectFirst {
case f if f.isFile => Some(false -> f.getPath)
case f if f.isDirectory => resolveDir(f)
}).flatten
} else if (isCore(path)) Some(true -> path) else inNodeModules(path)(dir) orElse inClasspath(path)
private def resolveDir(dir: File): Option[(Boolean, String)] = {
val main = Try {
val opt = JSON.parseFull(Source.fromFile(new File(dir, "package.json")).mkString)
opt.get.asInstanceOf[Map[String, String]]("main")
}.toOption map {"./" + _} getOrElse "./index.js"
_resolve(main)(dir)
}
private def inClasspath(path: String): Option[(Boolean, String)] =
List("", ".js", ".json") map {path + _} collectFirst {
case p if getClass.getClassLoader.getResource(p) != null => false -> p
}
private def inNodeModules(path: String)(dir: File): Option[(Boolean, String)] = {
if (dir == null) None
else _resolve(new File(new File(dir, "node_modules"), path).getPath)(dir) match {
case s@Some(_) => s
case None => inNodeModules(path)(dir.getParentFile)
}
}
def getCache: JMap[String, Module] = _cache // global, map: id -> module
private def source(resolved: String): BufferedSource =
if (resolved.startsWith(".") || resolved.startsWith("/")) Source.fromFile(resolved)
else Source.fromInputStream(getClass.getClassLoader.getResourceAsStream(resolved))
private def _loadModule[T](resolved: String, t: Option[Class[T]]): WithObj[T] = {
val module = new Module(resolved, self)
_cache.put(resolved, module)
val Ext = """.*(\.\w+)$""".r
val obj: Option[T] = resolved match {
case Ext(".json") =>
module._exports = (Try {
import scala.collection.JavaConversions._
JSON.parseFull(source(resolved).mkString).get match {
case a: Map[String, AnyRef] => a: JMap[String, AnyRef]
case a: List[AnyRef] => a: JList[AnyRef]
}
} recover {
case e => throw new RuntimeException(s"JSON parse error: $resolved", e)
}).get
None
case Ext(".js") | _ =>
module.context.eval(source(resolved).bufferedReader(), t)
}
self.children.add(module)
module._loaded = true
WithObj[T](module, obj)
}
private def isCore(path: String): Boolean =
List(
"_linklist", "assert", "console", "freelist", "path", "punycode", "querystring", "sys", "url", "util"
).contains(path)
private def _coreModule[T](resolved: String, t: Option[Class[T]]): WithObj[T] = {
val module = new Module(resolved, root)
_core.put(resolved, module)
val inputStream = getClass.getClassLoader.getResourceAsStream(s"lib/$resolved.js")
val obj: Option[T] = module.context.eval(Source.fromInputStream(inputStream).bufferedReader(), t)
root.children.add(module)
module._loaded = true
WithObj[T](module, obj)
}
}
private val _cache: JMap[String, Module] = parent map {_._cache} getOrElse new JHashMap()
private val _core: JMap[String, Module] = parent map {_._core} getOrElse new JHashMap()
def getRequire: JFunction[String, AnyRef] with Require = _require
val filename: String = new File(id).getCanonicalPath
private val _dir: File = new File(filename).getParentFile
private var _loaded = false
def isLoaded = _loaded
def getParent: Module = parent getOrElse null
val children: JList[Module] = new JArrayList()
}
| KillingBilling/junction | src/main/scala/org/killingbilling/junction/Module.scala | Scala | mit | 7,734 |
package org.jetbrains.plugins.scala.debugger.evaluation
import com.intellij.openapi.command.undo.{BasicUndoableAction, UndoManager}
import com.intellij.openapi.editor.Document
import com.intellij.openapi.project.Project
import com.intellij.psi.IntentionFilterOwner.IntentionActionsFilter
import com.intellij.psi.JavaCodeFragment.{ExceptionHandler, VisibilityChecker}
import com.intellij.psi._
import com.intellij.psi.impl.source.PsiFileImpl
import com.intellij.psi.impl.source.tree.FileElement
import com.intellij.psi.scope.PsiScopeProcessor
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.testFramework.LightVirtualFile
import org.jetbrains.plugins.scala.ScalaFileType
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScReferenceExpression
import org.jetbrains.plugins.scala.lang.psi.impl.{ScalaFileImpl, ScalaPsiElementFactory}
import scala.collection.mutable.HashSet
/**
* @author Alexander Podkhalyuzin
*/
class ScalaCodeFragment(project: Project, text: String) extends {
private var vFile = new LightVirtualFile("Dummy.scala",
ScalaFileType.SCALA_FILE_TYPE, text)
private var provider = new SingleRootFileViewProvider(
PsiManager.getInstance(project), vFile, true)
} with ScalaFileImpl(provider) with JavaCodeFragment {
getViewProvider.forceCachedPsi(this)
override def getViewProvider = provider
private var thisType: PsiType = null
private var superType: PsiType = null
private var exceptionHandler: ExceptionHandler = null
private var resolveScope: GlobalSearchScope = null
private var filter: IntentionActionsFilter = null
private var imports: HashSet[String] = new HashSet
def getThisType: PsiType = thisType
def setThisType(psiType: PsiType) {thisType = psiType}
def getSuperType: PsiType = superType
def setSuperType(superType: PsiType) {this.superType = superType}
def importsToString(): String = {
imports.mkString(",")
}
def addImportsFromString(imports: String) {
this.imports ++= imports.split(',').filter(_.nonEmpty)
}
def setVisibilityChecker(checker: VisibilityChecker) {}
def getVisibilityChecker: VisibilityChecker = VisibilityChecker.EVERYTHING_VISIBLE
def setExceptionHandler(checker: ExceptionHandler) {exceptionHandler = checker}
def getExceptionHandler: ExceptionHandler = exceptionHandler
def forceResolveScope(scope: GlobalSearchScope) {resolveScope = scope}
def getForcedResolveScope: GlobalSearchScope = resolveScope
def setIntentionActionsFilter(filter: IntentionActionsFilter) {this.filter = filter}
def getIntentionActionsFilter: IntentionActionsFilter = filter
override def isScriptFile: Boolean = false
override def isScriptFile(withCashing: Boolean): Boolean = false
override def addImportForPath(path: String, ref: PsiElement, explicitly: Boolean) {
imports += path
myManager.beforeChange(false)
val project: Project = myManager.getProject
val psiDocumentManager = PsiDocumentManager.getInstance(project)
val document: Document = psiDocumentManager.getDocument(this)
UndoManager.getInstance(project).undoableActionPerformed(
new ScalaCodeFragment.ImportClassUndoableAction(path, document, imports)
)
val newRef = ref match {
case st: ScStableCodeReferenceElement if st.resolve() == null =>
Some(ScalaPsiElementFactory.createReferenceFromText(st.getText, st.getParent, st))
case expr: ScReferenceExpression if expr.resolve() == null =>
Some(ScalaPsiElementFactory.createExpressionFromText(expr.getText, expr).asInstanceOf[ScReferenceExpression])
case _ => None
}
newRef match {
case Some(r) if r.resolve() != null => ref.replace(r)
case _ =>
}
}
override def processDeclarations(processor: PsiScopeProcessor, state: ResolveState,
lastParent: PsiElement, place: PsiElement): Boolean = {
for (qName <- imports) {
val imp = ScalaPsiElementFactory.createImportFromTextWithContext("import _root_." + qName, this, this)
if (!imp.processDeclarations(processor, state, lastParent, place)) return false
}
if (!super.processDeclarations(processor, state, lastParent, place)) return false
true
}
override def clone(): PsiFileImpl = {
val clone = cloneImpl(calcTreeElement.clone.asInstanceOf[FileElement]).asInstanceOf[ScalaCodeFragment]
clone.imports = this.imports
clone.vFile = new LightVirtualFile("Dummy.scala",
ScalaFileType.SCALA_FILE_TYPE, getText)
clone.provider = new SingleRootFileViewProvider(
PsiManager.getInstance(project), clone.vFile, true)
clone.provider.forceCachedPsi(clone)
clone
}
}
object ScalaCodeFragment {
private class ImportClassUndoableAction(path: String, document: Document,
imports: HashSet[String]) extends BasicUndoableAction {
def undo() {
imports -= path
}
def redo() {
imports += path
}
}
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaCodeFragment.scala | Scala | apache-2.0 | 5,041 |
package name.denyago.yasc.chat
import akka.actor.{Actor, ActorRef, Terminated}
import name.denyago.yasc.chat.events.{MessagePosted, UserJoined}
/**
* Actor, representing Chat Room, where Users may join, leave and
* exchange messages.
*/
class ChatRoom extends Actor {
override def receive = withUsers(Map.empty)
def withUsers(users: Map[ActorRef, String]): Receive = {
case UserJoined(name, actorRef) =>
context.watch(sender())
context.become(withUsers(users.updated(actorRef, name)))
case Terminated(actorRef) =>
context.unwatch(actorRef)
context.become(withUsers(users.filterKeys(_ != actorRef)))
case MessagePosted(text) =>
users.keys.foreach(_ ! MessagePosted(text))
}
}
| denyago/yet-another-simple-chat | src/main/scala/name/denyago/yasc/chat/ChatRoom.scala | Scala | mit | 734 |
object query {
def run(fn: String) = {
val s = new Snippet()
s(fn)
}
def main(args: Array[String]): Unit = {
if (args.length != 1) {
println("usage: query <filename>")
} else {
run(args(0))
}
}
}
| RomanTsegelskyi/lms-truffle | src/out/query.scala | Scala | gpl-2.0 | 236 |
package dotty.tools.dotc
package transform
import TreeTransforms._
import core.Denotations._
import core.SymDenotations._
import core.Contexts._
import core.Types._
import ast.Trees._
import ast.tpd.{Apply, Tree, cpy}
import dotty.tools.dotc.ast.tpd
import scala.collection.mutable
import dotty.tools.dotc._
import core._
import Contexts._
import Symbols._
import Decorators._
import NameOps._
import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, TreeTransformer, TreeTransform}
import dotty.tools.dotc.ast.Trees._
import dotty.tools.dotc.ast.{untpd, tpd}
import dotty.tools.dotc.core.Constants.Constant
import dotty.tools.dotc.core.Types.MethodType
import dotty.tools.dotc.core.Names.Name
import dotty.runtime.LazyVals
import scala.collection.mutable.ListBuffer
import dotty.tools.dotc.core.Denotations.SingleDenotation
import dotty.tools.dotc.core.SymDenotations.SymDenotation
import StdNames._
import Phases.Phase
/** Replace member references as follows:
*
* - `x != y` for != in class Any becomes `!(x == y)` with == in class Any.
* - `x.##` for ## in NullClass becomes `0`
* - `x.##` for ## in Any becomes calls to ScalaRunTime.hash,
* using the most precise overload available
* - `x.getClass` for getClass in primitives becomes `x.getClass` with getClass in class Object.
*/
class InterceptedMethods extends MiniPhaseTransform {
thisTransform =>
import tpd._
override def phaseName: String = "intercepted"
private var primitiveGetClassMethods: Set[Symbol] = _
var Any_## : Symbol = _ // cached for performance reason
/** perform context-dependant initialization */
override def prepareForUnit(tree: Tree)(implicit ctx: Context) = {
this.Any_## = defn.Any_##
primitiveGetClassMethods = Set[Symbol]() ++ defn.ScalaValueClasses.map(x => x.requiredMethod(nme.getClass_))
this
}
// this should be removed if we have guarantee that ## will get Apply node
override def transformSelect(tree: tpd.Select)(implicit ctx: Context, info: TransformerInfo): Tree = {
if (tree.symbol.isTerm && (Any_## eq tree.symbol.asTerm)) {
val rewrite = poundPoundValue(tree.qualifier)
ctx.log(s"$phaseName rewrote $tree to $rewrite")
rewrite
}
else tree
}
private def poundPoundValue(tree: Tree)(implicit ctx: Context) = {
val s = tree.tpe.widen.typeSymbol
if (s == defn.NullClass) Literal(Constant(0))
else {
// Since we are past typer, we need to avoid creating trees carrying
// overloaded types. This logic is custom (and technically incomplete,
// although serviceable) for def hash. What is really needed is for
// the overloading logic presently hidden away in a few different
// places to be properly exposed so we can just call "resolveOverload"
// after typer. Until then:
def alts = defn.ScalaRuntimeModule.info.member(nme.hash_)
// if tpe is a primitive value type, alt1 will match on the exact value,
// taking in account that null.asInstanceOf[Int] == 0
def alt1 = alts.suchThat(_.info.firstParamTypes.head =:= tree.tpe.widen)
// otherwise alt2 will match. alt2 also knows how to handle 'null' runtime value
def alt2 = defn.ScalaRuntimeModule.info.member(nme.hash_)
.suchThat(_.info.firstParamTypes.head.typeSymbol == defn.AnyClass)
Ident((if (defn.ScalaNumericValueClasses contains s) alt1 else alt2).termRef)
.appliedTo(tree)
}
}
override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = {
def unknown = {
assert(false, s"The symbol '${tree.fun.symbol.showLocated}' was intercepted but didn't match any cases, " +
s"that means the intercepted methods set doesn't match the code")
tree
}
lazy val Select(qual, _) = tree.fun
val Any_## = this.Any_##
val Any_!= = defn.Any_!=
val rewrite: Tree = tree.fun.symbol match {
case Any_## =>
poundPoundValue(qual)
case Any_!= =>
qual.select(defn.Any_==).appliedToArgs(tree.args).select(defn.Boolean_!)
/*
/* else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
// todo: this is needed to support value classes
// Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
global.typer.typed(gen.mkRuntimeCall(nme.anyValClass,
List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
}*/
*/
case t if primitiveGetClassMethods.contains(t) =>
// if we got here then we're trying to send a primitive getClass method to either
// a) an Any, in which cage Object_getClass works because Any erases to object. Or
//
// b) a non-primitive, e.g. because the qualifier's type is a refinement type where one parent
// of the refinement is a primitive and another is AnyRef. In that case
// we get a primitive form of _getClass trying to target a boxed value
// so we need replace that method name with Object_getClass to get correct behavior.
// See SI-5568.
qual.selectWithSig(defn.Any_getClass).appliedToNone
case _ =>
tree
}
ctx.log(s"$phaseName rewrote $tree to $rewrite")
rewrite
}
}
| yusuke2255/dotty | src/dotty/tools/dotc/transform/InterceptedMethods.scala | Scala | bsd-3-clause | 5,275 |
package dotty.tools.dotc.decompiler
import java.nio.file.Files
import dotty.tools.dotc
import dotty.tools.dotc.core.Contexts._
/** Main class of the `dotc -decompiler` decompiler.
*
* @author Nicolas Stucki
*/
object Main extends dotc.Driver {
override protected def newCompiler(implicit ctx: Context): dotc.Compiler = {
assert(ctx.settings.fromTasty.value)
if (!ctx.settings.outputDir.isDefault)
Files.deleteIfExists(ctx.settings.outputDir.value.fileNamed("decompiled.scala").jpath)
new TASTYDecompiler
}
override def setup(args0: Array[String], rootCtx: Context): (List[String], Context) = {
var args = args0.filter(a => a != "-decompile")
if (!args.contains("-from-tasty")) args = "-from-tasty" +: args
if (args.contains("-d")) args = "-color:never" +: args
super.setup(args, rootCtx)
}
}
| som-snytt/dotty | compiler/src/dotty/tools/dotc/decompiler/Main.scala | Scala | apache-2.0 | 841 |
package org.photon.common.persist
import com.twitter.util.Future
import scala.collection.mutable
trait Caching[T <: Model] extends Cachable[T] {
val cache = mutable.Map.empty[T#PrimaryKey, T]
def hydrate(): Future[Unit] = all map { res =>
for (o <- res) cache(o.id) = o
}
def find(predicate: T => Boolean): Option[T] = cache.values.find(predicate)
def filter(predicate: T => Boolean): Iterable[T] = cache.values.filter(predicate)
abstract override def find(id: T#PrimaryKey): Future[T] =
cache.get(id) match {
case Some(o) => Future(o)
case None => super.find(id) map { o =>
cache(id) = o
o
}
}
abstract override def persist(o: T): Future[T] = {
val fut = super.persist(o)
if (o.state == ModelState.None)
fut map { o =>
cache(o.id) = o
o
}
else
fut
}
abstract override def remove(o: T): Future[T] = super.remove(o) map { o =>
cache -= o.id
o
}
}
| Blackrush/photon-persist | src/org/photon/common/persist/Caching.scala | Scala | mit | 974 |
package com.socrata.balboa.impl
import com.socrata.balboa.metrics.Metric.RecordType
import com.socrata.balboa.metrics.impl.JsonMessage
import com.socrata.balboa.metrics.measurements.combining._
import com.socrata.balboa.metrics.{Metric, Metrics}
import com.socrata.metrics.MetricQueue
import com.socrata.metrics.components.{BufferComponent, BufferItem, MessageQueueComponent}
import org.slf4j.LoggerFactory
import scala.collection.mutable
import scala.collection.JavaConverters._
// Not Thread Safe; access must be synchronized by caller (MetricDequeuer)
trait HashMapBufferComponent extends BufferComponent {
self: MessageQueueComponent =>
private val log = LoggerFactory.getLogger(this.getClass)
class Buffer extends BufferLike {
val bufferMap = mutable.HashMap.empty[String, BufferItem]
val messageQueue = self.MessageQueue()
def add(item:BufferItem): Unit = {
val timeslice = timeBoundary(item.timestamp)
val bufferKey = item.entityId + ":" + timeslice
val buffered = bufferMap.get(bufferKey)
val consolidatedBufferItem = buffered match {
case None => BufferItem(item.entityId, item.metrics, timeslice)
case Some(bi) => BufferItem(item.entityId, consolidate(bi.metrics, item.metrics), timeslice)
}
bufferMap += (bufferKey -> consolidatedBufferItem)
}
def consolidate(metrics1:Metrics, metrics2:Metrics): Metrics = {
val unionKeys = metrics1.keySet().asScala union metrics2.keySet().asScala
val metricsComb = new Metrics()
for {
key <- unionKeys
} yield {
val metric1 = Option(metrics1.get(key))
val metric2 = Option(metrics2.get(key))
(metric1, metric2) match {
case (None, None) =>
case (None, Some(m2)) => metricsComb.put(key, m2)
case (Some(m1), None) => metricsComb.put(key, m1)
case (Some(m1), Some(m2)) =>
if (m1.getType != m2.getType) {
throw new IllegalArgumentException("Cannot combine differently typed metrics")
} else {
m1.getType match {
case RecordType.ABSOLUTE =>
metricsComb.put(key, new Metric(
RecordType.ABSOLUTE,
(new Absolution).combine(m1.getValue, m2.getValue)))
case RecordType.AGGREGATE =>
metricsComb.put(key, new Metric(
RecordType.AGGREGATE,
(new Summation).combine(m1.getValue, m2.getValue)))
}
}
}
}
metricsComb
}
def flush(): Int = {
val size = bufferMap.size
for {
item <- bufferMap.values
} yield {
val msg = asMessage(item)
messageQueue.send(msg)
}
bufferMap.clear
size
}
private def asMessage(item:BufferItem) = {
val msg = new JsonMessage()
msg.setEntityId(item.entityId)
msg.setMetrics(item.metrics)
msg.setTimestamp(item.timestamp)
msg
}
def timeBoundary(timestamp:Long): Long = timestamp - (timestamp % MetricQueue.AGGREGATE_GRANULARITY)
def start(): Unit = {
messageQueue.start()
}
def stop(): Unit = {
if (bufferMap.nonEmpty) {
log.info("Flushing " + bufferMap.size + " metrics from BalboaClient buffer before shutting down ...")
flush()
}
messageQueue.stop()
}
def size(): Int = bufferMap.size
}
// scalastyle:off method.name
def Buffer(): Buffer = new Buffer()
}
| socrata-platform/balboa | balboa-client/src/main/scala/com/socrata/balboa/impl/HashMapBuffer.scala | Scala | apache-2.0 | 3,526 |
package app
import util.{LockUtil, CollaboratorsAuthenticator, JGitUtil, ReferrerAuthenticator, Notifier, Keys}
import util.Directory._
import util.Implicits._
import util.ControlUtil._
import service._
import org.eclipse.jgit.api.Git
import jp.sf.amateras.scalatra.forms._
import org.eclipse.jgit.transport.RefSpec
import scala.collection.JavaConverters._
import org.eclipse.jgit.lib.{ObjectId, CommitBuilder, PersonIdent}
import service.IssuesService._
import service.PullRequestService._
import util.JGitUtil.DiffInfo
import service.RepositoryService.RepositoryTreeNode
import util.JGitUtil.CommitInfo
import org.slf4j.LoggerFactory
import org.eclipse.jgit.merge.MergeStrategy
import org.eclipse.jgit.errors.NoMergeBaseException
import service.WebHookService.WebHookPayload
class PullRequestsController extends PullRequestsControllerBase
with RepositoryService with AccountService with IssuesService with PullRequestService with MilestonesService with LabelsService
with ActivityService with WebHookService with ReferrerAuthenticator with CollaboratorsAuthenticator
trait PullRequestsControllerBase extends ControllerBase {
self: RepositoryService with AccountService with IssuesService with MilestonesService with LabelsService
with ActivityService with PullRequestService with WebHookService with ReferrerAuthenticator with CollaboratorsAuthenticator =>
private val logger = LoggerFactory.getLogger(classOf[PullRequestsControllerBase])
val pullRequestForm = mapping(
"title" -> trim(label("Title" , text(required, maxlength(100)))),
"content" -> trim(label("Content", optional(text()))),
"targetUserName" -> trim(text(required, maxlength(100))),
"targetBranch" -> trim(text(required, maxlength(100))),
"requestUserName" -> trim(text(required, maxlength(100))),
"requestRepositoryName" -> trim(text(required, maxlength(100))),
"requestBranch" -> trim(text(required, maxlength(100))),
"commitIdFrom" -> trim(text(required, maxlength(40))),
"commitIdTo" -> trim(text(required, maxlength(40)))
)(PullRequestForm.apply)
val mergeForm = mapping(
"message" -> trim(label("Message", text(required)))
)(MergeForm.apply)
case class PullRequestForm(
title: String,
content: Option[String],
targetUserName: String,
targetBranch: String,
requestUserName: String,
requestRepositoryName: String,
requestBranch: String,
commitIdFrom: String,
commitIdTo: String)
case class MergeForm(message: String)
get("/:owner/:repository/pulls")(referrersOnly { repository =>
searchPullRequests(None, repository)
})
get("/:owner/:repository/pulls/:userName")(referrersOnly { repository =>
searchPullRequests(Some(params("userName")), repository)
})
get("/:owner/:repository/pull/:id")(referrersOnly { repository =>
params("id").toIntOpt.flatMap{ issueId =>
val owner = repository.owner
val name = repository.name
getPullRequest(owner, name, issueId) map { case(issue, pullreq) =>
using(Git.open(getRepositoryDir(owner, name))){ git =>
val (commits, diffs) =
getRequestCompareInfo(owner, name, pullreq.commitIdFrom, owner, name, pullreq.commitIdTo)
pulls.html.pullreq(
issue, pullreq,
getComments(owner, name, issueId),
getIssueLabels(owner, name, issueId),
(getCollaborators(owner, name) ::: (if(getAccountByUserName(owner).get.isGroupAccount) Nil else List(owner))).sorted,
getMilestonesWithIssueCount(owner, name),
getLabels(owner, name),
commits,
diffs,
hasWritePermission(owner, name, context.loginAccount),
repository)
}
}
} getOrElse NotFound
})
ajaxGet("/:owner/:repository/pull/:id/mergeguide")(collaboratorsOnly { repository =>
params("id").toIntOpt.flatMap{ issueId =>
val owner = repository.owner
val name = repository.name
getPullRequest(owner, name, issueId) map { case(issue, pullreq) =>
pulls.html.mergeguide(
checkConflictInPullRequest(owner, name, pullreq.branch, pullreq.requestUserName, name, pullreq.requestBranch, issueId),
pullreq,
s"${context.baseUrl}/git/${pullreq.requestUserName}/${pullreq.requestRepositoryName}.git")
}
} getOrElse NotFound
})
get("/:owner/:repository/pull/:id/delete/*")(collaboratorsOnly { repository =>
params("id").toIntOpt.map { issueId =>
val branchName = multiParams("splat").head
val userName = context.loginAccount.get.userName
if(repository.repository.defaultBranch != branchName){
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
git.branchDelete().setForce(true).setBranchNames(branchName).call()
recordDeleteBranchActivity(repository.owner, repository.name, userName, branchName)
}
}
createComment(repository.owner, repository.name, userName, issueId, branchName, "delete_branch")
redirect(s"/${repository.owner}/${repository.name}/pull/${issueId}")
} getOrElse NotFound
})
post("/:owner/:repository/pull/:id/merge", mergeForm)(collaboratorsOnly { (form, repository) =>
params("id").toIntOpt.flatMap { issueId =>
val owner = repository.owner
val name = repository.name
LockUtil.lock(s"${owner}/${name}/merge"){
getPullRequest(owner, name, issueId).map { case (issue, pullreq) =>
using(Git.open(getRepositoryDir(owner, name))) { git =>
// mark issue as merged and close.
val loginAccount = context.loginAccount.get
createComment(owner, name, loginAccount.userName, issueId, form.message, "merge")
createComment(owner, name, loginAccount.userName, issueId, "Close", "close")
updateClosed(owner, name, issueId, true)
// record activity
recordMergeActivity(owner, name, loginAccount.userName, issueId, form.message)
// merge
val mergeBaseRefName = s"refs/heads/${pullreq.branch}"
val merger = MergeStrategy.RECURSIVE.newMerger(git.getRepository, true)
val mergeBaseTip = git.getRepository.resolve(mergeBaseRefName)
val mergeTip = git.getRepository.resolve(s"refs/pull/${issueId}/head")
val conflicted = try {
!merger.merge(mergeBaseTip, mergeTip)
} catch {
case e: NoMergeBaseException => true
}
if (conflicted) {
throw new RuntimeException("This pull request can't merge automatically.")
}
// creates merge commit
val mergeCommit = new CommitBuilder()
mergeCommit.setTreeId(merger.getResultTreeId)
mergeCommit.setParentIds(Array[ObjectId](mergeBaseTip, mergeTip): _*)
val personIdent = new PersonIdent(loginAccount.fullName, loginAccount.mailAddress)
mergeCommit.setAuthor(personIdent)
mergeCommit.setCommitter(personIdent)
mergeCommit.setMessage(s"Merge pull request #${issueId} from ${pullreq.requestUserName}/${pullreq.requestRepositoryName}\\n\\n" +
form.message)
// insertObject and got mergeCommit Object Id
val inserter = git.getRepository.newObjectInserter
val mergeCommitId = inserter.insert(mergeCommit)
inserter.flush()
inserter.release()
// update refs
val refUpdate = git.getRepository.updateRef(mergeBaseRefName)
refUpdate.setNewObjectId(mergeCommitId)
refUpdate.setForceUpdate(false)
refUpdate.setRefLogIdent(personIdent)
refUpdate.setRefLogMessage("merged", true)
refUpdate.update()
val (commits, _) = getRequestCompareInfo(owner, name, pullreq.commitIdFrom,
pullreq.requestUserName, pullreq.requestRepositoryName, pullreq.commitIdTo)
// close issue by content of pull request
val defaultBranch = getRepository(owner, name, context.baseUrl).get.repository.defaultBranch
if(pullreq.branch == defaultBranch){
commits.flatten.foreach { commit =>
closeIssuesFromMessage(commit.fullMessage, loginAccount.userName, owner, name)
}
issue.content match {
case Some(content) => closeIssuesFromMessage(content, loginAccount.userName, owner, name)
case _ =>
}
closeIssuesFromMessage(form.message, loginAccount.userName, owner, name)
}
// call web hook
getWebHookURLs(owner, name) match {
case webHookURLs if(webHookURLs.nonEmpty) =>
for(ownerAccount <- getAccountByUserName(owner)){
callWebHook(owner, name, webHookURLs,
WebHookPayload(git, loginAccount, mergeBaseRefName, repository, commits.flatten.toList, ownerAccount))
}
case _ =>
}
// notifications
Notifier().toNotify(repository, issueId, "merge"){
Notifier.msgStatus(s"${context.baseUrl}/${owner}/${name}/pull/${issueId}")
}
redirect(s"/${owner}/${name}/pull/${issueId}")
}
}
}
} getOrElse NotFound
})
get("/:owner/:repository/compare")(referrersOnly { forkedRepository =>
(forkedRepository.repository.originUserName, forkedRepository.repository.originRepositoryName) match {
case (Some(originUserName), Some(originRepositoryName)) => {
getRepository(originUserName, originRepositoryName, context.baseUrl).map { originRepository =>
using(
Git.open(getRepositoryDir(originUserName, originRepositoryName)),
Git.open(getRepositoryDir(forkedRepository.owner, forkedRepository.name))
){ (oldGit, newGit) =>
val oldBranch = JGitUtil.getDefaultBranch(oldGit, originRepository).get._2
val newBranch = JGitUtil.getDefaultBranch(newGit, forkedRepository).get._2
redirect(s"/${forkedRepository.owner}/${forkedRepository.name}/compare/${originUserName}:${oldBranch}...${newBranch}")
}
} getOrElse NotFound
}
case _ => {
using(Git.open(getRepositoryDir(forkedRepository.owner, forkedRepository.name))){ git =>
JGitUtil.getDefaultBranch(git, forkedRepository).map { case (_, defaultBranch) =>
redirect(s"/${forkedRepository.owner}/${forkedRepository.name}/compare/${defaultBranch}...${defaultBranch}")
} getOrElse {
redirect(s"/${forkedRepository.owner}/${forkedRepository.name}")
}
}
}
}
})
get("/:owner/:repository/compare/*...*")(referrersOnly { forkedRepository =>
val Seq(origin, forked) = multiParams("splat")
val (originOwner, tmpOriginBranch) = parseCompareIdentifie(origin, forkedRepository.owner)
val (forkedOwner, tmpForkedBranch) = parseCompareIdentifie(forked, forkedRepository.owner)
(for(
originRepositoryName <- if(originOwner == forkedOwner){
Some(forkedRepository.name)
} else {
forkedRepository.repository.originRepositoryName.orElse {
getForkedRepositories(forkedRepository.owner, forkedRepository.name).find(_._1 == originOwner).map(_._2)
}
};
originRepository <- getRepository(originOwner, originRepositoryName, context.baseUrl)
) yield {
using(
Git.open(getRepositoryDir(originRepository.owner, originRepository.name)),
Git.open(getRepositoryDir(forkedRepository.owner, forkedRepository.name))
){ case (oldGit, newGit) =>
val originBranch = JGitUtil.getDefaultBranch(oldGit, originRepository, tmpOriginBranch).get._2
val forkedBranch = JGitUtil.getDefaultBranch(newGit, forkedRepository, tmpForkedBranch).get._2
val forkedId = JGitUtil.getForkedCommitId(oldGit, newGit,
originRepository.owner, originRepository.name, originBranch,
forkedRepository.owner, forkedRepository.name, forkedBranch)
val oldId = oldGit.getRepository.resolve(forkedId)
val newId = newGit.getRepository.resolve(forkedBranch)
val (commits, diffs) = getRequestCompareInfo(
originRepository.owner, originRepository.name, oldId.getName,
forkedRepository.owner, forkedRepository.name, newId.getName)
pulls.html.compare(
commits,
diffs,
(forkedRepository.repository.originUserName, forkedRepository.repository.originRepositoryName) match {
case (Some(userName), Some(repositoryName)) => (userName, repositoryName) :: getForkedRepositories(userName, repositoryName)
case _ => (forkedRepository.owner, forkedRepository.name) :: getForkedRepositories(forkedRepository.owner, forkedRepository.name)
},
originBranch,
forkedBranch,
oldId.getName,
newId.getName,
forkedRepository,
originRepository,
forkedRepository,
hasWritePermission(forkedRepository.owner, forkedRepository.name, context.loginAccount))
}
}) getOrElse NotFound
})
ajaxGet("/:owner/:repository/compare/*...*/mergecheck")(collaboratorsOnly { forkedRepository =>
val Seq(origin, forked) = multiParams("splat")
val (originOwner, tmpOriginBranch) = parseCompareIdentifie(origin, forkedRepository.owner)
val (forkedOwner, tmpForkedBranch) = parseCompareIdentifie(forked, forkedRepository.owner)
(for(
originRepositoryName <- if(originOwner == forkedOwner){
Some(forkedRepository.name)
} else {
forkedRepository.repository.originRepositoryName.orElse {
getForkedRepositories(forkedRepository.owner, forkedRepository.name).find(_._1 == originOwner).map(_._2)
}
};
originRepository <- getRepository(originOwner, originRepositoryName, context.baseUrl)
) yield {
using(
Git.open(getRepositoryDir(originRepository.owner, originRepository.name)),
Git.open(getRepositoryDir(forkedRepository.owner, forkedRepository.name))
){ case (oldGit, newGit) =>
val originBranch = JGitUtil.getDefaultBranch(oldGit, originRepository, tmpOriginBranch).get._2
val forkedBranch = JGitUtil.getDefaultBranch(newGit, forkedRepository, tmpForkedBranch).get._2
pulls.html.mergecheck(
checkConflict(originRepository.owner, originRepository.name, originBranch,
forkedRepository.owner, forkedRepository.name, forkedBranch))
}
}) getOrElse NotFound
})
post("/:owner/:repository/pulls/new", pullRequestForm)(referrersOnly { (form, repository) =>
val loginUserName = context.loginAccount.get.userName
val issueId = createIssue(
owner = repository.owner,
repository = repository.name,
loginUser = loginUserName,
title = form.title,
content = form.content,
assignedUserName = None,
milestoneId = None,
isPullRequest = true)
createPullRequest(
originUserName = repository.owner,
originRepositoryName = repository.name,
issueId = issueId,
originBranch = form.targetBranch,
requestUserName = form.requestUserName,
requestRepositoryName = form.requestRepositoryName,
requestBranch = form.requestBranch,
commitIdFrom = form.commitIdFrom,
commitIdTo = form.commitIdTo)
// fetch requested branch
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
git.fetch
.setRemote(getRepositoryDir(form.requestUserName, form.requestRepositoryName).toURI.toString)
.setRefSpecs(new RefSpec(s"refs/heads/${form.requestBranch}:refs/pull/${issueId}/head"))
.call
}
// record activity
recordPullRequestActivity(repository.owner, repository.name, loginUserName, issueId, form.title)
// notifications
Notifier().toNotify(repository, issueId, form.content.getOrElse("")){
Notifier.msgPullRequest(s"${context.baseUrl}/${repository.owner}/${repository.name}/pull/${issueId}")
}
redirect(s"/${repository.owner}/${repository.name}/pull/${issueId}")
})
/**
* Checks whether conflict will be caused in merging. Returns true if conflict will be caused.
*/
private def checkConflict(userName: String, repositoryName: String, branch: String,
requestUserName: String, requestRepositoryName: String, requestBranch: String): Boolean = {
LockUtil.lock(s"${userName}/${repositoryName}/merge-check"){
using(Git.open(getRepositoryDir(requestUserName, requestRepositoryName))) { git =>
val remoteRefName = s"refs/heads/${branch}"
val tmpRefName = s"refs/merge-check/${userName}/${branch}"
val refSpec = new RefSpec(s"${remoteRefName}:${tmpRefName}").setForceUpdate(true)
try {
// fetch objects from origin repository branch
git.fetch
.setRemote(getRepositoryDir(userName, repositoryName).toURI.toString)
.setRefSpecs(refSpec)
.call
// merge conflict check
val merger = MergeStrategy.RECURSIVE.newMerger(git.getRepository, true)
val mergeBaseTip = git.getRepository.resolve(s"refs/heads/${requestBranch}")
val mergeTip = git.getRepository.resolve(tmpRefName)
try {
!merger.merge(mergeBaseTip, mergeTip)
} catch {
case e: NoMergeBaseException => true
}
} finally {
val refUpdate = git.getRepository.updateRef(refSpec.getDestination)
refUpdate.setForceUpdate(true)
refUpdate.delete()
}
}
}
}
/**
* Checks whether conflict will be caused in merging within pull request. Returns true if conflict will be caused.
*/
private def checkConflictInPullRequest(userName: String, repositoryName: String, branch: String,
requestUserName: String, requestRepositoryName: String, requestBranch: String,
issueId: Int): Boolean = {
LockUtil.lock(s"${userName}/${repositoryName}/merge") {
using(Git.open(getRepositoryDir(userName, repositoryName))) { git =>
// merge
val merger = MergeStrategy.RECURSIVE.newMerger(git.getRepository, true)
val mergeBaseTip = git.getRepository.resolve(s"refs/heads/${branch}")
val mergeTip = git.getRepository.resolve(s"refs/pull/${issueId}/head")
try {
!merger.merge(mergeBaseTip, mergeTip)
} catch {
case e: NoMergeBaseException => true
}
}
}
}
/**
* Parses branch identifier and extracts owner and branch name as tuple.
*
* - "owner:branch" to ("owner", "branch")
* - "branch" to ("defaultOwner", "branch")
*/
private def parseCompareIdentifie(value: String, defaultOwner: String): (String, String) =
if(value.contains(':')){
val array = value.split(":")
(array(0), array(1))
} else {
(defaultOwner, value)
}
private def getRequestCompareInfo(userName: String, repositoryName: String, branch: String,
requestUserName: String, requestRepositoryName: String, requestCommitId: String): (Seq[Seq[CommitInfo]], Seq[DiffInfo]) =
using(
Git.open(getRepositoryDir(userName, repositoryName)),
Git.open(getRepositoryDir(requestUserName, requestRepositoryName))
){ (oldGit, newGit) =>
val oldId = oldGit.getRepository.resolve(branch)
val newId = newGit.getRepository.resolve(requestCommitId)
val commits = newGit.log.addRange(oldId, newId).call.iterator.asScala.map { revCommit =>
new CommitInfo(revCommit)
}.toList.splitWith { (commit1, commit2) =>
view.helpers.date(commit1.time) == view.helpers.date(commit2.time)
}
val diffs = JGitUtil.getDiffs(newGit, oldId.getName, newId.getName, true)
(commits, diffs)
}
private def searchPullRequests(userName: Option[String], repository: RepositoryService.RepositoryInfo) =
defining(repository.owner, repository.name){ case (owner, repoName) =>
val filterUser = userName.map { x => Map("created_by" -> x) } getOrElse Map("all" -> "")
val page = IssueSearchCondition.page(request)
val sessionKey = Keys.Session.Pulls(owner, repoName)
// retrieve search condition
val condition = session.putAndGet(sessionKey,
if(request.hasQueryString) IssueSearchCondition(request)
else session.getAs[IssueSearchCondition](sessionKey).getOrElse(IssueSearchCondition())
)
pulls.html.list(
searchIssue(condition, filterUser, true, (page - 1) * PullRequestLimit, PullRequestLimit, owner -> repoName),
getPullRequestCountGroupByUser(condition.state == "closed", owner, Some(repoName)),
userName,
page,
countIssue(condition.copy(state = "open" ), filterUser, true, owner -> repoName),
countIssue(condition.copy(state = "closed"), filterUser, true, owner -> repoName),
countIssue(condition, Map.empty, true, owner -> repoName),
condition,
repository,
hasWritePermission(owner, repoName, context.loginAccount))
}
}
| Muscipular/gitbucket | src/main/scala/app/PullRequestsController.scala | Scala | apache-2.0 | 21,563 |
package org.hotsextra.matchmaking
package webinterface
package servestrings
import org.http4s.headers.`Content-Type`
import org.http4s.MediaType
import org.http4s.EntityEncoder._
object Css {
val site = """
|body {
| color: #666;
| font-family: "Open Sans", sans-serif;
| font-size: 14px;
| line-height: 1.75em;
|}
|
|table {
| text-align: right;
|}
|
|th {
| text-align: center;
|}
|
|#found {
| float:left;
| width:40%;
|}
|
|#found ul {
| list-style-type: none;
|}
|
|#waiting {
| float:right;
| width:40%;
|}""".stripMargin
def cssEncoder = stringEncoder.withContentType(`Content-Type`(MediaType.`text/css`))
} | martijnhoekstra/hotsextra-matchmaker | src/main/scala/org/hotsextra/matchmaking/webinterface/servestrings/Css.scala | Scala | agpl-3.0 | 691 |
package io.gatling.tcp
import java.nio.charset.Charset
case object TcpProtocolBuilderAddressStep {
def address(address: String) = TcpProtocolBuilderPortStep(address)
}
case class TcpProtocolBuilderPortStep(address: String) {
def port(port: Int) = TcpProtocolBuilder(address, port, None)
}
case class TcpProtocolBuilder(address: String, port: Int, framer: Option[TcpFramer]) {
def lengthBased(offset: Int, length: Int, adjust: Int, strip: Int) = {
TcpProtocolBuilder(address, port, Some(LengthBasedTcpFramer(offset, length, adjust, strip)))
}
def lengthBased(length: Int): TcpProtocolBuilder = lengthBased(0, length, 0, length)
def delimiterBased(delimiters: String, strip: Boolean, charset: String = "UTF-8") = {
TcpProtocolBuilder(address, port, Some(DelimiterBasedTcpFramer(delimiters.getBytes(Charset.forName(charset)), strip)))
}
def protobufVarint = TcpProtocolBuilder(address, port, Some(ProtobufVarint32TcpFramer))
def build() = new TcpProtocol(address = address, port = port, framer = framer getOrElse(LengthBasedTcpFramer(0, 4, 0, 4)))
}
| snripa/gatling-tcp-extensions | src/main/scala/io/gatling/tcp/TcpProtocolBuilder.scala | Scala | mit | 1,076 |
/*
* Copyright Β© 2014 TU Berlin (emma@dima.tu-berlin.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.emmalanguage
package lib.ml.optimization.loss
import lib.linalg.DVector
import lib.ml.LDPoint
// FIXME: does not compile with `@emma.lib`
object hinge extends Loss {
override def apply[ID](w: DVector, x: LDPoint[ID, Double]) = ???
override def gradient[ID](w: DVector, x: LDPoint[ID, Double]) = ???
}
| emmalanguage/emma | emma-lib/src/main/scala/org/emmalanguage/lib/ml/optimization/loss/hinge.scala | Scala | apache-2.0 | 942 |
/*
* The MIT License
*
* Copyright 2016 @claydonkey (Anthony Campbell).
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package breeze.linalg.functions
import breeze.linalg.{ SparseVector, DenseVector }
import org.scalatest.FunSuite
import breeze.linalg._
import breeze.math._
import java.text._
import DenseMatrix.canMapValues
class schurTest extends FunSuite {
test("Schur Test Random Double Matrix") {
val RndM = DenseMatrix.rand(4, 4)
val (u1, q1) = schur(RndM)
val ans = (q1 * u1 * q1.t).mapValues((i) => ((i.real * 100000.0).round) / 100000.0)
assert(ans == RndM.mapValues((i) => ((i * 100000.0).round) / 100000.0))
}
test("Schur Test Random Complex Matrix") {
val RndR = DenseMatrix.rand(4, 4)
val RndI = DenseMatrix.rand(4, 4)
val C = DenseMatrix.tabulate[Complex](RndI.cols, RndI.rows)((i, j) => Complex(RndI(i, j), RndI(i, j)))
val (u1, q1) = schur(C)
val ans = (q1 * u1 * q1.t).mapValues((i) => (Complex((((i.real * 100000.0).round) / 100000.0), (((i.imag * 100000.0).round) / 100000.0))))
assert(ans == C.mapValues((i) => (Complex((((i.real * 100000.0).round) / 100000.0), (((i.imag * 100000.0).round) / 100000.0)))))
}
} | claydonkey/breeze | math/src/test/scala/breeze/linalg/functions/schurTest.scala | Scala | apache-2.0 | 2,225 |
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.Play
import play.api.test._
import play.api.test.Helpers._
object authUtility {
private def makeDigest: String = {
val login = Play.current.configuration.getString("auth.username").getOrElse("admin")
val password = Play.current.configuration.getString("auth.password").getOrElse("admin")
new sun.misc.BASE64Encoder().encode((login + ":" + password).mkString.getBytes)
}
def makeHeader: (String, String) = ("Authorization", s"Basic $makeDigest")
}
@RunWith(classOf[JUnitRunner])
class ApplicationSpec extends Specification {
"Application" should {
"send 404 on a bad request" in new WithApplication {
val notFound = route(FakeRequest(GET, "/boum")).get
status(notFound) must equalTo(NOT_FOUND)
contentType(notFound) must beSome.which(_ == "text/html")
contentAsString(notFound) must contain ("404")
}
"redirect to list of urls" in new WithApplication {
val home = route(FakeRequest(GET, "/")).get
status(home) must equalTo(SEE_OTHER)
}
"admin dashboard should be protected by auth" in new WithApplication {
val dashboard = route(FakeRequest(GET, "/admin")).get
status(dashboard) must equalTo(UNAUTHORIZED)
headers(dashboard).get("WWW-Authenticate").get must contain ("whosmad")
}
"admin dashboard should unlock with good login and password" in new WithApplication {
val dashboard = route(FakeRequest(GET, "/admin").withHeaders(authUtility.makeHeader)).get
status(dashboard) must equalTo(OK)
contentType(dashboard) must beSome.which(_ == "text/html")
contentAsString(dashboard) must contain ("Shitty URL shorter")
}
}
}
| Rydgel/Shorty | test/ApplicationSpec.scala | Scala | mit | 1,758 |
package repositories.general
import models.Club
trait BaseClubRepository extends BaseRepository[Club] {
}
| Tannheuser/DeutschAktiv | app/repositories/general/BaseClubRepository.scala | Scala | mit | 109 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.log4j.Level
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.{Ascending, AttributeReference, Literal, SortOrder}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.plans.Inner
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.types.IntegerType
class ResolveHintsSuite extends AnalysisTest {
import org.apache.spark.sql.catalyst.analysis.TestRelations._
test("invalid hints should be ignored") {
checkAnalysis(
UnresolvedHint("some_random_hint_that_does_not_exist", Seq("TaBlE"), table("TaBlE")),
testRelation,
caseSensitive = false)
}
test("case-sensitive or insensitive parameters") {
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("TaBlE"), table("TaBlE")),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = false)
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("table"), table("TaBlE")),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = false)
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("TaBlE"), table("TaBlE")),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = true)
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("table"), table("TaBlE")),
testRelation,
caseSensitive = true)
}
test("multiple broadcast hint aliases") {
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("table", "table2"), table("table").join(table("table2"))),
Join(ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
ResolvedHint(testRelation2, HintInfo(strategy = Some(BROADCAST))),
Inner, None, JoinHint.NONE),
caseSensitive = false)
}
test("do not traverse past existing broadcast hints") {
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("table"),
ResolvedHint(table("table").where('a > 1), HintInfo(strategy = Some(BROADCAST)))),
ResolvedHint(testRelation.where('a > 1), HintInfo(strategy = Some(BROADCAST))).analyze,
caseSensitive = false)
}
test("should work for subqueries") {
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("tableAlias"), table("table").as("tableAlias")),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = false)
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("tableAlias"), table("table").subquery('tableAlias)),
ResolvedHint(testRelation, HintInfo(strategy = Some(BROADCAST))),
caseSensitive = false)
// Negative case: if the alias doesn't match, don't match the original table name.
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("table"), table("table").as("tableAlias")),
testRelation,
caseSensitive = false)
}
test("do not traverse past subquery alias") {
checkAnalysis(
UnresolvedHint("MAPJOIN", Seq("table"), table("table").where('a > 1).subquery('tableAlias)),
testRelation.where('a > 1).analyze,
caseSensitive = false)
}
test("should work for CTE") {
checkAnalysis(
CatalystSqlParser.parsePlan(
"""
|WITH ctetable AS (SELECT * FROM table WHERE a > 1)
|SELECT /*+ BROADCAST(ctetable) */ * FROM ctetable
""".stripMargin
),
ResolvedHint(testRelation.where('a > 1).select('a), HintInfo(strategy = Some(BROADCAST)))
.select('a).analyze,
caseSensitive = false)
}
test("should not traverse down CTE") {
checkAnalysis(
CatalystSqlParser.parsePlan(
"""
|WITH ctetable AS (SELECT * FROM table WHERE a > 1)
|SELECT /*+ BROADCAST(table) */ * FROM ctetable
""".stripMargin
),
testRelation.where('a > 1).select('a).select('a).analyze,
caseSensitive = false)
}
test("coalesce and repartition hint") {
checkAnalysis(
UnresolvedHint("COALESCE", Seq(Literal(10)), table("TaBlE")),
Repartition(numPartitions = 10, shuffle = false, child = testRelation))
checkAnalysis(
UnresolvedHint("coalesce", Seq(Literal(20)), table("TaBlE")),
Repartition(numPartitions = 20, shuffle = false, child = testRelation))
checkAnalysis(
UnresolvedHint("REPARTITION", Seq(Literal(100)), table("TaBlE")),
Repartition(numPartitions = 100, shuffle = true, child = testRelation))
checkAnalysis(
UnresolvedHint("RePARTITion", Seq(Literal(200)), table("TaBlE")),
Repartition(numPartitions = 200, shuffle = true, child = testRelation))
val errMsg = "COALESCE Hint expects a partition number as a parameter"
assertAnalysisError(
UnresolvedHint("COALESCE", Seq.empty, table("TaBlE")),
Seq(errMsg))
assertAnalysisError(
UnresolvedHint("COALESCE", Seq(Literal(10), Literal(false)), table("TaBlE")),
Seq(errMsg))
assertAnalysisError(
UnresolvedHint("COALESCE", Seq(Literal(1.0)), table("TaBlE")),
Seq(errMsg))
checkAnalysis(
UnresolvedHint("RePartition", Seq(Literal(10), UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(Seq(AttributeReference("a", IntegerType)()), testRelation, 10))
checkAnalysis(
UnresolvedHint("REPARTITION", Seq(Literal(10), UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(Seq(AttributeReference("a", IntegerType)()), testRelation, 10))
checkAnalysis(
UnresolvedHint("REPARTITION", Seq(UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(
Seq(AttributeReference("a", IntegerType)()), testRelation, conf.numShufflePartitions))
val e = intercept[IllegalArgumentException] {
checkAnalysis(
UnresolvedHint("REPARTITION",
Seq(SortOrder(AttributeReference("a", IntegerType)(), Ascending)),
table("TaBlE")),
RepartitionByExpression(
Seq(SortOrder(AttributeReference("a", IntegerType)(), Ascending)), testRelation, 10)
)
}
e.getMessage.contains("For range partitioning use REPARTITION_BY_RANGE instead")
checkAnalysis(
UnresolvedHint(
"REPARTITION_BY_RANGE", Seq(Literal(10), UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(
Seq(SortOrder(AttributeReference("a", IntegerType)(), Ascending)), testRelation, 10))
checkAnalysis(
UnresolvedHint(
"REPARTITION_BY_RANGE", Seq(UnresolvedAttribute("a")), table("TaBlE")),
RepartitionByExpression(
Seq(SortOrder(AttributeReference("a", IntegerType)(), Ascending)),
testRelation, conf.numShufflePartitions))
val errMsg2 = "REPARTITION Hint parameter should include columns, but"
assertAnalysisError(
UnresolvedHint("REPARTITION", Seq(Literal(true)), table("TaBlE")),
Seq(errMsg2))
assertAnalysisError(
UnresolvedHint("REPARTITION",
Seq(Literal(1.0), AttributeReference("a", IntegerType)()),
table("TaBlE")),
Seq(errMsg2))
val errMsg3 = "REPARTITION_BY_RANGE Hint parameter should include columns, but"
assertAnalysisError(
UnresolvedHint("REPARTITION_BY_RANGE",
Seq(Literal(1.0), AttributeReference("a", IntegerType)()),
table("TaBlE")),
Seq(errMsg3))
assertAnalysisError(
UnresolvedHint("REPARTITION_BY_RANGE",
Seq(Literal(10), Literal(10)),
table("TaBlE")),
Seq(errMsg3))
assertAnalysisError(
UnresolvedHint("REPARTITION_BY_RANGE",
Seq(Literal(10), Literal(10), UnresolvedAttribute("a")),
table("TaBlE")),
Seq(errMsg3))
}
test("log warnings for invalid hints") {
val logAppender = new LogAppender("invalid hints")
withLogAppender(logAppender) {
checkAnalysis(
UnresolvedHint("unknown_hint", Seq("TaBlE"), table("TaBlE")),
testRelation,
caseSensitive = false)
}
assert(logAppender.loggingEvents.exists(
e => e.getLevel == Level.WARN &&
e.getRenderedMessage.contains("Unrecognized hint: unknown_hint")))
}
test("SPARK-30003: Do not throw stack overflow exception in non-root unknown hint resolution") {
checkAnalysis(
Project(testRelation.output, UnresolvedHint("unknown_hint", Seq("TaBlE"), table("TaBlE"))),
Project(testRelation.output, testRelation),
caseSensitive = false)
}
}
| goldmedal/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ResolveHintsSuite.scala | Scala | apache-2.0 | 9,279 |
package grid.engine
object Environment extends Environment
trait Environment {
def SGE_ROOT: String = sys.env("SGE_ROOT")
def SGE_CELL: String = sys.env("SGE_CELL")
def SGE_QMASTER_PORT: String = sys.env("SGE_QMASTER_PORT")
}
| idiv-biodiversity/grid-engine-tools | src/main/scala/Environment.scala | Scala | unlicense | 235 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.livy.sessions
sealed abstract class SessionState(val state: String, val isActive: Boolean) {
override def toString: String = state
}
class FinishedSessionState(
override val state: String,
override val isActive: Boolean,
val time: Long
) extends SessionState(state, isActive)
object SessionState {
def apply(s: String): SessionState = s match {
case "not_started" => NotStarted
case "starting" => Starting
case "recovering" => Recovering
case "idle" => Idle
case "running" => Running
case "busy" => Busy
case "shutting_down" => ShuttingDown
case "error" => Error()
case "dead" => Dead()
case "killed" => Killed()
case "success" => Success()
case _ => throw new IllegalArgumentException(s"Illegal session state: $s")
}
object NotStarted extends SessionState("not_started", true)
object Starting extends SessionState("starting", true)
object Recovering extends SessionState("recovering", true)
object Idle extends SessionState("idle", true)
object Running extends SessionState("running", true)
object Busy extends SessionState("busy", true)
object ShuttingDown extends SessionState("shutting_down", false)
case class Killed(override val time: Long = System.nanoTime()) extends
FinishedSessionState("killed", false, time)
case class Error(override val time: Long = System.nanoTime()) extends
FinishedSessionState("error", true, time)
case class Dead(override val time: Long = System.nanoTime()) extends
FinishedSessionState("dead", false, time)
case class Success(override val time: Long = System.nanoTime()) extends
FinishedSessionState("success", false, time)
}
| ajbozarth/incubator-livy | core/src/main/scala/org/apache/livy/sessions/SessionState.scala | Scala | apache-2.0 | 2,495 |
/**
* Copyright 2013 Alex Jones
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with work for additional information
* regarding copyright ownership. The ASF licenses file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package controllers
import com.typesafe.scalalogging.StrictLogging
import play.api.http.HeaderNames
import play.api.libs.concurrent.Execution.Implicits._
import play.api.mvc.{Action, ActionBuilder, ControllerComponents}
import play.api.mvc.Results.NotModified
import scala.concurrent.Future
/**
* A trait for controllers that allows for ETag headers to be queried and a 304 No Content to be returned if
* the resource has not changed.
*/
trait Etag extends StrictLogging {
def ETag[A](calculatedETag: String)(action: Action[A]): Action[A] =
Action.async(action.parser) { implicit request =>
val quotedETag = '"' + calculatedETag + '"'
val modified = request.headers.get(HeaderNames.IF_NONE_MATCH) match {
case None =>
logger.info(s"No ${HeaderNames.IF_NONE_MATCH} header was sent for resource $request.uri")
true
case Some(etag) =>
logger.info(s"Header ${HeaderNames.IF_NONE_MATCH} for $request.uri has value $etag")
etag != quotedETag
}
val response =
if (modified) {
logger.info(s"Request $request.uri has been modified.")
action(request)
} else {
logger.info(s"Request $request.uri has not been modified.")
Future.successful(NotModified)
}
response.map(_.withHeaders(HeaderNames.ETAG -> quotedETag))
}
} | unclealex72/west-ham-calendar | app/controllers/Etag.scala | Scala | apache-2.0 | 2,189 |
package ecommerce.invoicing.app
import akka.cluster.Cluster
import akka.kernel.Bootable
import ecommerce.invoicing.{Department, Invoice}
import pl.newicom.dddd.cluster._
import pl.newicom.dddd.office.OfficeFactory.office
import pl.newicom.dddd.process.CommandReceptorSupport.{CommandReception, receptor}
class InvoicingBackendApp extends Bootable with InvoicingBackendConfiguration {
override def startup(): Unit = {
Cluster(system).registerOnMemberUp {
office[Invoice]
CommandReception(Department)(implicit o => receptor)
}
}
} | pawelkaczor/ddd-leaven-akka-v2 | invoicing/write-back/src/main/scala/ecommerce/invoicing/app/InvoicingBackendApp.scala | Scala | mit | 556 |
package com.hackfmi.askthor.domain
import spray.http.{StatusCodes, StatusCode}
/**
* Service failure description.
*
* @param message error message
* @param errorType error type
*/
case class Failure(message: String, errorType: FailureType.Value) {
/**
* Return corresponding HTTP status code for failure specified type.
*
* @return HTTP status code value
*/
def getStatusCode: StatusCode = {
FailureType.withName(this.errorType.toString) match {
case FailureType.BadRequest => StatusCodes.BadRequest
case FailureType.NotFound => StatusCodes.NotFound
case FailureType.Duplicate => StatusCodes.Forbidden
case FailureType.DatabaseFailure => StatusCodes.InternalServerError
case _ => StatusCodes.InternalServerError
}
}
}
/**
* Allowed failure types.
*/
object FailureType extends Enumeration {
type Failure = Value
val BadRequest = Value("bad_request")
val NotFound = Value("not_found")
val Duplicate = Value("entity_exists")
val DatabaseFailure = Value("database_error")
val InternalError = Value("internal_error")
}
| jorshua/ActionRestService | askthor/src/main/scala/com/hackfmi/askthor/domain/Failure.scala | Scala | mit | 1,097 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
package plugins
import Def.Setting
import Keys._
import librarymanagement._
/**
* An experimental plugin that adds the ability for Giter8 templates to be resolved
*/
object Giter8TemplatePlugin extends AutoPlugin {
override def requires = CorePlugin
override def trigger = allRequirements
override lazy val globalSettings: Seq[Setting[_]] =
Seq(
templateResolverInfos +=
TemplateResolverInfo(
ModuleID(
"org.scala-sbt.sbt-giter8-resolver",
"sbt-giter8-resolver",
"0.13.1"
) cross CrossVersion.binary,
"sbtgiter8resolver.Giter8TemplateResolver"
)
)
}
| sbt/sbt | main/src/main/scala/sbt/plugins/Giter8TemplatePlugin.scala | Scala | apache-2.0 | 815 |
package com.nulabinc.backlog.migration.common.service
import javax.inject.Inject
import com.nulabinc.backlog.migration.common.client.BacklogAPIClient
import com.nulabinc.backlog4j.Resolution
import scala.jdk.CollectionConverters._
/**
* @author
* uchida
*/
class ResolutionServiceImpl @Inject() (backlog: BacklogAPIClient) extends ResolutionService {
override def allResolutions(): Seq[Resolution] =
backlog.getResolutions.asScala.toSeq
}
| nulab/backlog-migration-common | core/src/main/scala/com/nulabinc/backlog/migration/common/service/ResolutionServiceImpl.scala | Scala | mit | 456 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming
import java.util.concurrent.CountDownLatch
import scala.concurrent.Future
import scala.util.Random
import scala.util.control.NonFatal
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.Eventually._
import org.scalatest.concurrent.PatienceConfiguration.Timeout
import org.scalatest.time.Span
import org.scalatest.time.SpanSugar._
import org.apache.spark.SparkException
import org.apache.spark.sql.{AnalysisException, Dataset}
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.util.BlockingSource
import org.apache.spark.util.Utils
class StreamingQueryManagerSuite extends StreamTest with BeforeAndAfter {
import AwaitTerminationTester._
import testImplicits._
override val streamingTimeout = 20.seconds
before {
assert(spark.streams.active.isEmpty)
spark.streams.resetTerminated()
}
after {
assert(spark.streams.active.isEmpty)
spark.streams.resetTerminated()
}
testQuietly("listing") {
val (m1, ds1) = makeDataset
val (m2, ds2) = makeDataset
val (m3, ds3) = makeDataset
withQueriesOn(ds1, ds2, ds3) { queries =>
require(queries.size === 3)
assert(spark.streams.active.toSet === queries.toSet)
val (q1, q2, q3) = (queries(0), queries(1), queries(2))
assert(spark.streams.get(q1.id).eq(q1))
assert(spark.streams.get(q2.id).eq(q2))
assert(spark.streams.get(q3.id).eq(q3))
assert(spark.streams.get(java.util.UUID.randomUUID()) === null) // non-existent id
q1.stop()
assert(spark.streams.active.toSet === Set(q2, q3))
assert(spark.streams.get(q1.id) === null)
assert(spark.streams.get(q2.id).eq(q2))
m2.addData(0) // q2 should terminate with error
eventually(Timeout(streamingTimeout)) {
require(!q2.isActive)
require(q2.exception.isDefined)
}
assert(spark.streams.get(q2.id) === null)
assert(spark.streams.active.toSet === Set(q3))
}
}
testQuietly("awaitAnyTermination without timeout and resetTerminated") {
val datasets = Seq.fill(5)(makeDataset._2)
withQueriesOn(datasets: _*) { queries =>
require(queries.size === datasets.size)
assert(spark.streams.active.toSet === queries.toSet)
// awaitAnyTermination should be blocking
testAwaitAnyTermination(ExpectBlocked)
// Stop a query asynchronously and see if it is reported through awaitAnyTermination
val q1 = stopRandomQueryAsync(stopAfter = 100 milliseconds, withError = false)
testAwaitAnyTermination(ExpectNotBlocked)
require(!q1.isActive) // should be inactive by the time the prev awaitAnyTerm returned
// All subsequent calls to awaitAnyTermination should be non-blocking
testAwaitAnyTermination(ExpectNotBlocked)
// Resetting termination should make awaitAnyTermination() blocking again
spark.streams.resetTerminated()
testAwaitAnyTermination(ExpectBlocked)
// Terminate a query asynchronously with exception and see awaitAnyTermination throws
// the exception
val q2 = stopRandomQueryAsync(100 milliseconds, withError = true)
testAwaitAnyTermination(ExpectException[SparkException])
require(!q2.isActive) // should be inactive by the time the prev awaitAnyTerm returned
// All subsequent calls to awaitAnyTermination should throw the exception
testAwaitAnyTermination(ExpectException[SparkException])
// Resetting termination should make awaitAnyTermination() blocking again
spark.streams.resetTerminated()
testAwaitAnyTermination(ExpectBlocked)
// Terminate multiple queries, one with failure and see whether awaitAnyTermination throws
// the exception
val q3 = stopRandomQueryAsync(10 milliseconds, withError = false)
testAwaitAnyTermination(ExpectNotBlocked)
require(!q3.isActive)
val q4 = stopRandomQueryAsync(10 milliseconds, withError = true)
eventually(Timeout(streamingTimeout)) { require(!q4.isActive) }
// After q4 terminates with exception, awaitAnyTerm should start throwing exception
testAwaitAnyTermination(ExpectException[SparkException])
}
}
testQuietly("awaitAnyTermination with timeout and resetTerminated") {
val datasets = Seq.fill(6)(makeDataset._2)
withQueriesOn(datasets: _*) { queries =>
require(queries.size === datasets.size)
assert(spark.streams.active.toSet === queries.toSet)
// awaitAnyTermination should be blocking or non-blocking depending on timeout values
testAwaitAnyTermination(
ExpectBlocked,
awaitTimeout = 4 seconds,
expectedReturnedValue = false,
testBehaviorFor = 2 seconds)
testAwaitAnyTermination(
ExpectNotBlocked,
awaitTimeout = 50 milliseconds,
expectedReturnedValue = false,
testBehaviorFor = 1 second)
// Stop a query asynchronously within timeout and awaitAnyTerm should be unblocked
val q1 = stopRandomQueryAsync(stopAfter = 100 milliseconds, withError = false)
testAwaitAnyTermination(
ExpectNotBlocked,
awaitTimeout = 2 seconds,
expectedReturnedValue = true,
testBehaviorFor = 4 seconds)
require(!q1.isActive) // should be inactive by the time the prev awaitAnyTerm returned
// All subsequent calls to awaitAnyTermination should be non-blocking even if timeout is high
testAwaitAnyTermination(
ExpectNotBlocked, awaitTimeout = 4 seconds, expectedReturnedValue = true)
// Resetting termination should make awaitAnyTermination() blocking again
spark.streams.resetTerminated()
testAwaitAnyTermination(
ExpectBlocked,
awaitTimeout = 4 seconds,
expectedReturnedValue = false,
testBehaviorFor = 1 second)
// Terminate a query asynchronously with exception within timeout, awaitAnyTermination should
// throws the exception
val q2 = stopRandomQueryAsync(100 milliseconds, withError = true)
testAwaitAnyTermination(
ExpectException[SparkException],
awaitTimeout = 4 seconds,
testBehaviorFor = 6 seconds)
require(!q2.isActive) // should be inactive by the time the prev awaitAnyTerm returned
// All subsequent calls to awaitAnyTermination should throw the exception
testAwaitAnyTermination(
ExpectException[SparkException],
awaitTimeout = 2 seconds,
testBehaviorFor = 4 seconds)
// Terminate a query asynchronously outside the timeout, awaitAnyTerm should be blocked
spark.streams.resetTerminated()
val q3 = stopRandomQueryAsync(2 seconds, withError = true)
testAwaitAnyTermination(
ExpectNotBlocked,
awaitTimeout = 100 milliseconds,
expectedReturnedValue = false,
testBehaviorFor = 4 seconds)
// After that query is stopped, awaitAnyTerm should throw exception
eventually(Timeout(streamingTimeout)) { require(!q3.isActive) } // wait for query to stop
testAwaitAnyTermination(
ExpectException[SparkException],
awaitTimeout = 100 milliseconds,
testBehaviorFor = 4 seconds)
// Terminate multiple queries, one with failure and see whether awaitAnyTermination throws
// the exception
spark.streams.resetTerminated()
val q4 = stopRandomQueryAsync(10 milliseconds, withError = false)
testAwaitAnyTermination(
ExpectNotBlocked, awaitTimeout = 2 seconds, expectedReturnedValue = true)
require(!q4.isActive)
val q5 = stopRandomQueryAsync(10 milliseconds, withError = true)
eventually(Timeout(streamingTimeout)) { require(!q5.isActive) }
// After q5 terminates with exception, awaitAnyTerm should start throwing exception
testAwaitAnyTermination(ExpectException[SparkException], awaitTimeout = 2 seconds)
}
}
test("SPARK-18811: Source resolution should not block main thread") {
failAfter(streamingTimeout) {
BlockingSource.latch = new CountDownLatch(1)
withTempDir { tempDir =>
// if source resolution was happening on the main thread, it would block the start call,
// now it should only be blocking the stream execution thread
val sq = spark.readStream
.format("org.apache.spark.sql.streaming.util.BlockingSource")
.load()
.writeStream
.format("org.apache.spark.sql.streaming.util.BlockingSource")
.option("checkpointLocation", tempDir.toString)
.start()
eventually(Timeout(streamingTimeout)) {
assert(sq.status.message.contains("Initializing sources"))
}
BlockingSource.latch.countDown()
sq.stop()
}
}
}
/** Run a body of code by defining a query on each dataset */
private def withQueriesOn(datasets: Dataset[_]*)(body: Seq[StreamingQuery] => Unit): Unit = {
failAfter(streamingTimeout) {
val queries = withClue("Error starting queries") {
datasets.zipWithIndex.map { case (ds, i) =>
var query: StreamingQuery = null
try {
val df = ds.toDF
val metadataRoot =
Utils.createTempDir(namePrefix = "streaming.checkpoint").getCanonicalPath
query =
df.writeStream
.format("memory")
.queryName(s"query$i")
.option("checkpointLocation", metadataRoot)
.outputMode("append")
.start()
} catch {
case NonFatal(e) =>
if (query != null) query.stop()
throw e
}
query
}
}
try {
body(queries)
} finally {
queries.foreach(_.stop())
}
}
}
/** Test the behavior of awaitAnyTermination */
private def testAwaitAnyTermination(
expectedBehavior: ExpectedBehavior,
expectedReturnedValue: Boolean = false,
awaitTimeout: Span = null,
testBehaviorFor: Span = 4 seconds
): Unit = {
def awaitTermFunc(): Unit = {
if (awaitTimeout != null && awaitTimeout.toMillis > 0) {
val returnedValue = spark.streams.awaitAnyTermination(awaitTimeout.toMillis)
assert(returnedValue === expectedReturnedValue, "Returned value does not match expected")
} else {
spark.streams.awaitAnyTermination()
}
}
AwaitTerminationTester.test(expectedBehavior, awaitTermFunc, testBehaviorFor)
}
/** Stop a random active query either with `stop()` or with an error */
private def stopRandomQueryAsync(stopAfter: Span, withError: Boolean): StreamingQuery = {
import scala.concurrent.ExecutionContext.Implicits.global
val activeQueries = spark.streams.active
val queryToStop = activeQueries(Random.nextInt(activeQueries.length))
Future {
Thread.sleep(stopAfter.toMillis)
if (withError) {
logDebug(s"Terminating query ${queryToStop.name} with error")
queryToStop.asInstanceOf[StreamingQueryWrapper].streamingQuery.logicalPlan.collect {
case StreamingExecutionRelation(source, _) =>
source.asInstanceOf[MemoryStream[Int]].addData(0)
}
} else {
logDebug(s"Stopping query ${queryToStop.name}")
queryToStop.stop()
}
}
queryToStop
}
private def makeDataset: (MemoryStream[Int], Dataset[Int]) = {
val inputData = MemoryStream[Int]
val mapped = inputData.toDS.map(6 / _)
(inputData, mapped)
}
}
| bOOm-X/spark | sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala | Scala | apache-2.0 | 12,360 |
package models
import slick.session.Database
import slick.driver.ExtendedProfile
import play.api.db.DB
import play.api.Application
import fixtures._
import play.Logger.{ debug, info, warn, error }
/**
* Use Cake pattern to inject proper database driver
*/
class DAL(override val profile: ExtendedProfile)
extends ProjectComponent
with SprintComponent
with StoryComponent
with TaskComponent
with UserComponent
with Profile {
import profile.simple._
private def ddl = { Projects.ddl ++ Users.ddl ++ Sprints.ddl ++ Stories.ddl ++ Tasks.ddl }
private def fixtures = Seq(UsersFixture, ProjectsFixture, SprintsFixture, StoriesFixture, TasksFixture)
/**
* Create the database schema
*/
def create(implicit session: Session): Unit = {
info("Creating Database Schema:" + ddl.createStatements.fold()(_ + "\\n" + _))
ddl.create
// Insert test data
fixtures foreach (_.createFixtures)
}
/**
* Delete the database schema
*/
def drop(implicit session: Session): Unit = {
info("Dropping Database Schema:" + ddl.dropStatements.fold()(_ + "\\n" + _))
ddl.drop
}
}
import play.api.Play.current
/**
* Glue that binds the database driver to the DAL
*/
object AppDB extends SlickDriven {
lazy val database = getDb
lazy val dal = getDal
} | soupytwist/knit | app/models/DAL.scala | Scala | gpl-3.0 | 1,299 |
package de.berlin.arzt.ml
import java.io.IOException
import java.nio.file.Paths
import org.apache.spark.{ SparkContext, SparkConf }
import scala.io.Codec
import scala.util.{ Try, Success }
import MovieLens._
/**
* Created by realdocx on 18.11.15.
*/
object Main {
val pre = "http://files.grouplens.org/datasets/movielens/ml-100k"
val uItem = s"$pre/u.item"
val uData = s"$pre/u.data"
val modelPath = Paths.get(s"./model.bin")
def computeModel()(implicit context: SparkContext) = {
implicit val codec = Codec.ISO8859
val lines = getLinesFromUrl(uData)
val ratings = movieLensRatings(lines)
val seen = ratedMovies(ratings)
//the data size can be reduced here
//.filter { rating => rating.product <= 400 && rating.user <= 400 }
val uItemLines = getLinesFromUrl(uItem)
val idxToName = movieNames(uItemLines)
val n = ratings.map(_.user).max + 1
val m = ratings.map(_.product).max + 1
val normalizer = ratings.count().toDouble
val rank = 10
val Ξ» = 0.01
val Ξ΅ = 0.001
val (y, rated, unrated) = createMatrices(n, m, ratings)
println(
"""Enter a number to choose the matrix factorization implementation used for collaborative filtering:
|1: Gradient Descent (own implementation)
|2: Alternating Least Squares (Spark implementation)
|3: Alternating Least Squares (own implementation, slow)
""".stripMargin)
val (row, col) =
readInt match {
case 1 =>
Factorization.trainModel(y, rated, unrated, rank, Ξ΅, Ξ», normalizer)
case 2 =>
trainSparkAlsModel(n, m, ratings, rank, Ξ»)
case 3 =>
Factorization.runAls(y, rated, rank, Ξ΅, Ξ»)
case i => throw new IOException(s"Unsupported Option: $i")
}
MovieModel(
userFeatures = row,
movieFeatures = col,
seenMovies = seen.collectAsMap().toMap,
idx2Name = idxToName.collectAsMap().toMap)
}
def main(args: Array[String]) = {
val master = Try(args(0)).toOption
implicit val context = sparkContext(master)
val model =
if (modelPath.toFile.exists()) {
println("Found an existing model. Do you want to reuse it (y/n)?")
readLine() match {
case "y" | "yes" =>
loadModel(modelPath)
case "n" | "no" =>
saveModel(
modelPath,
model = computeModel())
case i =>
throw new IOException(s"Unsupported input: '$i'")
}
} else {
println("No existing model found. Creating new one.")
saveModel(
modelPath,
model = computeModel())
}
/* String representations of original and predicted rating matrices
val r2 = DenseMatrix.zeros[Boolean](rated.rows, rated.cols)
val original = ratingMatToString(y, unrated)
val predicted = ratingMatToString(row*col, r2)
* */
context.stop()
recommendDialog(model)
}
def recommendDialog(model: MovieModel): Unit = {
val maxId = model.userFeatures.cols - 1
println(s"Enter a list of user id numbers β [0, $maxId] to get recommendations (CTRL-C to quit):")
Try {
val a = readLine().split("[^0-9]+").map(_.toInt)
val result = recommendMovies(model, a, limit = 20)
printResults(result)
}
recommendDialog(model)
}
def sparkContext(url: Option[String]) = {
println("Setting up spark context.")
val conf = new SparkConf()
.setAppName("MovieLens Recommendation Example")
url.foreach(conf.setMaster)
if (Try(conf.get("spark.master")).isFailure) {
conf.setMaster("local[2]")
}
for (master <- Try(conf.get("spark.master"))) {
println(s"Master: $master")
}
val context = new SparkContext(conf)
context.setCheckpointDir("/tmp")
context
}
}
| arzt/ml-stuff | src/main/scala/de/berlin/arzt/ml/Main.scala | Scala | gpl-2.0 | 3,825 |
package feh.tec.rubik
import org.scalacheck.Arbitrary
object Arbitraries {
Arbitrary
}
| fehu/int-sis--AStar | rubik/src/test/scala/feh/tec/rubik/Arbitraries.scala | Scala | mit | 95 |
/*
* ____ ____ _____ ____ ___ ____
* | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R)
* | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data
* | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc.
* |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version
* 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.precog.common
package ingest
import util._
import org.specs2.mutable._
import org.specs2.ScalaCheck
import org.scalacheck.{Arbitrary, Gen}
import blueeyes.json._
import blueeyes.json.serialization.{ Extractor, Decomposer }
import blueeyes.json.serialization.DefaultSerialization._
import blueeyes.json.serialization.Extractor._
import scalaz._
import scalaz.std.list._
import scalaz.syntax.traverse._
class ArchiveSpecs extends Specification with ArbitraryEventMessage with ScalaCheck {
implicit val arbArchive = Arbitrary(genRandomArchive)
"serialization of an archive" should {
"read back the data that was written" in check { in: Archive =>
in.serialize.validated[Archive] must beLike {
case Success(out) => in must_== out
}
}
"read legacy archives" in {
val Success(JArray(input)) = JParser.parseFromString("""[
{"path":"/test/test/php/query/T10170960455069fb56d061c690884208/","tokenId":"test1"},
{"path":"/test/test/php/query/T9345418045069fd119e9ed256256425/","tokenId": "test1"},
{"path":"/test/test/php/query/T1373621163506a00891eb60240629876/","tokenId":"test1"},
{"path":"/test/test/php/query/T1564471072506a01ed32be5009280574/","tokenId":"test1"},
{"path":"/test/test/php/query/T1172864121506c4ea9e2308492490793/","tokenId":"test1"},
{"path":"/test/test/ttt/","tokenId":"test2"},
{"path":"/test/nathan/politicalsentiment/twitter/test/1/","tokenId":"test3"},
{"path":"/test/test/","tokenId":"test2"},
{"path":"/test/foo/","tokenId":"test4"}
]""")
val results = input.map(_.validated[Archive]).collect {
case Success(result) => result
}
results.size mustEqual 9
results.map(_.apiKey).toSet mustEqual Set("test1", "test2", "test3", "test4")
}
"read new archives" in {
val Success(JArray(input)) = JParser.parseFromString("""[
{"apiKey":"test1","path":"/foo1/test/js/delete/"},
{"apiKey":"test2","path":"/foo2/blargh/"},
{"apiKey":"test2","path":"/foo2/blargh/"},
{"apiKey":"test2","path":"/foo2/testing/"},
{"apiKey":"test2","path":"/foo2/testing/"}
]""")
val results = input.map(_.validated[Archive]).collect {
case Success(result) => result
}
results.size mustEqual 5
results.map(_.apiKey).toSet mustEqual Set("test1", "test2")
}
"read archives with reversed fields" in {
val Success(JArray(input)) = JParser.parseFromString("""[
{"path":"test1","apiKey":"/foo1/test/js/delete/"},
{"path":"test2","apiKey":"/foo2/blargh/"},
{"path":"test2","apiKey":"/foo2/blargh/"},
{"path":"test2","apiKey":"/foo2/testing/"},
{"path":"test2","apiKey":"/foo2/testing/"}
]""")
val results = input.map(_.validated[Archive]).collect {
case Success(result) => result
}
results.size mustEqual 5
results.map(_.apiKey).toSet mustEqual Set("test1", "test2")
}
}
}
// vim: set ts=4 sw=4 et:
| precog/platform | common/src/test/scala/com/precog/common/ingest/ArchiveSpecs.scala | Scala | agpl-3.0 | 3,961 |
package org.jetbrains.plugins.scala.lang.formatting.settings
import java.awt.BorderLayout
import java.awt.event.ActionEvent
import com.intellij.application.options.codeStyle.OptionTreeWithPreviewPanel
import com.intellij.openapi.fileTypes.FileType
import com.intellij.openapi.ui.OnePixelDivider
import com.intellij.psi.codeStyle.{CodeStyleSettings, LanguageCodeStyleSettingsProvider}
import com.intellij.ui.border.CustomLineBorder
import javax.swing.{JCheckBox, JComponent, JPanel}
import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaDocFormattingPanel.ScalaDocCodeSample
import org.jetbrains.plugins.scala.{ScalaBundle, ScalaFileType, ScalaLanguage}
class ScalaDocFormattingPanel(val settings: CodeStyleSettings) extends OptionTreeWithPreviewPanel(settings) {
private var myEnableCheckBox: JCheckBox = _
private var myAsteriskStyleCheckBox: JCheckBox = _
private val myScaladocPanel: JPanel = new JPanel(new BorderLayout)
init()
override def getSettingsType = LanguageCodeStyleSettingsProvider.SettingsType.LANGUAGE_SPECIFIC
override def getPanel: JPanel = myScaladocPanel
override protected def getTabTitle: String = ScalaBundle.message("scaladoc.panel.title")
override protected def getFileType: FileType = ScalaFileType.INSTANCE
override protected def getRightMargin: Int = 47
override protected def getPreviewText: String = ScalaDocCodeSample
override def init(): Unit = {
super.init()
val topPanel = new JPanel(new BorderLayout)
myEnableCheckBox = new JCheckBox(ScalaBundle.message("scaladoc.panel.enable.scaladoc.formatting"))
myEnableCheckBox.addActionListener((_: ActionEvent) => update())
myAsteriskStyleCheckBox = new JCheckBox(ScalaBundle.message("scaladoc.panel.add.additional.space.for.leading.asterisk"))
topPanel.add(myEnableCheckBox, BorderLayout.NORTH)
topPanel.add(myAsteriskStyleCheckBox, BorderLayout.SOUTH)
myPanel.setBorder(new CustomLineBorder(OnePixelDivider.BACKGROUND, 1, 0, 0, 0))
myScaladocPanel.add(BorderLayout.CENTER, myPanel)
myScaladocPanel.add(topPanel, BorderLayout.NORTH)
}
override protected def initTables(): Unit = {
initCustomOptions(ScalaDocFormattingPanel.ALIGNMENT_GROUP)
initCustomOptions(ScalaDocFormattingPanel.BLANK_LINES_GROUP)
initCustomOptions(ScalaDocFormattingPanel.OTHER_GROUP)
}
private def update(): Unit = {
setEnabled(getPanel, myEnableCheckBox.isSelected)
myEnableCheckBox.setEnabled(true)
myAsteriskStyleCheckBox.setEnabled(true)
}
private def setEnabled(c: JComponent, enabled: Boolean): Unit = {
c.setEnabled(enabled)
val children = c.getComponents
for (child <- children) {
child match {
case c1: JComponent =>
setEnabled(c1, enabled)
case _ =>
}
}
}
override def apply(settings: CodeStyleSettings): Unit = {
super.apply(settings)
val scalaSettings = settings.getCustomSettings(classOf[ScalaCodeStyleSettings])
scalaSettings.ENABLE_SCALADOC_FORMATTING = myEnableCheckBox.isSelected
scalaSettings.USE_SCALADOC2_FORMATTING = myAsteriskStyleCheckBox.isSelected
}
override protected def resetImpl(settings: CodeStyleSettings): Unit = {
super.resetImpl(settings)
val scalaSettings = settings.getCustomSettings(classOf[ScalaCodeStyleSettings])
myEnableCheckBox.setSelected(scalaSettings.ENABLE_SCALADOC_FORMATTING)
myAsteriskStyleCheckBox.setSelected(scalaSettings.USE_SCALADOC2_FORMATTING)
update()
}
override def isModified(settings: CodeStyleSettings): Boolean = {
val scalaSettings = settings.getCustomSettings(classOf[ScalaCodeStyleSettings])
super.isModified(settings) || myEnableCheckBox.isSelected != scalaSettings.ENABLE_SCALADOC_FORMATTING ||
myAsteriskStyleCheckBox.isSelected != scalaSettings.USE_SCALADOC2_FORMATTING
}
protected override def customizeSettings(): Unit = {
val provider: LanguageCodeStyleSettingsProvider = LanguageCodeStyleSettingsProvider.forLanguage(ScalaLanguage.INSTANCE)
if (provider != null) {
provider.customizeSettings(this, getSettingsType)
}
}
}
object ScalaDocFormattingPanel {
val BLANK_LINES_GROUP: String = ScalaBundle.message("scaladoc.panel.groups.blank.lines")
val ALIGNMENT_GROUP : String = ScalaBundle.message("scaladoc.panel.groups.alignment")
val OTHER_GROUP : String = ScalaBundle.message("scaladoc.panel.groups.other")
private val ScalaDocCodeSample =
"""
|/**
| * Some description
| *
| * Lists:
| * 1. item 1 line 1
| * item 1 line 2
| * item 1 line 3
| * I. item inner 1 line 1
| * item inner 1 line 2
| * I. item inner 2 line 1
| * 1. item 2
| * - item inner 3
| * - item inner 4 line 1
| * item inner 4 line 2
| *
| *Some other description
| * @note Note that this tag is here just to show
| * how exactly alignment for tags different from parameters and return tags
|
|
| * @forExample Even if the tag is not valid, formatting will still be fine
| * also, if you choose to preserver spaces in tags, no spaces will be removed after tag value
| * @param x Some parameter named x that has
| * a multiline description
| * @param yy Another parameter named yy
| * @param longParamName Another parameter with a long name
| * @tparam A description of parameter A
| * with very long body
| * @tparam B description of parameter B
| * also with very
| * long body
| * @return Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
| * eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
| * @throws RuntimeException whenever it feels like it
|
|
| * @throws IndexOutOfBoundsException when index is out of bound
| */
|def foo[A, B](x: Int, yy: Int, longParamName: Int): Int
|""".stripMargin.replace("\r", "")
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/formatting/settings/ScalaDocFormattingPanel.scala | Scala | apache-2.0 | 6,056 |
package net.sansa_stack.query.spark.semantic.utils
import java.util.concurrent.TimeUnit
import scala.collection.mutable.ArrayBuffer
/*
* Helplers
*/
object Helpers {
/*
* fetchTriplesSPO - fetch SUBJECT, PREDICATE and OBJECT
*/
def fetchTripleSPO(triple: String, symbol: Map[String, String]): ArrayBuffer[String] = {
// return list
val tripleData: ArrayBuffer[String] = ArrayBuffer()
// fetch indices
val locationPoint1 = triple.indexOf(symbol("blank"))
val locationPoint2 = triple.lastIndexOf(symbol("blank"))
// WHERE clause: SUBJECT, PREDICATE and OBJECT
val tripleSubject = triple.substring(0, locationPoint1).trim()
val triplePredicate = triple.substring(locationPoint1, locationPoint2).trim()
val tripleObject = triple.substring(locationPoint2, triple.length()).trim()
// append data
tripleData.append(tripleSubject)
tripleData.append(triplePredicate)
tripleData.append(tripleObject)
tripleData
}
// fetch FILTER function data
def fetchFilterFunctionData(fName: String, filterFunction: String, processLine: String, symbol: Map[String, String]): ArrayBuffer[String] = {
val data: ArrayBuffer[String] = ArrayBuffer()
// split
val splitData = filterFunction.split(fName)
var locationPoint = splitData(1).indexOf(symbol("round-bracket-right"))
// variable (?X)
val variable = splitData(1).substring(1, locationPoint)
// value (<...>)
locationPoint = processLine.indexOf(variable)
var value = processLine.substring(locationPoint + variable.length + 1)
locationPoint = value.indexOf(symbol("blank"))
if (locationPoint.equals(-1)) value = value
else value = value.substring(0, locationPoint)
// append data
data.append(variable)
data.append(value)
data
}
// total query process time
def queryTime(processedTime: Long, symbol: Map[String, String]): Long = {
val milliseconds = TimeUnit.MILLISECONDS.convert(processedTime, TimeUnit.NANOSECONDS)
val seconds = Math.floor(milliseconds / 1000d + .5d).toInt
val minutes = TimeUnit.MINUTES.convert(processedTime, TimeUnit.NANOSECONDS)
if (milliseconds >= 0) {
println(s"Processed Time (MILLISECONDS): $milliseconds")
if (seconds > 0) {
println(s"Processed Time (SECONDS): $seconds approx.")
if (minutes > 0) {
println(s"Processed Time (MINUTES): $minutes")
}
}
}
println(symbol("newline"))
// append query time
milliseconds
}
// overall queries process time
def overallQueriesTime(_queriesProcessTime: ArrayBuffer[Long]): Unit = {
val milliseconds: Long = _queriesProcessTime.sum
val seconds = Math.floor(milliseconds / 1000d + .5d).toInt
if (milliseconds >= 1000) {
println(s"--> Overall Process Time: ${milliseconds}ms (${seconds}secs approx.)")
} else {
println(s"--> Overall Process Time: ${milliseconds}ms")
}
}
}
| SANSA-Stack/SANSA-RDF | sansa-query/sansa-query-spark/src/main/scala/net/sansa_stack/query/spark/semantic/utils/Helpers.scala | Scala | apache-2.0 | 2,949 |
package com.geteit.rcouch.actors
import org.scalatest._
import akka.actor.{ActorRef, ActorSystem}
import akka.testkit.{ImplicitSender, TestKit}
import com.geteit.rcouch.Settings.ClusterSettings
import com.geteit.rcouch.memcached.Memcached
import akka.util.{Timeout, ByteString}
import com.geteit.rcouch.memcached.Memcached.{GetResponse, StoreResponse}
import scala.concurrent.Await
import scala.concurrent.duration._
import akka.pattern._
import com.geteit.rcouch.actors.ClusterActor.GetBucketActor
import com.geteit.rcouch.BucketSpec
import com.geteit.rcouch.couchbase.Couchbase.Bucket
/**
*/
class ClusterActorSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with fixture.FeatureSpecLike with Matchers with BucketSpec {
def this() = this(ActorSystem("ClusterSpec"))
implicit val timeout = 5.seconds : Timeout
override protected def beforeAll() {
super.beforeAll()
}
override protected def afterAll(): Unit = {
super.afterAll()
TestKit.shutdownActorSystem(system)
}
feature("Connect to couchbase server") {
scenario("Get BucketActor and send memcached commands") { b: Bucket =>
val cluster = system.actorOf(ClusterActor.props(ClusterSettings()))
val bucket = Await.result((cluster ? GetBucketActor(b.name)).mapTo[ActorRef], 5.seconds)
bucket ! Memcached.Set("key", ByteString("value"), 0, 3600)
val res = expectMsgClass(classOf[StoreResponse])
res.status should be(0)
bucket ! Memcached.Get("key")
val gr = expectMsgClass(classOf[GetResponse])
gr.value should be(ByteString("value"))
bucket ! Memcached.GetK("key")
val gr1 = expectMsgClass(classOf[GetResponse])
gr1.key should be(Some("key"))
gr1.value should be(ByteString("value"))
Await.result(gracefulStop(cluster, 5.seconds), 6.seconds)
}
}
}
| zbsz/reactive-couch | src/it/scala/com/geteit/rcouch/actors/ClusterActorSpec.scala | Scala | apache-2.0 | 1,849 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command.datamap
import java.util
import scala.collection.JavaConverters._
import org.apache.spark.sql.{CarbonEnv, Row, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.execution.command.{Checker, DataCommand}
import org.apache.spark.sql.types.StringType
import org.apache.carbondata.core.datamap.DataMapStoreManager
import org.apache.carbondata.core.metadata.schema.datamap.{DataMapClassProvider, DataMapProperty}
import org.apache.carbondata.core.metadata.schema.table.DataMapSchema
/**
* Show the datamaps on the table
*
* @param tableIdentifier
*/
case class CarbonDataMapShowCommand(tableIdentifier: Option[TableIdentifier])
extends DataCommand {
override def output: Seq[Attribute] = {
Seq(AttributeReference("DataMapName", StringType, nullable = false)(),
AttributeReference("ClassName", StringType, nullable = false)(),
AttributeReference("Associated Table", StringType, nullable = false)(),
AttributeReference("DataMap Properties", StringType, nullable = false)())
}
override def processData(sparkSession: SparkSession): Seq[Row] = {
convertToRow(getAllDataMaps(sparkSession))
}
/**
* get all datamaps for this table, including preagg, index datamaps and mv
*/
def getAllDataMaps(sparkSession: SparkSession): util.List[DataMapSchema] = {
val dataMapSchemaList: util.List[DataMapSchema] = new util.ArrayList[DataMapSchema]()
tableIdentifier match {
case Some(table) =>
val carbonTable = CarbonEnv.getCarbonTable(table)(sparkSession)
Checker.validateTableExists(table.database, table.table, sparkSession)
if (carbonTable.hasDataMapSchema) {
dataMapSchemaList.addAll(carbonTable.getTableInfo.getDataMapSchemaList)
}
val indexSchemas = DataMapStoreManager.getInstance().getDataMapSchemasOfTable(carbonTable)
if (!indexSchemas.isEmpty) {
dataMapSchemaList.addAll(indexSchemas)
}
case _ =>
dataMapSchemaList.addAll(DataMapStoreManager.getInstance().getAllDataMapSchemas)
}
dataMapSchemaList
}
private def convertToRow(schemaList: util.List[DataMapSchema]) = {
if (schemaList != null && schemaList.size() > 0) {
schemaList.asScala
.map { s =>
val relationIdentifier = s.getRelationIdentifier
val table = relationIdentifier.getDatabaseName + "." + relationIdentifier.getTableName
// preaggregate datamap does not support user specified property, therefor we return empty
val dmPropertieStr = if (s.getProviderName.equalsIgnoreCase(
DataMapClassProvider.PREAGGREGATE.getShortName)) {
""
} else {
s.getProperties.asScala
// ignore internal used property
.filter(p => !p._1.equalsIgnoreCase(DataMapProperty.DEFERRED_REBUILD) &&
!p._1.equalsIgnoreCase(DataMapProperty.CHILD_SELECT_QUERY) &&
!p._1.equalsIgnoreCase(DataMapProperty.QUERY_TYPE))
.map(p => s"'${ p._1 }'='${ p._2 }'").toSeq
.sorted.mkString(", ")
}
Row(s.getDataMapName, s.getProviderName, table, dmPropertieStr)
}
} else {
Seq.empty
}
}
}
| sgururajshetty/carbondata | integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/datamap/CarbonDataMapShowCommand.scala | Scala | apache-2.0 | 4,198 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2.validation
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.ct600.v2.{RSQ7, RSQ8}
import uk.gov.hmrc.ct.ct600.v2.retriever.ReturnStatementsBoxRetriever
class RSQ7MutuallyExclusiveWithRSQ8Spec extends WordSpec with Matchers with MockitoSugar with RSQ7MutuallyExclusiveWithRSQ8 {
"validateMutualExclusivity" should {
"return no errors if both RSQ7 & RSQ8 are empty" in {
val retriever = mock[ReturnStatementsBoxRetriever]
when(retriever.rsq7()).thenReturn(RSQ7(None))
when(retriever.rsq8()).thenReturn(RSQ8(None))
validateMutualExclusivity(retriever) shouldBe empty
}
"return an error if both RSQ7 & RSQ8 are true" in {
val retriever = mock[ReturnStatementsBoxRetriever]
when(retriever.rsq7()).thenReturn(RSQ7(Some(true)))
when(retriever.rsq8()).thenReturn(RSQ8(Some(true)))
val expectedErrors = Set(CtValidation(Some("RSQ7"), "error.RSQ7.mutuallyExclusive"), CtValidation(Some("RSQ8"), "error.RSQ8.mutuallyExclusive"))
validateMutualExclusivity(retriever) shouldBe expectedErrors
}
"return no errors if both RSQ7 is false & RSQ8 is true" in {
val retriever = mock[ReturnStatementsBoxRetriever]
when(retriever.rsq7()).thenReturn(RSQ7(Some(false)))
when(retriever.rsq8()).thenReturn(RSQ8(Some(true)))
validateMutualExclusivity(retriever) shouldBe Set.empty
}
"return no errors if both RSQ7 is true & RSQ8 is empty" in {
val retriever = mock[ReturnStatementsBoxRetriever]
when(retriever.rsq7()).thenReturn(RSQ7(Some(true)))
when(retriever.rsq8()).thenReturn(RSQ8(None))
validateMutualExclusivity(retriever) shouldBe Set.empty
}
}
}
| liquidarmour/ct-calculations | src/test/scala/uk/gov/hmrc/ct/ct600/v2/validation/RSQ7MutuallyExclusiveWithRSQ8Spec.scala | Scala | apache-2.0 | 2,446 |
import scala.compiletime.{erasedValue, summonFrom}
import scala.deriving._
import scala.quoted._
trait Lft[T]:
def toExpr(x: T)(using Type[T], Quotes): Expr[T] // TODO remove `Type[T]`
object Lft {
given Lft[Int] with
def toExpr(x: Int)(using Type[Int], Quotes) = Expr(x)
inline given derived[T](using inline m: Mirror.Of[T]): Lft[T] = ${ derivedExpr('m) }
private def derivedExpr[T](mirrorExpr: Expr[Mirror.Of[T]])(using qctx: Quotes, tpe: Type[T]): Expr[Lft[T]] = {
mirrorExpr match {
case '{ $mirrorExpr : Mirror.Sum { type MirroredElemTypes = mirroredElemTypes } } =>
val liftables = Expr.ofSeq(elemTypesLfts[mirroredElemTypes])
'{ new LiftableSum[T, mirroredElemTypes]($mirrorExpr, $liftables) }
case '{ $mirrorExpr : Mirror.Product { type MirroredElemTypes = mirroredElemTypes } } =>
val liftableExprs = Expr.ofSeq(elemTypesLfts[mirroredElemTypes])
'{ new LiftableProduct[T, mirroredElemTypes]($mirrorExpr, $liftableExprs) }
}
}
class LiftableSum[T, MElemTypes](
mirror: Mirror.Sum { type MirroredElemTypes = MElemTypes; type MirroredMonoType = T },
liftables: Seq[Lft[_]] // TODO make Lft creation lazy
) extends Lft[T]:
def toExpr(x: T)(using Type[T], Quotes): Expr[T] =
val ordinal = mirror.ordinal(x)
val tp = Expr.summon[Mirror.SumOf[T]].get match
case '{ $mirrorExpr : Mirror.Sum { type MirroredElemTypes = mirroredElemTypes } } =>
elemType[mirroredElemTypes](ordinal)
val liftable = liftables.apply(ordinal).asInstanceOf[Lft[T]]
liftable.toExpr(x)(using tp.asInstanceOf[Type[T]], summon[Quotes])
end LiftableSum
class LiftableProduct[T, MElemTypes](
mirror: Mirror.Product { type MirroredElemTypes = MElemTypes; type MirroredMonoType = T },
liftables: Seq[Lft[_]]
) extends Lft[T]:
def toExpr(x: T)(using Type[T], Quotes): Expr[T] =
val mirrorExpr = Expr.summon[Mirror.ProductOf[T]].get
val elemExprs =
x.asInstanceOf[Product].productIterator.zip(liftables.iterator).map { (elem, lift) =>
lift.asInstanceOf[Lft[Any]].toExpr(elem)
}.toSeq
val elemsTupleExpr = Expr.ofTupleFromSeq(elemExprs)
'{ $mirrorExpr.fromProduct($elemsTupleExpr) }
end LiftableProduct
private def elemTypesLfts[X: Type](using Quotes): List[Expr[Lft[_]]] =
Type.of[X] match
case '[ head *: tail ] =>
Expr.summon[Lft[head]].getOrElse(quotes.reflect.report.errorAndAbort(s"Could not find given Lft[${Type.show[head]}]")) :: elemTypesLfts[tail]
case '[ EmptyTuple ] => Nil
private def elemType[X: Type](ordinal: Int)(using Quotes): Type[_] =
Type.of[X] match
case '[ head *: tail ] =>
if ordinal == 0 then Type.of[head]
else elemType[tail](ordinal - 1)
}
| dotty-staging/dotty | tests/run-macros/quoted-liftable-derivation-macro/Derivation_1.scala | Scala | apache-2.0 | 2,821 |
package com.nekogata.backlogger.js_exports.events
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSExport, JSExportTopLevel}
@JSExportTopLevel("SettingEvents")
object SettingEvents {
@JSExport val saved = new Event {
@JSExport override def subscribe(f: js.Function): Subscription = super.subscribe(f)
}
@JSExport val apiKeyVerificationFailed = new Event {
@JSExport override def subscribe(f: js.Function): Subscription = super.subscribe(f)
}
}
| Shinpeim/BackLogger | scala/src/main/scala/com/nekogata/backlogger/js_exports/events/SettingEvents.scala | Scala | mit | 476 |
object classtut{
def main(args: Array[String]){
var ram=new Student
ram.setName("Ram")
ram.setStandard("XI")
println(ram.toString())
} //end of main
class Student(var name: String,var standard: String){
this.setName(name)
val id=Student.newIdNum
def getName(): String=name
def getStandard(): String=standard
def setName(name: String){
if(!(name.matches(".*\\d+.*")))
this.name=name
}
def setStandard(standard: String){
this.standard=standard
}
def this(name: String){
this("No Name","No Standard")
this.setName(name)
}
def this(){
this("No Name","No Standard")
}
override def toString():String={
return "%s with id %d studies in %s standard".format(this.name, this.id, this.standard)
}
}
object Student{
private var idNumber=0
private def newIdNum={ idNumber+=1; idNumber}
}
// TODO: implement inheritance just for practice
}
| Jargon4072/DS-ALGO_implementations | scala/classtut.scala | Scala | gpl-3.0 | 967 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.server
import akka.actor._
import akka.http.scaladsl.model.ws._
import akka.pattern.pipe
import akka.stream._
import akka.stream.scaladsl._
import akka.testkit._
import akka.util.ByteString
import org.ensime.fixture.SharedTestKitFixture
import org.ensime.util.EnsimeSpec
import spray.json._
import scala.concurrent.duration._
class WebSocketBoilerplateSpec extends EnsimeSpec with SharedTestKitFixture {
import WebSocketBoilerplate._
"WebSocketBoilerplate" should "produce Flow[In, Out]" in withTestKit { tk =>
import tk.system
import tk.system.dispatcher
implicit val mat: ActorMaterializer = ActorMaterializer()
val service = TestProbe()
var target: ActorRef = null
val flow = actorRefAsFlow[String, Long] { t =>
target = t
service.ref
}
val client = TestProbe()
Source.single("hello").via(flow).runWith(Sink.head).pipeTo(client.ref)
service.expectMsg("hello")
service.send(target, 13L)
service.expectNoMsg(3 seconds)
client.expectMsg(13L)
// it would be good to check that errors / closing will stop the
// actor but that's perhaps testing the framework.
}
case class Foo(a: String)
case class Bar(b: Long)
import DefaultJsonProtocol._
implicit def FooFormat: RootJsonFormat[Foo] = jsonFormat1(Foo)
implicit def BarFormat: RootJsonFormat[Bar] = jsonFormat1(Bar)
val foo = Foo("hello")
val bar = Bar(13L)
it should "produce a marshalled Flow that accepts valid messages" in withTestKit { tk =>
import tk.system
import tk.system.dispatcher
implicit val mat: ActorMaterializer = ActorMaterializer()
// This is quite horrible and really highlights why a BidiFlow
// model would be better. WebSockets are *not* request / response
// (like this).
val user = Flow[Foo].map { f =>
f shouldBe foo
bar
}
val endpoints = jsonMarshalledMessageFlow(user)
val input = TextMessage(foo.toJson.compactPrint)
val client = TestProbe()
Source.single(input).via(endpoints).runWith(Sink.head).pipeTo(client.ref)
client.expectMsg(TextMessage(bar.toJson.prettyPrint))
}
it should "produce a marshalled Flow that errors on bad message" in withTestKit { tk =>
import tk.system
import tk.system.dispatcher
implicit val mat: ActorMaterializer = ActorMaterializer()
val user = Flow[Foo].map { f =>
f shouldBe foo
bar
}
val endpoints = jsonMarshalledMessageFlow(user)
val input = BinaryMessage(ByteString(0, 1, 2))
val client = TestProbe()
Source.single(input).via(endpoints).runWith(Sink.head).pipeTo(client.ref)
client.expectMsgPF() {
case Status.Failure(_) =>
}
}
it should "produce a marshalled Flow that errors on bad inbound JSON" in withTestKit { tk =>
import tk.system
import tk.system.dispatcher
implicit val mat: ActorMaterializer = ActorMaterializer()
val user = Flow[Foo].map { _ => bar }
val endpoints = jsonMarshalledMessageFlow(user)
val input = TextMessage.Strict("""{}""")
val client = TestProbe()
Source.single(input).via(endpoints).runWith(Sink.head).pipeTo(client.ref)
client.expectMsgPF() {
case Status.Failure(e: DeserializationException) =>
}
}
}
| d1egoaz/ensime-sbt | src/sbt-test/sbt-ensime/ensime-server/server/src/test/scala/org/ensime/server/WebSocketBoilerplateSpec.scala | Scala | apache-2.0 | 3,393 |
class C {
val sa = s"""\\"""
val sb = s"""\\\\"""
val sc = s"""\\ """
val ra = raw"""\\"""
val rb = raw"""\\\\"""
val rc = raw"""\\ """
}
| lrytz/scala | test/files/neg/t6476b.scala | Scala | apache-2.0 | 142 |
package au.com.intelix.rs.core.actors
import akka.actor.{Actor, ActorRef, Terminated}
import au.com.intelix.config.RootConfig
import au.com.intelix.evt.EvtContext
import au.com.intelix.rs.core.config.WithActorSystemConfig
trait BaseActor extends WithActorSystemConfig with ActorUtils with EvtContext {
import CommonActorEvt._
private val pathAsString = self.path.toStringWithoutAddress
protected[actors] var terminatedFuncChain: Seq[ActorRef => Unit] = Seq.empty
private var preStartChain: Seq[() => Unit] = Seq.empty
private var preRestartChain: Seq[PartialFunction[(Throwable, Option[Any]), Unit]] = Seq.empty
private var postRestartChain: Seq[PartialFunction[Throwable, Unit]] = Seq.empty
private var postStopChain: Seq[() => Unit] = Seq.empty
override implicit lazy val nodeCfg: RootConfig = RootConfig(config)
def onActorTerminated(f: ActorRef => Unit) = terminatedFuncChain = terminatedFuncChain :+ f
def onPreStart(thunk: => Unit) = preStartChain = preStartChain :+ (() => thunk)
def onPostStop(thunk: => Unit) = postStopChain = postStopChain :+ (() => thunk)
def onPreRestart(f: PartialFunction[(Throwable, Option[Any]), Unit]) = preRestartChain = preRestartChain :+ f
def onPostRestart(f: PartialFunction[Throwable, Unit]) = postRestartChain = postRestartChain :+ f
commonEvtFields('path -> pathAsString, 'nodeid -> nodeId)
@throws[Exception](classOf[Exception])
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
raise(Evt.PreRestart, 'reason -> reason.getMessage, 'msg -> message, 'path -> pathAsString)
preRestartChain.foreach(_.applyOrElse((reason, message), (_: (Throwable, Option[Any])) => () ))
super.preRestart(reason, message)
}
@throws[Exception](classOf[Exception])
override def postRestart(reason: Throwable): Unit = {
postRestartChain.foreach(_.applyOrElse(reason, (_: Throwable) => ()))
super.postRestart(reason)
raise(Evt.PostRestart, 'reason -> reason.getMessage, 'path -> pathAsString)
}
@throws[Exception](classOf[Exception])
override def preStart(): Unit = {
raise(Evt.PreStart, 'path -> pathAsString)
preStartChain.foreach(_.apply())
super.preStart()
}
@throws[Exception](classOf[Exception])
override def postStop(): Unit = {
postStopChain.foreach(_.apply())
super.postStop()
raise(Evt.PostStop, 'path -> pathAsString)
}
def onMessage(f: Receive)
}
trait JBaseActor extends BaseActor {
private var chainedFunc: Receive = {
case Terminated(ref) => terminatedFuncChain.foreach(_ (ref))
}
override final val receive: Actor.Receive = {
case x if chainedFunc.isDefinedAt(x) => chainedFunc(x)
case x => unhandled(x)
}
override final def onMessage(f: Receive): Unit = chainedFunc = f orElse chainedFunc
}
| intelix/reactiveservices | platform/core/src/main/scala/au/com/intelix/rs/core/actors/BaseActor.scala | Scala | apache-2.0 | 2,796 |
package controllers
import play.api.mvc._
import play.api.libs.streams._
import javax.inject.Inject
import akka.actor._
import akka.stream.Materializer
import play.api.libs.json.JsValue
import models._
import models.BucketJsonWriterNoDb
import play.api.libs.json.Json
import play.api.libs.json.Writes
import play.api.Configuration
import akka.util.Timeout
import javax.inject.Named
import play.api.libs.concurrent.InjectedActorSupport
import akka.routing.BroadcastRoutingLogic
import akka.routing.BroadcastGroup
import play.api.libs.iteratee.Concurrent
import play.api.libs.iteratee.Enumerator
import play.api.libs.iteratee.Concurrent.Channel
import akka.cluster.pubsub.DistributedPubSub
import akka.cluster.pubsub.DistributedPubSubMediator
case class UpdateBucket(bucket: Bucket, dumps: Seq[Dump], jsonWriter: Writes[(Bucket, Seq[Dump])])
case class UpdateDump(dump: Dump, jsonWriter: Writes[Dump])
class WebSocket @Inject() (
implicit system: ActorSystem,
materializer: Materializer) {
def socket = WebSocket.accept[JsValue, JsValue] { request =>
ActorFlow.actorRef(out => WebSocketWorker.props(out))
}
}
object WebSocketWorker {
def props(out: ActorRef) = Props(new WebSocketWorker(out))
}
class WebSocketWorker(out: ActorRef) extends Actor {
import DistributedPubSubMediator. { Subscribe, SubscribeAck }
val mediator = DistributedPubSub(context.system).mediator
mediator ! Subscribe("updateBucket", self)
mediator ! Subscribe("updateDump", self)
def receive = {
case UpdateBucket(bucket, dumps, jsonWriter) => {
implicit val bucketWriter = jsonWriter
out ! Json.obj(
"type" -> "updateBucket",
"bucket" -> Json.toJson((bucket, dumps))
)
}
case UpdateDump(dump, jsonWriter) => {
implicit val dumpWriter = jsonWriter
out ! Json.obj(
"type" -> "updateDump",
"dump" -> Json.toJson(dump)
)
}
}
}
class WebSocketMaster() extends Actor {
import akka.cluster.pubsub.DistributedPubSubMediator.Publish
val mediator = DistributedPubSub(context.system).mediator
def receive = {
case updateBucket: UpdateBucket => mediator ! Publish("updateBucket", updateBucket)
case updateDump: UpdateDump => mediator ! Publish("updateDump", updateDump)
}
}
| alexanderfloh/dmpster | app/controllers/WebSocket.scala | Scala | mit | 2,296 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js API **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package scala.scalajs.niocharset
import java.nio.charset._
/** Standard charsets.
* This is basically the same as [[java.nio.charset.StandardCharsets]], but
* it is also available when compiling with a JDK 6.
*/
object StandardCharsets {
import scala.scalajs.niocharset
/** ISO-8859-1, aka latin1. */
def ISO_8859_1: Charset = niocharset.ISO_8859_1
/** US-ASCII. */
def US_ASCII: Charset = niocharset.US_ASCII
/** UTF-8. */
def UTF_8: Charset = niocharset.UTF_8
/** UTF-16 Big Endian without BOM. */
def UTF_16BE: Charset = niocharset.UTF_16BE
/** UTF-16 Little Endian without BOM. */
def UTF_16LE: Charset = niocharset.UTF_16LE
/** UTF-16 with an optional BOM.
* When encoding, Big Endian is always used.
* When decoding, the BOM specifies what endianness to use. If no BOM is
* found, it defaults to Big Endian.
*/
def UTF_16: Charset = niocharset.UTF_16
}
| jmnarloch/scala-js | library/src/main/scala/scala/scalajs/niocharset/StandardCharsets.scala | Scala | bsd-3-clause | 1,440 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.typedarray
import org.scalajs.testsuite.utils.Requires
import scala.language.implicitConversions
import java.io.InputStream
import scala.scalajs.js
import scala.scalajs.js.typedarray._
import org.junit.Assert._
import org.junit.Test
import org.scalajs.testsuite.utils.AssertThrows._
/** Tests for our implementation of java.io._ stream classes */
trait ArrayBufferInputStreamTest {
def byteArray(a: TraversableOnce[Int]): Array[Byte] = {
a.toArray.map(_.toByte)
}
def mkStream(seq: Seq[Int]): InputStream
private val length = 50
private def newStream: InputStream = mkStream(1 to length)
@Test def read(): Unit = {
val stream = newStream
for (i <- 1 to length)
assertEquals(i, stream.read())
for (_ <- 1 to 5)
assertEquals(-1, stream.read())
}
@Test def read_buf(): Unit = {
val stream = newStream
val buf = new Array[Byte](10)
assertEquals(10, stream.read(buf))
assertArrayEquals(byteArray(1 to 10), buf)
assertEquals(35L, stream.skip(35))
assertEquals(5, stream.read(buf))
assertArrayEquals(byteArray((46 to 50) ++ (6 to 10)), buf)
assertEquals(-1, stream.read(buf))
assertEquals(-1, stream.read())
}
@Test def read_full_argument(): Unit = {
val stream = newStream
val buf = new Array[Byte](20)
assertEquals(5, stream.read(buf, 10, 5))
assertArrayEquals(byteArray(Seq.fill(10)(0) ++ (1 to 5) ++ Seq.fill(5)(0)), buf)
assertEquals(20, stream.read(buf, 0, 20))
assertArrayEquals(byteArray(6 to 25), buf)
assertEquals(0, stream.read(buf, 10, 0))
assertArrayEquals(byteArray(6 to 25), buf)
expectThrows(classOf[IndexOutOfBoundsException], stream.read(buf, -1, 0))
expectThrows(classOf[IndexOutOfBoundsException], stream.read(buf, 0, -1))
expectThrows(classOf[IndexOutOfBoundsException], stream.read(buf, 100, 0))
expectThrows(classOf[IndexOutOfBoundsException], stream.read(buf, 10, 100))
assertArrayEquals(byteArray(6 to 25), buf)
assertEquals(20L, stream.skip(20))
assertEquals(5, stream.read(buf, 0, 10))
assertArrayEquals(byteArray((46 to 50) ++ (11 to 25)), buf)
assertEquals(-1, stream.read(buf, 0, 10))
assertEquals(0, stream.read(buf, 0, 0))
assertArrayEquals(byteArray((46 to 50) ++ (11 to 25)), buf)
}
@Test def available(): Unit = {
val stream = newStream
def mySkip(n: Int) = for (_ <- 1 to n) assertNotEquals(-1, stream.read())
def check(n: Int) = assertEquals(n, stream.available)
check(50)
mySkip(5)
check(45)
assertEquals(10L, stream.skip(10))
check(35)
mySkip(30)
check(5)
assertEquals(5L, stream.skip(20))
check(0)
}
@Test def skip(): Unit = {
val stream = newStream
assertEquals(7L, stream.skip(7))
for (i <- 8 to 32)
assertEquals(i, stream.read())
assertEquals(0L, stream.skip(0))
assertEquals(33, stream.read())
assertEquals(0L, stream.skip(-4))
assertEquals(34, stream.read())
assertEquals(16L, stream.skip(30))
assertEquals(0L, stream.skip(30))
}
@Test def markSupported(): Unit = {
assertTrue(newStream.markSupported)
}
@Test def close(): Unit = {
val stream = newStream
for (i <- 1 to length) {
stream.close()
assertEquals(i, stream.read())
}
}
@Test def mark_reset(): Unit = {
val stream = newStream
def read(range: Range) = for (i <- range) assertEquals(i, stream.read())
read(1 to 10)
stream.reset() // mark must be 0 at creation
read(1 to 5)
stream.mark(length)
read(6 to 22)
stream.reset()
read(6 to 20)
stream.reset()
read(6 to 25)
stream.reset()
assertEquals(40L, stream.skip(40))
stream.mark(length)
read(46 to 50)
stream.reset()
read(46 to 50)
stream.mark(length)
assertEquals(-1, stream.read())
stream.reset()
assertEquals(-1, stream.read())
}
@Test def should_return_positive_integers_when_calling_read(): Unit = {
val stream = mkStream(Seq(-1, -2, -3))
assertEquals(255, stream.read())
assertEquals(254, stream.read())
assertEquals(253, stream.read())
assertEquals(-1, stream.read())
}
}
object ArrayBufferInputStreamWithoutOffsetTest extends Requires.TypedArray
class ArrayBufferInputStreamWithoutOffsetTest extends ArrayBufferInputStreamTest {
def mkStream(seq: Seq[Int]): InputStream = {
import js.JSConverters._
new ArrayBufferInputStream(new Int8Array(seq.toJSArray).buffer)
}
}
object ArrayBufferInputStreamWithOffsetTest extends Requires.TypedArray
class ArrayBufferInputStreamWithOffsetTest extends ArrayBufferInputStreamTest {
def mkStream(seq: Seq[Int]): InputStream = {
import js.JSConverters._
val off = 100
val data = new Int8Array(seq.size + off)
data.set(seq.toJSArray, off)
new ArrayBufferInputStream(data.buffer, off, seq.size)
}
}
| SebsLittleHelpers/scala-js | test-suite/js/src/test/scala/org/scalajs/testsuite/typedarray/ArrayBufferInputStreamTest.scala | Scala | apache-2.0 | 5,181 |
/*
* Copyright 2010 LinkedIn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.producer
import async.{AsyncKafkaProducer, ProducerConfig, QueueClosedException, QueueFullException}
import kafka.serializer.Serializer
import kafka.message.{ByteBufferMessageSet, Message}
import junit.framework.{Assert, TestCase}
import java.util.{Properties}
import org.easymock.{EasyMock}
import kafka.api.ProducerRequest
import org.apache.log4j.Level
class AsyncProducerTest extends TestCase {
private val messageContent1 = "test"
private val topic1 = "test-topic"
private val message1: Message = new Message(messageContent1.getBytes)
private val messageContent2 = "test1"
private val topic2 = "test1-topic"
private val message2: Message = new Message(messageContent2.getBytes)
def testProducerQueueSize() {
val basicProducer = EasyMock.createMock(classOf[SimpleProducer])
basicProducer.multiSend(EasyMock.aryEq(Array(new ProducerRequest(topic1, ProducerRequest.RandomPartition,
getMessageSetOfSize(List(message1), 10)))))
EasyMock.expectLastCall
basicProducer.close
EasyMock.expectLastCall
EasyMock.replay(basicProducer)
val props = new Properties()
props.put("host", "localhost")
props.put("port", "9092")
props.put("queue.size", "10")
props.put("serializer.class", "kafka.producer.StringSerializer")
val config = new ProducerConfig(props)
val producer = new AsyncKafkaProducer[String](config, basicProducer, new StringSerializer)
producer.start
//temporarily set log4j to a higher level to avoid error in the output
producer.setLoggerLevel(Level.FATAL)
try {
for(i <- 0 until 11) {
producer.send(messageContent1)
}
Assert.fail("Queue should be full")
}
catch {
case e: QueueFullException =>
}
producer.close
EasyMock.verify(basicProducer)
producer.setLoggerLevel(Level.ERROR)
}
def testAddAfterQueueClosed() {
val basicProducer = EasyMock.createMock(classOf[SimpleProducer])
basicProducer.multiSend(EasyMock.aryEq(Array(new ProducerRequest(topic1, ProducerRequest.RandomPartition,
getMessageSetOfSize(List(message1), 10)))))
EasyMock.expectLastCall
basicProducer.close
EasyMock.expectLastCall
EasyMock.replay(basicProducer)
val props = new Properties()
props.put("host", "localhost")
props.put("port", "9092")
props.put("queue.size", "10")
props.put("serializer.class", "kafka.producer.StringSerializer")
val config = new ProducerConfig(props)
val producer = new AsyncKafkaProducer[String](config, basicProducer, new StringSerializer)
producer.start
for(i <- 0 until 10) {
producer.send(messageContent1)
}
producer.close
try {
producer.send(messageContent1)
Assert.fail("Queue should be closed")
} catch {
case e: QueueClosedException =>
}
EasyMock.verify(basicProducer)
}
def testBatchSize() {
val basicProducer = EasyMock.createStrictMock(classOf[SimpleProducer])
basicProducer.multiSend(EasyMock.aryEq(Array(new ProducerRequest(topic1, ProducerRequest.RandomPartition,
getMessageSetOfSize(List(message1), 5)))))
EasyMock.expectLastCall.times(2)
basicProducer.multiSend(EasyMock.aryEq(Array(new ProducerRequest(topic1, ProducerRequest.RandomPartition,
getMessageSetOfSize(List(message1), 1)))))
EasyMock.expectLastCall
basicProducer.close
EasyMock.expectLastCall
EasyMock.replay(basicProducer)
val props = new Properties()
props.put("host", "localhost")
props.put("port", "9092")
props.put("queue.size", "10")
props.put("serializer.class", "kafka.producer.StringSerializer")
props.put("batch.size", "5")
val config = new ProducerConfig(props)
val producer = new AsyncKafkaProducer[String](config, basicProducer, new StringSerializer)
producer.start
for(i <- 0 until 10) {
producer.send(messageContent1)
}
Thread.sleep(100)
try {
producer.send(messageContent1)
} catch {
case e: QueueFullException =>
Assert.fail("Queue should not be full")
}
producer.close
EasyMock.verify(basicProducer)
}
def testQueueTimeExpired() {
val basicProducer = EasyMock.createMock(classOf[SimpleProducer])
basicProducer.multiSend(EasyMock.aryEq(Array(new ProducerRequest(topic1, ProducerRequest.RandomPartition,
getMessageSetOfSize(List(message1), 3)))))
EasyMock.expectLastCall
basicProducer.close
EasyMock.expectLastCall
EasyMock.replay(basicProducer)
val props = new Properties()
props.put("host", "localhost")
props.put("port", "9092")
props.put("queue.size", "10")
props.put("serializer.class", "kafka.producer.StringSerializer")
props.put("queue.time", "200")
val config = new ProducerConfig(props)
val producer = new AsyncKafkaProducer[String](config, basicProducer, new StringSerializer)
producer.start
for(i <- 0 until 3) {
producer.send(messageContent1)
}
Thread.sleep(500)
producer.close
EasyMock.verify(basicProducer)
}
def testSenderThreadShutdown() {
val basicProducer = new MockProducer("localhost", 9092, 1000, 1000, 1000)
val props = new Properties()
props.put("host", "localhost")
props.put("port", "9092")
props.put("queue.size", "10")
props.put("serializer.class", "kafka.producer.StringSerializer")
props.put("queue.time", "100")
val config = new ProducerConfig(props)
val producer = new AsyncKafkaProducer[String](config, basicProducer, new StringSerializer)
producer.start
producer.send(messageContent1)
producer.close
}
def testCollateEvents() {
val basicProducer = EasyMock.createMock(classOf[SimpleProducer])
basicProducer.multiSend(EasyMock.aryEq(Array(new ProducerRequest(topic2, ProducerRequest.RandomPartition,
getMessageSetOfSize(List(message2), 5)),
new ProducerRequest(topic1, ProducerRequest.RandomPartition,
getMessageSetOfSize(List(message1), 5)))))
EasyMock.expectLastCall
basicProducer.close
EasyMock.expectLastCall
EasyMock.replay(basicProducer)
val props = new Properties()
props.put("host", "localhost")
props.put("port", "9092")
props.put("queue.size", "50")
props.put("serializer.class", "kafka.producer.StringSerializer")
props.put("batch.size", "10")
val config = new ProducerConfig(props)
val producer = new AsyncKafkaProducer[String](config, basicProducer, new StringSerializer)
producer.start
for(i <- 0 until 5) {
producer.send(messageContent1)
producer.send(messageContent2)
}
producer.close
EasyMock.verify(basicProducer)
}
private def getMessageSetOfSize(messages: List[Message], counts: Int): ByteBufferMessageSet = {
var messageList = new java.util.ArrayList[Message]()
for(message <- messages) {
for(i <- 0 until counts) {
messageList.add(message)
}
}
new ByteBufferMessageSet(messageList)
}
class StringSerializer extends Serializer[String] {
def toEvent(message: Message):String = message.toString
def toMessage(event: String):Message = new Message(event.getBytes)
def getName(event: String): String = event.concat("-topic")
}
class MockProducer(override val host: String,
override val port: Int,
override val bufferSize: Int,
override val connectTimeoutMs: Int,
override val reconnectInterval: Int) extends
SimpleProducer(host, port, bufferSize, connectTimeoutMs, reconnectInterval) {
override def send(topic: String, messages: ByteBufferMessageSet): Unit = {
Thread.sleep(1000)
}
override def multiSend(produces: Array[ProducerRequest]) {
Thread.sleep(1000)
}
}
} | jinfei21/kafka | test/unit/kafka/producer/AsyncProducerTest.scala | Scala | apache-2.0 | 8,589 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression.features
import org.junit.Test
import org.scalaide.debug.internal.expression.BaseIntegrationTest
import org.scalaide.debug.internal.expression.BaseIntegrationTestCompanion
import org.scalaide.debug.internal.expression.Names.Java
import org.scalaide.debug.internal.expression.Names.Scala
class ValAccessTest extends BaseIntegrationTest(ValAccessTest) {
@Test
def testInt(): Unit = eval("int", 1, Java.primitives.int)
@Test
def testChar(): Unit = eval("char", 'c', Java.primitives.char)
@Test
def testDouble(): Unit = eval("double", 1.1, Java.primitives.double)
@Test
def testFloat(): Unit = eval("float", 1.1f, Java.primitives.float)
@Test
def testBoolean(): Unit = eval("boolean", false, Java.primitives.boolean)
@Test
def testString(): Unit = eval("string", "Ala", Java.String)
@Test
def testLong(): Unit = eval("long", 1L, Java.primitives.long)
@Test
def testStringMethod(): Unit = eval("string.toLowerCase", "ala", Java.String)
@Test
def testObjectList(): Unit = eval("list", List("1", "2", "3"), Scala.::)
@Test
def testObjectListMethod(): Unit = eval("list.mkString", "123", Java.String)
@Test
def testStrangeMethodsNamesMethod(): Unit = eval("*", 1, Java.primitives.int)
@Test
def testPlusOnVals(): Unit = eval("int + int", 2, Java.primitives.int)
@Test
def testOuterScopedVal(): Unit = eval("outer", "ala", Java.String)
@Test
def testLibClassVal(): Unit = eval("libClass", "LibClass(1)", "debug.LibClass")
@Test
def testObjectAccess(): Unit = eval("objectVal", "Libs - object", "debug.Libs$")
}
object ValAccessTest extends BaseIntegrationTestCompanion
| stephenh/scala-ide | org.scala-ide.sdt.debug.expression.tests/src/org/scalaide/debug/internal/expression/features/ValAccessTest.scala | Scala | bsd-3-clause | 1,749 |
package com.geeksville.mavlink
import java.io._
import com.geeksville.akka.InstrumentedActor
import org.mavlink.messages.MAVLinkMessage
import com.geeksville.util.ThreadTools
import com.geeksville.util.Using._
import org.mavlink._
import com.geeksville.util.DebugInputStream
import com.geeksville.util.ByteOnlyInputStream
import com.geeksville.util.Throttled
import com.geeksville.logback.Logging
import java.net.ConnectException
import scala.concurrent._
import scala.util.Random
import java.net.SocketTimeoutException
import akka.actor.PoisonPill
import akka.actor.Actor
import java.nio.BufferUnderflowException
// with SerialPortEventListener
/**
* Receives mavlink from an input stream
*
* @param sysIdOverride if set, we will replace any received sysIds with this alternative (useful for remapping sysId based on interface)
* @param tlogSpeedup if specified, we expect to see an 8 byte timestamp before each msg. We will play the read data back at the rate we find in the file.
*/
class MavlinkStreamReceiver(
ingen: => InputStream,
val sysIdOverride: Option[Int] = None,
val tlogSpeedup: Option[Double] = None, autoStart: Boolean = true) extends InstrumentedActor with MavlinkReceiver {
log.debug("MavlinkStream starting")
MavlinkStreamReceiver.isIgnoreReceive = false
/**
* We use generators to init these variables, because android doesn't allow network access from the
* 'main' thread
*/
private lazy val instream = ingen // new DebugInputStream(ingen)
/// This skanky hack is to make sure that we only touch the inputstream if it has already been created
private var isInstreamValid = false
/// The id we expect for vehicles on this port (possibly will be overridden)
val expectedSysId = 1
val rxThread = ThreadTools.createDaemon("streamRx")(rxWorker)
/**
* If true we will pretend to drop many packets
*/
var simulateUnreliable = false
private val rand = new Random(System.currentTimeMillis)
private var shuttingDown = false
//rxThread.setPriority(Thread.MAX_PRIORITY)
if (autoStart) {
log.info("Autostarting reads")
self ! MavlinkStreamReceiver.StartMsg
}
// Mission control does this, seems to be necessary to keep device from hanging up on us
//out.write("\\r\\n\\r\\n\\r\\n".map(_.toByte).toArray)
private def shouldDrop = simulateUnreliable && rand.nextInt(10) < 2
override def onReceive = {
case MavlinkStreamReceiver.StartMsg =>
log.info("Received start message")
rxThread.start()
}
override def postStop() {
log.debug("MavlinkStream postStop")
shuttingDown = true
// This should cause the rx thread to bail
if (isInstreamValid)
instream.close()
super.postStop()
}
private def rxWorker() {
println("MavlinkStream thread running")
try {
using(instream) { stream =>
isInstreamValid = true
val dataStream = new DataInputStream(stream)
val reader = new MAVLinkReader(dataStream, IMAVLinkMessage.MAVPROT_PACKET_START_V10)
var lostBytes = 0
var badSeq = 0
val messageThrottle = new Throttled(60 * 1000)
var oldLost = 0L
var oldNumPacket = 0L
var numPacket = 0L
var prevSeq = -1
val overrideId = sysIdOverride.getOrElse(-1)
var startTimestamp = 0L
var startTick = System.currentTimeMillis
try {
while (!shuttingDown) {
try {
//log.debug("Reading next packet")
// Sleep if needed to simulate the time delay
tlogSpeedup.foreach { speedup =>
val nowStamp = (dataStream.readLong / speedup).toLong
if (startTimestamp == 0L) {
startTimestamp = nowStamp
}
val desired = (nowStamp - startTimestamp) + startTick
val delay = desired - System.currentTimeMillis
if (delay > 0) {
//log.debug(s"Sleeping for $delay")
Thread.sleep(delay)
}
}
val msg = Option(reader.getNextMessage())
//println(s"Read packet: $msg")
msg.foreach { s =>
numPacket += 1
// Reassign sysId if requested
if (overrideId != -1 && s.sysId == expectedSysId)
s.sysId = overrideId
//log.debug("RxSer: " + s)
if (reader.getLostBytes > lostBytes) {
// The android version of the library lets an extra two bytes sneak in. FIXME. For now
// ignore silently because it seems okay (I bet the bytes are ftdi header bytes)
// if (reader.getLostBytes != lostBytes + 2)
//log.warn("Serial RX has dropped %d bytes in total...".format(reader.getLostBytes))
lostBytes = reader.getLostBytes
}
if (reader.getBadSequence > badSeq) {
badSeq = reader.getBadSequence
//log.warn("Serial RX has %d bad sequences in total...".format(badSeq))
}
messageThrottle { dt: Long =>
val numSec = dt / 1000.0
val newLost = reader.getLostBytes
val dropPerSec = (newLost - oldLost) / numSec
oldLost = newLost
val mPerSec = (numPacket - oldNumPacket) / numSec
oldNumPacket = numPacket
log.info("msgs per sec %s, bytes dropped per sec=%s".format(mPerSec, dropPerSec))
}
// Dups are normal, the 3dr radio will duplicate packets if it has nothing better to do
if (s.sequence != prevSeq && !MavlinkStreamReceiver.isIgnoreReceive) // for profiling
if (!shouldDrop)
handleIncomingPacket(s)
prevSeq = s.sequence
}
} catch {
case ex: BufferUnderflowException =>
log.error("Ignoring underflow in message parse") // The mavlink code doesn't properly handle this
}
}
// This catch clause is only used _after_ we've successfully opened our socket
} catch {
case ex: EOFException =>
// Kill our actor if our port gets closed
log.info("Exiting stream reader due to EOF")
self ! PoisonPill
case ex: IOException =>
if (!shuttingDown) {
log.error("Killing mavlink stream due to: " + ex)
self ! PoisonPill
}
}
}
// This catch clause covers connection time problems
} catch {
case ex: IOException =>
log.error("Failure to connect: " + ex.getMessage)
self ! PoisonPill
case ex: SocketTimeoutException =>
log.error("Socket timeout: " + ex.getMessage)
self ! PoisonPill
}
log.debug("Exiting mavlink reader: " + this)
}
}
object MavlinkStreamReceiver {
/// Start reading from stream
case object StartMsg
var isIgnoreReceive = false
} | geeksville/arduleader | common/src/main/scala/com/geeksville/mavlink/MavlinkStreamReceiver.scala | Scala | gpl-3.0 | 7,127 |
package ca.aretex.labs.data.jsonmodel
import org.junit.Test
/**
* Created by Choungmo Fofack on 5/4/17.
*/
class PersonTest {
@Test
def test() = {
val person = Person(9999, false, "John Williams", null, Array(Child(16, "Shirley"), Child(11, "Jessica")))
assert(
"Person(9999, false, John Williams, List(), List(Child(16,Shirley), Child(11,Jessica)))"
.equalsIgnoreCase(person.toString())
)
}
}
| nicaiseeric/lazy-json | src/test/scala/ca/aretex/labs/data/jsonmodel/PersonTest.scala | Scala | apache-2.0 | 429 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.params.wrappers.spark
import org.apache.spark.ml
import io.deepsense.deeplang.params.StringParam
import io.deepsense.deeplang.params.validators.{AcceptAllRegexValidator, Validator}
class StringParamWrapper[P <: ml.param.Params](
override val name: String,
override val description: Option[String],
val sparkParamGetter: P => ml.param.Param[String],
override val validator: Validator[String] = new AcceptAllRegexValidator)
extends StringParam(name, description, validator)
with ForwardSparkParamWrapper[P, String] {
override def replicate(name: String): StringParamWrapper[P] =
new StringParamWrapper[P](name, description, sparkParamGetter, validator)
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/main/scala/io/deepsense/deeplang/params/wrappers/spark/StringParamWrapper.scala | Scala | apache-2.0 | 1,312 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.cli
import htsjdk.samtools.ValidationStringency
import org.apache.spark.SparkContext
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.utils.cli._
import org.bdgenomics.utils.misc.Logging
import org.kohsuke.args4j.{ Option => Args4jOption, Argument }
object Vcf2ADAM extends BDGCommandCompanion {
val commandName = "vcf2adam"
val commandDescription = "Convert a VCF file to the corresponding ADAM format"
def apply(cmdLine: Array[String]) = {
new Vcf2ADAM(Args4j[Vcf2ADAMArgs](cmdLine))
}
}
class Vcf2ADAMArgs extends Args4jBase with ParquetSaveArgs {
@Argument(required = true, metaVar = "VCF", usage = "The VCF file to convert", index = 0)
var vcfPath: String = _
@Argument(required = true, metaVar = "ADAM", usage = "Location to write ADAM Variant data", index = 1)
var outputPath: String = null
@Args4jOption(required = false, name = "-coalesce", usage = "Set the number of partitions written to the ADAM output directory")
var coalesce: Int = -1
@Args4jOption(required = false, name = "-force_shuffle_coalesce", usage = "Even if the repartitioned RDD has fewer partitions, force a shuffle.")
var forceShuffle: Boolean = false
@Args4jOption(required = false, name = "-only_variants", usage = "Output Variant objects instead of Genotypes")
var onlyVariants: Boolean = false
@Args4jOption(required = false, name = "-stringency", usage = "Stringency level for various checks; can be SILENT, LENIENT, or STRICT. Defaults to STRICT")
var stringency: String = "STRICT"
}
class Vcf2ADAM(val args: Vcf2ADAMArgs) extends BDGSparkCommand[Vcf2ADAMArgs] with Logging {
val companion = Vcf2ADAM
val stringency = ValidationStringency.valueOf(args.stringency)
def run(sc: SparkContext) {
val variantContextRdd = sc.loadVcf(args.vcfPath, stringency)
val variantContextsToSave = if (args.coalesce > 0) {
variantContextRdd.transform(
_.coalesce(args.coalesce, shuffle = args.coalesce > variantContextRdd.rdd.partitions.length || args.forceShuffle)
)
} else {
variantContextRdd
}
if (args.onlyVariants) {
variantContextsToSave
.toVariantRDD
.saveAsParquet(args)
} else {
variantContextsToSave
.toGenotypeRDD
.saveAsParquet(args)
}
}
}
| massie/adam | adam-cli/src/main/scala/org/bdgenomics/adam/cli/Vcf2ADAM.scala | Scala | apache-2.0 | 3,115 |
/*
* Copyright (c) 2013, Scodec
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package scodec
package codecs
import scodec.bits.*
class ByteAlignedCodecTest extends CodecSuite:
test("roundtrip") {
roundtrip(byteAligned(int32), Int.MaxValue)
roundtrip(byteAligned(uint(6)), 2)
}
test("pad appropriately") {
assertEquals(byteAligned(uint(6)).encode(1).require, bin"00000100")
assertEquals(
byteAligned(listOfN(uint(4), uint8))
.encode(List(1, 2, 3))
.require,
bin"00110000000100000010000000110000"
)
}
test("de-pad appropriately") {
assertEquals(
byteAligned(listOfN(uint(4), uint8))
.decode(bin"001100000001000000100000001100001111")
.require,
DecodeResult(List(1, 2, 3), bin"1111")
)
}
test("compute size bounds appropriately") {
assertEquals(byteAligned(listOfN(uint(4), uint8)).sizeBound, SizeBound.atLeast(8))
}
| scodec/scodec | unitTests/src/test/scala/scodec/codecs/ByteAlignedCodecTest.scala | Scala | bsd-3-clause | 2,422 |
/*
Copyright 2012 Georgia Tech Research Institute
Author: lance.gatlin@gtri.gatech.edu
This file is part of org.gtri.util.iteratee library.
org.gtri.util.iteratee library is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
org.gtri.util.iteratee library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with org.gtri.util.iteratee library. If not, see <http://www.gnu.org/licenses/>.
*/
package org.gtri.util.iteratee.impl
import org.gtri.util.iteratee.api.Enumerator
import org.gtri.util.scala.exelog.noop._
/**
* A class to get an Iterator of Enumerator.State.Result for an Enumerator
* @param r
* @param step
* @tparam O
* @tparam R
*/
class EnumeratorIterator[O,R <: Enumerator.State.Result[O]](r : R, step: R => R) extends scala.Iterator[R] {
var current = r
def hasNext = current.next.statusCode.isDone == false
def next() = {
current = step(current)
current
}
}
| gtri-iead/org.gtri.util.iteratee | impl/src/main/scala/org/gtri/util/iteratee/impl/EnumeratorIterator.scala | Scala | gpl-3.0 | 1,367 |
/*
* Copyright 2013 - 2015, Daniel Krzywicki <daniel.krzywicki@agh.edu.pl>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package pl.edu.agh.scalamas.random
import org.apache.commons.math3.random.{RandomDataGenerator, Well19937c, RandomGenerator}
import pl.edu.agh.scalamas.app.AgentRuntimeComponent
import net.ceedubs.ficus.Ficus._
/**
* Mixin component for random number generation. The random generators are based on apache commons math library.
*/
trait RandomGeneratorComponent {
this: AgentRuntimeComponent =>
/**
* The global seed used in the application.
* A common value may not guarantee repeatable results in the case of concurrent applications.
* A distinct value guarantees distinct results.
*/
def globalSeed = agentRuntime.config.as[Option[Long]]("mas.seed").getOrElse(System.currentTimeMillis())
/** Factory method for creating a random generator. Override this to choose a different RNG algorithm.
*/
def randomGeneratorFactory(seed: Long): RandomGenerator = new Well19937c(seed)
/**
* Provide the RNG. Shortcut for randomData.getRandomGenerator()
*/
def random: RandomGenerator = randomData.getRandomGenerator
/**
* Provides a RandomDataGenerator for distribution-based operations.
*/
def randomData: RandomDataGenerator
}
| ros3n/IntOb | core/src/main/scala/pl/edu/agh/scalamas/random/RandomGeneratorComponent.scala | Scala | mit | 2,332 |
package mypipe.avro.schema
import org.apache.avro.repo.client.RESTRepositoryClient
import scala.collection.mutable
import java.util.logging.Logger
import org.apache.avro.repo.{ Repository, SchemaEntry, Subject }
import com.google.common.collect.{ HashBiMap, BiMap }
trait SchemaRepository[ID, SCHEMA] {
/** @param subject
* @param schemaId
* @return Some(schema) if the subject and schemaId are valid, None otherwise
*/
def getSchema(subject: String, schemaId: ID): Option[SCHEMA]
/** @param subject
* @return Some(schema) if the subject exists, None otherwise
*/
def getLatestSchema(subject: String, flushCache: Boolean = false): Option[SCHEMA]
/** @param subject
* @param schema
* @return Some(schemaId) if the subject and schema are valid, None otherwise
*/
def getSchemaId(subject: String, schema: SCHEMA): Option[ID]
/** @param subject
* @param schema
* @return schemaId, potentially an already existing one, if the schema isn't new.
* @throws Exception if registration is unsuccessful
*/
def registerSchema(subject: String, schema: SCHEMA): ID
}
/** Generic implementation of a caching client for an AVRO-1124-style repo which provides strongly-typed APIs.
*
* @tparam ID
* @tparam SCHEMA
*/
abstract class GenericSchemaRepository[ID, SCHEMA] extends SchemaRepository[ID, SCHEMA] {
// Abstract functions which need to be overridden using traits or custom implementations.
// Functions used to convert back and forth with the AVRO-1124 Schema Repo, which uses Strings for its IDs and Schemas.
protected def idToString(id: ID): String
protected def stringToId(id: String): ID
protected def schemaToString(schema: SCHEMA): String
protected def stringToSchema(schema: String): SCHEMA
// Configuration
protected def getRepositoryURL: String
// Concrete implementation !
// Utilities
protected lazy val client: Repository = new RESTRepositoryClient(getRepositoryURL)
private val logger = Logger.getLogger(classOf[GenericSchemaRepository[ID, SCHEMA]].getName)
// Internal state
private val idToSchemaCache = mutable.Map[String, BiMap[ID, SCHEMA]]()
private val schemaToIdCache = mutable.Map[String, BiMap[SCHEMA, ID]]()
private val latestSchemaCache = new java.util.HashMap[String, SCHEMA]()
/** Utility function to DRY up the code.
*
*
* @param subject to look into to get the repository's corresponding [[org.apache.avro.repo.Subject]]
* @param key to store in the cache, if we are able to retrieve an entity
* @param map to store the key and entity into, if we are able to retrieve the entity
* N.B.: using a java.util.Map for compatibility with Guava's [[com.google.common.collect.BiMap]]
* @param entityRetrievalFunction to use on the [[org.apache.avro.repo.Subject]] to get a [[org.apache.avro.repo.SchemaEntry]]
* @param schemaEntryToStringFunction to use on the [[org.apache.avro.repo.SchemaEntry]] in order to get our (Stringly-typed) entity
* @param stringToValueFunction to convert the (Stringly-typed) entity into the proper type (VALUE).
* @param createMissingSubject to tell the function whether to create the subject in the remote repository, if it doesn't already exist (default = false).
* @param throwException to tell the function whether to throw an exception instead of returning None if there's any problem (default = false).
* @tparam KEY the type of the key in the map we want to update
* @tparam VALUE the type of the value in the map we want to update
* @return Some(schema) if the subject and key are valid, None otherwise
*/
private def retrieveUnknownEntity[KEY, VALUE](subject: String,
key: KEY,
map: java.util.Map[KEY, VALUE],
entityRetrievalFunction: Subject β SchemaEntry,
schemaEntryToStringFunction: SchemaEntry β String,
stringToValueFunction: String β VALUE,
createMissingSubject: Boolean = false,
throwException: Boolean = false): Option[VALUE] = {
val subjectOption: Option[Subject] = client.lookup(subject) match {
case null β {
if (createMissingSubject) {
Some(client.register(subject, null))
} else {
None
}
}
case subject β Some(subject)
}
subjectOption match {
case Some(subject) β {
try {
entityRetrievalFunction(subject) match {
case null β {
if (throwException) {
throw new RuntimeException("An unknown problem occurred... the RESTRepositoryClient returned null.")
} else {
None
}
}
case schemaEntry β {
val value: VALUE = stringToValueFunction(
schemaEntryToStringFunction(
schemaEntry))
map.put(key, value)
Some(value)
}
}
} catch {
case e: Exception β {
logger.warning("Got an exception while trying to retrieve an entity from the RESTRepositoryClient!\\n" +
e.getMessage + ": " + e.getStackTraceString)
if (throwException) {
throw e
} else {
None
}
}
}
}
case None β if (throwException) {
throw new RuntimeException("The requested subject does not exist in the remote Schema Repository.")
} else {
None
}
}
}
private def retrieveEntity[KEY, VALUE](subject: String,
key: KEY,
mainCache: mutable.Map[String, BiMap[KEY, VALUE]],
inverseCache: mutable.Map[String, BiMap[VALUE, KEY]],
entityRetrievalFunction: Subject β SchemaEntry,
schemaEntryToStringFunction: SchemaEntry β String,
stringToValueFunction: String β VALUE,
createMissingSubject: Boolean = false,
flushCache: Boolean = false,
throwException: Boolean = false): Option[VALUE] = {
def specificRetrieveFunction(cachedMap: java.util.Map[KEY, VALUE]): Option[VALUE] = {
retrieveUnknownEntity[KEY, VALUE](
subject,
key,
cachedMap,
entityRetrievalFunction,
schemaEntryToStringFunction,
stringToValueFunction,
createMissingSubject,
throwException)
}
mainCache.get(subject) match {
case Some(existingCachedMap) β Option(existingCachedMap.get(key)) match {
case None β specificRetrieveFunction(existingCachedMap)
case someSchema if (flushCache) β specificRetrieveFunction(existingCachedMap)
case someSchema β someSchema
}
case None β {
val newMapToCache = HashBiMap.create[KEY, VALUE]()
mainCache.put(subject, newMapToCache)
inverseCache.put(subject, newMapToCache.inverse())
specificRetrieveFunction(newMapToCache)
}
}
}
/** @param subject
* @param schemaId
* @return Some(schema) if the subject and schemaId are valid, None otherwise
*/
def getSchema(subject: String, schemaId: ID): Option[SCHEMA] = {
retrieveEntity[ID, SCHEMA](
subject,
schemaId,
idToSchemaCache,
schemaToIdCache,
entityRetrievalFunction = _.lookupById(idToString(schemaId)),
schemaEntryToStringFunction = _.getSchema,
stringToValueFunction = stringToSchema)
}
/** @param subject
* @return Some(schema) if the subject exists, None otherwise
*/
def getLatestSchema(subject: String, flushCache: Boolean = false): Option[SCHEMA] = {
def retrieve = retrieveUnknownEntity[String, SCHEMA](
subject,
subject,
latestSchemaCache,
entityRetrievalFunction = _.latest,
schemaEntryToStringFunction = _.getSchema,
stringToValueFunction = stringToSchema)
if (flushCache) {
retrieve
} else {
Option(latestSchemaCache.get(subject)) match {
case None β retrieve
case someSchema β someSchema
}
}
}
/** @param subject
* @param schema
* @return Some(schemaId) if the subject and schema are valid, None otherwise
*/
def getSchemaId(subject: String, schema: SCHEMA): Option[ID] = {
retrieveEntity[SCHEMA, ID](
subject,
schema,
schemaToIdCache,
idToSchemaCache,
entityRetrievalFunction = _.lookupBySchema(schemaToString(schema)),
schemaEntryToStringFunction = _.getId,
stringToValueFunction = stringToId)
}
/** @param subject
* @param schema
* @return schemaId, potentially an already existing one, if the schema isn't new.
* @throws Exception if registration is unsuccessful
*/
def registerSchema(subject: String, schema: SCHEMA): ID = {
retrieveEntity[SCHEMA, ID](
subject,
schema,
schemaToIdCache,
idToSchemaCache,
entityRetrievalFunction = _.register(schemaToString(schema)),
schemaEntryToStringFunction = _.getId,
stringToValueFunction = stringToId,
createMissingSubject = true,
throwException = true).get
}
} | Asana/mypipe | mypipe-avro/src/main/scala/mypipe/avro/schema/GenericSchemaRepository.scala | Scala | apache-2.0 | 9,729 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v3
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.ct600e.v3.retriever.CT600EBoxRetriever
/*
Error code: 9612 169 Location: [CTE]/InformationRequired/Expenditure/UKlandBuildings
Description: [E60] should exceed 0 if [E100] is present
Transactional error (en): Box E60 must be greater than 0 (zero) if Box E100 is completed
Transactional error (cy): Maeβn rhaid i Flwch E60 fod yn fwy na 0 (sero) os yw Blwch E100 wedi ei gwblhau
*/
class E60Spec extends WordSpec with Matchers with MockitoSugar {
val boxRetriever = mock[CT600EBoxRetriever]
"E60" should {
"validate" when {
"true when E60 == 0 and E100 is empty" in {
when(boxRetriever.e60()).thenReturn(E60(Some(0)))
when(boxRetriever.e100()).thenReturn(E100(None))
E60(Some(0)).validate(boxRetriever) shouldBe Set.empty
}
"true when E60 == 0 and E100 == 0" in {
when(boxRetriever.e60()).thenReturn(E60(Some(0)))
when(boxRetriever.e100()).thenReturn(E100(Some(0)))
E60(Some(0)).validate(boxRetriever) shouldBe Set.empty
}
"true when E60 > 0 and E100 > 0" in {
when(boxRetriever.e60()).thenReturn(E60(Some(60)))
when(boxRetriever.e100()).thenReturn(E100(Some(100)))
E60(Some(60)).validate(boxRetriever) shouldBe Set.empty
}
"false when E60 == 0 and E100 > 0" in {
when(boxRetriever.e60()).thenReturn(E60(Some(0)))
when(boxRetriever.e100()).thenReturn(E100(Some(100)))
E60(Some(0)).validate(boxRetriever) shouldBe Set(CtValidation(boxId = Some("E60"), errorMessageKey = "error.E60.must.be.positive.when.E100.positive"))
}
"false when E60 empty and E100 > 0" in {
when(boxRetriever.e60()).thenReturn(E60(None))
when(boxRetriever.e100()).thenReturn(E100(Some(100)))
E60(None).validate(boxRetriever) shouldBe Set(CtValidation(boxId = Some("E60"), errorMessageKey = "error.E60.must.be.positive.when.E100.positive"))
}
}
}
}
| hmrc/ct-calculations | src/test/scala/uk/gov/hmrc/ct/ct600e/v3/E60Spec.scala | Scala | apache-2.0 | 2,737 |
package com.geishatokyo.diffsql.diff
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import com.geishatokyo.diffsql.ast._
import com.geishatokyo.diffsql.ast.DataType
import com.geishatokyo.diffsql.ast.Table
import com.geishatokyo.diffsql.ast.Column
/**
* Created by takeshita on 14/02/17.
*/
class StandardDifferencerTest extends FlatSpec with Matchers {
"StandardDifferencer" should "diff columns" in {
implicit val eq = DataTypeEquality.OnlyName
val table1 = Table("User",List(Column("id",DataType("BIGINT"),List(ColumnOption.PrimaryKey)),Column("name",DataType("INT"))),Nil)
val table2 = Table("user",List(Column("id",DataType("BigInt")),Column("gender",DataType("INT"))),Nil)
implicit val differencer = new StandardDifferencer()
val diff = table1 -- table2
val col = diff.columns
col.add == List(Column("name",DataType("INT")))
col.remove == List(Column("gender",DataType("INT")))
col.alter == List(Column("id",DataType("BIGINT"),List(ColumnOption.PrimaryKey)))
}
} | geishatokyo/diff-sql-table | parser/src/test/scala/com/geishatokyo/diffsql/diff/StandardDifferencerTest.scala | Scala | mit | 1,038 |
package de.tuberlin.uebb.sl2.modules
import java.io.File
trait Configs {
case class Config(
/**
* where to look for source files
*/
val sourcepath: File,
/**
* which source files at source path to compile
*/
val sources: List[String],
/**
* where to look for compiled versions of imported modules
* (this should usually be the destination directory.
* if its not, manual changes to the requirejs-config might
* be needed.)
*/
val classpath: File,
/**
* the simple name of the main file compiled
*/
val mainName: String,
/**
* the parent directory of the main file compiled
*/
val mainParent: File,
/**
* where to put the compiled files from source.
*/
val destination: File
)
}
| mzuber/simple-language | src/main/scala/modules/Configs.scala | Scala | bsd-3-clause | 806 |
package engine
import engine.ActionResult.{JoinIsWaiting, No, Ok, Yes}
import engine.Task.TaskContext
abstract class TaskDefinition {
def action(implicit context: TaskContext): Option[ActionResult]
def name: String
}
object TaskDefinition {
class ProcessTaskDefinition(func: (TaskContext) => Unit) extends TaskDefinition {
override def action(implicit context: TaskContext): Option[ActionResult] = {
func(context)
Some(Ok)
}
override def name: String = "Process"
}
class SubWorkflowTaskDefinition(wfDef: WorkflowDefinition) extends TaskDefinition {
private val key = "SubflowTaskDefinition_%d".format(this.hashCode())
def subWorkflow(task: Task): Option[Workflow] = task.get[Workflow](key)
override def action(implicit context: TaskContext): Option[ActionResult] =
subWorkflow(context.task) orElse {
val wf: Workflow = context.engine.startWorkflow(wfDef, "SubWorkflow", context.workflow)
context.task.put(key, wf)
None
} flatMap {
wf => if (wf.endExecuted) Some(Ok) else None
}
override def name: String = "SubWorkflow[%s]".format(wfDef.name)
}
class BranchTaskDefinition(func: (TaskContext) => Boolean) extends TaskDefinition {
override def action(implicit context: TaskContext): Option[ActionResult] =
if (func(context))
Some(Yes)
else
Some(No)
override def name: String = "Branch"
}
class WaitFirstTaskDefinition(waitFor: TaskDefinition*) extends TaskDefinition {
private val key = "WaitFirstTaskDefinition_%d".format(this.hashCode())
private val _waitFor = waitFor.toSet
override def action(implicit context: TaskContext): Option[ActionResult] = {
val parentDef: TaskDefinition = context.task.parent.get.value.taskDef
val parents = context.workflow.get[Set[TaskDefinition]](key).map(_ + parentDef).getOrElse(Set(parentDef))
context.workflow.put(key, parents)
if (_waitFor.intersect(parents).nonEmpty)
Some(Ok)
else
Some(JoinIsWaiting)
}
override def name: String = "WaitFirst"
}
class WaitAllTaskDefinition(waitFor: TaskDefinition*) extends TaskDefinition {
private val key = "WaitFirstTaskDefinition_%d".format(this.hashCode())
private val _waitFor = waitFor.toSet
override def action(implicit context: TaskContext): Option[ActionResult] = {
val parentDef: TaskDefinition = context.task.parent.get.value.taskDef
val parents = context.workflow.get[Set[TaskDefinition]](key).map(_ + parentDef).getOrElse(Set(parentDef))
context.workflow.put(key, parents)
if (_waitFor == parents)
Some(Ok)
else
Some(JoinIsWaiting)
}
override def name: String = "WaitAll"
}
object StartTaskDefinition extends TaskDefinition {
override def action(implicit context: TaskContext): Option[ActionResult] = Option(Ok)
override def name: String = "Start"
}
object EndTaskDefinition extends TaskDefinition {
override def action(implicit context: TaskContext): Option[ActionResult] = Option(Ok)
override def name: String = "End"
}
}
| mpod/scala-workflow | backend/src/main/scala/engine/TaskDefinition.scala | Scala | gpl-3.0 | 3,136 |
package core
import akka.actor.{ Actor, ActorRefFactory, PoisonPill }
import com.typesafe.scalalogging.LazyLogging
import persistence.entities.{ TestsConfiguration, Project, PullRequestPayload }
import spray.client.pipelining._
import spray.http.{ BasicHttpCredentials, HttpRequest, _ }
import utils._
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.{ Failure, Success }
case class SendComment(proj: Project, testId: Int, prSource: PullRequestPayload)
trait CommentWriter {
def apply(prSource: PullRequestPayload, msg: String, modules: Configuration)(implicit refFactory: ActorRefFactory,
ec: ExecutionContext): Future[HttpResponse]
def actionNew: String
def actionBetter: String
def actionWorse: String
def actionEqual: String
def actionUnknown: String
}
class CommentWriterActor(modules: Configuration with PersistenceModule,
commentWriter: CommentWriter,
testConfig: Option[TestsConfiguration]) extends Actor with LazyLogging {
import context.dispatcher
def receive: Receive = {
case SendComment(proj, testId, prSource) =>
val response = for {
jobs <- modules.jobsDal.getJobsByTestId(testId)
jobComment <- Future.sequence(jobs.map(CommentBuilder.buildComment(_, commentWriter, modules, testConfig.flatMap(_.commentTemplate)))).map(_.mkString("\n\n"))
response <- commentWriter(prSource, jobComment, modules)
} yield response
val name = s"${prSource.repoFullName}#${prSource.pullRequestId}"
response onComplete {
case Success(res) =>
if (res.status.isSuccess)
logger.info(s"Write comment on $name status ${res.status}")
else
logger.error(s"Write comment on $name status ${res.status}")
self ! PoisonPill
case Failure(error) =>
logger.error(s"Failed to send comment for $name", error)
self ! PoisonPill
}
}
}
object BitbucketCommentWriter extends CommentWriter {
def apply(prSource: PullRequestPayload, msg: String, modules: Configuration)(implicit refFactory: ActorRefFactory, ec: ExecutionContext): Future[HttpResponse] = {
val url = s"https://bitbucket.org/api/1.0/repositories/${prSource.repoFullName}/pullrequests/${prSource.pullRequestId}/comments"
val pipeline: HttpRequest => Future[HttpResponse] = (
addCredentials(BasicHttpCredentials(modules.config.getString("bitbucket.user"), modules.config.getString("bitbucket.pass")))
~> sendReceive)
pipeline(Post(url, FormData(Seq("content" -> msg))))
}
val actionNew = ":new:"
val actionBetter = ":green_heart:"
val actionWorse = ":broken_heart:"
val actionEqual = ":blue_heart:"
val actionUnknown = ":grey_question:"
}
| ShiftForward/ridgeback | src/main/scala/core/CommentWriter.scala | Scala | mit | 2,842 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.