code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright 2014 Databricks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hydrograph.engine.spark.datasource.xml.util
import java.io.CharArrayWriter
import java.nio.charset.Charset
import javax.xml.stream.XMLOutputFactory
import com.sun.xml.internal.txw2.output.IndentingXMLStreamWriter
import hydrograph.engine.spark.datasource.xml.parsers.StaxXmlGenerator
import hydrograph.engine.spark.datasource.xml.{XmlInputFormat, XmlOptions}
import org.apache.hadoop.io.{LongWritable, Text}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.DataFrame
import scala.collection.Map
/**
* The Object XmlFile.
*
* @author Bitwise
*
*/
private[xml] object XmlFile {
val DEFAULT_INDENT = " "
val DEFAULT_ROW_SEPARATOR = "\\n"
def withCharset(
context: SparkContext,
location: String,
charset: String,
rowTag: String): RDD[String] = {
context.hadoopConfiguration.set(XmlInputFormat.START_TAG_KEY, s"<$rowTag>")
context.hadoopConfiguration.set(XmlInputFormat.END_TAG_KEY, s"</$rowTag>")
context.hadoopConfiguration.set(XmlInputFormat.ENCODING_KEY, charset)
if (Charset.forName(charset) == Charset.forName(XmlOptions.DEFAULT_CHARSET)) {
context.newAPIHadoopFile(location,
classOf[XmlInputFormat],
classOf[LongWritable],
classOf[Text]).map(pair => new String(pair._2.getBytes, 0, pair._2.getLength))
} else {
context.newAPIHadoopFile(location,
classOf[XmlInputFormat],
classOf[LongWritable],
classOf[Text]).map(pair => new String(pair._2.getBytes, 0, pair._2.getLength, charset))
}
}
/**
* Note that writing a XML file from [[DataFrame]] having a field
* [[org.apache.spark.sql.types.ArrayType]] with its element as nested array would have
* an additional nested field for the element. For example, the [[DataFrame]] having
* a field below,
*
* fieldA Array(Array(data1, data2))
*
* would produce a XML file below.
*
* <fieldA>
* <item>data1</item>
* </fieldA>
* <fieldA>
* <item>data2</item>
* </fieldA>
*
* Namely, roundtrip in writing and reading can end up in different schema structure.
*/
def saveAsXmlFile(
dataFrame: DataFrame,
path: String,
parameters: Map[String, String] = Map()): Unit = {
val options = XmlOptions(parameters.toMap)
val codecClass = CompressionCodecs.getCodecClass(options.codec)
val startElement = s"<${options.rootTag}>"
val endElement = s"</${options.rootTag}>"
val rowSchema = dataFrame.schema
val indent = XmlFile.DEFAULT_INDENT
val rowSeparator = XmlFile.DEFAULT_ROW_SEPARATOR
val xmlRDD = dataFrame.rdd.mapPartitions { iter =>
val factory = XMLOutputFactory.newInstance()
val writer = new CharArrayWriter()
val xmlWriter = factory.createXMLStreamWriter(writer)
val indentingXmlWriter = new IndentingXMLStreamWriter(xmlWriter)
indentingXmlWriter.setIndentStep(indent)
new Iterator[String] {
var firstRow: Boolean = true
var lastRow: Boolean = true
override def hasNext: Boolean = iter.hasNext || firstRow || lastRow
override def next: String = {
if (iter.nonEmpty) {
val xml = {
StaxXmlGenerator(
rowSchema,
indentingXmlWriter,
options)(iter.next())
writer.toString
}
writer.reset()
// Here it needs to add indentations for the start of each line,
// in order to insert the start element and end element.
val indentedXml = indent + xml.replaceAll(rowSeparator, rowSeparator + indent)
if (firstRow) {
firstRow = false
startElement + rowSeparator + indentedXml
} else {
indentedXml
}
} else {
indentingXmlWriter.close()
if (!firstRow) {
lastRow = false
endElement
} else {
// This means the iterator was initially empty.
firstRow = false
lastRow = false
""
}
}
}
}
}
codecClass match {
case null => xmlRDD.saveAsTextFile(path)
case codec => xmlRDD.saveAsTextFile(path, codec)
}
}
}
| capitalone/Hydrograph | hydrograph.engine/hydrograph.engine.spark/src/main/scala/hydrograph/engine/spark/datasource/xml/util/XmlFile.scala | Scala | apache-2.0 | 4,923 |
package io.buoyant.namerd
package iface.mesh
import com.twitter.finagle.{Addr, Dentry, Dtab, Name, NameTree, Namer, Path}
import com.twitter.finagle.buoyant.h2
import com.twitter.finagle.naming.NameInterpreter
import com.twitter.finagle.stats.StatsReceiver
import com.twitter.io.Buf
import com.twitter.util.{Activity, Closable, Future, Return, Throw, Try, Var}
import io.buoyant.grpc.runtime.{GrpcStatus, Stream, VarEventStream}
import io.buoyant.namer.{ConfiguredDtabNamer, Delegator, DelegateTree, RichActivity}
import io.linkerd.mesh
import io.linkerd.mesh.Converters._
/**
* An Interpreter backed by the io.buoyant.proto.namerd.Interpreter service.
*/
object DelegatorService {
def apply(
store: DtabStore,
namers: Map[Path, Namer],
stats: StatsReceiver
): mesh.Delegator.Server =
new mesh.Delegator.Server(new Impl(store, namers, stats))
private[mesh] class Impl(
store: DtabStore,
namers: Map[Path, Namer],
stats: StatsReceiver
) extends mesh.Delegator {
override def getDtab(req: mesh.DtabReq): Future[mesh.DtabRsp] = req match {
case mesh.DtabReq(None) => Future.exception(Errors.NoRoot)
case mesh.DtabReq(Some(proot)) =>
fromPath(proot) match {
case Path.Utf8(ns) => store.observe(ns).toFuture.transform(_transformDtabRsp)
case root => Future.exception(Errors.InvalidRoot(root))
}
}
override def streamDtab(req: mesh.DtabReq): Stream[mesh.DtabRsp] = req match {
case mesh.DtabReq(None) => Stream.exception(Errors.NoRoot)
case mesh.DtabReq(Some(proot)) =>
fromPath(proot) match {
case Path.Utf8(ns) => VarEventStream(store.observe(ns).values.map(toDtabRspEv))
case root => Stream.exception(Errors.InvalidRoot(root))
}
}
override def getDelegateTree(req: mesh.DelegateTreeReq): Future[mesh.DelegateTreeRsp] = req match {
case mesh.DelegateTreeReq(None, _, _) => Future.exception(Errors.NoRoot)
case mesh.DelegateTreeReq(_, None, _) => Future.exception(Errors.NoName)
case mesh.DelegateTreeReq(Some(proot), Some(ptree), dtab0) =>
fromPath(proot) match {
case Path.Utf8(ns) =>
val tree = fromPathNameTree(ptree).map(Name.Path(_))
val dtab = dtab0 match {
case None => Dtab.empty
case Some(d) => fromDtab(d)
}
getNs(ns).delegate(dtab, tree).map(toDelegateTreeRsp)
case root => Future.exception(Errors.InvalidRoot(root))
}
}
override def streamDelegateTree(req: mesh.DelegateTreeReq): Stream[mesh.DelegateTreeRsp] =
Stream.fromFuture(getDelegateTree(req))
private[this] def getNs(ns: String): NameInterpreter with Delegator = {
val dtabVar = store.observe(ns).map(_extractDtab)
ConfiguredDtabNamer(dtabVar, namers.toSeq)
}
}
private[mesh] val _extractDtab: Option[VersionedDtab] => Dtab = {
case None => Dtab.empty
case Some(VersionedDtab(dtab, _)) => dtab
}
private[mesh] val _transformDtabRsp: Try[Option[VersionedDtab]] => Future[mesh.DtabRsp] = {
case Return(None) => Future.exception(Errors.RootNotFound)
case Return(Some(vdtab)) => Future.value(toDtabRsp(vdtab))
case Throw(e) => Future.exception(GrpcStatus.Internal(e.getMessage))
}
private[mesh] val toDtabRsp: VersionedDtab => mesh.DtabRsp = { vdtab =>
val v = mesh.VersionedDtab.Version(Some(vdtab.version))
val d = toDtab(vdtab.dtab)
mesh.DtabRsp(Some(mesh.VersionedDtab(Some(v), Some(d))))
}
private[mesh] val toDtabRspEv: Try[Option[VersionedDtab]] => VarEventStream.Ev[mesh.DtabRsp] = {
case Return(Some(vdtab)) => VarEventStream.Val(toDtabRsp(vdtab))
case Return(None) => VarEventStream.End(Throw(Errors.RootNotFound)) // TODO empty dtab?
case Throw(e) => VarEventStream.End(Throw(GrpcStatus.Internal(e.getMessage)))
}
private[mesh] val toDelegateWeightedTree: DelegateTree.Weighted[Name.Bound] => mesh.BoundDelegateTree.Union.Weighted =
wt => mesh.BoundDelegateTree.Union.Weighted(Some(wt.weight), Some(toDelegateTree(wt.tree)))
private[mesh] def mkBoundDelegateTree(
path: Path,
dentry: Option[Dentry],
node: mesh.BoundDelegateTree.OneofNode
) = mesh.BoundDelegateTree(
Some(toPath(path)),
dentry.map(toDentry),
Some(node)
)
private[mesh] def mkBoundDelegateTree(
path: Path,
dentry: Dentry,
node: mesh.BoundDelegateTree.OneofNode
): mesh.BoundDelegateTree =
mkBoundDelegateTree(path, Some(dentry), node)
private[mesh] def mkBoundDelegateTreeLeaf(name: Name.Bound): Option[mesh.BoundDelegateTree.Leaf] =
name.id match {
case id: Path =>
val pid = toPath(id)
val ppath = toPath(name.path)
Some(mesh.BoundDelegateTree.Leaf(Some(pid), Some(ppath)))
case _ => None
}
private[mesh] val toDelegateTree: DelegateTree[Name.Bound] => mesh.BoundDelegateTree = {
case DelegateTree.Neg(p, d) =>
mkBoundDelegateTree(p, d, mesh.BoundDelegateTree.OneofNode.Neg(mesh.BoundDelegateTree.Neg()))
case DelegateTree.Fail(p, d) =>
mkBoundDelegateTree(p, d, mesh.BoundDelegateTree.OneofNode.Fail(mesh.BoundDelegateTree.Fail()))
case DelegateTree.Empty(p, d) =>
mkBoundDelegateTree(p, d, mesh.BoundDelegateTree.OneofNode.Empty(mesh.BoundDelegateTree.Empty()))
case DelegateTree.Delegate(p, d, t) =>
mkBoundDelegateTree(p, d, mesh.BoundDelegateTree.OneofNode.Delegate(toDelegateTree(t)))
case DelegateTree.Exception(p, d, e) =>
val msg = if (e.getMessage == null) "No underlying exception message" else e.getMessage
mkBoundDelegateTree(p, d, mesh.BoundDelegateTree.OneofNode.Exception(s"BoundDelegateTree Exception: $msg"))
case DelegateTree.Transformation(p, desc, n, t) =>
val leaf = mkBoundDelegateTreeLeaf(n)
val ptrans = mesh.BoundDelegateTree.Transformation(Some(desc), leaf, Some(toDelegateTree(t)))
mkBoundDelegateTree(p, None, mesh.BoundDelegateTree.OneofNode.Transformation(ptrans))
case DelegateTree.Leaf(p, d, name) =>
val node = mkBoundDelegateTreeLeaf(name) match {
case None => mesh.BoundDelegateTree.OneofNode.Neg(mesh.BoundDelegateTree.Neg())
case Some(leaf) => mesh.BoundDelegateTree.OneofNode.Leaf(leaf)
}
mkBoundDelegateTree(p, d, node)
case DelegateTree.Alt(p, d, trees@_*) =>
val ptrees = trees.map(toDelegateTree)
val node = mesh.BoundDelegateTree.OneofNode.Alt(mesh.BoundDelegateTree.Alt(ptrees))
mkBoundDelegateTree(p, d, node)
case DelegateTree.Union(p, d, trees@_*) =>
val ptrees = trees.map(toDelegateWeightedTree)
val node = mesh.BoundDelegateTree.OneofNode.Union(mesh.BoundDelegateTree.Union(ptrees))
mkBoundDelegateTree(p, d, node)
}
private[mesh] val toDelegateTreeRsp: DelegateTree[Name.Bound] => mesh.DelegateTreeRsp =
t => mesh.DelegateTreeRsp(Some(toDelegateTree(t)))
private[mesh] val toDelegateTreeRspEv: Try[DelegateTree[Name.Bound]] => VarEventStream.Ev[mesh.DelegateTreeRsp] = {
case Return(tree) => VarEventStream.Val(toDelegateTreeRsp(tree))
case Throw(e) => VarEventStream.End(Throw(e))
}
}
| linkerd/linkerd | namerd/iface/mesh/src/main/scala/io/buoyant/namerd/iface/mesh/DelegatorService.scala | Scala | apache-2.0 | 7,169 |
package io.hydrosphere.mist.core.logging
sealed trait Level {
def value: Int
def name: String
}
object Level {
def fromInt(i: Int): Level = i match {
case 1 => Debug
case 2 => Info
case 3 => Warn
case 4 => Error
case x => throw new IllegalArgumentException(s"Unknown level $i")
}
object Debug extends Level { val value = 1; val name = "DEBUG" }
object Info extends Level { val value = 2; val name = "INFO" }
object Warn extends Level { val value = 3; val name = "WARN" }
object Error extends Level { val value = 4; val name = "ERROR" }
}
| Hydrospheredata/mist | mist/core/src/main/scala/io/hydrosphere/mist/core/logging/Level.scala | Scala | apache-2.0 | 583 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.staticStringTest
import org.jetbrains.plugins.scala.testingSupport.IntegrationTest
/**
* @author Roman.Shein
* @since 26.06.2015.
*/
trait WordSpecStaticStringTest extends IntegrationTest {
val wordSpecClassName = "WordSpecStringTest"
val wordSpecFileName = wordSpecClassName + ".scala"
def addWordSpec() = {
addFileToProject(wordSpecFileName,
"""
|import org.scalatest._
|
|class WordSpecStringTest extends WordSpec {
| val constName = "const"
|
| constName should {
| constName in {
| }
|
| constName + " sum" in {
| }
| }
|
| "sum " + "name" should {
| constName + constName in {
| }
|
| "test" in {}
|
| const + System.currentTimeMillis() in {
| }
| }
|}
|
""".stripMargin.trim())
}
def testWordSpecSum() = {
addWordSpec()
assert(checkConfigAndSettings(createTestFromLocation(17, 10, wordSpecFileName), wordSpecClassName, "sum name should test"))
}
def testWordSpecVal() = {
addWordSpec()
assert(checkConfigAndSettings(createTestFromLocation(6, 10, wordSpecFileName), wordSpecClassName, "const should const"))
}
def testWordSpecValSum() = {
addWordSpec()
assert(checkConfigAndSettings(createTestFromLocation(14, 10, wordSpecFileName), wordSpecClassName, "sum name should constconst"))
assert(checkConfigAndSettings(createTestFromLocation(9, 10, wordSpecFileName), wordSpecClassName, "const should const sum"))
}
def testWordSpecNonConst() = {
addWordSpec()
assert(checkConfigAndSettings(createTestFromLocation(19, 10, wordSpecFileName), wordSpecClassName))
}
}
| triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/testingSupport/scalatest/staticStringTest/WordSpecStaticStringTest.scala | Scala | apache-2.0 | 1,840 |
package lila.perfStat
import org.joda.time.DateTime
import reactivemongo.bson._
import scala.concurrent.duration._
import lila.db.dsl._
import lila.rating.PerfType
final class PerfStatStorage(coll: Coll) {
implicit val PerfTypeBSONHandler = new BSONHandler[BSONInteger, PerfType] {
def read(b: BSONInteger) = PerfType.byId get b.value err s"Invalid perf type id ${b.value}"
def write(p: PerfType) = BSONInteger(p.id)
}
implicit val UserIdBSONHandler = new BSONHandler[BSONString, UserId] {
def read(b: BSONString) = UserId(b.value)
def write(u: UserId) = BSONString(u.value)
}
private implicit val RatingAtBSONHandler = Macros.handler[RatingAt]
private implicit val ResultBSONHandler = Macros.handler[Result]
private implicit val ResultsBSONHandler = Macros.handler[Results]
private implicit val StreakBSONHandler = Macros.handler[Streak]
private implicit val StreaksBSONHandler = Macros.handler[Streaks]
private implicit val PlayStreakBSONHandler = Macros.handler[PlayStreak]
private implicit val ResultStreakBSONHandler = Macros.handler[ResultStreak]
private implicit val AvgBSONHandler = Macros.handler[Avg]
private implicit val CountBSONHandler = Macros.handler[Count]
private implicit val PerfStatBSONHandler = Macros.handler[PerfStat]
def find(userId: String, perfType: PerfType): Fu[Option[PerfStat]] =
coll.byId[PerfStat](PerfStat.makeId(userId, perfType))
def update(perfStat: PerfStat): Funit =
coll.update($id(perfStat.id), perfStat).void
def insert(perfStat: PerfStat): Funit =
coll.insert(perfStat).void
}
| clarkerubber/lila | modules/perfStat/src/main/PerfStatStorage.scala | Scala | agpl-3.0 | 1,588 |
/*
# Copyright 2016 Georges Lipka
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*/
package com.glipka.easyReactJS.react
import scala.scalajs.js
import scala.scalajs.js._
import org.scalajs.dom.html
import js.{ UndefOr, Any, Function => JFn }
import js.annotation.{ JSBracketAccess, JSName }
import js.{ Any => jAny }
import org.scalajs.dom._
// https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/react/react.d.ts
@js.native
trait EventSimulator extends js.Any {
def apply(element: Element, eventData: SyntheticEventData) = js.native
def apply(element: Element) = js.native
def apply(element: Tuple2[Component[_, _], SyntheticEventData]) = js.native
def apply(element: Component[_, _]) =js.native
}
| glipka/Easy-React-With-ScalaJS | src/main/scala/com/glipka/easyReactJS/react/EventSimulator.scala | Scala | apache-2.0 | 1,226 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util.TypeUtils
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the negated value of `expr`.",
examples = """
Examples:
> SELECT _FUNC_(1);
-1
""")
case class UnaryMinus(child: Expression) extends UnaryExpression
with ExpectsInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection.NumericAndInterval)
override def dataType: DataType = child.dataType
override def toString: String = s"-$child"
private lazy val numeric = TypeUtils.getNumeric(dataType)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = dataType match {
case dt: DecimalType => defineCodeGen(ctx, ev, c => s"$c.unary_$$minus()")
case dt: NumericType => nullSafeCodeGen(ctx, ev, eval => {
val originValue = ctx.freshName("origin")
// codegen would fail to compile if we just write (-($c))
// for example, we could not write --9223372036854775808L in code
s"""
${ctx.javaType(dt)} $originValue = (${ctx.javaType(dt)})($eval);
${ev.value} = (${ctx.javaType(dt)})(-($originValue));
"""})
case dt: CalendarIntervalType => defineCodeGen(ctx, ev, c => s"$c.negate()")
}
protected override def nullSafeEval(input: Any): Any = {
if (dataType.isInstanceOf[CalendarIntervalType]) {
input.asInstanceOf[CalendarInterval].negate()
} else {
numeric.negate(input)
}
}
override def sql: String = s"(- ${child.sql})"
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the value of `expr`.")
case class UnaryPositive(child: Expression)
extends UnaryExpression with ExpectsInputTypes with NullIntolerant {
override def prettyName: String = "positive"
override def inputTypes: Seq[AbstractDataType] = Seq(TypeCollection.NumericAndInterval)
override def dataType: DataType = child.dataType
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode =
defineCodeGen(ctx, ev, c => c)
protected override def nullSafeEval(input: Any): Any = input
override def sql: String = s"(+ ${child.sql})"
}
/**
* A function that get the absolute value of the numeric value.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the absolute value of the numeric value.",
examples = """
Examples:
> SELECT _FUNC_(-1);
1
""")
case class Abs(child: Expression)
extends UnaryExpression with ExpectsInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType)
override def dataType: DataType = child.dataType
private lazy val numeric = TypeUtils.getNumeric(dataType)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = dataType match {
case dt: DecimalType =>
defineCodeGen(ctx, ev, c => s"$c.abs()")
case dt: NumericType =>
defineCodeGen(ctx, ev, c => s"(${ctx.javaType(dt)})(java.lang.Math.abs($c))")
}
protected override def nullSafeEval(input: Any): Any = numeric.abs(input)
}
abstract class BinaryArithmetic extends BinaryOperator with NullIntolerant {
override def dataType: DataType = left.dataType
override lazy val resolved = childrenResolved && checkInputDataTypes().isSuccess
/** Name of the function for this expression on a [[Decimal]] type. */
def decimalMethod: String =
sys.error("BinaryArithmetics must override either decimalMethod or genCode")
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = dataType match {
case dt: DecimalType =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1.$decimalMethod($eval2)")
// byte and short are casted into int when add, minus, times or divide
case ByteType | ShortType =>
defineCodeGen(ctx, ev,
(eval1, eval2) => s"(${ctx.javaType(dataType)})($eval1 $symbol $eval2)")
case _ =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1 $symbol $eval2")
}
}
object BinaryArithmetic {
def unapply(e: BinaryArithmetic): Option[(Expression, Expression)] = Some((e.left, e.right))
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns `expr1`+`expr2`.",
examples = """
Examples:
> SELECT 1 _FUNC_ 2;
3
""")
case class Add(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = TypeCollection.NumericAndInterval
override def symbol: String = "+"
private lazy val numeric = TypeUtils.getNumeric(dataType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
if (dataType.isInstanceOf[CalendarIntervalType]) {
input1.asInstanceOf[CalendarInterval].add(input2.asInstanceOf[CalendarInterval])
} else {
numeric.plus(input1, input2)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = dataType match {
case dt: DecimalType =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1.$$plus($eval2)")
case ByteType | ShortType =>
defineCodeGen(ctx, ev,
(eval1, eval2) => s"(${ctx.javaType(dataType)})($eval1 $symbol $eval2)")
case CalendarIntervalType =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1.add($eval2)")
case _ =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1 $symbol $eval2")
}
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns `expr1`-`expr2`.",
examples = """
Examples:
> SELECT 2 _FUNC_ 1;
1
""")
case class Subtract(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = TypeCollection.NumericAndInterval
override def symbol: String = "-"
private lazy val numeric = TypeUtils.getNumeric(dataType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
if (dataType.isInstanceOf[CalendarIntervalType]) {
input1.asInstanceOf[CalendarInterval].subtract(input2.asInstanceOf[CalendarInterval])
} else {
numeric.minus(input1, input2)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = dataType match {
case dt: DecimalType =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1.$$minus($eval2)")
case ByteType | ShortType =>
defineCodeGen(ctx, ev,
(eval1, eval2) => s"(${ctx.javaType(dataType)})($eval1 $symbol $eval2)")
case CalendarIntervalType =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1.subtract($eval2)")
case _ =>
defineCodeGen(ctx, ev, (eval1, eval2) => s"$eval1 $symbol $eval2")
}
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns `expr1`*`expr2`.",
examples = """
Examples:
> SELECT 2 _FUNC_ 3;
6
""")
case class Multiply(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = NumericType
override def symbol: String = "*"
override def decimalMethod: String = "$times"
private lazy val numeric = TypeUtils.getNumeric(dataType)
protected override def nullSafeEval(input1: Any, input2: Any): Any = numeric.times(input1, input2)
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns `expr1`/`expr2`. It always performs floating point division.",
examples = """
Examples:
> SELECT 3 _FUNC_ 2;
1.5
> SELECT 2L _FUNC_ 2L;
1.0
""")
// scalastyle:on line.size.limit
case class Divide(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = TypeCollection(DoubleType, DecimalType)
override def symbol: String = "/"
override def decimalMethod: String = "$div"
override def nullable: Boolean = true
private lazy val div: (Any, Any) => Any = dataType match {
case ft: FractionalType => ft.fractional.asInstanceOf[Fractional[Any]].div
}
override def eval(input: InternalRow): Any = {
val input2 = right.eval(input)
if (input2 == null || input2 == 0) {
null
} else {
val input1 = left.eval(input)
if (input1 == null) {
null
} else {
div(input1, input2)
}
}
}
/**
* Special case handling due to division by 0 => null.
*/
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval1 = left.genCode(ctx)
val eval2 = right.genCode(ctx)
val isZero = if (dataType.isInstanceOf[DecimalType]) {
s"${eval2.value}.isZero()"
} else {
s"${eval2.value} == 0"
}
val javaType = ctx.javaType(dataType)
val divide = if (dataType.isInstanceOf[DecimalType]) {
s"${eval1.value}.$decimalMethod(${eval2.value})"
} else {
s"($javaType)(${eval1.value} $symbol ${eval2.value})"
}
if (!left.nullable && !right.nullable) {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${ctx.defaultValue(javaType)};
if ($isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
${ev.value} = $divide;
}""")
} else {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${ctx.defaultValue(javaType)};
if (${eval2.isNull} || $isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
if (${eval1.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = $divide;
}
}""")
}
}
}
@ExpressionDescription(
usage = "expr1 _FUNC_ expr2 - Returns the remainder after `expr1`/`expr2`.",
examples = """
Examples:
> SELECT 2 _FUNC_ 1.8;
0.2
> SELECT MOD(2, 1.8);
0.2
""")
case class Remainder(left: Expression, right: Expression) extends BinaryArithmetic {
override def inputType: AbstractDataType = NumericType
override def symbol: String = "%"
override def decimalMethod: String = "remainder"
override def nullable: Boolean = true
private lazy val integral = dataType match {
case i: IntegralType => i.integral.asInstanceOf[Integral[Any]]
case i: FractionalType => i.asIntegral.asInstanceOf[Integral[Any]]
}
override def eval(input: InternalRow): Any = {
val input2 = right.eval(input)
if (input2 == null || input2 == 0) {
null
} else {
val input1 = left.eval(input)
if (input1 == null) {
null
} else {
input1 match {
case d: Double => d % input2.asInstanceOf[java.lang.Double]
case f: Float => f % input2.asInstanceOf[java.lang.Float]
case _ => integral.rem(input1, input2)
}
}
}
}
/**
* Special case handling for x % 0 ==> null.
*/
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval1 = left.genCode(ctx)
val eval2 = right.genCode(ctx)
val isZero = if (dataType.isInstanceOf[DecimalType]) {
s"${eval2.value}.isZero()"
} else {
s"${eval2.value} == 0"
}
val javaType = ctx.javaType(dataType)
val remainder = if (dataType.isInstanceOf[DecimalType]) {
s"${eval1.value}.$decimalMethod(${eval2.value})"
} else {
s"($javaType)(${eval1.value} $symbol ${eval2.value})"
}
if (!left.nullable && !right.nullable) {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${ctx.defaultValue(javaType)};
if ($isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
${ev.value} = $remainder;
}""")
} else {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${ctx.defaultValue(javaType)};
if (${eval2.isNull} || $isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
if (${eval1.isNull}) {
${ev.isNull} = true;
} else {
${ev.value} = $remainder;
}
}""")
}
}
}
@ExpressionDescription(
usage = "_FUNC_(expr1, expr2) - Returns the positive value of `expr1` mod `expr2`.",
examples = """
Examples:
> SELECT _FUNC_(10, 3);
1
> SELECT _FUNC_(-10, 3);
2
""")
case class Pmod(left: Expression, right: Expression) extends BinaryArithmetic {
override def toString: String = s"pmod($left, $right)"
override def symbol: String = "pmod"
protected def checkTypesInternal(t: DataType) =
TypeUtils.checkForNumericExpr(t, "pmod")
override def inputType: AbstractDataType = NumericType
override def nullable: Boolean = true
override def eval(input: InternalRow): Any = {
val input2 = right.eval(input)
if (input2 == null || input2 == 0) {
null
} else {
val input1 = left.eval(input)
if (input1 == null) {
null
} else {
input1 match {
case i: Integer => pmod(i, input2.asInstanceOf[java.lang.Integer])
case l: Long => pmod(l, input2.asInstanceOf[java.lang.Long])
case s: Short => pmod(s, input2.asInstanceOf[java.lang.Short])
case b: Byte => pmod(b, input2.asInstanceOf[java.lang.Byte])
case f: Float => pmod(f, input2.asInstanceOf[java.lang.Float])
case d: Double => pmod(d, input2.asInstanceOf[java.lang.Double])
case d: Decimal => pmod(d, input2.asInstanceOf[Decimal])
}
}
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val eval1 = left.genCode(ctx)
val eval2 = right.genCode(ctx)
val isZero = if (dataType.isInstanceOf[DecimalType]) {
s"${eval2.value}.isZero()"
} else {
s"${eval2.value} == 0"
}
val remainder = ctx.freshName("remainder")
val javaType = ctx.javaType(dataType)
val result = dataType match {
case DecimalType.Fixed(_, _) =>
val decimalAdd = "$plus"
s"""
${ctx.javaType(dataType)} $remainder = ${eval1.value}.remainder(${eval2.value});
if ($remainder.compare(new org.apache.spark.sql.types.Decimal().set(0)) < 0) {
${ev.value}=($remainder.$decimalAdd(${eval2.value})).remainder(${eval2.value});
} else {
${ev.value}=$remainder;
}
"""
// byte and short are casted into int when add, minus, times or divide
case ByteType | ShortType =>
s"""
${ctx.javaType(dataType)} $remainder =
(${ctx.javaType(dataType)})(${eval1.value} % ${eval2.value});
if ($remainder < 0) {
${ev.value}=(${ctx.javaType(dataType)})(($remainder + ${eval2.value}) % ${eval2.value});
} else {
${ev.value}=$remainder;
}
"""
case _ =>
s"""
${ctx.javaType(dataType)} $remainder = ${eval1.value} % ${eval2.value};
if ($remainder < 0) {
${ev.value}=($remainder + ${eval2.value}) % ${eval2.value};
} else {
${ev.value}=$remainder;
}
"""
}
if (!left.nullable && !right.nullable) {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${ctx.defaultValue(javaType)};
if ($isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
$result
}""")
} else {
ev.copy(code = s"""
${eval2.code}
boolean ${ev.isNull} = false;
$javaType ${ev.value} = ${ctx.defaultValue(javaType)};
if (${eval2.isNull} || $isZero) {
${ev.isNull} = true;
} else {
${eval1.code}
if (${eval1.isNull}) {
${ev.isNull} = true;
} else {
$result
}
}""")
}
}
private def pmod(a: Int, n: Int): Int = {
val r = a % n
if (r < 0) {(r + n) % n} else r
}
private def pmod(a: Long, n: Long): Long = {
val r = a % n
if (r < 0) {(r + n) % n} else r
}
private def pmod(a: Byte, n: Byte): Byte = {
val r = a % n
if (r < 0) {((r + n) % n).toByte} else r.toByte
}
private def pmod(a: Double, n: Double): Double = {
val r = a % n
if (r < 0) {(r + n) % n} else r
}
private def pmod(a: Short, n: Short): Short = {
val r = a % n
if (r < 0) {((r + n) % n).toShort} else r.toShort
}
private def pmod(a: Float, n: Float): Float = {
val r = a % n
if (r < 0) {(r + n) % n} else r
}
private def pmod(a: Decimal, n: Decimal): Decimal = {
val r = a % n
if (r != null && r.compare(Decimal.ZERO) < 0) {(r + n) % n} else r
}
override def sql: String = s"$prettyName(${left.sql}, ${right.sql})"
}
/**
* A function that returns the least value of all parameters, skipping null values.
* It takes at least 2 parameters, and returns null iff all parameters are null.
*/
@ExpressionDescription(
usage = "_FUNC_(expr, ...) - Returns the least value of all parameters, skipping null values.",
examples = """
Examples:
> SELECT _FUNC_(10, 9, 2, 4, 3);
2
""")
case class Least(children: Seq[Expression]) extends Expression {
override def nullable: Boolean = children.forall(_.nullable)
override def foldable: Boolean = children.forall(_.foldable)
private lazy val ordering = TypeUtils.getInterpretedOrdering(dataType)
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length <= 1) {
TypeCheckResult.TypeCheckFailure(
s"input to function $prettyName requires at least two arguments")
} else if (children.map(_.dataType).distinct.count(_ != NullType) > 1) {
TypeCheckResult.TypeCheckFailure(
s"The expressions should all have the same type," +
s" got LEAST(${children.map(_.dataType.simpleString).mkString(", ")}).")
} else {
TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName")
}
}
override def dataType: DataType = children.head.dataType
override def eval(input: InternalRow): Any = {
children.foldLeft[Any](null)((r, c) => {
val evalc = c.eval(input)
if (evalc != null) {
if (r == null || ordering.lt(evalc, r)) evalc else r
} else {
r
}
})
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val evalChildren = children.map(_.genCode(ctx))
ev.isNull = ctx.addMutableState(ctx.JAVA_BOOLEAN, ev.isNull)
val evals = evalChildren.map(eval =>
s"""
|${eval.code}
|if (!${eval.isNull} && (${ev.isNull} ||
| ${ctx.genGreater(dataType, ev.value, eval.value)})) {
| ${ev.isNull} = false;
| ${ev.value} = ${eval.value};
|}
""".stripMargin
)
val resultType = ctx.javaType(dataType)
val codes = ctx.splitExpressionsWithCurrentInputs(
expressions = evals,
funcName = "least",
extraArguments = Seq(resultType -> ev.value),
returnType = resultType,
makeSplitFunction = body =>
s"""
|$body
|return ${ev.value};
""".stripMargin,
foldFunctions = _.map(funcCall => s"${ev.value} = $funcCall;").mkString("\\n"))
ev.copy(code =
s"""
|${ev.isNull} = true;
|${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
|$codes
""".stripMargin)
}
}
/**
* A function that returns the greatest value of all parameters, skipping null values.
* It takes at least 2 parameters, and returns null iff all parameters are null.
*/
@ExpressionDescription(
usage = "_FUNC_(expr, ...) - Returns the greatest value of all parameters, skipping null values.",
examples = """
Examples:
> SELECT _FUNC_(10, 9, 2, 4, 3);
10
""")
case class Greatest(children: Seq[Expression]) extends Expression {
override def nullable: Boolean = children.forall(_.nullable)
override def foldable: Boolean = children.forall(_.foldable)
private lazy val ordering = TypeUtils.getInterpretedOrdering(dataType)
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length <= 1) {
TypeCheckResult.TypeCheckFailure(
s"input to function $prettyName requires at least two arguments")
} else if (children.map(_.dataType).distinct.count(_ != NullType) > 1) {
TypeCheckResult.TypeCheckFailure(
s"The expressions should all have the same type," +
s" got GREATEST(${children.map(_.dataType.simpleString).mkString(", ")}).")
} else {
TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName")
}
}
override def dataType: DataType = children.head.dataType
override def eval(input: InternalRow): Any = {
children.foldLeft[Any](null)((r, c) => {
val evalc = c.eval(input)
if (evalc != null) {
if (r == null || ordering.gt(evalc, r)) evalc else r
} else {
r
}
})
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val evalChildren = children.map(_.genCode(ctx))
ev.isNull = ctx.addMutableState(ctx.JAVA_BOOLEAN, ev.isNull)
val evals = evalChildren.map(eval =>
s"""
|${eval.code}
|if (!${eval.isNull} && (${ev.isNull} ||
| ${ctx.genGreater(dataType, eval.value, ev.value)})) {
| ${ev.isNull} = false;
| ${ev.value} = ${eval.value};
|}
""".stripMargin
)
val resultType = ctx.javaType(dataType)
val codes = ctx.splitExpressionsWithCurrentInputs(
expressions = evals,
funcName = "greatest",
extraArguments = Seq(resultType -> ev.value),
returnType = resultType,
makeSplitFunction = body =>
s"""
|$body
|return ${ev.value};
""".stripMargin,
foldFunctions = _.map(funcCall => s"${ev.value} = $funcCall;").mkString("\\n"))
ev.copy(code =
s"""
|${ev.isNull} = true;
|${ctx.javaType(dataType)} ${ev.value} = ${ctx.defaultValue(dataType)};
|$codes
""".stripMargin)
}
}
| saltstar/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala | Scala | apache-2.0 | 23,054 |
package endpoints
package documented
package algebra
import scala.language.higherKinds
/**
* Algebra interface for describing URL including documentation.
*
* This interface is modeled after [[endpoints.algebra.Urls]] but some methods
* take additional parameters carrying the documentation part.
*/
trait Urls {
/** A query string carrying an `A` information */
type QueryString[A]
/** Provides convenient methods on [[QueryString]]. */
implicit class QueryStringOps[A](first: QueryString[A]) {
/**
* Convenient method to concatenate two [[QueryString]]s.
*
* {{{
* qs[Int]("foo") & qs[String]("baz")
* }}}
*
* @param second `QueryString` to concatenate with this one
* @tparam B Information carried by the second `QueryString`
* @return A `QueryString` that carries both `A` and `B` information
*/
final def & [B](second: QueryString[B])(implicit tupler: Tupler[A, B]): QueryString[tupler.Out] =
combineQueryStrings(first, second)
}
/** Concatenates two `QueryString`s */
def combineQueryStrings[A, B](first: QueryString[A], second: QueryString[B])(implicit tupler: Tupler[A, B]): QueryString[tupler.Out]
/**
* Builds a `QueryString` with one parameter.
*
* @param name Parameter’s name
* @tparam A Type of the value carried by the parameter
*/
def qs[A](name: String)(implicit value: QueryStringParam[A]): QueryString[A]
/**
* Builds a `QueryString` with one optional parameter of type `A`.
*
* @param name Parameter’s name
*/
def optQs[A](name: String)(implicit value: QueryStringParam[A]): QueryString[Option[A]]
/**
* A single query string parameter carrying an `A` information.
*/
type QueryStringParam[A]
/** Ability to define `String` query string parameters */
implicit def stringQueryString: QueryStringParam[String]
/** Ability to define `Int` query string parameters */
implicit def intQueryString: QueryStringParam[Int]
/** Query string parameter containing a `Long` value */
implicit def longQueryString: QueryStringParam[Long]
/**
* An URL path segment carrying an `A` information.
*/
type Segment[A]
/** Ability to define `String` path segments */
implicit def stringSegment: Segment[String]
/** Ability to define `Int` path segments */
implicit def intSegment: Segment[Int]
/** Segment containing a `Long` value */
implicit def longSegment: Segment[Long]
/** An URL path carrying an `A` information */
type Path[A] <: Url[A]
/** Convenient methods for [[Path]]s. */
implicit class PathOps[A](first: Path[A]) {
/** Chains this path with the `second` constant path segment */
final def / (second: String): Path[A] = chainPaths(first, staticPathSegment(second))
/** Chains this path with the `second` path segment */
final def / [B](second: Path[B])(implicit tupler: Tupler[A, B]): Path[tupler.Out] = chainPaths(first, second)
/** Chains this path with the given [[QueryString]] */
final def /? [B](qs: QueryString[B])(implicit tupler: Tupler[A, B]): Url[tupler.Out] = urlWithQueryString(first, qs)
}
/** Builds a static path segment */
def staticPathSegment(segment: String): Path[Unit]
/** Builds a path segment carrying an `A` information
*
* @param name Name for the segment (for documentation)
*/
def segment[A](name: String)(implicit s: Segment[A]): Path[A]
/** Chains the two paths */
def chainPaths[A, B](first: Path[A], second: Path[B])(implicit tupler: Tupler[A, B]): Path[tupler.Out]
/**
* An empty path.
*
* Useful to begin a path definition:
*
* {{{
* path / "foo" / segment[Int]("some-value")
* }}}
*
*/
val path: Path[Unit] = staticPathSegment("")
/**
* An URL carrying an `A` information
*/
type Url[A]
/** Builds an URL from the given path and query string */
def urlWithQueryString[A, B](path: Path[A], qs: QueryString[B])(implicit tupler: Tupler[A, B]): Url[tupler.Out]
}
| Krever/endpoints | openapi/openapi/src/main/scala/endpoints/documented/algebra/Urls.scala | Scala | mit | 4,042 |
package metascala
object OO {
import HLists._
import Nats._
import Tuples._
trait MethodBase {
type Object
}
trait MethodObj[Obj] extends MethodBase {
type Object = Obj
}
trait Method0Base[Obj] extends MethodObj[Obj] {
type Out
def apply(obj : Obj) : Out
}
class Method0[Obj, O](fn : Obj => O) extends Method0Base[Obj] {
type Out = O
def apply(obj : Obj) = fn(obj)
}
trait Method1Base[Obj] extends MethodObj[Obj] {
type In1
type Out
def apply(obj : Obj, arg : In1) : Out
}
class Method1[Obj, I, O](fn : (Obj, I) => O) extends Method1Base[Obj] {
type In1 = I
type Out = O
def apply(obj : Obj, arg : In1) : Out = fn(obj, arg).asInstanceOf[Out]
}
def _override[M <: MethodBase, H, T <: HList](obj : HCons[H, T], m : M)(implicit fn : ReplaceByTypeFn[HCons[H, T], _0, M]) : HCons[H, T] =
obj.replaceByType(_0, m)
case class RichHCons[H, T <: HList](l : HCons[H, T]) {
def get[M <: MethodBase](implicit fn : GetByTypeFn[HCons[H, T], _0, M]) : M = l.getByType[_0, M]
def call[M <: Method0Base[HCons[H, T]]](implicit fn : GetByTypeFn[HCons[H, T], _0, M]) : M#Out = get[M].apply(l)
def call[M <: Method1Base[HCons[H, T]]](arg : M#In1)(implicit fn : GetByTypeFn[HCons[H, T], _0, M]) : M#Out = applyArg(get[M], arg)
def delegate[M <: Method0Base[HCons[H, T]]](l2 : HCons[H, T])(implicit fn : GetByTypeFn[HCons[H, T], _0, M]) : M#Out = get[M].apply(l2)
def applyArg[M <: Method1Base[HCons[H, T]]](m : M, arg : M#In1) : M#Out = m(l, arg.asInstanceOf[m.In1])
def |=[M <: MethodBase](m : M)(implicit fn : ReplaceByTypeFn[HCons[H, T], _0, M]) = _override(l, m)
}
implicit def hconsToRichHCons[H, T <: HList](l : HCons[H, T]) : RichHCons[H, T] = RichHCons(l)
case class MethodProduct[P <: Product](p : P) {
def call[M <: Method0Base[P]](implicit fn : Getter[P, M]) : M#Out = get[P, M](p).apply(p)
def call[M <: Method1Base[P]](arg : M#In1)(implicit fn : Getter[P, M]) : M#Out = applyArg(get[P, M](p), arg)
def delegate[M <: Method0Base[P]](l2 : P)(implicit fn : Getter[P, M]) : M#Out = get[P, M](p).apply(l2)
def applyArg[M <: Method1Base[P]](m : M, arg : M#In1) : M#Out = m(p, arg.asInstanceOf[m.In1])
def |=[M <: MethodBase](m : M)(implicit fn : Replacer[P, M]) = replace(p, m)
}
implicit def productToMethodProduct[P <: Product](p : P) = MethodProduct(p)
}
| svn2github/metascala | src/metascala/OO.scala | Scala | bsd-3-clause | 2,423 |
package scala.meta.quasiquotes
import org.jetbrains.plugins.scala.SlowTests
import org.junit.experimental.categories.Category
/**
* @author mutcianm
* @since 21.10.16.
*/
@Category(Array(classOf[SlowTests]))
class StatApplyTest extends QuasiQuoteTypeInferenceTestBase {
def testClass(): Unit = doTest(
s"""
|${START}q"class Foo"$END
|//Defn.Class
""".stripMargin
)
def testObject(): Unit = doTest(
s"""
|${START}q"object Foo"$END
|//Defn.Object
""".stripMargin
)
def testTrait(): Unit = doTest(
s"""
|${START}q"trait Foo"$END
|//Defn.Trait
""".stripMargin
)
def testDefnDef(): Unit = doTest(
s"""
|${START}q"def foo = 42"$END
|//Defn.Def
""".stripMargin
)
def testDefnVal(): Unit = doTest(
s"""
|${START}q"val foo = 42"$END
|//Defn.Val
""".stripMargin
)
}
| triplequote/intellij-scala | scala/scala-impl/test/scala/meta/quasiquotes/StatApplyTest.scala | Scala | apache-2.0 | 900 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*package org.scalatest.examples.asyncpropspec
import org.scalatest.AsyncPropSpec
import scala.concurrent.Future
import org.scalatest._
import prop._
class AddSpec extends AsyncPropSpec with TableDrivenPropertyChecks {
val examples =
Table(
("a", "b", "sum"),
(1, 2, 3),
(2, 3, 5),
(3, 4, 7)
)
def asyncForAll(table: TableFor3[Int, Int, Int])(fun: ((Int, Int, Int)) => Future[Assertion]): Future[Assertion] = {
Future.sequence(
table.map { case (a, b, expectedSum) =>
fun((a, b, expectedSum))
}
).map { assertions =>
succeed
}
}
def addSoon(addends: Int*): Future[Int] = Future { addends.sum }
property("addSoon will eventually compute a sum of passed Ints") {
asyncForAll(examples) { case (a, b, expectedSum) =>
val futureSum: Future[Int] = addSoon(a, b)
futureSum map { sum => assert(sum == expectedSum) }
}
}
def addNow(addends: Int*): Int = addends.sum
property("addNow will immediately compute a sum of passed Ints") {
forAll(examples) { case (a, b, expectedSum) =>
val sum: Int = addNow(a, b)
// You can also write synchronous tests. The body
// must have result type Assertion:
assert(sum == expectedSum)
}
}
}*/
| dotty-staging/scalatest | examples/src/test/scala/org/scalatest/examples/asyncpropspec/AddSpec.scala | Scala | apache-2.0 | 1,872 |
package com.arcusys.valamis.version240.certificate
import com.arcusys.valamis.core.DbNameUtils._
import com.arcusys.valamis.core.SlickProfile
import com.arcusys.valamis.model.PeriodTypes
trait CourseGoalTableComponent extends CertificateTableComponent{ self: SlickProfile =>
import driver.simple._
type CourseGoal = (Long, Long, Int, PeriodTypes.Value, Int)
class CourseGoalTable(tag: Tag) extends Table[CourseGoal](tag, tblName("CERT_COURSE_GOAL")) {
implicit val ValidPeriodTypeMapper = MappedColumnType.base[PeriodTypes.PeriodType, String](
s => s.toString,
s => PeriodTypes.withName(s)
)
def certificateId = column[Long]("CERTIFICATE_ID")
def courseId = column[Long]("COURSE_ID")
def periodValue = column[Int]("PERIOD_VALUE")
def periodType = column[PeriodTypes.PeriodType]("PERIOD_TPE")
def arrangementIndex = column[Int]("ARRANGEMENT_INDEX")
def * = (certificateId, courseId, periodValue, periodType, arrangementIndex)
def PK = primaryKey(pkName("CERT_COURSE_GOAL"), (certificateId, courseId))
def certificateFK = foreignKey(fkName("CERT_COURSE_GOAL_TO_CERT"), certificateId, certificates)(x => x.id, onDelete = ForeignKeyAction.Cascade)
}
val courseGoals = TableQuery[CourseGoalTable]
} | ViLPy/Valamis | valamis-slick-persistence/src/main/scala/com/arcusys/valamis/version240/certificate/CourseGoalTableComponent.scala | Scala | lgpl-3.0 | 1,262 |
package ohnosequencesBundles.statika
import ohnosequences.statika._
case object cdevel extends Bundle() {
def instructions: AnyInstructions = cmd("yum")("groupinstall", "-y", "Development Tools")
}
| ohnosequences-bundles/cdevel | src/main/scala/cdevel.scala | Scala | agpl-3.0 | 203 |
package com.github.j5ik2o.reactive.redis
import java.net.InetSocketAddress
import akka.actor.ActorSystem
import akka.io.Inet.SO.{ ReceiveBufferSize, SendBufferSize }
import akka.io.Tcp.SO.{ KeepAlive, TcpNoDelay }
import akka.stream.OverflowStrategy
import com.github.j5ik2o.reactive.redis.pool._
import monix.eval.Task
import monix.execution.Scheduler
import redis.clients.jedis.JedisPool
import redis.{ RedisClientPool, RedisServer }
import scala.concurrent.Await
import scala.concurrent.duration.Duration
@SuppressWarnings(
Array("org.wartremover.warts.Null", "org.wartremover.warts.Var", "org.wartremover.warts.Serializable")
)
trait BenchmarkHelper {
implicit val system: ActorSystem = ActorSystem()
implicit val scheduler: Scheduler = Scheduler(system.dispatcher)
val client: RedisClient = RedisClient()
val sizePerPeer: Int = 3
val WAIT_IN_SEC: Int = 1000 * 3
val redisTestServer: RedisTestServer = new RedisTestServer()
private var _rediscalaPool: RedisClientPool = _
def rediscalaPool: RedisClientPool = _rediscalaPool
private var _poolOfJedisQueue: RedisConnectionPool[Task] = _
private var _poolOfJedisActor: RedisConnectionPool[Task] = _
private var _poolOfDefaultQueue: RedisConnectionPool[Task] = _
private var _poolOfDefaultActor: RedisConnectionPool[Task] = _
def reactiveRedisPoolOfJedisQueue: RedisConnectionPool[Task] = _poolOfJedisQueue
def reactiveRedisPoolOfJedisActor: RedisConnectionPool[Task] = _poolOfJedisActor
def reactiveRedisPoolOfDefaultQueue: RedisConnectionPool[Task] = _poolOfDefaultQueue
def reactiveRedisPoolOfDefaultActor: RedisConnectionPool[Task] = _poolOfDefaultActor
private var _jedisPool: JedisPool = _
def jedisPool: JedisPool = _jedisPool
private var _scalaRedisPool: com.redis.RedisClientPool = _
def scalaRedisPool: com.redis.RedisClientPool = _scalaRedisPool
def fixture(): Unit
def setup(): Unit = {
redisTestServer.start()
Thread.sleep(WAIT_IN_SEC)
_jedisPool = new JedisPool("127.0.0.1", redisTestServer.getPort)
val peerConfig: PeerConfig =
PeerConfig(
new InetSocketAddress("127.0.0.1", redisTestServer.getPort),
options = Vector(TcpNoDelay(true), KeepAlive(true), SendBufferSize(2048), ReceiveBufferSize(2048)),
overflowStrategyOnSourceQueueMode = OverflowStrategy.dropNew,
requestBufferSize = Int.MaxValue
)
// _pool = StormpotPool.ofSingle(StormpotConfig(), peerConfig, RedisConnection(_, _))
// _pool = ScalaPool.ofSingle(ScalaPoolConfig(), peerConfig, RedisConnection(_, _))
// _pool = FOPPool.ofSingle(FOPConfig(), peerConfig, RedisConnection(_, _))
//_pool = RedisConnectionPool.ofSingleRoundRobin(sizePerPeer, peerConfig, RedisConnection(_, _))
_poolOfDefaultQueue =
CommonsPool.ofSingle(CommonsPoolConfig(), peerConfig.withConnectionSourceQueueMode, RedisConnection.apply)
_poolOfDefaultActor =
CommonsPool.ofSingle(CommonsPoolConfig(), peerConfig.withConnectionSourceActorMode, RedisConnection.apply)
_poolOfJedisQueue =
CommonsPool.ofSingle(CommonsPoolConfig(), peerConfig.withConnectionSourceQueueMode, RedisConnection.ofJedis)
_poolOfJedisActor =
CommonsPool.ofSingle(CommonsPoolConfig(), peerConfig.withConnectionSourceActorMode, RedisConnection.ofJedis)
_rediscalaPool = _root_.redis.RedisClientPool(List(RedisServer("127.0.0.1", redisTestServer.getPort)))
_scalaRedisPool = new com.redis.RedisClientPool("127.0.0.1", redisTestServer.getPort)
Thread.sleep(WAIT_IN_SEC)
fixture()
}
def tearDown(): Unit = {
_poolOfJedisQueue.dispose()
_poolOfJedisActor.dispose()
_poolOfDefaultQueue.dispose()
_poolOfDefaultActor.dispose()
redisTestServer.stop()
Await.result(system.terminate(), Duration.Inf)
}
}
| j5ik2o/reactive-redis | benchmark/src/main/scala/com/github/j5ik2o/reactive/redis/BenchmarkHelper.scala | Scala | mit | 3,830 |
package lila.user
import reactivemongo.api.bson._
import scala.concurrent.duration._
import lila.common.LightUser
import lila.memo.CacheApi._
import lila.rating.{ Perf, PerfType }
import lila.db.dsl._
import User.{ LightCount, LightPerf }
final class Cached(
userRepo: UserRepo,
onlineUserIds: () => Set[User.ID],
mongoCache: lila.memo.MongoCache.Api,
cacheApi: lila.memo.CacheApi,
rankingApi: RankingApi
)(implicit
ec: scala.concurrent.ExecutionContext,
system: akka.actor.ActorSystem
) {
implicit private val LightUserBSONHandler = Macros.handler[LightUser]
implicit private val LightPerfBSONHandler = Macros.handler[LightPerf]
implicit private val LightCountBSONHandler = Macros.handler[LightCount]
val top10 = cacheApi.unit[Perfs.Leaderboards] {
_.refreshAfterWrite(2 minutes)
.buildAsyncFuture { _ =>
rankingApi
.fetchLeaderboard(10)
.withTimeout(2 minutes)
.monSuccess(_.user.leaderboardCompute)
}
}
val top200Perf = mongoCache[Perf.ID, List[User.LightPerf]](
PerfType.leaderboardable.size,
"user:top200:perf",
19 minutes,
_.toString
) { loader =>
_.refreshAfterWrite(20 minutes)
.buildAsyncFuture {
loader {
rankingApi.topPerf(_, 200)
}
}
}
private val topWeekCache = mongoCache.unit[List[User.LightPerf]](
"user:top:week",
9 minutes
) { loader =>
_.refreshAfterWrite(10 minutes)
.buildAsyncFuture {
loader { _ =>
PerfType.leaderboardable
.map { perf =>
rankingApi.topPerf(perf.id, 1)
}
.sequenceFu
.dmap(_.flatten)
}
}
}
def topWeek = topWeekCache.get {}
val top10NbGame = mongoCache.unit[List[User.LightCount]](
"user:top:nbGame",
74 minutes
) { loader =>
_.refreshAfterWrite(75 minutes)
.buildAsyncFuture {
loader { _ =>
userRepo topNbGame 10 dmap (_.map(_.lightCount))
}
}
}
private val top50OnlineCache = cacheApi.unit[List[User]] {
_.refreshAfterWrite(1 minute)
.buildAsyncFuture { _ =>
userRepo.byIdsSortRatingNoBot(onlineUserIds(), 50)
}
}
def getTop50Online = top50OnlineCache.getUnit
def rankingsOf(userId: User.ID): lila.rating.UserRankMap = rankingApi.weeklyStableRanking of userId
private[user] val botIds = cacheApi.unit[Set[User.ID]] {
_.refreshAfterWrite(10 minutes)
.buildAsyncFuture(_ => userRepo.botIds)
}
private def userIdsLikeFetch(text: String) =
userRepo.userIdsLikeFilter(text, $empty, 12)
private val userIdsLikeCache = cacheApi[String, List[User.ID]](1024, "user.like") {
_.expireAfterWrite(5 minutes).buildAsyncFuture(userIdsLikeFetch)
}
def userIdsLike(text: String): Fu[List[User.ID]] = {
if (text.lengthIs < 5) userIdsLikeCache get text
else userIdsLikeFetch(text)
}
}
| luanlv/lila | modules/user/src/main/Cached.scala | Scala | mit | 2,929 |
package roshan
import akka.actor.{ActorSystem, ActorRef}
import roshan.protocols.MapProtocol.AreaMessage
import roshan.db.dbCharacter
import roshan.model.Direction
object Useful {
val tilesPerMap = 20
var TestActorSystem:Option[ActorSystem] = None
var isTesting:Boolean = false
def testing:ActorSystem = {
isTesting = true
val sys = ActorSystem("TestingSystem")
TestActorSystem = Option(sys)
sys
}
val defaultCharacter = dbCharacter(None, 1, 0, 30, Direction.S, Nil, 10, 10)
private var maxIdForTesting:Int = 1
def testCharacter:dbCharacter = {maxIdForTesting+=1;defaultCharacter.copy(id=Some(maxIdForTesting))}
def mapSection(x:Int, y:Int):(Int, Int) =
((x / tilesPerMap) * tilesPerMap, (y / tilesPerMap) * tilesPerMap)
def relativeToMapSection(x:Int, y:Int):(Int, Int) =
(x - ((x / tilesPerMap) * tilesPerMap), y - ((y / tilesPerMap) * tilesPerMap))
def getTileRelativeToMapSection(x:Int, y:Int):Int = {
val (mapX, mapY) = relativeToMapSection(x, y)
mapY * tilesPerMap + mapX
}
def splitMessage(message:AreaMessage, recipient:ActorRef) {
val topLeftSection = mapSection(message.x, message.y)
val bottomRightSection = mapSection(message.x2, message.y2)
if (topLeftSection != bottomRightSection) {
val newUpperBoundX = if (message.x2 > topLeftSection._1 + tilesPerMap) topLeftSection._1 + tilesPerMap else message.x2
val newUpperBoundY = if (message.y2 > topLeftSection._2 + tilesPerMap) topLeftSection._2 + tilesPerMap else message.y2
recipient ! AreaMessage(message.x, message.y, newUpperBoundX, newUpperBoundY, message.message)
if (message.x2 > newUpperBoundX)
splitMessage(AreaMessage(newUpperBoundX, message.y, message.x2, message.y2, message.message), recipient)
if (message.y2 > newUpperBoundY)
splitMessage(AreaMessage(message.x, newUpperBoundY, message.x2, message.y2, message.message), recipient)
if (message.x2 > newUpperBoundX && message.y2 > newUpperBoundY)
splitMessage(AreaMessage(newUpperBoundX, newUpperBoundY, message.x2, message.y2, message.message), recipient)
} else recipient ! message
}
}
| andychase/roshan | src/main/scala/roshan/Useful.scala | Scala | mit | 2,158 |
package p
trait NRoot[A]
object FastComplex {
final def sqrt(x: Double): Double = Math.sqrt(x)
final def sqrt[A](a: A)(implicit ev: NRoot[A]): A = ???
object Inner {
import java.lang.Math.sqrt
// wrong message:
// error: reference to sqrt is ambiguous;
// it is both defined in object FastComplex and imported subsequently by
sqrt(0d)
}
}
| som-snytt/dotty | tests/untried/neg/t8024b.scala | Scala | apache-2.0 | 378 |
package scoverage
import java.io.File
import java.util.concurrent.Executors
import org.scalatest.{BeforeAndAfter, FunSuite}
import scala.collection.breakOut
import scala.concurrent._
import scala.concurrent.duration._
/**
* Verify that [[Invoker.invoked()]] is thread-safe
*/
class InvokerConcurrencyTest extends FunSuite with BeforeAndAfter {
implicit val executor = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(8))
val measurementDir = new File("target/invoker-test.measurement")
before {
deleteMeasurementFiles()
measurementDir.mkdirs()
}
test("calling Invoker.invoked on multiple threads does not corrupt the measurement file") {
val testIds: Set[Int] = (1 to 1000).toSet
// Create 1k "invoked" calls on the common thread pool, to stress test
// the method
val futures: List[Future[Unit]] = testIds.map { i: Int =>
Future {
Invoker.invoked(i, measurementDir.toString)
}
}(breakOut)
futures.foreach(Await.result(_, 1.second))
// Now verify that the measurement file is not corrupted by loading it
val measurementFiles = Invoker.findMeasurementFiles(measurementDir)
val idsFromFile = Invoker.invoked(measurementFiles).toSet
idsFromFile === testIds
}
after {
deleteMeasurementFiles()
measurementDir.delete()
}
private def deleteMeasurementFiles(): Unit = {
if (measurementDir.isDirectory)
measurementDir.listFiles().foreach(_.delete())
}
}
| xudongzheng1225/scalac-scoverage-plugin | scalac-scoverage-runtime/src/test/scala/scoverage/InvokerConcurrencyTest.scala | Scala | apache-2.0 | 1,478 |
package ru.pavkin.todoist.api.core
import ru.pavkin.todoist.api.core.decoder.SingleResponseDecoder
import ru.pavkin.todoist.api.core.model.{AccessToken, TokenExchange}
import ru.pavkin.todoist.api.core.query.SingleQueryRequestDefinition
trait ExecutedOAuthAPI[F[_], L[_], P[_], Req, Base]
extends OAuthAPI[F, P, Base]
with ExecutedAPI[F, L, P, Req, Base] {
def oAuthStep3(request: TokenExchange)
(implicit trr: ToRawRequest[TokenExchange],
parser: SingleResponseDecoder[P, Base, AccessToken]): RequestDefinition[F, P, AccessToken, Base] = {
implicit val hrr: HasRawRequest[AccessToken] = HasRawRequest(trr.rawRequest(request))
new SingleQueryRequestDefinition[F, L, P, model.AccessToken, Req, Base](
requestFactory, executor, flattener, parser
)
}
}
| vpavkin/todoist-api-scala | core/src/main/scala/ru/pavkin/todoist/api/core/ExecutedOAuthAPI.scala | Scala | mit | 816 |
package org.bjean.sample.wordcount.processor
import java.nio.file.{Files, Paths}
import org.apache.spark.SparkContext
import org.bjean.sample.support.SparkContextTrait
import org.scalatest.{BeforeAndAfter, Matchers, WordSpec}
import scala.io.Source
class WordCountProcessorTest extends WordSpec with Matchers with SparkContextTrait with BeforeAndAfter {
var outputPath: String = _
before {
outputPath = Files.createTempDirectory("sm-processing-output").toAbsolutePath.toString
Files.delete(Paths.get(outputPath))
println(s"Output folder will be: ${outputPath}")
}
"Running WordCountProcessor" should {
"output correct count for single word" in testForUseCase("single")
"output correct count for multiple words" in testForUseCase("multiple")
}
def inputFilePath(name: String) = getClass.getResource(s"/input/${name}.txt").getPath
def testForUseCase(usecase: String, withRawSessionPathArgs: Boolean = true) = withSparkContext("a_spark_ctx") { (ctx: SparkContext) =>
val expectedOutput = Source.fromInputStream(getClass.getClassLoader.getResourceAsStream(s"expected_output/${usecase}.txt")).toList
val args = Config(inputFilePath(usecase), outputPath,4)
val job = new WordCountProcessor(ctx, args)
job.runJob()
val output = Source.fromFile(s"${outputPath}/part-00000").toList
output should contain theSameElementsAs (expectedOutput)
}
}
| bjet007/word-count-spark-aws | processing/src/test/scala/org/bjean/sample/wordcount/processor/WordCountProcessorTest.scala | Scala | apache-2.0 | 1,410 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib
import scala.scalajs.js
import org.scalajs.jasmine.JasmineExpectation
import org.scalajs.jasminetest.JasmineTest
import java.util.{ Formatter, Formattable, FormattableFlags }
import java.lang.{
Double => JDouble,
Float => JFloat,
Integer => JInteger,
Long => JLong,
Byte => JByte,
Short => JShort,
Boolean => JBoolean,
String => JString
}
object FormatterTest extends JasmineTest {
class HelperClass
class FormattableClass extends Formattable {
var frm: Formatter = _
var flags: Int = _
var width: Int = _
var precision: Int = _
var calls = 0
def formatTo(frm: Formatter, flags: Int, width: Int, precision: Int): Unit = {
this.calls += 1
this.flags = flags
this.width = width
this.precision = precision
frm.out().append("foobar")
}
def expectCalled(times: Int, flags: Int, width: Int, precision: Int): Unit = {
expect(this.calls).toEqual(times)
expect(this.flags).toEqual(flags)
expect(this.width).toEqual(width)
expect(this.precision).toEqual(precision)
}
}
def expectF(format: String, args: AnyRef*): JasmineExpectation = {
val fmt = new Formatter()
val res = fmt.format(format, args:_*).toString()
fmt.close()
expect(res)
}
def expectFC(format: String, flags: Int, width: Int,
precision: Int): JasmineExpectation = {
val fc = new FormattableClass
val exp = expectF(format, fc)
fc.expectCalled(1, flags, width, precision)
exp
}
def expectThrow(format: String, args: AnyRef*): Unit = {
val fmt = new Formatter()
expect(() => fmt.format(format, args:_*)).toThrow
}
describe("java.util.Formatter") {
// Explicitly define these as `var`'s to avoid any compile-time constant folding
var IntMax: Int = Int.MaxValue
var IntMin: Int = Int.MinValue
var ByteMax: Byte = Byte.MaxValue
var ByteMin: Byte = Byte.MinValue
var ShortMax: Short = Short.MaxValue
var ShortMin: Short = Short.MinValue
it("should provide 'b' conversion") {
expectF("%b", null).toEqual("false")
expectF("%b", true: JBoolean).toEqual(JString.valueOf(true))
expectF("%b", false: JBoolean).toEqual(JString.valueOf(false))
expectF("%b", new HelperClass).toEqual("true")
}
it("should provide 'h' conversion") {
val x = new HelperClass
expectF("%h", x).toEqual(Integer.toHexString(x.hashCode()))
expectF("%H", x).toEqual(Integer.toHexString(x.hashCode()).toUpperCase())
expectF("%h", null).toEqual("null")
}
it("should provide 's' conversion") {
expectFC("%s", 0, -1, -1).toEqual("foobar")
expectFC("%-s", FormattableFlags.LEFT_JUSTIFY, -1, -1).toEqual("foobar")
expectFC("%-10s", FormattableFlags.LEFT_JUSTIFY, 10, -1).toEqual("foobar")
expectFC("%#-10.2s", FormattableFlags.LEFT_JUSTIFY |
FormattableFlags.ALTERNATE, 10, 2).toEqual("foobar")
expectFC("%#10.2S", FormattableFlags.UPPERCASE |
FormattableFlags.ALTERNATE, 10, 2).toEqual("foobar")
expectF("%10s", "hello").toEqual(" hello")
expectF("%-10s", "hello").toEqual("hello ")
expectThrow("%#s", "hello")
}
it("should provide 'c' conversion") {
expectF("%-5c", new Character('!')).toEqual("! ")
}
it("should provide 'd' conversion") {
expectF("%d", new Integer(5)).toEqual("5")
expectF("%05d", new Integer(5)).toEqual("00005")
expectF("%5d", new Integer(-10)).toEqual(" -10")
expectF("%05d", new Integer(-10)).toEqual("-0010")
}
it("should provide 'o' conversion") {
expectF("%o", new JInteger(8)).toEqual("10")
expectF("%05o", new JInteger(16)).toEqual("00020")
expectF("%5o", new JInteger(-10)).toEqual("37777777766")
expectF("%05o", new JInteger(-10)).toEqual("37777777766")
expectF("%o", new JByte(8.toByte)).toEqual("10")
expectF("%05o", new JByte(16.toByte)).toEqual("00020")
expectF("%14o", new JByte(-10.toByte)).toEqual(" 37777777766")
expectF("%05o", new JByte(-10.toByte)).toEqual("37777777766")
expectF("%o", new JShort(8.toShort)).toEqual("10")
expectF("%05o", new JShort(16.toShort)).toEqual("00020")
expectF("%5o", new JShort(-10.toShort)).toEqual("37777777766")
expectF("%015o",new JShort(-10.toShort)).toEqual("000037777777766")
expectF("%05o", new JLong(-5L)).toEqual("1777777777777777777773")
}
it("should provide 'x' conversion") {
expectF("%0#5x", new JInteger(5)).toEqual("0x005")
expectF("%#5x", new JInteger(5)).toEqual(" 0x5")
expectF("%#5X", new JInteger(5)).toEqual(" 0X5")
expectF("%x", new JInteger(-3)).toEqual("fffffffd")
expectF("%x", new JByte(-4.toByte)).toEqual("fffffffc")
expectF("%0#5x", new JByte(5.toByte)).toEqual("0x005")
expectF("%#5x", new JByte(5.toByte)).toEqual(" 0x5")
expectF("%#5X", new JByte(5.toByte)).toEqual(" 0X5")
expectF("%x", new JByte(-3.toByte)).toEqual("fffffffd")
expectF("%0#5x", new JShort(5.toShort)).toEqual("0x005")
expectF("%#5x", new JShort(5.toShort)).toEqual(" 0x5")
expectF("%#5X", new JShort(5.toShort)).toEqual(" 0X5")
expectF("%x", new JShort(-3.toShort)).toEqual("fffffffd")
expectF("%x", new JLong(-5L)).toEqual("fffffffffffffffb")
expectF("%X", new JLong(26L)).toEqual("1A")
}
it("should provide 'e' conversion") {
expectF("%e", new JDouble(1000)).toEqual("1.000000e+03")
expectF("%.0e", new JDouble(1.2e100)).toEqual("1e+100")
// We use 1.51e100 in this test, since we seem to have a floating
// imprecision at exactly 1.5e100 that yields to a rounding error
// towards (1e+100 instead of 2e+100)
expectF("%.0e", new JDouble(1.51e100)).toEqual("2e+100")
expectF("%10.2e", new JDouble(1.2e100)).toEqual(" 1.20e+100")
expectF("%012.4e", new JFloat(1.2e-21f)).toEqual("001.2000e-21")
expectF("%012.4E", new JFloat(1.2e-21f)).toEqual("001.2000E-21")
expectF("%(015.4e", new JFloat(-1.2e-21f)).toEqual("(0001.2000e-21)")
// Tests with infinity and NaN
expectF("%e", new JDouble(Double.PositiveInfinity)).toEqual("Infinity")
expectF("%e", new JDouble(Double.NegativeInfinity)).toEqual("-Infinity")
expectF("%010e", new JDouble(Double.PositiveInfinity)).toEqual(" Infinity")
expectF("%-10e", new JDouble(Double.PositiveInfinity)).toEqual("Infinity ")
expectF("%(e", new JDouble(Double.NegativeInfinity)).toEqual("(Infinity)")
expectF("%010e", new JDouble(Double.NaN)).toEqual(" NaN")
}
it("should provide 'g' conversion") {
expectF("%g", new JDouble(.5e-4)).toEqual("5.00000e-05")
expectF("%g", new JDouble(3e-4)).toEqual("0.000300000")
expectF("%.3g", new JDouble(3e-4)).toEqual("0.000300")
expectF("%.2g", new JDouble(1e-3)).toEqual("0.0010")
expectF("%g", new JDouble(3e5)).toEqual("300000")
expectF("%.3g", new JDouble(3e5)).toEqual("3.00e+05")
expectF("%04g", new JDouble(Double.NaN)).toEqual(" NaN")
}
it("should provide 'f' conversion") {
expectF("%f", new JDouble(3.3)).toEqual("3.300000")
expectF("%0(9.4f", new JDouble(-4.6)).toEqual("(04.6000)")
expectF("%f", new JFloat(3e10f)).toEqual("30000001024.000000")
expectF("%f", new JDouble(3e10)).toEqual("30000000000.000000")
expectF("%04f", new JDouble(Double.NaN)).toEqual(" NaN")
}
it("should support '%%'") {
expectF("%d%%%d", new JInteger(1), new JInteger(2)).toEqual("1%2")
}
it("should support '%n'") {
expectF("%d%n%d", new JInteger(1), new JInteger(2)).toEqual("1\\n2")
}
it("should survive `null` and `undefined`") {
expectF("%s", null).toEqual("null")
expectF("%s", js.undefined).toEqual("undefined")
}
it("should allow 'f' string interpolation to survive `null` and `undefined`") {
expect(f"${null}%s").toEqual("null")
expect(f"${js.undefined}%s").toEqual("undefined")
}
it("should allow positional arguments") {
expectF("%2$d %1$d", new JInteger(1), new JInteger(2)).toEqual("2 1")
expectF("%2$d %2$d %d", new JInteger(1), new JInteger(2)).toEqual("2 2 1")
expectF("%2$d %<d %d", new JInteger(1), new JInteger(2)).toEqual("2 2 1")
}
it("should fail when called after close") {
val f = new Formatter()
f.close()
expect(() => f.toString()).toThrow
}
it("should fail with bad format specifier") {
expectThrow("hello world%")
expectThrow("%%%")
expectThrow("%q")
expectThrow("%1")
expectThrow("%_f")
}
it("should fail with not enough arguments") {
expectThrow("%f")
expectThrow("%d%d%d", new JInteger(1), new JInteger(1))
expectThrow("%10$d", new JInteger(1))
}
}
}
| jmnarloch/scala-js | test-suite/src/test/scala/org/scalajs/testsuite/javalib/FormatterTest.scala | Scala | bsd-3-clause | 9,510 |
package com.signalcollect.dcop.evaluation
import scala.collection.mutable
import scala.math.Ordering.Implicits._
import com.signalcollect.dcop.modules.UtilityConfig
trait EavConfig[AgentId, Action, UtilityType, +Config <: EavConfig[AgentId, Action, UtilityType, Config]] extends UtilityConfig[AgentId, Action, UtilityType, Config] {
implicit protected def utilEv: Ordering[UtilityType]
def agentId: AgentId
def domainNeighborhood: collection.Map[AgentId, Set[Action]]
def utilities: collection.Map[(AgentId, Action, Action), UtilityType]
def defaultUtility: UtilityType
override val centralVariableAssignment: (AgentId, Action) = (agentId, centralVariableValue)
val minMaxUtilities = utilities.groupBy(x => x._1._1).map(x => (x._1,
if (domain.forall(action =>
domainNeighborhood(x._1).forall(actionNeighbor =>
x._2.contains((x._1, action, actionNeighbor)))))
(x._2.values.min, x._2.values.max)
else
(x._2.values.fold(defaultUtility)(_ min _), x._2.values.fold(defaultUtility)(_ max _))))
// This method is already implemented in a superclass, but subclasses have to override it.
override def centralVariableValue: Action = ???
override def expectedConflicts(centralVariableValue: Action) = {
val conflicts = Set.newBuilder[AgentId]
domainNeighborhood.withFilter(x => minMaxUtilities.get(x._1) match {
case Some((minUtility, maxUtility)) =>
(neighborhood.get(x._1) match {
case Some(actionNeighbor) =>
utilities.getOrElse((x._1, centralVariableValue, actionNeighbor), defaultUtility)
case None => minUtility
}) < maxUtility
case None => false
}).foreach(conflicts += _._1)
conflicts.result
}
protected def orderedNeighborhood: collection.Map[AgentId, Action] = {
val builder = mutable.LinkedHashMap.newBuilder[AgentId, Action]
builder ++= domainNeighborhood.view.collect({
case (x, _) if neighborhood.contains(x) => (x, neighborhood(x))
})
builder ++= neighborhood
builder.result
}
}
| flueckiger/dcop-algorithms-evaluation | src/main/scala/com/signalcollect/dcop/evaluation/EavConfig.scala | Scala | apache-2.0 | 2,105 |
package samples
import java.util.Properties
import org.nats._
object SecurePub {
def main(args: Array[String]){
var props = new Properties
props.put("truststore", "./truststore")
props.put("truststore_pass", "password")
// KeyStore is used only when tlsverify is set on the server.
props.put("keystore", "./keystore")
props.put("keystore_pass", "password")
var conn = Conn.connect(props)
println("Publishing...")
conn.publish("hello", "world")
conn.close
sys.exit
}
} | tyagihas/scala_nats | src/test/scala/samples/SecurePub.scala | Scala | mit | 519 |
object Script {
val loopInv =
parseFormula("K() > 0 & e() > 0 & nx()^2 + ny()^2 = 1 & (qx() - px()) * nx() + (qy() - py()) * ny() >=0")
val easybranchT =
composelistT(
hpalpha1T*,
diffsolveT(RightP(0), Endpoint),
hpalpha1T*,
tryruleT(andRight)<(
easiestT,
alleasyT
)
)
val cutb2 =
cutT(
StandardCut,
parseFormula(
"(qx() + K() * (fx() - (fx() * nx() + fy() * ny() + " +
"((qx() - px()) * nx() + (qy() - py()) * ny() + " +
"1 / 2 * K() * (FXP * nx() + FYP * ny()) * e()^2) * TMP) * nx()) * s() + " +
"1 / 2 * K() * FXP * s()^2 - px()) * nx() + " +
"(qy() + K() * (fy() - (fx() * nx() + fy() * ny() + " +
"((qx() - px()) * nx() + (qy() - py()) * ny() + " +
"1 / 2 * K() * (FXP * nx() + FYP * ny()) * e()^2) * TMP) * ny()) * s() + " +
"1 / 2 * K() * FYP * s()^2 - py()) * ny() >= 0"),
parseFormula(
"((qx() - px() ) + K() * (fx() - (fx() * nx() + fy() * ny() + " +
"((qx() - px()) * nx() + (qy() - py()) * ny() + " +
"1 / 2 * K() * (FXP * nx() + FYP * ny()) * e()^2) * TMP) * nx()) * s() + " +
"1 / 2 * K() * FXP * s()^2) * nx() + " +
"((qy() - py()) + K() * (fy() - (fx() * nx() + fy() * ny() + " +
"((qx() - px()) * nx() + (qy() - py()) * ny() + " +
"1 / 2 * K() * (FXP * nx() + FYP * ny()) * e()^2) * TMP) * ny()) * s() + " +
"1 / 2 * K() * FYP * s()^2) * ny() >= 0")
)
val cutb5a =
cutmT(
StandardCut,
List(parseFormula("TMP * K() * e() = 1"),
parseFormula("FXP * nx() + FYP * ny() > 0")),
parseFormula("((qx() - px()) * nx() + (qy() - py()) * ny()) + " +
"(K() * ((fx() * nx() + fy() * ny()) - " +
"((fx() * nx() + fy() * ny()) + ((qx() - px()) * nx() + " +
"(qy() - py()) * ny() + 1 / 2 * K() * (FXP * nx() + FYP * " +
"ny()) * e()^2) * TMP)) * s()) " +
"+ 1 / 2 * K() * (FXP * nx() + FYP * ny())* s()^2 >= 0"))
val cutb6 =
cutT(
StandardCut,
parseFormula("FXP * nx() + FYP * ny() >= 0"),
parseFormula("(qx() + K() * (fx() - 0 * nx()) * e() + 1 / 2 * K() * FXP * e()^2 - px()) * nx() + " +
"(qy() + K() * (fy() - 0 * ny()) * e() + 1 / 2 * K() * FYP * e()^2 - py()) * ny() >= 0")
)
val main =
tryruleT(loopInduction(loopInv))<(
easiestT,
composelistT(
hpalpha1T*,
tryruleT(andRight)<(
composelistT(
hpalpha1T*,
tryruleT(andRight)<(
composelistT(
hpalpha1T*,
tryruleT(andRight)<(
composelistT(
hpalpha1T*,
tryruleT(andRight)<(
composelistT(
hpalpha1T*,
tryruleT(andRight)<(
composelistT(
hpalpha1T*,
tryruleT(andRight)<(
// branch 1
easybranchT,
// branch 2
composelistT(
hpalpha1T*,
diffsolveT(RightP(0), Endpoint),
hpalpha1T*,
tryruleT(andRight)<(
easiestT,
composelistT(
substT*,
cutb2<(
composelistT(
tryruleT(unsubstitute(Fn("-", List(Fn("qx", Nil), Fn("px", Nil))))),
tryruleT(unsubstitute(Fn("-", List(Fn("qy", Nil), Fn("py", Nil))))),
arithT
),
arithT
)
)
)
)
)
),
//branch 3
composelistT(
hpalpha1T*,
diffsolveT(RightP(0), Endpoint),
hpalpha1T*,
tryruleT(andRight)<(
easiestT,
hidehasfnT("e")& alleasyT
)
)
)
),
// branch 4
easybranchT
)
),
// branch 5
composelistT(
hpalpha1T*,
tryruleT(andRight)<(
composelistT(hpalpha1T*,
tryruleT(andRight)<(
composelistT(hpalpha1T*,
diffsolveT(RightP(0), Endpoint),
hpalpha1T*,
tryruleT(andRight)<(
easiestT,
composelistT(
substT*,
tryruleatT(hide)(LeftP(14)),
tryruleatT(hide)(LeftP(12)),
tryruleatT(hide)(LeftP(10)),
tryruleatT(hide)(LeftP(6)),
tryruleatT(hide)(LeftP(2)),
tryruleatT(hide)(LeftP(0)),
arithT
)
)
),
composelistT(hpalpha1T*,
diffsolveT(RightP(0), Endpoint),
hpalpha1T*,
tryruleT(andRight)<(
easiestT,
composelistT(
substT*,
tryruleatT(hide)(LeftP(9)),
tryruleatT(hide)(LeftP(8)),
cutb5a<(composelistT(
tryruleatT(hide)(LeftP(6)),
unsubT(parseFormula(
"FN <= 0"),
List(Var("FN"))),
unsubT(parseFormula(
"Q + K() * (A * e() + FNP * e()^2 * (1 / 2)) <= 0"),
List(Var("Q"),Var("FNP"))),
arithT),
composelistT(
tryruleatT(hide)(LeftP(11)),
tryruleatT(hide)(LeftP(10)),
tryruleatT(hide)(LeftP(9)),
tryruleatT(hide)(LeftP(8)),
tryruleatT(hide)(LeftP(7)),
tryruleatT(hide)(LeftP(6)),
tryruleatT(hide)(LeftP(5)),
tryruleatT(hide)(LeftP(4)),
tryruleatT(hide)(LeftP(3)),
tryruleatT(hide)(LeftP(2)),
tryruleatT(hide)(LeftP(1)),
arithT))
)
)
)
), nilT),
composelistT(hpalpha1T*,
diffsolveT(RightP(0), Endpoint),
hpalpha1T*,
tryruleT(andRight)<(
easiestT,
composelistT(substT*,arithT))
)))
)
),
// branch 6
composelistT(
hpalpha1T*,
diffsolveT(RightP(0), Endpoint),
hpalpha1T*,
tryruleT(andRight)<(
easiestT,
composelistT(
substT*,
cutb6<(
alleasyT,
composelistT(
tryruleatT(hide)(LeftP(5)),
tryruleatT(hide)(LeftP(6)),
arithT
)
)
)
)
)
)
),
// branch 7
easybranchT
)
),
easiestT
)
}
| keymaerad/KeYmaeraD | examples/medrobot/robot_generalized.dl.scala | Scala | bsd-3-clause | 8,387 |
package im.tox.antox.activities
import android.app.Activity
import android.content.Intent
import android.graphics.Color
import android.os.{Build, Bundle}
import android.preference.PreferenceManager
import android.support.v7.app.AppCompatActivity
import android.view.{View, WindowManager}
import android.widget._
import im.tox.antox.data.UserDB
import im.tox.antox.tox.ToxDoService
import im.tox.antoxnightly.R
class LoginActivity extends AppCompatActivity with AdapterView.OnItemSelectedListener {
private var profileSelected: String = _
protected override def onCreate(savedInstanceState: Bundle) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_login)
getSupportActionBar.hide()
if (Build.VERSION.SDK_INT != Build.VERSION_CODES.JELLY_BEAN &&
Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
getWindow.setFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED, WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED)
}
val preferences = PreferenceManager.getDefaultSharedPreferences(this)
val db = new UserDB(this)
if (!db.doUsersExist()) {
db.close()
val createAccount = new Intent(getApplicationContext, classOf[CreateAccountActivity])
startActivity(createAccount)
finish()
} else if (preferences.getBoolean("loggedin", false)) {
db.close()
val startTox = new Intent(getApplicationContext, classOf[ToxDoService])
getApplicationContext.startService(startTox)
val main = new Intent(getApplicationContext, classOf[MainActivity])
startActivity(main)
finish()
} else {
val profiles = db.getAllProfiles
db.close()
val profileSpinner = findViewById(R.id.login_account_name).asInstanceOf[Spinner]
val adapter = new ArrayAdapter[String](this, android.R.layout.simple_spinner_dropdown_item, profiles)
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
profileSpinner.setAdapter(adapter)
profileSpinner.setSelection(0)
profileSpinner.setOnItemSelectedListener(this)
}
}
def onItemSelected(parent: AdapterView[_], view: View, pos: Int, id: Long) {
profileSelected = parent.getItemAtPosition(pos).toString
if (parent.getChildAt(0) != null) // getChildAt(pos) returns a view, or null if non-existant
parent.getChildAt(0).asInstanceOf[TextView].setTextColor(Color.BLACK)
}
def onNothingSelected(parent: AdapterView[_]) {
}
def onClickLogin(view: View) {
val account = profileSelected
if (account == "") {
val context = getApplicationContext
val text = getString(R.string.login_must_fill_in)
val duration = Toast.LENGTH_SHORT
val toast = Toast.makeText(context, text, duration)
toast.show()
} else {
val db = new UserDB(this)
if (db.doesUserExist(account)) {
val details = db.getUserDetails(account)
db.close()
val preferences = PreferenceManager.getDefaultSharedPreferences(this)
val editor = preferences.edit()
editor.putBoolean("loggedin", true)
editor.putString("active_account", account)
editor.putString("nickname", details.nickname)
editor.putString("password", details.password)
editor.putString("status", details.status)
editor.putString("status_message", details.statusMessage)
editor.putBoolean("logging_enabled", details.loggingEnabled)
editor.putString("avatar", details.avatarName)
editor.apply()
val startTox = new Intent(getApplicationContext, classOf[ToxDoService])
getApplicationContext.startService(startTox)
val main = new Intent(getApplicationContext, classOf[MainActivity])
startActivity(main)
finish()
} else {
val context = getApplicationContext
val text = getString(R.string.login_bad_login)
val duration = Toast.LENGTH_SHORT
val toast = Toast.makeText(context, text, duration)
toast.show()
}
}
}
def onClickCreateAccount(view: View) {
val createAccount = new Intent(getApplicationContext, classOf[CreateAccountActivity])
startActivityForResult(createAccount, 1)
overridePendingTransition(android.R.anim.fade_in, android.R.anim.fade_out)
}
override def onActivityResult(requestCode: Int, resultCode: Int, data: Intent) {
if (requestCode == 1) {
if (resultCode == Activity.RESULT_OK) {
finish()
}
}
}
}
| zetok/Antox | app/src/main/scala/im/tox/antox/activities/LoginActivity.scala | Scala | gpl-3.0 | 4,482 |
package com.twitter.finagle.stress
import com.twitter.conversions.time._
import com.twitter.finagle.util.DefaultTimer
import com.twitter.ostrich.stats.StatsCollection
import com.twitter.util.Duration
import java.net.{InetAddress, InetSocketAddress, SocketAddress}
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.Executors
import org.jboss.netty.bootstrap.ServerBootstrap
import org.jboss.netty.buffer._
import org.jboss.netty.channel._
import org.jboss.netty.channel.group.DefaultChannelGroup
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory
import org.jboss.netty.handler.codec.http._
import scala.collection.JavaConversions._
object EmbeddedServer {
def apply() = new EmbeddedServer()
}
class EmbeddedServer(private val addr: SocketAddress) {
def this() = this(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
// (Publicly accessible) stats covering this server.
val stats = new StatsCollection
val stopped = new AtomicBoolean(false)
// Server state:
private[this] var isApplicationNonresponsive = false
private[this] var isConnectionNonresponsive = false
private[this] var isBelligerent = false
private[this] var latency = 0.seconds
private[this] val channels = new DefaultChannelGroup
private[this] val executor = Executors.newCachedThreadPool()
private[this] val bootstrap = new ServerBootstrap(
new NioServerSocketChannelFactory(executor, executor))
bootstrap.setPipelineFactory(new ChannelPipelineFactory {
def getPipeline = {
val pipeline = Channels.pipeline()
pipeline.addLast("transposer", new SimpleChannelDownstreamHandler {
override def writeRequested(ctx: ChannelHandlerContext, e: MessageEvent) {
if (!isBelligerent) {
super.writeRequested(ctx, e)
return
}
// Garble the message a bit.
val buffer = e.getMessage.asInstanceOf[ChannelBuffer]
val bytes = new Array[Byte](buffer.readableBytes)
buffer.getBytes(0, bytes)
val transposed = bytes map { byte => (byte + 1) toByte }
val transposedBuffer = ChannelBuffers.wrappedBuffer(transposed)
Channels.write(ctx, e.getFuture, transposedBuffer)
}
})
pipeline.addLast("decoder", new HttpRequestDecoder)
pipeline.addLast("encoder", new HttpResponseEncoder)
pipeline.addLast("logger", new SimpleChannelHandler {
override def channelClosed(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
channels.remove(ctx.getChannel)
stats.incr("closed")
super.channelClosed(ctx, e)
}
override def channelOpen(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
if (isConnectionNonresponsive)
ctx.getChannel.setReadable(false)
channels.add(ctx.getChannel)
stats.incr("opened")
super.channelClosed(ctx, e)
}
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) {
stats.incr("requests")
super.messageReceived(ctx, e)
}
override def exceptionCaught(ctx: ChannelHandlerContext, e: ExceptionEvent) {
stats.incr("exc_%s".format(e.getCause.getClass.getName.split('.').last))
}
})
pipeline.addLast("latency", new SimpleChannelDownstreamHandler {
override def writeRequested(ctx: ChannelHandlerContext, e: MessageEvent) {
if (latency != 0.seconds)
DefaultTimer.twitter.schedule(latency) { super.writeRequested(ctx, e) }
else
super.writeRequested(ctx, e)
}
})
pipeline.addLast("dots", new SimpleChannelUpstreamHandler {
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) {
val response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK)
response.setContent(ChannelBuffers.wrappedBuffer("..........".getBytes))
response.headers.set("Content-Length", "10")
if (!isApplicationNonresponsive)
ctx.getChannel.write(response)
}
})
pipeline
}
})
private[this] var serverChannel = bootstrap.bind(addr)
val boundAddress = serverChannel.getLocalAddress.asInstanceOf[InetSocketAddress]
def stop() {
if (stopped.getAndSet(true))
return
if (serverChannel.isOpen)
serverChannel.close().awaitUninterruptibly()
channels.close().awaitUninterruptibly()
channels.clear()
bootstrap.releaseExternalResources()
}
def start() {
if (!serverChannel.isOpen)
serverChannel = bootstrap.bind(addr)
}
def becomeApplicationNonresponsive() {
isApplicationNonresponsive = true
}
def becomeConnectionNonresponsive() {
isConnectionNonresponsive = true
channels foreach { _.setReadable(false) }
}
def becomeBelligerent() {
isBelligerent = true
}
def setLatency(latency: Duration) {
this.latency = latency
}
// TODO: turn responsiveness back on.
}
| Krasnyanskiy/finagle | finagle-stress/src/main/scala/com/twitter/finagle/stress/EmbeddedServer.scala | Scala | apache-2.0 | 5,040 |
package org.psliwa.idea.composerJson.intellij.codeAssist
import com.intellij.psi.{PsiElement, PsiFileSystemItem}
import org.junit.Assert.assertEquals
import org.psliwa.idea.composerJson.ComposerJson
abstract class FilePathReferences extends CompletionTest {
def checkFileReference(file: String, s: String): Unit = {
myFixture.getTempDirFixture.createFile(file)
assertEquals(1, getResolvedFileReferences(endsWith(file), s).length)
}
def checkEmptyFileReferences(file: String, s: String): Unit = {
myFixture.getTempDirFixture.createFile(file)
assertEquals(0, getResolvedFileReferences(endsWith(file), s).length)
}
private def endsWith(suffix: String)(s: String): Boolean = s.endsWith(suffix)
def getResolvedFileReferences(fileComparator: String => Boolean,
s: String,
mapElement: PsiElement => PsiElement = _.getParent): Array[String] = {
myFixture.configureByText(ComposerJson, s)
val element = mapElement(myFixture.getFile.findElementAt(myFixture.getCaretOffset))
element.getReferences
.map(_.resolve())
.filter(_.isInstanceOf[PsiFileSystemItem])
.map(_.asInstanceOf[PsiFileSystemItem])
.map(_.getVirtualFile.getCanonicalPath)
.filter(fileComparator)
}
}
| psliwa/idea-composer-plugin | src/test/scala/org/psliwa/idea/composerJson/intellij/codeAssist/FilePathReferences.scala | Scala | mit | 1,300 |
package org.orbeon.dom
abstract class VisitorSupport extends Visitor {
def visit(node: Document) = ()
def visit(node: Element) = ()
def visit(node: Attribute) = ()
def visit(node: Comment) = ()
def visit(node: Namespace) = ()
def visit(node: ProcessingInstruction) = ()
def visit(node: Text) = ()
}
| brunobuzzi/orbeon-forms | dom/src/main/scala/org/orbeon/dom/VisitorSupport.scala | Scala | lgpl-2.1 | 396 |
package org.goingok.services
import java.util.UUID
import org.goingok.data.models.{User, UserAuth}
import org.goingok.data.persistence.db.DatabaseOps
import scala.concurrent.Future
import scala.util.{Failure, Success, Try}
/**
* Created by andrew@andrewresearch.net on 12/4/17.
*/
object UserService {
import org.goingok.GoingOkContext._
def getOrCreateGoingokId(google_id:String,google_email:String): Future[UUID] = {
DatabaseOps.selectUserAuthForGoogleId(google_id).map { uao =>
if(uao.nonEmpty) {
val gokId = uao.get.goingok_id
log.info(s"Authorised user with goingok_id: $gokId")
gokId
}
else {
val newUserAuth = UserAuth(google_id=google_id,google_email=google_email)
val newUser = User(newUserAuth.goingok_id,"","NON-NON-NON")
log.info(s"Creating new user with goingok_id: ${newUserAuth.goingok_id}")
DatabaseOps.insertUserAuth(newUserAuth)
DatabaseOps.insertUser(newUser)
newUserAuth.goingok_id
}
}
}
}
| GoingOK/goingok-server | src/main/scala/org/goingok/services/UserService.scala | Scala | apache-2.0 | 1,030 |
/*
* Copyright 2017-2022 John Snow Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.johnsnowlabs.nlp.annotators.pos.perceptron
import com.johnsnowlabs.nlp.AnnotatorApproach
import org.apache.spark.ml.PipelineModel
import org.apache.spark.ml.param.{DoubleParam, IntParam, Param}
import org.apache.spark.ml.util.{DefaultParamsReadable, Identifiable}
import org.apache.spark.sql.Dataset
import scala.collection.mutable.{Map => MMap}
/** Trains an averaged Perceptron model to tag words part-of-speech.
* Sets a POS tag to each word within a sentence.
*
* For pretrained models please see the [[PerceptronModel]].
*
* The training data needs to be in a Spark DataFrame, where the column needs to consist of
* [[com.johnsnowlabs.nlp.Annotation Annotations]] of type `POS`. The `Annotation` needs to have member `result`
* set to the POS tag and have a `"word"` mapping to its word inside of member `metadata`.
* This DataFrame for training can easily created by the helper class [[com.johnsnowlabs.nlp.training.POS POS]].
* {{{
* POS().readDataset(spark, datasetPath).selectExpr("explode(tags) as tags").show(false)
* +---------------------------------------------+
* |tags |
* +---------------------------------------------+
* |[pos, 0, 5, NNP, [word -> Pierre], []] |
* |[pos, 7, 12, NNP, [word -> Vinken], []] |
* |[pos, 14, 14, ,, [word -> ,], []] |
* |[pos, 31, 34, MD, [word -> will], []] |
* |[pos, 36, 39, VB, [word -> join], []] |
* |[pos, 41, 43, DT, [word -> the], []] |
* |[pos, 45, 49, NN, [word -> board], []] |
* ...
* }}}
*
* For extended examples of usage, see the [[https://github.com/JohnSnowLabs/spark-nlp-workshop/blob/master/jupyter/training/french/Train-Perceptron-French.ipynb Spark NLP Workshop]]
* and [[https://github.com/JohnSnowLabs/spark-nlp/tree/master/src/test/scala/com/johnsnowlabs/nlp/annotators/pos/perceptron PerceptronApproach tests]].
*
* ==Example==
* {{{
* import spark.implicits._
* import com.johnsnowlabs.nlp.base.DocumentAssembler
* import com.johnsnowlabs.nlp.annotator.SentenceDetector
* import com.johnsnowlabs.nlp.annotators.Tokenizer
* import com.johnsnowlabs.nlp.training.POS
* import com.johnsnowlabs.nlp.annotators.pos.perceptron.PerceptronApproach
* import org.apache.spark.ml.Pipeline
*
* val documentAssembler = new DocumentAssembler()
* .setInputCol("text")
* .setOutputCol("document")
*
* val sentence = new SentenceDetector()
* .setInputCols("document")
* .setOutputCol("sentence")
*
* val tokenizer = new Tokenizer()
* .setInputCols("sentence")
* .setOutputCol("token")
*
* val datasetPath = "src/test/resources/anc-pos-corpus-small/test-training.txt"
* val trainingPerceptronDF = POS().readDataset(spark, datasetPath)
*
* val trainedPos = new PerceptronApproach()
* .setInputCols("document", "token")
* .setOutputCol("pos")
* .setPosColumn("tags")
* .fit(trainingPerceptronDF)
*
* val pipeline = new Pipeline().setStages(Array(
* documentAssembler,
* sentence,
* tokenizer,
* trainedPos
* ))
*
* val data = Seq("To be or not to be, is this the question?").toDF("text")
* val result = pipeline.fit(data).transform(data)
*
* result.selectExpr("pos.result").show(false)
* +--------------------------------------------------+
* |result |
* +--------------------------------------------------+
* |[NNP, NNP, CD, JJ, NNP, NNP, ,, MD, VB, DT, CD, .]|
* +--------------------------------------------------+
* }}}
*
* @param uid internal uid required to generate writable annotators
* @groupname anno Annotator types
* @groupdesc anno Required input and expected output annotator types
* @groupname Ungrouped Members
* @groupname param Parameters
* @groupname setParam Parameter setters
* @groupname getParam Parameter getters
* @groupname Ungrouped Members
* @groupprio param 1
* @groupprio anno 2
* @groupprio Ungrouped 3
* @groupprio setParam 4
* @groupprio getParam 5
* @groupdesc param A list of (hyper-)parameter keys this annotator can take. Users can set and get the parameter values through setters and getters, respectively.
* */
class PerceptronApproach(override val uid: String) extends AnnotatorApproach[PerceptronModel]
with PerceptronTrainingUtils {
import com.johnsnowlabs.nlp.AnnotatorType._
/** Averaged Perceptron model to tag words part-of-speech */
override val description: String = "Averaged Perceptron model to tag words part-of-speech"
/** Column of Array of POS tags that match tokens
*
* @group param
* */
val posCol = new Param[String](this, "posCol", "Column of Array of POS tags that match tokens")
/** Number of iterations in training, converges to better accuracy (Default: `5`)
*
* @group param
* */
val nIterations = new IntParam(this, "nIterations", "Number of iterations in training, converges to better accuracy")
setDefault(nIterations, 5)
/** How many times at least a tag on a word to be marked as frequent (Default: `20`)
*
* @group param
* */
val frequencyThreshold = new IntParam(this, "frequencyThreshold",
"How many times at least a tag on a word to be marked as frequent")
setDefault(frequencyThreshold, 20)
/** How much percentage of total amount of words are covered to be marked as frequent (Default: `0.97`)
*
* @group param
* */
val ambiguityThreshold = new DoubleParam(this, "ambiguityThreshold",
"How much percentage of total amount of words are covered to be marked as frequent")
setDefault(ambiguityThreshold, 0.97)
/** Column containing an array of POS Tags matching every token on the line.
*
* @group setParam
* */
def setPosColumn(value: String): this.type = set(posCol, value)
/** Number of iterations for training. May improve accuracy but takes longer. Default 5.
*
* @group setParam
* */
def setNIterations(value: Int): this.type = set(nIterations, value)
/**
* "How many times at least a tag on a word to be marked as frequent
*
* @group setParam
*/
def setFrequencyThreshold(value: Int): this.type = set(frequencyThreshold, value)
/**
* "How much percentage of total amount of words are covered to be marked as frequent
*
* @group setParam
*/
def setAmbiguityThreshold(value: Double): this.type = set(ambiguityThreshold, value)
/** Number of iterations for training. May improve accuracy but takes longer (Default: `5`)
*
* @group getParam
* */
def getNIterations: Int = $(nIterations)
def this() = this(Identifiable.randomUID("POS"))
/** Output annotator type: POS
*
* @group anno
* */
override val outputAnnotatorType: AnnotatorType = POS
/** Input annotator type: TOKEN, DOCUMENT
*
* @group anno
* */
override val inputAnnotatorTypes: Array[AnnotatorType] = Array(TOKEN, DOCUMENT)
/**
* Trains a model based on a provided CORPUS
*
* @return A trained averaged model
*/
override def train(dataset: Dataset[_], recursivePipeline: Option[PipelineModel]): PerceptronModel = {
val taggedSentences = generatesTagBook(dataset)
val taggedWordBook = buildTagBook(taggedSentences, $(frequencyThreshold), $(ambiguityThreshold))
/** finds all distinct tags and stores them */
val classes = taggedSentences.flatMap(_.tags).distinct
val initialModel = new TrainingPerceptronLegacy(classes, taggedWordBook, MMap())
val finalModel = trainPerceptron($(nIterations), initialModel, taggedSentences, taggedWordBook)
logger.debug("TRAINING: Finished all iterations")
new PerceptronModel().setModel(finalModel)
}
}
/**
* This is the companion object of [[PerceptronApproach]]. Please refer to that class for the documentation.
*/
object PerceptronApproach extends DefaultParamsReadable[PerceptronApproach] | JohnSnowLabs/spark-nlp | src/main/scala/com/johnsnowlabs/nlp/annotators/pos/perceptron/PerceptronApproach.scala | Scala | apache-2.0 | 8,452 |
package model.dtos
case class PlatformStats(totalConsultations:Int,
medianAverageCommentsPerConsultations:Int,
medianAverageDaysPerConsultation:Int,
organizationsPerCategory:List[OrganizationStatsGrouped])
| scify/DemocracIT-Web | app/model/dtos/PlatformStats.scala | Scala | apache-2.0 | 282 |
package com.wavesplatform.transaction.assets
import com.wavesplatform.account.{AddressScheme, KeyPair, PrivateKey, PublicKey}
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.crypto
import com.wavesplatform.lang.ValidationError
import com.wavesplatform.transaction.Asset.IssuedAsset
import com.wavesplatform.transaction._
import com.wavesplatform.transaction.serialization.impl.{BaseTxJson, PBTransactionSerializer}
import com.wavesplatform.transaction.validation._
import com.wavesplatform.transaction.validation.impl.UpdateAssetInfoTxValidator
import monix.eval.Coeval
import play.api.libs.json.{JsObject, Json}
import scala.util.{Failure, Success, Try}
case class UpdateAssetInfoTransaction(
version: TxVersion,
sender: PublicKey,
assetId: IssuedAsset,
name: String,
description: String,
timestamp: TxTimestamp,
feeAmount: TxAmount,
feeAsset: Asset,
proofs: Proofs,
chainId: Byte
) extends VersionedTransaction
with FastHashId
with ProvenTransaction
with ProtobufOnly { self =>
override def assetFee: (Asset, TxAmount) = (feeAsset, feeAmount)
override def builder: UpdateAssetInfoTransaction.type = UpdateAssetInfoTransaction
override val bodyBytes: Coeval[Array[Byte]] = Coeval.evalOnce(PBTransactionSerializer.bodyBytes(self))
override val bytes: Coeval[Array[Byte]] = Coeval.evalOnce(PBTransactionSerializer.bytes(self))
override val json: Coeval[JsObject] =
Coeval.evalOnce(
BaseTxJson.toJson(self) ++ Json.obj(
"chainId" -> self.chainId,
"assetId" -> (self.assetId: Asset),
"name" -> self.name,
"description" -> self.description
)
)
override def checkedAssets: Seq[IssuedAsset] = Seq(assetId)
}
object UpdateAssetInfoTransaction extends TransactionParser {
type TransactionT = UpdateAssetInfoTransaction
override val typeId: TxType = 17: Byte
override val supportedVersions: Set[TxVersion] = Set(1)
implicit def sign(tx: UpdateAssetInfoTransaction, privateKey: PrivateKey): UpdateAssetInfoTransaction =
tx.copy(proofs = Proofs(crypto.sign(privateKey, tx.bodyBytes())))
implicit val validator: TxValidator[UpdateAssetInfoTransaction] = UpdateAssetInfoTxValidator
override def parseBytes(bytes: Array[TxType]): Try[UpdateAssetInfoTransaction] =
PBTransactionSerializer
.parseBytes(bytes)
.flatMap {
case tx: UpdateAssetInfoTransaction => Success(tx)
case tx: Transaction => Failure(UnexpectedTransaction(typeId, tx.typeId))
}
def create(
version: Byte,
sender: PublicKey,
assetId: ByteStr,
name: String,
description: String,
timestamp: TxTimestamp,
feeAmount: TxAmount,
feeAsset: Asset,
proofs: Proofs,
chainId: Byte = AddressScheme.current.chainId
): Either[ValidationError, UpdateAssetInfoTransaction] = {
UpdateAssetInfoTransaction(
version,
sender,
IssuedAsset(assetId),
name,
description,
timestamp,
feeAmount,
feeAsset,
proofs,
chainId
).validatedEither
}
def selfSigned(
version: Byte,
sender: KeyPair,
assetId: ByteStr,
name: String,
description: String,
timestamp: TxTimestamp,
feeAmount: TxAmount,
feeAsset: Asset,
chainId: Byte = AddressScheme.current.chainId
): Either[ValidationError, UpdateAssetInfoTransaction] =
create(version, sender.publicKey, assetId, name, description, timestamp, feeAmount, feeAsset, Proofs.empty, chainId).map(_.signWith(sender.privateKey))
}
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/transaction/assets/UpdateAssetInfoTransaction.scala | Scala | mit | 3,644 |
package org.pfcoperez.dailyalgorithm.datastructures
import org.scalatest.{ FlatSpec, Matchers }
class BitsSpec extends FlatSpec with Matchers {
def bytes2seqbool(bytes: Seq[Byte]): Seq[Boolean] =
for {
b <- bytes
bit <- b.toBinaryString
} yield bit == '1'
def text2seqbool(str: String): Seq[Boolean] = bytes2seqbool(str.map(_.toByte))
"A Bits (BitArray) data structure" should "be able to hold the same content as `Seq[Boolean]`" in {
val message = """Look again at that dot. That's here. That's home. That's us. On it everyone you love, everyone you know, everyone you ever heard of, every human being who ever was, lived out their lives. The aggregate of our joy and suffering, thousands of confident religions, ideologies, and economic doctrines, every hunter and forager, every hero and coward, every creator and destroyer of civilization, every king and peasant, every young couple in love, every mother and father, hopeful child, inventor and explorer, every teacher of morals, every corrupt politician, every "superstar," every "supreme leader," every saint and sinner in the history of our species lived there-on a mote of dust suspended in a sunbeam"""
val booleanSeq = text2seqbool(message)
val bits = Bits(booleanSeq)
(0L until bits.length).map(bits(_).right.get) should equal(booleanSeq)
}
it should "act as a monoid where empty is zero and concatenating is combine" in {
import Bits._
import cats.Monoid
import cats.syntax.monoid._
implicit def bool2bits(x: Boolean): Bits = Bits(x :: Nil)
val bits = Monoid[Bits].empty |+| true |+| false |+| true |+| true
(0L until bits.length).map(bits(_).right.get) should equal(Seq(true, false, true, true))
}
}
| pfcoperez/algorithmaday | src/test/scala/org/pfcoperez/dailyalgorithm/datastructures/BitsSpec.scala | Scala | gpl-3.0 | 1,751 |
package score.discord.canti.discord
import net.dv8tion.jda.api.entities.{Message, MessageChannel, User}
import score.discord.canti.wrappers.jda.ID
case class BareMessage(
messageId: ID[Message],
chanId: ID[MessageChannel],
senderId: ID[User],
text: String
)
| ScoreUnder/canti-bot | src/main/scala/score/discord/canti/discord/BareMessage.scala | Scala | agpl-3.0 | 268 |
package com.softwaremill.bootzooka.api
import com.softwaremill.bootzooka.common.StringJsonWrapper
import com.softwaremill.bootzooka.service.PasswordRecoveryService
import com.softwaremill.bootzooka.service.user.UserService
import org.scalatra.swagger.{StringResponseMessage, Swagger, SwaggerSupport}
import org.scalatra.{AsyncResult, FutureSupport, NoContent}
import scala.concurrent.{ExecutionContext, Future}
/**
* Servlet handling requests related to password recovery.
*/
// format: OFF
class PasswordRecoveryServlet(passwordRecoveryService: PasswordRecoveryService, userService: UserService)
(override implicit val swagger: Swagger, implicit val executor: ExecutionContext)
extends JsonServlet with SwaggerMappable with PasswordRecoveryServlet.ApiDocs with FutureSupport {
// format: ON
override def mappingPath = PasswordRecoveryServlet.MappingPath
post("/", operation(requestPasswordReset)) {
val login = (parsedBody \\ "login").extractOpt[String].getOrElse("")
new AsyncResult() {
override val is = userService.checkUserExistenceFor(login, login).flatMap {
case Right(_) => Future { haltWithNotFound("No user with given login/e-mail found.") }
case _ =>
passwordRecoveryService.sendResetCodeToUser(login).map(_ =>
StringJsonWrapper("success"))
}
}
}
post("/:code", operation(resetPassword)) {
val code = params("code")
val password = (parsedBody \\ "password").extractOpt[String].getOrElse("")
if (!password.isEmpty) {
new AsyncResult {
val is = passwordRecoveryService.performPasswordReset(code, password).map {
case Left(e) => haltWithForbidden(e)
case _ => NoContent()
}
}
}
else {
haltWithBadRequest("missingpassword")
}
}
}
object PasswordRecoveryServlet {
val MappingPath = "passwordrecovery"
// only enclosing object's companions have access to this trait
protected trait ApiDocs extends SwaggerSupport {
self: PasswordRecoveryServlet =>
override protected val applicationDescription = "Password recovery"
protected val requestPasswordReset = (
apiOperation[StringJsonWrapper]("requestPasswordReset")
summary "Request password reset"
parameter bodyParam[PasswordResetRequestCommand]("body").description("User login").required
responseMessages (
StringResponseMessage(200, "OK"),
StringResponseMessage(404, "No user with given login/e-mail found")
)
)
protected val resetPassword = (
apiOperation[Unit]("resetPassword")
summary "Reset password"
parameters (
pathParam[String]("code").description("Password reset code").required,
bodyParam[PasswordResetCommand]("body").description("New password").required
)
responseMessages (
StringResponseMessage(200, "OK"),
StringResponseMessage(400, "Missing password"),
StringResponseMessage(403, "Invalid password reset code")
)
)
}
private[this] case class PasswordResetRequestCommand(login: String)
private[this] case class PasswordResetCommand(password: String)
} | umitunal/bootzooka | backend/src/main/scala/com/softwaremill/bootzooka/api/PasswordRecoveryServlet.scala | Scala | apache-2.0 | 3,189 |
import slick.jdbc.H2Profile.api._
import scala.concurrent.ExecutionContext.Implicits.global
trait Interpolation { this: PlainSQL.type =>
def createCoffees: DBIO[Int] =
sqlu"""create table coffees(
name varchar not null,
sup_id int not null,
price double not null,
sales int not null,
total int not null,
foreign key(sup_id) references suppliers(id))"""
def createSuppliers: DBIO[Int] =
sqlu"""create table suppliers(
id int not null primary key,
name varchar not null,
street varchar not null,
city varchar not null,
state varchar not null,
zip varchar not null)"""
def insertSuppliers: DBIO[Unit] = DBIO.seq(
// Insert some suppliers
sqlu"insert into suppliers values(101, 'Acme, Inc.', '99 Market Street', 'Groundsville', 'CA', '95199')",
sqlu"insert into suppliers values(49, 'Superior Coffee', '1 Party Place', 'Mendocino', 'CA', '95460')",
sqlu"insert into suppliers values(150, 'The High Ground', '100 Coffee Lane', 'Meadows', 'CA', '93966')"
)
def insertCoffees: DBIO[Unit] = {
def insert(c: Coffee): DBIO[Int] =
sqlu"insert into coffees values (${c.name}, ${c.supID}, ${c.price}, ${c.sales}, ${c.total})"
// Insert some coffees. The SQL statement is the same for all calls:
// "insert into coffees values (?, ?, ?, ?, ?)"
val inserts: Seq[DBIO[Int]] = Seq(
Coffee("Colombian", 101, 7.99, 0, 0),
Coffee("French_Roast", 49, 8.99, 0, 0),
Coffee("Espresso", 150, 9.99, 0, 0),
Coffee("Colombian_Decaf", 101, 8.99, 0, 0),
Coffee("French_Roast_Decaf", 49, 9.99, 0, 0)
).map(insert)
val combined: DBIO[Seq[Int]] = DBIO.sequence(inserts)
combined.map(_.sum)
}
def printAll: DBIO[Unit] =
// Iterate through all coffees and output them
sql"select * from coffees".as[Coffee].map { cs =>
println("Coffees:")
for(c <- cs)
println("* " + c.name + "\\t" + c.supID + "\\t" + c.price + "\\t" + c.sales + "\\t" + c.total)
}
def namesByPrice(price: Double): DBIO[Seq[(String, String)]] = sql"""
select c.name, s.name
from coffees c, suppliers s
where c.price < $price and s.id = c.sup_id""".as[(String, String)]
def supplierById(id: Int): DBIO[Seq[Supplier]] =
sql"select * from suppliers where id = $id".as[Supplier]
def printParameterized: DBIO[Unit] = {
// Perform a join to retrieve coffee names and supplier names for
// all coffees costing less than $9.00
namesByPrice(9.0).flatMap { l2 =>
println("Parameterized StaticQuery:")
for (t <- l2)
println("* " + t._1 + " supplied by " + t._2)
supplierById(49).map(s => println(s"Supplier #49: $s"))
}
}
def coffeeByName(name: String): DBIO[Option[Coffee]] = {
val table = "coffees"
sql"select * from #$table where name = $name".as[Coffee].headOption
}
def deleteCoffee(name: String): DBIO[Int] =
sqlu"delete from coffees where name = $name"
}
| nafg/slick | samples/slick-plainsql/src/main/scala/Interpolation.scala | Scala | bsd-2-clause | 2,982 |
package io.gatling.amqp.data
/**
* Marker trait for AMQP Requests
*/
trait AmqpRequest
| maiha/gatling-amqp | src/main/scala/io/gatling/amqp/data/AmqpRequest.scala | Scala | mit | 90 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.exhibitor
import java.io.IOException
import java.net.{HttpURLConnection, URL, URLEncoder}
import play.api.libs.json.{JsValue, Json}
import scopt.OptionParser
import scala.io.Source
object Cli {
def main(args: Array[String]) {
try {
exec(args)
} catch {
case e: Throwable =>
System.err.println("Error: " + e.getMessage)
sys.exit(1)
}
}
def exec(args: Array[String]) {
if (args.length == 0) {
handleHelp()
println()
throw new RuntimeException("No command supplied")
}
val command = args.head
val commandArgs = args.tail
command match {
case "help" => if (commandArgs.isEmpty) handleHelp() else handleHelp(commandArgs.head)
case "scheduler" => handleScheduler(commandArgs)
case "add" => handleAdd(commandArgs)
case "start" => handleStart(commandArgs)
case "stop" => handleStop(commandArgs)
case "remove" => handleRemove(commandArgs)
case "status" => handleStatus(commandArgs)
case "config" => handleConfig(commandArgs)
}
}
def handleHelp(command: String = "") {
command match {
case "" =>
println("Usage: <command>\\n")
printGenericHelp()
case "scheduler" => Parsers.scheduler.showUsage
case "add" => Parsers.add.showUsage
case "start" => Parsers.start.showUsage
case "stop" => Parsers.stop.showUsage
case "remove" => Parsers.remove.showUsage
case "status" => Parsers.status.showUsage
case "config" => Parsers.config.showUsage
case _ =>
println(s"Unknown command: $command\\n")
printGenericHelp()
}
}
def handleScheduler(args: Array[String]) {
Parsers.scheduler.parse(args, Map()) match {
case Some(config) =>
resolveApi(config.get("api"))
Config.master = config("master")
Config.user = config("user")
config.get("ensemblemodifyretries").foreach(retries => Config.ensembleModifyRetries = retries.toInt)
config.get("ensemblemodifybackoff").foreach(backoff => Config.ensembleModifyBackoff = backoff.toLong)
config.get("debug").foreach(debug => Config.debug = debug.toBoolean)
Scheduler.start()
case None => sys.exit(1)
}
}
def handleAdd(args: Array[String]) {
val id = getID(args, () => Parsers.add.showUsage)
Parsers.add.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val server = sendRequest("/add", config).as[ExhibitorServer]
printLine("Server added")
printLine()
printExhibitorServer(server)
case None => sys.exit(1)
}
}
def handleStart(args: Array[String]) {
val id = getID(args, () => Parsers.start.showUsage)
Parsers.start.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val server = sendRequest("/start", config).as[ExhibitorServer]
printLine("Started server")
printLine()
printExhibitorServer(server)
case None => sys.exit(1)
}
}
def handleStop(args: Array[String]) {
val id = getID(args, () => Parsers.stop.showUsage)
Parsers.stop.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val server = sendRequest("/stop", config).as[ExhibitorServer]
printLine(s"Stopped server ${server.id}")
case None => sys.exit(1)
}
}
def handleRemove(args: Array[String]) {
val id = getID(args, () => Parsers.remove.showUsage)
Parsers.remove.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val server = sendRequest("/remove", config).as[ExhibitorServer]
printLine(s"Removed server ${server.id}")
case None => sys.exit(1)
}
}
def handleStatus(args: Array[String]) {
Parsers.status.parse(args, Map()) match {
case Some(config) =>
resolveApi(config.get("api"))
val cluster = sendRequest("/status", config).as[List[ExhibitorServer]]
printCluster(cluster)
case None => sys.exit(1)
}
}
def handleConfig(args: Array[String]) {
val id = getID(args, () => Parsers.config.showUsage)
Parsers.config.parse(args.tail, Map("id" -> id)) match {
case Some(config) =>
resolveApi(config.get("api"))
val server = sendRequest("/config", config).as[ExhibitorServer]
printExhibitorServer(server)
case None => sys.exit(1)
}
}
private def getID(args: Array[String], usage: () => Unit): String = {
args.headOption match {
case Some(id) => id
case None =>
usage()
sys.exit(1)
}
}
private def resolveApi(apiOption: Option[String]) {
if (Config.api != null) return
if (apiOption.isDefined) {
Config.api = apiOption.get
return
}
if (System.getenv("EM_API") != null) {
Config.api = System.getenv("EM_API")
return
}
throw new IllegalArgumentException("Undefined API url. Please provide either a CLI --api option or EM_API env.")
}
private[exhibitor] def sendRequest(uri: String, params: Map[String, String]): JsValue = {
def queryString(params: Map[String, String]): String = {
var s = ""
params.foreach { case (name, value) =>
if (!s.isEmpty) s += "&"
s += URLEncoder.encode(name, "utf-8")
if (value != null) s += "=" + URLEncoder.encode(value, "utf-8")
}
s
}
val qs: String = queryString(params)
val url: String = Config.api + (if (Config.api.endsWith("/")) "" else "/") + "api" + uri + "?" + qs
val connection: HttpURLConnection = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
var response: String = null
try {
try {
response = Source.fromInputStream(connection.getInputStream).getLines().mkString
}
catch {
case e: IOException =>
if (connection.getResponseCode != 200) throw new IOException(connection.getResponseCode + " - " + connection.getResponseMessage)
else throw e
}
} finally {
connection.disconnect()
}
Json.parse(response)
}
private def printLine(s: AnyRef = "", indent: Int = 0) = println(" " * indent + s)
private def printGenericHelp() {
printLine("Commands:")
printLine("help - print this message.", 1)
printLine("help [cmd] - print command-specific help.", 1)
printLine("scheduler - start scheduler.", 1)
printLine("status - print cluster status.", 1)
printLine("add - add servers to cluster.", 1)
printLine("config - configure servers in cluster.", 1)
printLine("start - start servers in cluster.", 1)
printLine("stop - stop servers in cluster.", 1)
printLine("remove - remove servers in cluster.", 1)
}
private def printConstraintExamples() {
printLine("constraint examples:")
printLine("like:slave0 - value equals 'slave0'", 1)
printLine("unlike:slave0 - value is not equal to 'slave0'", 1)
printLine("like:slave.* - value starts with 'slave'", 1)
printLine("unique - all values are unique", 1)
}
private def printCluster(cluster: List[ExhibitorServer]) {
printLine("cluster:")
cluster.foreach(printExhibitorServer(_, 1))
}
private def printExhibitorServer(server: ExhibitorServer, indent: Int = 0) {
printLine("server:", indent)
printLine(s"id: ${server.id}", indent + 1)
printLine(s"state: ${server.state}", indent + 1)
if (!server.config.hostname.isEmpty && server.config.exhibitorConfig.get("port").isDefined) {
printLine(s"endpoint: ${server.url}/exhibitor/v1/ui/index.html", indent + 1)
}
if (server.constraints.nonEmpty)
printLine(s"constraints: ${Util.formatMap(server.constraints)}", indent + 1)
printTaskConfig(server.config, indent + 1)
printLine()
}
private def printTaskConfig(config: TaskConfig, indent: Int) {
printLine("exhibitor config:", indent)
config.exhibitorConfig.foreach { case (k, v) =>
printLine(s"$k: $v", indent + 1)
}
printLine("shared config overrides:", indent)
config.sharedConfigOverride.foreach { case (k, v) =>
printLine(s"$k: $v", indent + 1)
}
printLine(s"cpu: ${config.cpus}", indent)
printLine(s"mem: ${config.mem}", indent)
printLine(s"sharedConfigChangeBackoff: ${config.sharedConfigChangeBackoff}", indent)
}
private object Parsers {
val scheduler = new OptionParser[Map[String, String]]("scheduler") {
opt[String]('m', "master").required().text("Mesos Master addresses. Required.").action { (value, config) =>
config.updated("master", value)
}
opt[String]('a', "api").optional().text("Binding host:port for http/artifact server. Optional if EM_API env is set.").action { (value, config) =>
config.updated("api", value)
}
opt[String]('u', "user").required().text("Mesos user. Required.").action { (value, config) =>
config.updated("user", value)
}
opt[Int]("ensemblemodifyretries").optional().text("Number of retries to modify (add/remove server) ensemble. Defaults to 60. Optional.").action { (value, config) =>
config.updated("ensemblemodifyretries", value.toString)
}
opt[Long]("ensemblemodifybackoff").optional().text("Backoff between retries to modify (add/remove server) ensemble in milliseconds. Defaults to 1000. Optional.").action { (value, config) =>
config.updated("ensemblemodifybackoff", value.toString)
}
opt[Boolean]('d', "debug").optional().text("Debug mode. Optional. Defaults to false.").action { (value, config) =>
config.updated("debug", value.toString)
}
}
val add = new OptionParser[Map[String, String]]("add <id>") {
override def showUsage {
super.showUsage
printLine()
printConstraintExamples()
}
opt[String]('c', "cpu").optional().text(s"CPUs for server. Optional.").action { (value, config) =>
config.updated("cpu", value)
}
opt[String]('m', "mem").optional().text("Memory for server. Optional.").action { (value, config) =>
config.updated("mem", value)
}
opt[String]("constraints").optional().text("Constraints (hostname=like:master,rack=like:1.*). See below. Defaults to 'hostname=unique'. Optional.").action { (value, config) =>
config.updated("constraints", value)
}
opt[Long]('b', "configchangebackoff").optional().text("Backoff between checks whether the shared configuration changed in milliseconds. Defaults to 10000. Optional.").action { (value, config) =>
config.updated("configchangebackoff", value.toString)
}
opt[String]('a', "api").optional().text("Binding host:port for http/artifact server. Optional if EM_API env is set.").action { (value, config) =>
config.updated("api", value)
}
}
val start = defaultParser("start <id>")
val stop = defaultParser("stop <id>")
val remove = defaultParser("remove <id>")
val status = defaultParser("status")
val config = new OptionParser[Map[String, String]]("config <id>") {
opt[String]('a', "api").optional().text("Binding host:port for http/artifact server. Optional if EM_API env is set.").action { (value, config) =>
config.updated("api", value)
}
// Exhibitor configs
opt[String]("configtype").optional().text("Config type to use: s3 or zookeeper. Optional.").action { (value, config) =>
config.updated("configtype", value)
}
opt[String]("configcheckms").optional().text("Period (ms) to check for shared config updates. Optional.").action { (value, config) =>
config.updated("configcheckms", value)
}
opt[String]("defaultconfig").optional().text("Full path to a file that contains initial/default values for Exhibitor/ZooKeeper config values. The file is a standard property file. Optional.").action { (value, config) =>
config.updated("defaultconfig", value)
}
opt[String]("headingtext").optional().text("Extra text to display in UI header. Optional.").action { (value, config) =>
config.updated("headingtext", value)
}
opt[String]("hostname").optional().text("Hostname to use for this JVM. Optional.").action { (value, config) =>
config.updated("hostname", value)
}
opt[String]("jquerystyle").optional().text("Styling used for the JQuery-based UI. Optional.").action { (value, config) =>
config.updated("jquerystyle", value)
}
opt[String]("loglines").optional().text("Max lines of logging to keep in memory for display. Default is 1000. Optional.").action { (value, config) =>
config.updated("loglines", value)
}
opt[String]("nodemodification").optional().text("If true, the Explorer UI will allow nodes to be modified (use with caution). Default is true. Optional.").action { (value, config) =>
config.updated("nodemodification", value)
}
opt[String]("prefspath").optional().text("Certain values (such as Control Panel values) are stored in a preferences file. By default, Preferences.userRoot() is used. Optional.").action { (value, config) =>
config.updated("prefspath", value)
}
opt[String]("servo").optional().text("true/false (default is false). If enabled, ZooKeeper will be queried once a minute for its state via the 'mntr' four letter word (this requires ZooKeeper 3.4.x+). Servo will be used to publish this data via JMX. Optional.").action { (value, config) =>
config.updated("servo", value)
}
opt[String]("timeout").optional().text("Connection timeout (ms) for ZK connections. Default is 30000. Optional.").action { (value, config) =>
config.updated("timeout", value)
}
// S3 options
opt[String]("s3credentials").optional().text("Credentials to use for s3backup or s3config. Optional.").action { (value, config) =>
config.updated("s3credentials", value)
}
opt[String]("s3region").optional().text("Region for S3 calls (e.g. \\"eu-west-1\\"). Optional.").action { (value, config) =>
config.updated("s3region", value)
}
// Configuration Options for Type "s3"
opt[String]("s3config").optional().text("The bucket name and key to store the config (s3credentials may be provided as well). Argument is [bucket name]:[key]. Optional.").action { (value, config) =>
config.updated("s3config", value)
}
opt[String]("s3configprefix").optional().text("When using AWS S3 shared config files, the prefix to use for values such as locks. Optional.").action { (value, config) =>
config.updated("s3configprefix", value)
}
// Configuration Options for Type "zookeeper"
opt[String]("zkconfigconnect").optional().text("The initial connection string for ZooKeeper shared config storage. E.g: host1:2181,host2:2181... Optional.").action { (value, config) =>
config.updated("zkconfigconnect", value)
}
opt[String]("zkconfigexhibitorpath").optional().text("Used if the ZooKeeper shared config is also running Exhibitor. This is the URI path for the REST call. The default is: /. Optional.").action { (value, config) =>
config.updated("zkconfigexhibitorpath", value)
}
opt[String]("zkconfigexhibitorport").optional().text("Used if the ZooKeeper shared config is also running Exhibitor. This is the port that Exhibitor is listening on. IMPORTANT: if this value is not set it implies that Exhibitor is not being used on the ZooKeeper shared config. Optional.").action { (value, config) =>
config.updated("zkconfigexhibitorport", value)
}
opt[String]("zkconfigpollms").optional().text("The period in ms to check for changes in the config ensemble. The default is: 10000. Optional.").action { (value, config) =>
config.updated("zkconfigpollms", value)
}
opt[String]("zkconfigretry").optional().text("The retry values to use in the form sleep-ms:retry-qty. The default is: 1000:3. Optional.").action { (value, config) =>
config.updated("zkconfigretry", value)
}
opt[String]("zkconfigzpath").optional().text("The base ZPath that Exhibitor should use. E.g: /exhibitor/config. Optional.").action { (value, config) =>
config.updated("zkconfigzpath", value)
}
// Backup Options
opt[String]("filesystembackup").optional().text("If true, enables file system backup of ZooKeeper log files. Optional.").action { (value, config) =>
config.updated("filesystembackup", value)
}
opt[String]("s3backup").optional().text("If true, enables AWS S3 backup of ZooKeeper log files (s3credentials may be provided as well). Optional.").action { (value, config) =>
config.updated("s3backup", value)
}
// ACL Options
opt[String]("aclid").optional().text("Enable ACL for Exhibitor's internal ZooKeeper connection. This sets the ACL's ID. Optional.").action { (value, config) =>
config.updated("aclid", value)
}
opt[String]("aclperms").optional().text("Enable ACL for Exhibitor's internal ZooKeeper connection. This sets the ACL's Permissions - a comma list of possible permissions. If this isn't specified the permission is set to ALL. Values: read, write, create, delete, admin. Optional.").action { (value, config) =>
config.updated("aclperms", value)
}
opt[String]("aclscheme").optional().text("Enable ACL for Exhibitor's internal ZooKeeper connection. This sets the ACL's Scheme. Optional.").action { (value, config) =>
config.updated("aclscheme", value)
}
// shared configs
opt[String]("log-index-directory").optional().text("The directory where indexed Zookeeper logs should be kept. Optional.").action { (value, config) =>
config.updated("log-index-directory", value)
}
opt[String]("zookeeper-install-directory").optional().text("The directory where the Zookeeper server is installed. Optional.").action { (value, config) =>
config.updated("zookeeper-install-directory", value)
}
opt[String]("zookeeper-data-directory").optional().text("The directory where Zookeeper snapshot data is stored. Optional.").action { (value, config) =>
config.updated("zookeeper-data-directory", value)
}
opt[String]("zookeeper-log-directory").optional().text("The directory where Zookeeper transaction log data is stored. Optional.").action { (value, config) =>
config.updated("zookeeper-log-directory", value)
}
opt[String]("backup-extra").optional().text("Backup extra shared config. Optional.").action { (value, config) =>
config.updated("backup-extra", value)
}
opt[String]("zoo-cfg-extra").optional().text("Any additional properties to be added to the zoo.cfg file in form: key1\\\\\\\\=value1&key2\\\\\\\\=value2. Optional.").action { (value, config) =>
config.updated("zoo-cfg-extra", value)
}
opt[String]("java-environment").optional().text("Script to write as the 'java.env' file which gets executed as a part of Zookeeper start script. Optional.").action { (value, config) =>
config.updated("java-environment", value)
}
opt[String]("log4j-properties").optional().text("Contents of the log4j.properties file. Optional.").action { (value, config) =>
config.updated("log4j-properties", value)
}
opt[String]("client-port").optional().text("The port that clients use to connect to Zookeeper. Defaults to 2181. Optional.").action { (value, config) =>
config.updated("client-port", value)
}
opt[String]("connect-port").optional().text("The port that other Zookeeper instances use to connect to Zookeeper. Defaults to 2888. Optional.").action { (value, config) =>
config.updated("connect-port", value)
}
opt[String]("election-port").optional().text("The port that other Zookeeper instances use for election. Defaults to 3888. Optional.").action { (value, config) =>
config.updated("election-port", value)
}
opt[String]("check-ms").optional().text("The number of milliseconds between live-ness checks on Zookeeper server. Defaults to 30000. Optional.").action { (value, config) =>
config.updated("check-ms", value)
}
opt[String]("cleanup-period-ms").optional().text("The number of milliseconds between Zookeeper log file cleanups. Defaults to 43200000. Optional.").action { (value, config) =>
config.updated("cleanup-period-ms", value)
}
opt[String]("cleanup-max-files").optional().text("The max number of Zookeeper log files to keep when cleaning up. Defaults to 3. Optional.").action { (value, config) =>
config.updated("cleanup-max-files", value)
}
opt[String]("backup-max-store-ms").optional().text("Backup max store ms shared config. Optional.").action { (value, config) =>
config.updated("backup-max-store-ms", value)
}
opt[String]("backup-period-ms").optional().text("Backup period ms shared config. Optional.").action { (value, config) =>
config.updated("backup-period-ms", value)
}
}
private def defaultParser(descr: String): OptionParser[Map[String, String]] = new OptionParser[Map[String, String]](descr) {
opt[String]('a', "api").optional().text("Binding host:port for http/artifact server. Optional if EM_API env is set.").action { (value, config) =>
config.updated("api", value)
}
}
}
}
| samklr/exhibitor-mesos-framework | src/main/scala/ly/stealth/mesos/exhibitor/Cli.scala | Scala | apache-2.0 | 22,525 |
package editors
import play.api.mvc.{BodyParsers, AnyContentAsFormUrlEncoded, AnyContent, BodyParser}
/**
* Read data of type `A` as a `Map[String, Seq[String] ]`
* @tparam A
*/
trait DataReader[A] {
def read(a: A): Map[String, Seq[String]]
def bodyParser: BodyParser[A]
}
object DataReader {
implicit val anyContentReader = new DataReader[AnyContent] {
def read(a: AnyContent) = a match {
case AnyContentAsFormUrlEncoded(data) => data
}
def bodyParser = BodyParsers.parse.anyContent
}
}
| julienrf/editors | library/src/main/scala/editors/DataReader.scala | Scala | mit | 522 |
object SqrtNewton{
def abs(x: Double) = if (x < 0) -x else x
def sqrt(x: Double) = {
def sqrtIter(guess: Double): Double =
if (stopCond(guess)) x
else sqrtIter(approx(guess))
def stopCond(guess: Double) =
abs(guess * guess - x)/x < 0.001
def approx(guess: Double) =
(guess + x / guess) / 2
sqrtIter(1.0)
}
def main(args: Array[String]){
println(sqrt(4))
}
}
| Chasego/nie | proj/lang/Scala/SqrtNewton.scala | Scala | gpl-3.0 | 450 |
package com.twitter.util
import java.{util => ju}
import java.util.LinkedHashMap
import scala.collection.JavaConverters._
import scala.collection.mutable.{Map, MapLike, SynchronizedMap}
/**
* A wrapper trait for java.util.Map implementations to make them behave as scala Maps.
* This is useful if you want to have more specifically-typed wrapped objects instead
* of the generic maps returned by JavaConverters
*/
trait JMapWrapperLike[A, B, +Repr <: MapLike[A, B, Repr] with Map[A, B]] extends Map[A, B] with MapLike[A, B, Repr] {
def underlying: ju.Map[A, B]
override def size = underlying.size
override def get(k: A) = underlying.asScala.get(k)
override def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
override def -=(key: A): this.type = { underlying remove key; this }
override def put(k: A, v: B): Option[B] = underlying.asScala.put(k, v)
override def update(k: A, v: B) { underlying.put(k, v) }
override def remove(k: A): Option[B] = underlying.asScala.remove(k)
override def clear() = underlying.clear()
override def empty: Repr = null.asInstanceOf[Repr]
override def iterator = underlying.asScala.iterator
}
object LruMap {
// initial capacity and load factor are the normal defaults for LinkedHashMap
def makeUnderlying[K, V](maxSize: Int): ju.Map[K, V] = new LinkedHashMap[K, V](
16, /* initial capacity */
0.75f, /* load factor */
true /* access order (as opposed to insertion order) */
) {
override protected def removeEldestEntry(eldest: ju.Map.Entry[K, V]): Boolean = {
this.size() > maxSize
}
}
}
/**
* A scala `Map` backed by a [[java.util.LinkedHashMap]]
*/
class LruMap[K, V](val maxSize: Int, val underlying: ju.Map[K, V])
extends JMapWrapperLike[K, V, LruMap[K, V]]
{
override def empty: LruMap[K, V] = new LruMap[K, V](maxSize)
def this(maxSize: Int) = this(maxSize, LruMap.makeUnderlying(maxSize))
}
/**
* A synchronized scala `Map` backed by an [[java.util.LinkedHashMap]]
*/
class SynchronizedLruMap[K, V](maxSize: Int, underlying: ju.Map[K, V])
extends LruMap[K, V](maxSize, ju.Collections.synchronizedMap(underlying))
with SynchronizedMap[K, V]
{
override def empty: SynchronizedLruMap[K, V] = new SynchronizedLruMap[K, V](maxSize)
def this(maxSize: Int) = this(maxSize, LruMap.makeUnderlying(maxSize))
}
| BuoyantIO/twitter-util | util-collection/src/main/scala/com/twitter/util/LruMap.scala | Scala | apache-2.0 | 2,348 |
/**
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package rx.lang
/**
* This package contains all classes that RxScala users need.
*
* It basically mirrors the structure of package `rx`, but some changes were made to make it more Scala-idiomatic.
*/
package object scala {
/**
* Placeholder for extension methods into Observable[T] from other types
*/
implicit class ObservableExtensions[T](val source: Iterable[T]) extends AnyVal {
def toObservable: Observable[T] = { Observable.from(source) }
}
}
| zjrstar/RxScala | src/main/scala/rx/lang/scala/package.scala | Scala | apache-2.0 | 1,071 |
import com.amazonaws.services.{ dynamodbv2 => aws }
package object dynamodbv2 {
type TableStatus = aws.model.TableStatus
type KeyType = aws.model.KeyType
type AttributeAction = aws.model.AttributeAction
type ProjectionType = aws.model.ProjectionType
type ReturnConsumedCapacity = aws.model.ReturnConsumedCapacity
type ComparisonOperator = aws.model.ComparisonOperator
type Select = aws.model.Select
val cond = DynamoDBCondition
}
| hirokikonishi/awscala | aws/dynamo/src/main/scala/package.scala | Scala | apache-2.0 | 452 |
package opencl.generator
import ir.ast.{CheckedArrayAccess, Join, Zip, fun, _}
import ir.{ArrayType, ArrayTypeWC, ArrayTypeWSWC, TypeChecker, _}
import lift.arithmetic.{Cst, SizeVar}
import opencl.executor.{Compile, Execute, Executor, TestWithExecutor}
import opencl.ir._
import opencl.ir.pattern.{MapGlb, MapSeq, ReduceSeq, toGlobal}
import org.junit.Assert.{assertArrayEquals, assertEquals}
import org.junit.{Test, _}
object TestArray extends TestWithExecutor
class TestArray {
/**
* Size is not statically know but the capacity is.
* The array is filled with integers so we don't need to store the offsets.
* Layout: [size, elt_0, elt_1, …, elt_{κ-1}]
*/
@Test
def unknownSizeReduce(): Unit = {
val capacity = 1024
val size = 700
val input = Array.fill(size)(util.Random.nextInt(16))
val f = fun(
ArrayTypeWC(Int, capacity),
in =>
MapGlb(toGlobal(id(Int))) o ReduceSeq(addI, 0) $ in
)
val t = TypeChecker(f)
assertEquals(t, ArrayTypeWSWC(Int, 1, 1))
val (output, _) = Execute(128)[Array[Int]](f, input)
assertArrayEquals(Array(input.sum), output)
}
/**
* Same situation but this time the output is an array of the same shape
* and not a constant.
*/
@Test
def unknownSizeMap(): Unit = {
val capacity = 1024
val size = 879
val iInput = Array.fill(size)(util.Random.nextInt())
val fInput = Array.fill(size)(util.Random.nextFloat())
def mkMapId(st: ScalarType): Lambda1 = fun(
ArrayTypeWC(st, capacity),
MapGlb(toGlobal(id(st))) $ _
)
val exec = Execute(128)
val (iOutput, _) = exec[Vector[Int]](mkMapId(Int), iInput)
assertArrayEquals(iInput, iOutput.toArray)
val (fOutput, _) = exec[Vector[Float]](mkMapId(Float), fInput)
assertArrayEquals(fInput, fOutput.toArray, 0f)
}
@Test
def unknownSizeMapBool(): Unit = {
val capacity = 1024
val size = 877
val input = Array.fill(size)(util.Random.nextBoolean())
val mapId = fun(
ArrayTypeWC(Bool, capacity),
MapGlb(toGlobal(id(Bool))) $ _
)
val (bOutput, _) = Execute(128)[Vector[Boolean]](mapId, input)
assertEquals(input.toVector, bOutput)
}
@Test
def unknownSizeMapDouble(): Unit = {
Assume.assumeTrue("Needs double support", Executor.supportsDouble())
val capacity = 1024
val size = 879
val input = Array.fill(size)(util.Random.nextDouble())
val mapId = fun(
ArrayTypeWC(Double, capacity),
MapGlb(toGlobal(id(Double))) $ _
)
val (dOutput, _) = Execute(128)[Vector[Double]](mapId, input)
assertArrayEquals(input, dOutput.toArray, 0d)
}
/**
* This time we don't know the size either but we know the shape of the
* output.
* Layout: [κ, size, elt_0, elt_1, elt_2, …]
*/
@Test
def inputZeroKnowledge(): Unit = {
val size = 128
val input = Array.fill(size)(util.Random.nextInt(16))
val f = fun(
ArrayType(Int),
MapGlb(toGlobal(idI)) o ReduceSeq(addI, 0) $ _
)
val (output, _) = Execute(128)[Array[Int]](f, input)
assertArrayEquals(Array(input.sum), output)
}
/**
* Here, you know nothing (Jon Snow).
*/
@Test
def zeroKnowledge(): Unit = {
val f = fun(
ArrayType(Int),
in =>
MapGlb(toGlobal(idI)) $ in
)
assertEquals(TypeChecker(f), ArrayType(Int))
Compile(f)
}
/**
* Nested arrays with no size
*/
@Test
def nestedArraysNoSize(): Unit = {
val capacity = 128
val size1 = 90
val size2 = 42
val input = Array.fill(size1, size2)(util.Random.nextFloat())
val f = fun(
ArrayTypeWC(ArrayTypeWC(Float, capacity), capacity),
MapGlb(MapSeq(id(Float))) $ _
)
assertEquals(TypeChecker(f), ArrayTypeWC(ArrayTypeWC(Float, capacity), capacity))
val (output, _) = Execute(capacity)[Vector[Vector[Float]]](f, input)
assertArrayEquals(input.flatten, output.flatten.toArray, 0.0001f)
}
/**
* If some capacity is not known at compile time in the type of an input
* we can still allocate it at the last minute.
*/
@Test
def unknownInnerCapacity(): Unit = {
val size = 128
val N = SizeVar("N")
val f = fun(
ArrayTypeWSWC(ArrayType(Int), N),
Join() o
MapGlb(MapSeq(toGlobal(idI)) o ReduceSeq(addI, 0)) $ _
)
val input = Array.fill(size)(Array.fill(4 + util.Random.nextInt(8))(util.Random.nextInt(16)))
val (output, _) = Execute(size)[Array[Int]](f, input)
assertArrayEquals(input.map(_.sum), output)
}
@Test
def unknownInnerCapacityBool(): Unit = {
val size = 256
val N = SizeVar("N")
val countTrue = UserFun(
"countTrue", Array("tot", "b"), "return (b) ? tot + 1 : tot;", Seq(Int, Bool), Int
)
val f = fun(
ArrayTypeWSWC(ArrayType(Bool), N),
Join() o MapGlb(MapSeq(toGlobal(id(Int))) o ReduceSeq(countTrue, 0)) $ _
)
val input = Array.fill(size)(Array.fill(4 + util.Random.nextInt(8))(util.Random.nextBoolean()))
val exec = Execute(size)
val (output, _) = exec[Array[Int]](f, input)
assertArrayEquals(input.map(_.count(b => b)), output)
}
/**
* Nested arrays.
*/
@Test
def nestedArraysZeroKnowledge(): Unit = {
val f = fun(
ArrayType(ArrayType(Int)),
in =>
MapGlb(MapSeq(toGlobal(idI))) $ in
)
assertEquals(TypeChecker(f), ArrayType(ArrayType(Int)))
Compile(f)
}
@Ignore
@Test
def arrZipMapWAllocation(): Unit = {
val f = fun(
ArrayType(Float), ArrayType(Float), (p1, p2) =>
ReduceSeq(add, 0.0f) o MapSeq(add) $ Zip(p1, p2)
)
assertEquals(TypeChecker(f), ArrayTypeWSWC(Float, Cst(1)))
Compile(f)
}
@Test
def arrZipMap(): Unit = {
val f = fun(
ArrayType(Float), ArrayType(Float), (p1, p2) =>
toGlobal(MapSeq(id)) o ReduceSeq(fun((init, elem) => add(init, mult(elem._0, elem._1))), 0.0f) $ Zip(p1, p2)
)
assertEquals(TypeChecker(f), ArrayTypeWSWC(Float, Cst(1)))
val p1 = Array.fill(37)(util.Random.nextFloat())
val p2 = Array.fill(78)(util.Random.nextFloat())
val exec = Execute(128)
val (output, _) = exec[Array[Float]](f, p1, p2)
val (outputRev, _) = exec[Array[Float]](f, p2, p1)
val gold = (p1 zip p2.slice(0, 78)).map(p => p._1 * p._2).sum
assertArrayEquals(Array(gold), output, 0.0001f)
assertArrayEquals(Array(gold), outputRev, 0.0001f)
}
@Test
def twoDArrZipReduce(): Unit = {
val N = SizeVar("N")
val f = fun(
ArrayTypeWSWC(ArrayType(Float), N), ArrayTypeWSWC(ArrayType(Float), N), (p1, p2) =>
Join() o MapGlb(
fun(rowPair =>
MapSeq(toGlobal(id)) o
ReduceSeq(fun((init, elem) => add(init, mult(elem._0, elem._1))), 0.0f) $
Zip(rowPair._0, rowPair._1)
)
) $ Zip(p1, p2)
)
assertEquals(TypeChecker(f), ArrayTypeWSWC(Float, N))
val height = 128
val als: Array[Int] = Array.fill(height)(util.Random.nextInt(127) + 1)
val p1 = als.map(Array.fill(_)(util.Random.nextFloat()))
val p2 = als.map(Array.fill(_)(util.Random.nextFloat()))
val gold = (p1 zip p2).map {
case (arr1, arr2) =>
(arr1 zip arr2)
.map { case (e1: Float, e2: Float) => e1 * e2 }
.sum
}
val (output, _) = Execute(128)[Array[Float]](f, p1, p2)
assertArrayEquals(gold, output, 0.001f)
}
@Test
def chainedMaps(): Unit = {
val capacity = 128
val size = 87 // random value < capacity
val mkTuple = UserFun("mkTuple", "x", "return (Tuple){x, x};", Float, TupleType(Float, Float))
val addTuple = UserFun("addTuple", "t", "return t._0 + t._1;", TupleType(Float, Float), Float)
val f = fun(
ArrayTypeWC(Float, capacity),
arr =>
MapSeq(addTuple)
o MapSeq(mkTuple)
o MapSeq(fun(add.apply(1f, _))) $ arr
)
val input = Array.fill(size)(util.Random.nextFloat())
val exec = Execute(capacity)
val (output, _) = exec[Vector[Float]](f, input)
assertArrayEquals(input.map(x => 2 * (x + 1)), output.toArray, 0.001f)
}
@Test
def highDimension(): Unit = {
val dimSizes = Array(4, 2, 3, 2, 3)
val inner = ArrayType(Int)
val kernel = fun(
dimSizes.foldRight(inner)({ case (size, t) => ArrayTypeWSWC(t, size, size) }),
array =>
MapGlb(
MapSeq(
MapSeq(
MapSeq(
Join() o MapSeq(MapSeq(toGlobal(id(Int))) o ReduceSeq(add(Int), 0))
)
)
)
) $ array
)
val input = Array.fill(dimSizes(0), dimSizes(1), dimSizes(2), dimSizes(3), dimSizes(4))({
val len = util.Random.nextInt(8) + 1
Array.fill(len)(util.Random.nextInt(512))
})
val gold = input.map(_.map(_.map(_.map(_.map(_.sum).toVector).toVector).toVector).toVector).toVector
// Do not use [Array[Int]] here, we want to know if the Decoder can handle such a structure
val (output, _) = Execute(4, 4)[Vector[Vector[Vector[Vector[Vector[Int]]]]]](kernel, input)
assertEquals(gold, output)
}
@Test
def basicSpMV(): Unit = {
val N = SizeVar("VectorLength")
val M = SizeVar("MatrixHeight")
val f = fun(
ArrayTypeWSWC(ArrayType(Int), M),
ArrayTypeWSWC(ArrayType(Float), M),
ArrayTypeWSWC(Float, N),
(arrayIndices, arrayValues, vector) =>
Zip(arrayIndices, arrayValues) :>>
MapGlb(fun( rowPair =>
Zip(rowPair._0, rowPair._1) :>>
ReduceSeq(fun((acc, rowElem) =>
add(acc, mult(rowElem._1, CheckedArrayAccess(rowElem._0, 0.0f) $ vector))), 0.0f
) :>> toGlobal(MapSeq(id))
)) :>> Join()
)
val height = 128
val width = 64
val als: Array[Int] = Array.fill(height)(util.Random.nextInt(width - 1) + 1)
val indices = als.map(Array.fill(_)(util.Random.nextInt(width)))
val values = als.map(Array.fill(_)(util.Random.nextFloat()))
val vector = Array.fill(width)(util.Random.nextFloat())
val gold = (indices zip values).map{
case (ixRow, valRow) =>
(ixRow zip valRow).map{
case (ix, v) => v * vector(ix)
}.sum
}
val exec = Execute(128)
val (output, _) = exec[Array[Float]](f, indices, values, vector)
assertArrayEquals(gold, output, 0.001f)
}
}
| lift-project/lift | src/test/opencl/generator/TestArray.scala | Scala | mit | 10,346 |
/*
* This is a draft on an idea for an assertion module.
*
* An assertion provider should provide assert/require so
* that we can use them whenever we assume that a condition should
* always hold. Assertions are very useful concept for system with
* complex state, such as games, because they let us clarify
* our mind and catch errors early and thus simplify debugging.
*
* In release mode, there's an argument for not failing unless
* we really have to, and maybe an unexpected state can still be
* ok, which is why we would not want to crash on every assert even
* if the state is corrupted. That should of course be balanced
* with safety (for saved data) and there is always a risk of
* corrupting player progress. My idea here is to provide an
* assert and a fatalAssert, with the fatal would crash even
* in a release mode, but the assert would not.
*
* The idea then is to have various possible implementations, the
* default one would use built-in assert/requires, and just crash
* with an exception. A release one would maybe do nothing. There
* would be in-between solution, that would involve logging the
* asserts but not crashing, or even better a crash report that
* could be sent with all the asserts that failed.
*/
//trait AssertionsProvider {
//
//
// trait Asserts {
//
// def assert()
//
// // crashes in all cases, even in release mode.
// def fatal()
//
// }
// val Asserts: Asserts
//
//}
| regb/scala-game-library | core/src/main/scala/sgl/util/AssertionsProvider.scala | Scala | mit | 1,451 |
package scalajsreact.template.components.items
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
object Item2Data {
val component =
ScalaComponent.builder.static("Item2")(<.div("This is Item2 Page ")).build
def apply() = component().vdomElement
}
| chandu0101/scalajs-react-template | src/main/scala/scalajsreact/template/components/items/Item2Data.scala | Scala | apache-2.0 | 286 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.json
import java.io.{File, StringWriter}
import java.sql.{Date, Timestamp}
import com.fasterxml.jackson.core.JsonFactory
import org.apache.spark.rdd.RDD
import org.scalactic.Tolerance._
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.execution.datasources.{ResolvedDataSource, LogicalRelation}
import org.apache.spark.sql.execution.datasources.json.InferSchema.compatibleType
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
class JsonSuite extends QueryTest with SharedSQLContext with TestJsonData {
import testImplicits._
test("Type promotion") {
def checkTypePromotion(expected: Any, actual: Any) {
assert(expected.getClass == actual.getClass,
s"Failed to promote ${actual.getClass} to ${expected.getClass}.")
assert(expected == actual,
s"Promoted value ${actual}(${actual.getClass}) does not equal the expected value " +
s"${expected}(${expected.getClass}).")
}
val factory = new JsonFactory()
def enforceCorrectType(value: Any, dataType: DataType): Any = {
val writer = new StringWriter()
Utils.tryWithResource(factory.createGenerator(writer)) { generator =>
generator.writeObject(value)
generator.flush()
}
Utils.tryWithResource(factory.createParser(writer.toString)) { parser =>
parser.nextToken()
JacksonParser.convertField(factory, parser, dataType)
}
}
val intNumber: Int = 2147483647
checkTypePromotion(intNumber, enforceCorrectType(intNumber, IntegerType))
checkTypePromotion(intNumber.toLong, enforceCorrectType(intNumber, LongType))
checkTypePromotion(intNumber.toDouble, enforceCorrectType(intNumber, DoubleType))
checkTypePromotion(
Decimal(intNumber), enforceCorrectType(intNumber, DecimalType.SYSTEM_DEFAULT))
val longNumber: Long = 9223372036854775807L
checkTypePromotion(longNumber, enforceCorrectType(longNumber, LongType))
checkTypePromotion(longNumber.toDouble, enforceCorrectType(longNumber, DoubleType))
checkTypePromotion(
Decimal(longNumber), enforceCorrectType(longNumber, DecimalType.SYSTEM_DEFAULT))
val doubleNumber: Double = 1.7976931348623157E308d
checkTypePromotion(doubleNumber.toDouble, enforceCorrectType(doubleNumber, DoubleType))
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(intNumber)),
enforceCorrectType(intNumber, TimestampType))
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(intNumber.toLong)),
enforceCorrectType(intNumber.toLong, TimestampType))
val strTime = "2014-09-30 12:34:56"
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(Timestamp.valueOf(strTime)),
enforceCorrectType(strTime, TimestampType))
val strDate = "2014-10-15"
checkTypePromotion(
DateTimeUtils.fromJavaDate(Date.valueOf(strDate)), enforceCorrectType(strDate, DateType))
val ISO8601Time1 = "1970-01-01T01:00:01.0Z"
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(3601000)),
enforceCorrectType(ISO8601Time1, TimestampType))
checkTypePromotion(DateTimeUtils.millisToDays(3601000),
enforceCorrectType(ISO8601Time1, DateType))
val ISO8601Time2 = "1970-01-01T02:00:01-01:00"
checkTypePromotion(DateTimeUtils.fromJavaTimestamp(new Timestamp(10801000)),
enforceCorrectType(ISO8601Time2, TimestampType))
checkTypePromotion(DateTimeUtils.millisToDays(10801000),
enforceCorrectType(ISO8601Time2, DateType))
}
test("Get compatible type") {
def checkDataType(t1: DataType, t2: DataType, expected: DataType) {
var actual = compatibleType(t1, t2)
assert(actual == expected,
s"Expected $expected as the most general data type for $t1 and $t2, found $actual")
actual = compatibleType(t2, t1)
assert(actual == expected,
s"Expected $expected as the most general data type for $t1 and $t2, found $actual")
}
// NullType
checkDataType(NullType, BooleanType, BooleanType)
checkDataType(NullType, IntegerType, IntegerType)
checkDataType(NullType, LongType, LongType)
checkDataType(NullType, DoubleType, DoubleType)
checkDataType(NullType, DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT)
checkDataType(NullType, StringType, StringType)
checkDataType(NullType, ArrayType(IntegerType), ArrayType(IntegerType))
checkDataType(NullType, StructType(Nil), StructType(Nil))
checkDataType(NullType, NullType, NullType)
// BooleanType
checkDataType(BooleanType, BooleanType, BooleanType)
checkDataType(BooleanType, IntegerType, StringType)
checkDataType(BooleanType, LongType, StringType)
checkDataType(BooleanType, DoubleType, StringType)
checkDataType(BooleanType, DecimalType.SYSTEM_DEFAULT, StringType)
checkDataType(BooleanType, StringType, StringType)
checkDataType(BooleanType, ArrayType(IntegerType), StringType)
checkDataType(BooleanType, StructType(Nil), StringType)
// IntegerType
checkDataType(IntegerType, IntegerType, IntegerType)
checkDataType(IntegerType, LongType, LongType)
checkDataType(IntegerType, DoubleType, DoubleType)
checkDataType(IntegerType, DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT)
checkDataType(IntegerType, StringType, StringType)
checkDataType(IntegerType, ArrayType(IntegerType), StringType)
checkDataType(IntegerType, StructType(Nil), StringType)
// LongType
checkDataType(LongType, LongType, LongType)
checkDataType(LongType, DoubleType, DoubleType)
checkDataType(LongType, DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT)
checkDataType(LongType, StringType, StringType)
checkDataType(LongType, ArrayType(IntegerType), StringType)
checkDataType(LongType, StructType(Nil), StringType)
// DoubleType
checkDataType(DoubleType, DoubleType, DoubleType)
checkDataType(DoubleType, DecimalType.SYSTEM_DEFAULT, DoubleType)
checkDataType(DoubleType, StringType, StringType)
checkDataType(DoubleType, ArrayType(IntegerType), StringType)
checkDataType(DoubleType, StructType(Nil), StringType)
// DecimalType
checkDataType(DecimalType.SYSTEM_DEFAULT, DecimalType.SYSTEM_DEFAULT,
DecimalType.SYSTEM_DEFAULT)
checkDataType(DecimalType.SYSTEM_DEFAULT, StringType, StringType)
checkDataType(DecimalType.SYSTEM_DEFAULT, ArrayType(IntegerType), StringType)
checkDataType(DecimalType.SYSTEM_DEFAULT, StructType(Nil), StringType)
// StringType
checkDataType(StringType, StringType, StringType)
checkDataType(StringType, ArrayType(IntegerType), StringType)
checkDataType(StringType, StructType(Nil), StringType)
// ArrayType
checkDataType(ArrayType(IntegerType), ArrayType(IntegerType), ArrayType(IntegerType))
checkDataType(ArrayType(IntegerType), ArrayType(LongType), ArrayType(LongType))
checkDataType(ArrayType(IntegerType), ArrayType(StringType), ArrayType(StringType))
checkDataType(ArrayType(IntegerType), StructType(Nil), StringType)
checkDataType(
ArrayType(IntegerType, true), ArrayType(IntegerType), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, true), ArrayType(IntegerType, false), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, true), ArrayType(IntegerType, true), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, false), ArrayType(IntegerType), ArrayType(IntegerType, true))
checkDataType(
ArrayType(IntegerType, false), ArrayType(IntegerType, false), ArrayType(IntegerType, false))
checkDataType(
ArrayType(IntegerType, false), ArrayType(IntegerType, true), ArrayType(IntegerType, true))
// StructType
checkDataType(StructType(Nil), StructType(Nil), StructType(Nil))
checkDataType(
StructType(StructField("f1", IntegerType, true) :: Nil),
StructType(StructField("f1", IntegerType, true) :: Nil),
StructType(StructField("f1", IntegerType, true) :: Nil))
checkDataType(
StructType(StructField("f1", IntegerType, true) :: Nil),
StructType(Nil),
StructType(StructField("f1", IntegerType, true) :: Nil))
checkDataType(
StructType(
StructField("f1", IntegerType, true) ::
StructField("f2", IntegerType, true) :: Nil),
StructType(StructField("f1", LongType, true) :: Nil) ,
StructType(
StructField("f1", LongType, true) ::
StructField("f2", IntegerType, true) :: Nil))
checkDataType(
StructType(
StructField("f1", IntegerType, true) :: Nil),
StructType(
StructField("f2", IntegerType, true) :: Nil),
StructType(
StructField("f1", IntegerType, true) ::
StructField("f2", IntegerType, true) :: Nil))
checkDataType(
StructType(
StructField("f1", IntegerType, true) :: Nil),
DecimalType.SYSTEM_DEFAULT,
StringType)
}
test("Complex field and type inferring with null in sampling") {
val jsonDF = sqlContext.read.json(jsonNullStruct)
val expectedSchema = StructType(
StructField("headers", StructType(
StructField("Charset", StringType, true) ::
StructField("Host", StringType, true) :: Nil)
, true) ::
StructField("ip", StringType, true) ::
StructField("nullstr", StringType, true):: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql("select nullstr, headers.Host from jsonTable"),
Seq(Row("", "1.abc.com"), Row("", null), Row("", null), Row(null, null))
)
}
test("Primitive field and type inferring") {
val jsonDF = sqlContext.read.json(primitiveFieldAndType)
val expectedSchema = StructType(
StructField("bigInteger", DecimalType(20, 0), true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DoubleType, true) ::
StructField("integer", LongType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Complex field and type inferring") {
val jsonDF = sqlContext.read.json(complexFieldAndType1)
val expectedSchema = StructType(
StructField("arrayOfArray1", ArrayType(ArrayType(StringType, true), true), true) ::
StructField("arrayOfArray2", ArrayType(ArrayType(DoubleType, true), true), true) ::
StructField("arrayOfBigInteger", ArrayType(DecimalType(21, 0), true), true) ::
StructField("arrayOfBoolean", ArrayType(BooleanType, true), true) ::
StructField("arrayOfDouble", ArrayType(DoubleType, true), true) ::
StructField("arrayOfInteger", ArrayType(LongType, true), true) ::
StructField("arrayOfLong", ArrayType(LongType, true), true) ::
StructField("arrayOfNull", ArrayType(StringType, true), true) ::
StructField("arrayOfString", ArrayType(StringType, true), true) ::
StructField("arrayOfStruct", ArrayType(
StructType(
StructField("field1", BooleanType, true) ::
StructField("field2", StringType, true) ::
StructField("field3", StringType, true) :: Nil), true), true) ::
StructField("struct", StructType(
StructField("field1", BooleanType, true) ::
StructField("field2", DecimalType(20, 0), true) :: Nil), true) ::
StructField("structWithArrayFields", StructType(
StructField("field1", ArrayType(LongType, true), true) ::
StructField("field2", ArrayType(StringType, true), true) :: Nil), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.registerTempTable("jsonTable")
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from jsonTable"),
Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from jsonTable"),
Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] from jsonTable"),
Row(new java.math.BigDecimal("922337203685477580700"),
new java.math.BigDecimal("-922337203685477580800"), null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from jsonTable"),
Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from jsonTable"),
Row(Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from jsonTable"),
Row("str2", 2.1)
)
// Access elements of an array of structs.
checkAnswer(
sql("select arrayOfStruct[0], arrayOfStruct[1], arrayOfStruct[2], arrayOfStruct[3] " +
"from jsonTable"),
Row(
Row(true, "str1", null),
Row(false, null, null),
Row(null, null, null),
null)
)
// Access a struct and fields inside of it.
checkAnswer(
sql("select struct, struct.field1, struct.field2 from jsonTable"),
Row(
Row(true, new java.math.BigDecimal("92233720368547758070")),
true,
new java.math.BigDecimal("92233720368547758070")) :: Nil
)
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from jsonTable"),
Row(Seq(4, 5, 6), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] from jsonTable"),
Row(5, null)
)
}
test("GetField operation on complex data type") {
val jsonDF = sqlContext.read.json(complexFieldAndType1)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from jsonTable"),
Row(true, "str1")
)
// Getting all values of a specific field from an array of structs.
checkAnswer(
sql("select arrayOfStruct.field1, arrayOfStruct.field2 from jsonTable"),
Row(Seq(true, false, null), Seq("str1", null, null))
)
}
test("Type conflict in primitive field values") {
val jsonDF = sqlContext.read.json(primitiveFieldValueTypeConflict)
val expectedSchema = StructType(
StructField("num_bool", StringType, true) ::
StructField("num_num_1", LongType, true) ::
StructField("num_num_2", DoubleType, true) ::
StructField("num_num_3", DoubleType, true) ::
StructField("num_str", StringType, true) ::
StructField("str_bool", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row("true", 11L, null, 1.1, "13.1", "str1") ::
Row("12", null, 21474836470.9, null, null, "true") ::
Row("false", 21474836470L, 92233720368547758070d, 100, "str1", "false") ::
Row(null, 21474836570L, 1.1, 21474836470L, "92233720368547758070", null) :: Nil
)
// Number and Boolean conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_bool - 10 from jsonTable where num_bool > 11"),
Row(2)
)
// Widening to LongType
checkAnswer(
sql("select num_num_1 - 100 from jsonTable where num_num_1 > 11"),
Row(21474836370L) :: Row(21474836470L) :: Nil
)
checkAnswer(
sql("select num_num_1 - 100 from jsonTable where num_num_1 > 10"),
Row(-89) :: Row(21474836370L) :: Row(21474836470L) :: Nil
)
// Widening to DecimalType
checkAnswer(
sql("select num_num_2 + 1.3 from jsonTable where num_num_2 > 1.1"),
Row(21474836472.2) ::
Row(92233720368547758071.3) :: Nil
)
// Widening to Double
checkAnswer(
sql("select num_num_3 + 1.2 from jsonTable where num_num_3 > 1.1"),
Row(101.2) :: Row(21474836471.2) :: Nil
)
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str > 14"),
Row(BigDecimal("92233720368547758071.2"))
)
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str >= 92233720368547758060"),
Row(new java.math.BigDecimal("92233720368547758071.2"))
)
// String and Boolean conflict: resolve the type as string.
checkAnswer(
sql("select * from jsonTable where str_bool = 'str1'"),
Row("true", 11L, null, 1.1, "13.1", "str1")
)
}
ignore("Type conflict in primitive field values (Ignored)") {
val jsonDF = sqlContext.read.json(primitiveFieldValueTypeConflict)
jsonDF.registerTempTable("jsonTable")
// Right now, the analyzer does not promote strings in a boolean expression.
// Number and Boolean conflict: resolve the type as boolean in this query.
checkAnswer(
sql("select num_bool from jsonTable where NOT num_bool"),
Row(false)
)
checkAnswer(
sql("select str_bool from jsonTable where NOT str_bool"),
Row(false)
)
// Right now, the analyzer does not know that num_bool should be treated as a boolean.
// Number and Boolean conflict: resolve the type as boolean in this query.
checkAnswer(
sql("select num_bool from jsonTable where num_bool"),
Row(true)
)
checkAnswer(
sql("select str_bool from jsonTable where str_bool"),
Row(false)
)
// The plan of the following DSL is
// Project [(CAST(num_str#65:4, DoubleType) + 1.2) AS num#78]
// Filter (CAST(CAST(num_str#65:4, DoubleType), DecimalType) > 92233720368547758060)
// ExistingRdd [num_bool#61,num_num_1#62L,num_num_2#63,num_num_3#64,num_str#65,str_bool#66]
// We should directly cast num_str to DecimalType and also need to do the right type promotion
// in the Project.
checkAnswer(
jsonDF.
where('num_str >= BigDecimal("92233720368547758060")).
select(('num_str + 1.2).as("num")),
Row(new java.math.BigDecimal("92233720368547758071.2").doubleValue())
)
// The following test will fail. The type of num_str is StringType.
// So, to evaluate num_str + 1.2, we first need to use Cast to convert the type.
// In our test data, one value of num_str is 13.1.
// The result of (CAST(num_str#65:4, DoubleType) + 1.2) for this value is 14.299999999999999,
// which is not 14.3.
// Number and String conflict: resolve the type as number in this query.
checkAnswer(
sql("select num_str + 1.2 from jsonTable where num_str > 13"),
Row(BigDecimal("14.3")) :: Row(BigDecimal("92233720368547758071.2")) :: Nil
)
}
test("Type conflict in complex field values") {
val jsonDF = sqlContext.read.json(complexFieldValueTypeConflict)
val expectedSchema = StructType(
StructField("array", ArrayType(LongType, true), true) ::
StructField("num_struct", StringType, true) ::
StructField("str_array", StringType, true) ::
StructField("struct", StructType(
StructField("field", StringType, true) :: Nil), true) ::
StructField("struct_array", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(Seq(), "11", "[1,2,3]", Row(null), "[]") ::
Row(null, """{"field":false}""", null, null, "{}") ::
Row(Seq(4, 5, 6), null, "str", Row(null), "[7,8,9]") ::
Row(Seq(7), "{}", """["str1","str2",33]""", Row("str"), """{"field":true}""") :: Nil
)
}
test("Type conflict in array elements") {
val jsonDF = sqlContext.read.json(arrayElementTypeConflict)
val expectedSchema = StructType(
StructField("array1", ArrayType(StringType, true), true) ::
StructField("array2", ArrayType(StructType(
StructField("field", LongType, true) :: Nil), true), true) ::
StructField("array3", ArrayType(StringType, true), true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(Seq("1", "1.1", "true", null, "[]", "{}", "[2,3,4]",
"""{"field":"str"}"""), Seq(Row(214748364700L), Row(1)), null) ::
Row(null, null, Seq("""{"field":"str"}""", """{"field":1}""")) ::
Row(null, null, Seq("1", "2", "3")) :: Nil
)
// Treat an element as a number.
checkAnswer(
sql("select array1[0] + 1 from jsonTable where array1 is not null"),
Row(2)
)
}
test("Handling missing fields") {
val jsonDF = sqlContext.read.json(missingFields)
val expectedSchema = StructType(
StructField("a", BooleanType, true) ::
StructField("b", LongType, true) ::
StructField("c", ArrayType(LongType, true), true) ::
StructField("d", StructType(
StructField("field", BooleanType, true) :: Nil), true) ::
StructField("e", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.registerTempTable("jsonTable")
}
test("jsonFile should be based on JSONRelation") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalFile.toURI.toString
sparkContext.parallelize(1 to 100)
.map(i => s"""{"a": 1, "b": "str$i"}""").saveAsTextFile(path)
val jsonDF = sqlContext.read.option("samplingRatio", "0.49").json(path)
val analyzed = jsonDF.queryExecution.analyzed
assert(
analyzed.isInstanceOf[LogicalRelation],
"The DataFrame returned by jsonFile should be based on LogicalRelation.")
val relation = analyzed.asInstanceOf[LogicalRelation].relation
assert(
relation.isInstanceOf[JSONRelation],
"The DataFrame returned by jsonFile should be based on JSONRelation.")
assert(relation.asInstanceOf[JSONRelation].paths === Array(path))
assert(relation.asInstanceOf[JSONRelation].samplingRatio === (0.49 +- 0.001))
val schema = StructType(StructField("a", LongType, true) :: Nil)
val logicalRelation =
sqlContext.read.schema(schema).json(path)
.queryExecution.analyzed.asInstanceOf[LogicalRelation]
val relationWithSchema = logicalRelation.relation.asInstanceOf[JSONRelation]
assert(relationWithSchema.paths === Array(path))
assert(relationWithSchema.schema === schema)
assert(relationWithSchema.samplingRatio > 0.99)
}
test("Loading a JSON dataset from a text file") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).saveAsTextFile(path)
val jsonDF = sqlContext.read.json(path)
val expectedSchema = StructType(
StructField("bigInteger", DecimalType(20, 0), true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DoubleType, true) ::
StructField("integer", LongType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
assert(expectedSchema === jsonDF.schema)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql("select * from jsonTable"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Loading a JSON dataset from a text file with SQL") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).saveAsTextFile(path)
sql(
s"""
|CREATE TEMPORARY TABLE jsonTableSQL
|USING org.apache.spark.sql.json
|OPTIONS (
| path '$path'
|)
""".stripMargin)
checkAnswer(
sql("select * from jsonTableSQL"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Applying schemas") {
val dir = Utils.createTempDir()
dir.delete()
val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\\n", " ")).saveAsTextFile(path)
val schema = StructType(
StructField("bigInteger", DecimalType.SYSTEM_DEFAULT, true) ::
StructField("boolean", BooleanType, true) ::
StructField("double", DoubleType, true) ::
StructField("integer", IntegerType, true) ::
StructField("long", LongType, true) ::
StructField("null", StringType, true) ::
StructField("string", StringType, true) :: Nil)
val jsonDF1 = sqlContext.read.schema(schema).json(path)
assert(schema === jsonDF1.schema)
jsonDF1.registerTempTable("jsonTable1")
checkAnswer(
sql("select * from jsonTable1"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
val jsonDF2 = sqlContext.read.schema(schema).json(primitiveFieldAndType)
assert(schema === jsonDF2.schema)
jsonDF2.registerTempTable("jsonTable2")
checkAnswer(
sql("select * from jsonTable2"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
null,
"this is a simple string.")
)
}
test("Applying schemas with MapType") {
val schemaWithSimpleMap = StructType(
StructField("map", MapType(StringType, IntegerType, true), false) :: Nil)
val jsonWithSimpleMap = sqlContext.read.schema(schemaWithSimpleMap).json(mapType1)
jsonWithSimpleMap.registerTempTable("jsonWithSimpleMap")
checkAnswer(
sql("select map from jsonWithSimpleMap"),
Row(Map("a" -> 1)) ::
Row(Map("b" -> 2)) ::
Row(Map("c" -> 3)) ::
Row(Map("c" -> 1, "d" -> 4)) ::
Row(Map("e" -> null)) :: Nil
)
checkAnswer(
sql("select map['c'] from jsonWithSimpleMap"),
Row(null) ::
Row(null) ::
Row(3) ::
Row(1) ::
Row(null) :: Nil
)
val innerStruct = StructType(
StructField("field1", ArrayType(IntegerType, true), true) ::
StructField("field2", IntegerType, true) :: Nil)
val schemaWithComplexMap = StructType(
StructField("map", MapType(StringType, innerStruct, true), false) :: Nil)
val jsonWithComplexMap = sqlContext.read.schema(schemaWithComplexMap).json(mapType2)
jsonWithComplexMap.registerTempTable("jsonWithComplexMap")
checkAnswer(
sql("select map from jsonWithComplexMap"),
Row(Map("a" -> Row(Seq(1, 2, 3, null), null))) ::
Row(Map("b" -> Row(null, 2))) ::
Row(Map("c" -> Row(Seq(), 4))) ::
Row(Map("c" -> Row(null, 3), "d" -> Row(Seq(null), null))) ::
Row(Map("e" -> null)) ::
Row(Map("f" -> Row(null, null))) :: Nil
)
checkAnswer(
sql("select map['a'].field1, map['c'].field2 from jsonWithComplexMap"),
Row(Seq(1, 2, 3, null), null) ::
Row(null, null) ::
Row(null, 4) ::
Row(null, 3) ::
Row(null, null) ::
Row(null, null) :: Nil
)
}
test("SPARK-2096 Correctly parse dot notations") {
val jsonDF = sqlContext.read.json(complexFieldAndType2)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql("select arrayOfStruct[0].field1, arrayOfStruct[0].field2 from jsonTable"),
Row(true, "str1")
)
checkAnswer(
sql(
"""
|select complexArrayOfStruct[0].field1[1].inner2[0], complexArrayOfStruct[1].field2[0][1]
|from jsonTable
""".stripMargin),
Row("str2", 6)
)
}
test("SPARK-3390 Complex arrays") {
val jsonDF = sqlContext.read.json(complexFieldAndType2)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql(
"""
|select arrayOfArray1[0][0][0], arrayOfArray1[1][0][1], arrayOfArray1[1][1][0]
|from jsonTable
""".stripMargin),
Row(5, 7, 8)
)
checkAnswer(
sql(
"""
|select arrayOfArray2[0][0][0].inner1, arrayOfArray2[1][0],
|arrayOfArray2[1][1][1].inner2[0], arrayOfArray2[2][0][0].inner3[0][0].inner4
|from jsonTable
""".stripMargin),
Row("str1", Nil, "str4", 2)
)
}
test("SPARK-3308 Read top level JSON arrays") {
val jsonDF = sqlContext.read.json(jsonArray)
jsonDF.registerTempTable("jsonTable")
checkAnswer(
sql(
"""
|select a, b, c
|from jsonTable
""".stripMargin),
Row("str_a_1", null, null) ::
Row("str_a_2", null, null) ::
Row(null, "str_b_3", null) ::
Row("str_a_4", "str_b_4", "str_c_4") :: Nil
)
}
test("Corrupt records") {
// Test if we can query corrupt records.
withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
withTempTable("jsonTable") {
val jsonDF = sqlContext.read.json(corruptRecords)
jsonDF.registerTempTable("jsonTable")
val schema = StructType(
StructField("_unparsed", StringType, true) ::
StructField("a", StringType, true) ::
StructField("b", StringType, true) ::
StructField("c", StringType, true) :: Nil)
assert(schema === jsonDF.schema)
// In HiveContext, backticks should be used to access columns starting with a underscore.
checkAnswer(
sql(
"""
|SELECT a, b, c, _unparsed
|FROM jsonTable
""".stripMargin),
Row(null, null, null, "{") ::
Row(null, null, null, "") ::
Row(null, null, null, """{"a":1, b:2}""") ::
Row(null, null, null, """{"a":{, b:3}""") ::
Row("str_a_4", "str_b_4", "str_c_4", null) ::
Row(null, null, null, "]") :: Nil
)
checkAnswer(
sql(
"""
|SELECT a, b, c
|FROM jsonTable
|WHERE _unparsed IS NULL
""".stripMargin),
Row("str_a_4", "str_b_4", "str_c_4")
)
checkAnswer(
sql(
"""
|SELECT _unparsed
|FROM jsonTable
|WHERE _unparsed IS NOT NULL
""".stripMargin),
Row("{") ::
Row("") ::
Row("""{"a":1, b:2}""") ::
Row("""{"a":{, b:3}""") ::
Row("]") :: Nil
)
}
}
}
test("SPARK-4068: nulls in arrays") {
val jsonDF = sqlContext.read.json(nullsInArrays)
jsonDF.registerTempTable("jsonTable")
val schema = StructType(
StructField("field1",
ArrayType(ArrayType(ArrayType(ArrayType(StringType, true), true), true), true), true) ::
StructField("field2",
ArrayType(ArrayType(
StructType(StructField("Test", LongType, true) :: Nil), true), true), true) ::
StructField("field3",
ArrayType(ArrayType(
StructType(StructField("Test", StringType, true) :: Nil), true), true), true) ::
StructField("field4",
ArrayType(ArrayType(ArrayType(LongType, true), true), true), true) :: Nil)
assert(schema === jsonDF.schema)
checkAnswer(
sql(
"""
|SELECT field1, field2, field3, field4
|FROM jsonTable
""".stripMargin),
Row(Seq(Seq(null), Seq(Seq(Seq("Test")))), null, null, null) ::
Row(null, Seq(null, Seq(Row(1))), null, null) ::
Row(null, null, Seq(Seq(null), Seq(Row("2"))), null) ::
Row(null, null, null, Seq(Seq(null, Seq(1, 2, 3)))) :: Nil
)
}
test("SPARK-4228 DataFrame to JSON") {
val schema1 = StructType(
StructField("f1", IntegerType, false) ::
StructField("f2", StringType, false) ::
StructField("f3", BooleanType, false) ::
StructField("f4", ArrayType(StringType), nullable = true) ::
StructField("f5", IntegerType, true) :: Nil)
val rowRDD1 = unparsedStrings.map { r =>
val values = r.split(",").map(_.trim)
val v5 = try values(3).toInt catch {
case _: NumberFormatException => null
}
Row(values(0).toInt, values(1), values(2).toBoolean, r.split(",").toList, v5)
}
val df1 = sqlContext.createDataFrame(rowRDD1, schema1)
df1.registerTempTable("applySchema1")
val df2 = df1.toDF
val result = df2.toJSON.collect()
// scalastyle:off
assert(result(0) === "{\\"f1\\":1,\\"f2\\":\\"A1\\",\\"f3\\":true,\\"f4\\":[\\"1\\",\\" A1\\",\\" true\\",\\" null\\"]}")
assert(result(3) === "{\\"f1\\":4,\\"f2\\":\\"D4\\",\\"f3\\":true,\\"f4\\":[\\"4\\",\\" D4\\",\\" true\\",\\" 2147483644\\"],\\"f5\\":2147483644}")
// scalastyle:on
val schema2 = StructType(
StructField("f1", StructType(
StructField("f11", IntegerType, false) ::
StructField("f12", BooleanType, false) :: Nil), false) ::
StructField("f2", MapType(StringType, IntegerType, true), false) :: Nil)
val rowRDD2 = unparsedStrings.map { r =>
val values = r.split(",").map(_.trim)
val v4 = try values(3).toInt catch {
case _: NumberFormatException => null
}
Row(Row(values(0).toInt, values(2).toBoolean), Map(values(1) -> v4))
}
val df3 = sqlContext.createDataFrame(rowRDD2, schema2)
df3.registerTempTable("applySchema2")
val df4 = df3.toDF
val result2 = df4.toJSON.collect()
assert(result2(1) === "{\\"f1\\":{\\"f11\\":2,\\"f12\\":false},\\"f2\\":{\\"B2\\":null}}")
assert(result2(3) === "{\\"f1\\":{\\"f11\\":4,\\"f12\\":true},\\"f2\\":{\\"D4\\":2147483644}}")
val jsonDF = sqlContext.read.json(primitiveFieldAndType)
val primTable = sqlContext.read.json(jsonDF.toJSON)
primTable.registerTempTable("primativeTable")
checkAnswer(
sql("select * from primativeTable"),
Row(new java.math.BigDecimal("92233720368547758070"),
true,
1.7976931348623157E308,
10,
21474836470L,
"this is a simple string.")
)
val complexJsonDF = sqlContext.read.json(complexFieldAndType1)
val compTable = sqlContext.read.json(complexJsonDF.toJSON)
compTable.registerTempTable("complexTable")
// Access elements of a primitive array.
checkAnswer(
sql("select arrayOfString[0], arrayOfString[1], arrayOfString[2] from complexTable"),
Row("str1", "str2", null)
)
// Access an array of null values.
checkAnswer(
sql("select arrayOfNull from complexTable"),
Row(Seq(null, null, null, null))
)
// Access elements of a BigInteger array (we use DecimalType internally).
checkAnswer(
sql("select arrayOfBigInteger[0], arrayOfBigInteger[1], arrayOfBigInteger[2] " +
" from complexTable"),
Row(new java.math.BigDecimal("922337203685477580700"),
new java.math.BigDecimal("-922337203685477580800"), null)
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray1[0], arrayOfArray1[1] from complexTable"),
Row(Seq("1", "2", "3"), Seq("str1", "str2"))
)
// Access elements of an array of arrays.
checkAnswer(
sql("select arrayOfArray2[0], arrayOfArray2[1] from complexTable"),
Row(Seq(1.0, 2.0, 3.0), Seq(1.1, 2.1, 3.1))
)
// Access elements of an array inside a filed with the type of ArrayType(ArrayType).
checkAnswer(
sql("select arrayOfArray1[1][1], arrayOfArray2[1][1] from complexTable"),
Row("str2", 2.1)
)
// Access a struct and fields inside of it.
checkAnswer(
sql("select struct, struct.field1, struct.field2 from complexTable"),
Row(
Row(true, new java.math.BigDecimal("92233720368547758070")),
true,
new java.math.BigDecimal("92233720368547758070")) :: Nil
)
// Access an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1, structWithArrayFields.field2 from complexTable"),
Row(Seq(4, 5, 6), Seq("str1", "str2"))
)
// Access elements of an array field of a struct.
checkAnswer(
sql("select structWithArrayFields.field1[1], structWithArrayFields.field2[3] " +
"from complexTable"),
Row(5, null)
)
}
test("JSONRelation equality test") {
val relation0 = new JSONRelation(
Some(empty),
1.0,
Some(StructType(StructField("a", IntegerType, true) :: Nil)),
None, None)(sqlContext)
val logicalRelation0 = LogicalRelation(relation0)
val relation1 = new JSONRelation(
Some(singleRow),
1.0,
Some(StructType(StructField("a", IntegerType, true) :: Nil)),
None, None)(sqlContext)
val logicalRelation1 = LogicalRelation(relation1)
val relation2 = new JSONRelation(
Some(singleRow),
0.5,
Some(StructType(StructField("a", IntegerType, true) :: Nil)),
None, None)(sqlContext)
val logicalRelation2 = LogicalRelation(relation2)
val relation3 = new JSONRelation(
Some(singleRow),
1.0,
Some(StructType(StructField("b", IntegerType, true) :: Nil)),
None, None)(sqlContext)
val logicalRelation3 = LogicalRelation(relation3)
assert(relation0 !== relation1)
assert(!logicalRelation0.sameResult(logicalRelation1),
s"$logicalRelation0 and $logicalRelation1 should be considered not having the same result.")
assert(relation1 === relation2)
assert(logicalRelation1.sameResult(logicalRelation2),
s"$logicalRelation1 and $logicalRelation2 should be considered having the same result.")
assert(relation1 !== relation3)
assert(!logicalRelation1.sameResult(logicalRelation3),
s"$logicalRelation1 and $logicalRelation3 should be considered not having the same result.")
assert(relation2 !== relation3)
assert(!logicalRelation2.sameResult(logicalRelation3),
s"$logicalRelation2 and $logicalRelation3 should be considered not having the same result.")
withTempPath(dir => {
val path = dir.getCanonicalFile.toURI.toString
sparkContext.parallelize(1 to 100)
.map(i => s"""{"a": 1, "b": "str$i"}""").saveAsTextFile(path)
val d1 = ResolvedDataSource(
sqlContext,
userSpecifiedSchema = None,
partitionColumns = Array.empty[String],
provider = classOf[DefaultSource].getCanonicalName,
options = Map("path" -> path))
val d2 = ResolvedDataSource(
sqlContext,
userSpecifiedSchema = None,
partitionColumns = Array.empty[String],
provider = classOf[DefaultSource].getCanonicalName,
options = Map("path" -> path))
assert(d1 === d2)
})
}
test("SPARK-6245 JsonRDD.inferSchema on empty RDD") {
// This is really a test that it doesn't throw an exception
val emptySchema = InferSchema(empty, 1.0, "")
assert(StructType(Seq()) === emptySchema)
}
test("SPARK-7565 MapType in JsonRDD") {
withSQLConf(SQLConf.COLUMN_NAME_OF_CORRUPT_RECORD.key -> "_unparsed") {
withTempDir { dir =>
val schemaWithSimpleMap = StructType(
StructField("map", MapType(StringType, IntegerType, true), false) :: Nil)
val df = sqlContext.read.schema(schemaWithSimpleMap).json(mapType1)
val path = dir.getAbsolutePath
df.write.mode("overwrite").parquet(path)
// order of MapType is not defined
assert(sqlContext.read.parquet(path).count() == 5)
val df2 = sqlContext.read.json(corruptRecords)
df2.write.mode("overwrite").parquet(path)
checkAnswer(sqlContext.read.parquet(path), df2.collect())
}
}
}
test("SPARK-8093 Erase empty structs") {
val emptySchema = InferSchema(emptyRecords, 1.0, "")
assert(StructType(Seq()) === emptySchema)
}
test("JSON with Partition") {
def makePartition(rdd: RDD[String], parent: File, partName: String, partValue: Any): File = {
val p = new File(parent, s"$partName=${partValue.toString}")
rdd.saveAsTextFile(p.getCanonicalPath)
p
}
withTempPath(root => {
val d1 = new File(root, "d1=1")
// root/dt=1/col1=abc
val p1_col1 = makePartition(
sparkContext.parallelize(2 to 5).map(i => s"""{"a": 1, "b": "str$i"}"""),
d1,
"col1",
"abc")
// root/dt=1/col1=abd
val p2 = makePartition(
sparkContext.parallelize(6 to 10).map(i => s"""{"a": 1, "b": "str$i"}"""),
d1,
"col1",
"abd")
sqlContext.read.json(root.getAbsolutePath).registerTempTable("test_myjson_with_part")
checkAnswer(sql(
"SELECT count(a) FROM test_myjson_with_part where d1 = 1 and col1='abc'"), Row(4))
checkAnswer(sql(
"SELECT count(a) FROM test_myjson_with_part where d1 = 1 and col1='abd'"), Row(5))
checkAnswer(sql(
"SELECT count(a) FROM test_myjson_with_part where d1 = 1"), Row(9))
})
}
test("backward compatibility") {
// This test we make sure our JSON support can read JSON data generated by previous version
// of Spark generated through toJSON method and JSON data source.
// The data is generated by the following program.
// Here are a few notes:
// - Spark 1.5.0 cannot save timestamp data. So, we manually added timestamp field (col13)
// in the JSON object.
// - For Spark before 1.5.1, we do not generate UDTs. So, we manually added the UDT value to
// JSON objects generated by those Spark versions (col17).
// - If the type is NullType, we do not write data out.
// Create the schema.
val struct =
StructType(
StructField("f1", FloatType, true) ::
StructField("f2", ArrayType(BooleanType), true) :: Nil)
val dataTypes =
Seq(
StringType, BinaryType, NullType, BooleanType,
ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DecimalType(25, 5), DecimalType(6, 5),
DateType, TimestampType,
ArrayType(IntegerType), MapType(StringType, LongType), struct,
new MyDenseVectorUDT())
val fields = dataTypes.zipWithIndex.map { case (dataType, index) =>
StructField(s"col$index", dataType, nullable = true)
}
val schema = StructType(fields)
val constantValues =
Seq(
"a string in binary".getBytes("UTF-8"),
null,
true,
1.toByte,
2.toShort,
3,
Long.MaxValue,
0.25.toFloat,
0.75,
new java.math.BigDecimal(s"1234.23456"),
new java.math.BigDecimal(s"1.23456"),
java.sql.Date.valueOf("2015-01-01"),
java.sql.Timestamp.valueOf("2015-01-01 23:50:59.123"),
Seq(2, 3, 4),
Map("a string" -> 2000L),
Row(4.75.toFloat, Seq(false, true)),
new MyDenseVector(Array(0.25, 2.25, 4.25)))
val data =
Row.fromSeq(Seq("Spark " + sqlContext.sparkContext.version) ++ constantValues) :: Nil
// Data generated by previous versions.
// scalastyle:off
val existingJSONData =
"""{"col0":"Spark 1.2.2","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.3.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.3.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.4.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.4.1","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.5.0","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"2015-01-01","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" ::
"""{"col0":"Spark 1.5.0","col1":"YSBzdHJpbmcgaW4gYmluYXJ5","col3":true,"col4":1,"col5":2,"col6":3,"col7":9223372036854775807,"col8":0.25,"col9":0.75,"col10":1234.23456,"col11":1.23456,"col12":"16436","col13":"2015-01-01 23:50:59.123","col14":[2,3,4],"col15":{"a string":2000},"col16":{"f1":4.75,"f2":[false,true]},"col17":[0.25,2.25,4.25]}""" :: Nil
// scalastyle:on
// Generate data for the current version.
val df = sqlContext.createDataFrame(sqlContext.sparkContext.parallelize(data, 1), schema)
withTempPath { path =>
df.write.format("json").mode("overwrite").save(path.getCanonicalPath)
// df.toJSON will convert internal rows to external rows first and then generate
// JSON objects. While, df.write.format("json") will write internal rows directly.
val allJSON =
existingJSONData ++
df.toJSON.collect() ++
sparkContext.textFile(path.getCanonicalPath).collect()
Utils.deleteRecursively(path)
sparkContext.parallelize(allJSON, 1).saveAsTextFile(path.getCanonicalPath)
// Read data back with the schema specified.
val col0Values =
Seq(
"Spark 1.2.2",
"Spark 1.3.1",
"Spark 1.3.1",
"Spark 1.4.1",
"Spark 1.4.1",
"Spark 1.5.0",
"Spark 1.5.0",
"Spark " + sqlContext.sparkContext.version,
"Spark " + sqlContext.sparkContext.version)
val expectedResult = col0Values.map { v =>
Row.fromSeq(Seq(v) ++ constantValues)
}
checkAnswer(
sqlContext.read.format("json").schema(schema).load(path.getCanonicalPath),
expectedResult
)
}
}
}
| pronix/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala | Scala | apache-2.0 | 48,610 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.logical
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.flink.table.planner.plan.nodes.logical._
import org.apache.flink.table.planner.plan.optimize.program._
import org.apache.flink.table.planner.plan.rules.FlinkBatchRuleSets
import org.apache.flink.table.planner.plan.stats.FlinkStatistic
import org.apache.flink.table.planner.utils.TableTestBase
import com.google.common.collect.ImmutableSet
import org.apache.calcite.plan.hep.HepMatchOrder
import org.apache.calcite.rel.rules._
import org.apache.calcite.tools.RuleSets
import org.junit.{Before, Test}
/**
* Test for [[FlinkAggregateRemoveRule]].
*/
class FlinkAggregateRemoveRuleTest extends TableTestBase {
private val util = batchTestUtil()
@Before
def setup(): Unit = {
val programs = new FlinkChainedProgram[BatchOptimizeContext]()
programs.addLast(
// rewrite sub-queries to joins
"subquery_rewrite",
FlinkGroupProgramBuilder.newBuilder[BatchOptimizeContext]
.addProgram(FlinkHepRuleSetProgramBuilder.newBuilder
.setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE)
.setHepMatchOrder(HepMatchOrder.BOTTOM_UP)
.add(FlinkBatchRuleSets.SEMI_JOIN_RULES)
.build(), "rewrite sub-queries to semi/anti join")
.build())
programs.addLast(
"rules",
// use volcano planner because
// rel.getCluster.getPlanner is volcano planner used in FlinkAggregateRemoveRule
FlinkVolcanoProgramBuilder.newBuilder
.add(RuleSets.ofList(
ReduceExpressionsRule.FILTER_INSTANCE,
FlinkAggregateExpandDistinctAggregatesRule.INSTANCE,
FilterCalcMergeRule.INSTANCE,
ProjectCalcMergeRule.INSTANCE,
FilterToCalcRule.INSTANCE,
ProjectToCalcRule.INSTANCE,
FlinkCalcMergeRule.INSTANCE,
FlinkAggregateRemoveRule.INSTANCE,
DecomposeGroupingSetsRule.INSTANCE,
AggregateReduceGroupingRule.INSTANCE,
FlinkLogicalAggregate.BATCH_CONVERTER,
FlinkLogicalCalc.CONVERTER,
FlinkLogicalJoin.CONVERTER,
FlinkLogicalValues.CONVERTER,
FlinkLogicalExpand.CONVERTER,
FlinkLogicalLegacyTableSourceScan.CONVERTER,
FlinkLogicalLegacySink.CONVERTER))
.setRequiredOutputTraits(Array(FlinkConventions.LOGICAL))
.build())
util.replaceBatchProgram(programs)
util.addTableSource[(Int, Int, String)]("MyTable1", 'a, 'b, 'c)
util.addTableSource("MyTable2",
Array[TypeInformation[_]](Types.INT, Types.INT, Types.STRING),
Array("a", "b", "c"),
FlinkStatistic.builder().uniqueKeys(ImmutableSet.of(ImmutableSet.of("a"))).build()
)
util.addTableSource("MyTable3",
Array[TypeInformation[_]](Types.INT, Types.INT, Types.STRING, Types.STRING),
Array("a", "b", "c", "d"),
FlinkStatistic.builder().uniqueKeys(ImmutableSet.of(ImmutableSet.of("a"))).build()
)
}
@Test
def testAggRemove_GroupKeyIsNotUnique(): Unit = {
// can not remove agg
util.verifyPlan("SELECT a, MAX(c) from MyTable1 GROUP BY a")
}
@Test
def testAggRemove_WithoutFilter1(): Unit = {
util.verifyPlan("SELECT a, b + 1, c, s FROM (" +
"SELECT a, MIN(b) AS b, SUM(b) AS s, MAX(c) AS c FROM MyTable2 GROUP BY a)")
}
@Test
def testAggRemove_WithoutFilter2(): Unit = {
util.verifyPlan("SELECT a, SUM(b) AS s FROM MyTable2 GROUP BY a")
}
@Test
def testAggRemove_WithoutGroupBy1(): Unit = {
// can not remove agg
util.verifyPlan("SELECT MAX(a), SUM(b), MIN(c) FROM MyTable2")
}
@Test
def testAggRemove_WithoutGroupBy2(): Unit = {
util.verifyPlan("SELECT MAX(a), SUM(b), MIN(c) FROM (VALUES (1, 2, 3)) T(a, b, c)")
}
@Test
def testAggRemove_WithoutGroupBy3(): Unit = {
// can not remove agg
util.verifyPlan("SELECT * FROM MyTable2 WHERE EXISTS (SELECT SUM(a) FROM MyTable1 WHERE 1=2)")
}
@Test
def testAggRemove_WithoutGroupBy4(): Unit = {
// can not remove agg
util.verifyPlan("SELECT SUM(a) FROM (SELECT a FROM MyTable2 WHERE 1=2)")
}
@Test
def testAggRemove_WithoutAggCall(): Unit = {
util.verifyPlan("SELECT a, b FROM MyTable2 GROUP BY a, b")
}
@Test
def testAggRemove_WithFilter(): Unit = {
// can not remove agg
util.verifyPlan("SELECT a, MIN(c) FILTER (WHERE b > 0), MAX(b) FROM MyTable2 GROUP BY a")
}
@Test
def testAggRemove_Count(): Unit = {
// can not remove agg
util.verifyPlan("SELECT a, COUNT(c) FROM MyTable2 GROUP BY a")
}
@Test
def testAggRemove_CountStar(): Unit = {
// can not remove agg
util.verifyPlan("SELECT a, COUNT(*) FROM MyTable2 GROUP BY a")
}
@Test
def testAggRemove_GroupSets1(): Unit = {
// a is unique
util.verifyPlan("SELECT a, SUM(b) AS s FROM MyTable3 GROUP BY GROUPING SETS((a, c), (a, d))")
}
@Test
def testAggRemove_GroupSets2(): Unit = {
// can not remove agg
util.verifyPlan("SELECT a, SUM(b) AS s FROM MyTable3 GROUP BY GROUPING SETS((a, c), (a), ())")
}
@Test
def testAggRemove_Rollup(): Unit = {
// can not remove agg
util.verifyPlan("SELECT a, SUM(b) AS s FROM MyTable3 GROUP BY ROLLUP(a, c, d)")
}
@Test
def testAggRemove_Cube(): Unit = {
// can not remove agg
util.verifyPlan("SELECT a, SUM(b) AS s FROM MyTable3 GROUP BY CUBE(a, c, d)")
}
@Test
def testAggRemove_SingleDistinctAgg1(): Unit = {
util.verifyPlan("SELECT a, COUNT(DISTINCT c) FROM MyTable2 GROUP BY a")
}
@Test
def testAggRemove_SingleDistinctAgg2(): Unit = {
util.verifyPlan("SELECT a, COUNT(DISTINCT c) FROM MyTable2 GROUP BY a, b")
}
@Test
def testAggRemove_SingleDistinctAgg_WithNonDistinctAgg1(): Unit = {
util.verifyPlan("SELECT a, COUNT(DISTINCT b), SUM(b) FROM MyTable2 GROUP BY a")
}
@Test
def testAggRemove_SingleDistinctAgg_WithNonDistinctAgg2(): Unit = {
util.verifyPlan("SELECT a, COUNT(DISTINCT b), SUM(b) FROM MyTable2 GROUP BY a, c")
}
@Test
def testAggRemove_SingleDistinctAgg_WithNonDistinctAgg3(): Unit = {
util.verifyPlan("SELECT a, COUNT(DISTINCT c), SUM(b) FROM MyTable3 GROUP BY a")
}
@Test
def testAggRemove_SingleDistinctAgg_WithNonDistinctAgg4(): Unit = {
util.verifyPlan("SELECT a, COUNT(DISTINCT c), SUM(b) FROM MyTable3 GROUP BY a, d")
}
@Test
def testAggRemove_MultiDistinctAggs1(): Unit = {
util.verifyPlan("SELECT a, COUNT(DISTINCT b), SUM(DISTINCT b) FROM MyTable2 GROUP BY a")
}
@Test
def testAggRemove_MultiDistinctAggs2(): Unit = {
util.verifyPlan("SELECT a, COUNT(DISTINCT c), SUM(DISTINCT b) FROM MyTable3 GROUP BY a, d")
}
@Test
def testAggRemove_MultiDistinctAggs3(): Unit = {
util.verifyPlan(
"SELECT a, SUM(DISTINCT b), MAX(DISTINCT b), MIN(DISTINCT c) FROM MyTable2 GROUP BY a")
}
@Test
def testAggRemove_MultiDistinctAggs_WithNonDistinctAgg1(): Unit = {
util.verifyPlan("SELECT a, COUNT(DISTINCT c), SUM(b) FROM MyTable3 GROUP BY a, d")
}
}
| tzulitai/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/FlinkAggregateRemoveRuleTest.scala | Scala | apache-2.0 | 7,986 |
package linkchecker.design
import akka.actor.Actor
import scala.concurrent.duration._
import akka.actor.ReceiveTimeout
import akka.actor.ActorSystem
import akka.actor.Props
object MainActor {
def props = Props[MainActor]
}
class MainActor extends Actor {
import Receptionist._
val receptionist = context.actorOf(Receptionist.props, "receptionist")
receptionist ! Get("http://www.google.it")
receptionist ! Get("http://www.google.it/1")
receptionist ! Get("http://www.google.it/2")
receptionist ! Get("http://www.google.it/3")
receptionist ! Get("http://www.google.it/4")
//receptionist ! Get("http://www.repubblica.it")
context.setReceiveTimeout(60.seconds)
def receive = {
case Result(url, set) =>
println(set.toVector.sorted.mkString(s"Results for '$url':\\n", "\\n", "\\n"))
case Failed(url) =>
println(s"Failed to fetch '$url'\\n")
case ReceiveTimeout =>
context.stop(self)
}
}
object Main {
def main(args: Array[String]) {
val system = ActorSystem("LinkChecker")
val mainActor = system.actorOf(MainActor.props)
}
} | fabiofumarola/akka-tutorial | src/main/scala/linkchecker/design/MainActor.scala | Scala | cc0-1.0 | 1,096 |
package com.monovore.decline
import cats.syntax.all._
import com.monovore.decline.Usage._
private[decline] case class Usage(opts: Many[Options] = Prod(), args: Many[Args] = Prod()) {
def show: List[String] = {
val optStrings = showOptions(opts)
val argStrings = showArgs(args)
for {
opt <- optStrings
arg <- argStrings
} yield concat(List(opt, arg))
}
}
private[decline] object Usage {
// TODO: convert arg list representation to 'normal form'... ie. the most restrictive usage
// text we can write that captures all uses
// [<a>] [<b>] --> [<a> [<b>]]
// [<a>] <b> --> <a> <b>
// <a>... <b> -> none
// <a>... [<b>] -> <a...>
// <a>... <b>... -> none
// <a> (<b> | <c> <d>) -> <a> <b>, <a> <c> <d>
// (<a> | <b> <c>) <d> -> <b> <c> <d>
// <a> (<b> | <c> <d>) ->
// command <a> -> <a> command ????
// command [<a>] -> <a> command ????
// command command -> none
// <a>... command -> none
// [<a>...] command -> none (too many!)
// [<a> | <b> <c>] --> [<a> | <b> <c>]
// [<a> | <b> <c>] <d> --> <b> <c> <d>
// if i am mandatory, everyone to the left is interpreted 'as big as possible'
// if i am repeating, everyone on the right is interpreted as 'empty or fail'
sealed trait Many[A] {
def asProd: Prod[A] = Prod(this)
def asSum: Sum[A] = Sum(this)
}
case class Just[A](value: A) extends Many[A] {
override def toString: String = value.toString
}
case class Prod[A](allOf: Many[A]*) extends Many[A] {
override def asProd: Prod[A] = this
def and(other: Prod[A]): Prod[A] = Prod(allOf ++ other.allOf: _*)
override def toString: String = allOf.mkString(" ")
}
case class Sum[A](anyOf: Many[A]*) extends Many[A] {
override def asSum: Sum[A] = this
def or(other: Sum[A]): Sum[A] = Sum(anyOf ++ other.anyOf: _*)
override def toString: String =
asOptional(anyOf.toList)
.map(opt => opt.mkString("[", " | ", "]"))
.getOrElse { anyOf.mkString("(", " | ", ")") }
}
// --foo bar [--baz | -quux <foo> [--quux foo]...]
sealed trait Options
object Options {
case class Required(text: String) extends Options
case class Repeated(text: String) extends Options
}
// <path> <path> [subcommand]
// <path> [<string> [<integer>...]]
sealed trait Args
object Args {
case class Required(metavar: String) extends Args
case class Repeated(metavar: String) extends Args
case class Command(name: String) extends Args
}
def concat(all: Iterable[String]) = all.filter { _.nonEmpty }.mkString(" ")
def single(opt: Opt[_]) = opt match {
case Opt.Flag(names, _, Visibility.Normal) =>
List(Usage(opts = Just(Options.Required(s"${names.head}"))))
case Opt.Regular(names, metavar, _, Visibility.Normal) =>
List(Usage(opts = Just(Options.Required(s"${names.head} <$metavar>"))))
case Opt.Argument(metavar) =>
List(Usage(args = Just(Args.Required(s"<$metavar>"))))
case _ => List()
}
def repeated(opt: Opt[_]) = opt match {
case Opt.Flag(names, _, Visibility.Normal) =>
List(Usage(opts = Just(Options.Repeated(s"${names.head}"))))
case Opt.Regular(names, metavar, _, Visibility.Normal) =>
List(Usage(opts = Just(Options.Repeated(s"${names.head} <$metavar>"))))
case Opt.Argument(metavar) =>
List(Usage(args = Just(Args.Repeated(s"<$metavar>"))))
case _ => List()
}
def asOptional[A](list: List[Many[A]]): Option[List[Many[A]]] = list match {
case Nil => None
case Prod() :: rest => Some(rest.filterNot(_ == Prod()))
case other :: rest => asOptional(rest).map(other :: _)
}
def showArgs(args: Many[Args]): List[String] = args match {
case Sum() => List()
case Sum(single) => showArgs(single)
case Prod(single) => showArgs(single)
case Prod(many @ _*) => many.toList.traverse(showArgs).map(concat)
case Sum(many @ _*) =>
asOptional(many.toList)
.map(opt => opt.traverse(showArgs).map { _.mkString("[", " | ", "]") })
.getOrElse(many.flatMap(showArgs).toList)
case Just(Args.Required(meta)) => List(meta)
case Just(Args.Repeated(meta)) => List(s"$meta...")
case Just(Args.Command(command)) => List(command)
}
def showOptions(opts: Many[Options]): List[String] = opts match {
case Sum(alternatives @ _*) => {
asOptional(alternatives.toList)
.map {
case Seq(Just(Options.Repeated(a))) => List(s"[$a]...")
case filtered => filtered.traverse(showOptions).map(_.mkString("[", " | ", "]"))
}
.getOrElse { alternatives.toList.flatMap(showOptions) }
}
case Just(Options.Required(a)) => List(a)
case Just(Options.Repeated(a)) => List(s"$a [$a]...")
case Prod(items @ _*) => items.toList.traverse(showOptions).map(concat)
}
def fromOpts(opts: Opts[_]): List[Usage] = opts match {
case Opts.Pure(_) => List(Usage())
case Opts.Missing => Nil
case Opts.HelpFlag(a) => fromOpts(a)
case Opts.Validate(more, _) => fromOpts(more)
case Opts.Single(opt) => single(opt)
case Opts.Repeated(opt) => repeated(opt)
case Opts.Subcommand(command) => List(Usage(args = Just(Args.Command(command.name))))
case Opts.App(left, right) =>
for {
l <- fromOpts(left)
r <- fromOpts(right)
} yield Usage(l.opts.asProd and r.opts.asProd, l.args.asProd and r.args.asProd)
case Opts.OrElse(left, right) =>
(fromOpts(left).reverse, fromOpts(right)) match {
case (Usage(leftOpts, Prod()) :: ls, Usage(rightOpts, Prod()) :: rs) =>
ls.reverse ++ List(Usage(opts = leftOpts.asSum or rightOpts.asSum)) ++ rs
case (Usage(Prod(), leftArgs) :: ls, Usage(Prod(), rightArgs) :: rs) =>
ls.reverse ++ List(Usage(args = leftArgs.asSum or rightArgs.asSum)) ++ rs
case (ls, rs) => ls.reverse ++ rs
}
case Opts.Env(_, _, _) => List(Usage())
}
}
| bkirwi/decline | core/shared/src/main/scala/com/monovore/decline/Usage.scala | Scala | apache-2.0 | 5,900 |
import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Tcp.OutgoingConnection
import akka.stream.scaladsl.{Flow, Sink, Source, Tcp}
import akka.util.ByteString
import scala.concurrent.Future
object MaterializedValues extends App {
implicit val system = ActorSystem("twitter")
implicit val materializer = ActorMaterializer()
// Outputs ints and doesn't materializes any value
val intSource: Source[Int, NotUsed] = Source(List(1, 2, 3))
// Inputs and outputs are BytesStrings and materialized value
// is a Future[OutgoingConnection]
val killSwitchFlow: Flow[ByteString, ByteString, Future[OutgoingConnection]] =
Tcp().outgoingConnection("localhost", 8080)
// First value is the input, second is the materialized value
val intSink: Sink[Int, Future[Int]] = Sink.fold[Int, Int](0)(_ + _)
}
| callicles/akka-streams-intro | src/main/scala/MaterializedValues.scala | Scala | mit | 875 |
package com.wacai.config.annotation
import java.io.{File, PrintWriter}
import com.typesafe.config.ConfigFactory
import annotation.switch
import concurrent.duration._
import reflect.macros.whitebox
class Macro(val c: whitebox.Context) {
import Macro._
import c.universe._
import Flag._
lazy val outputDir = {
val f = new File(c.settings
.find(_.startsWith(OutputDirSettings))
.map(_.substring(OutputDirSettings.length))
.getOrElse(DefaultOutputDir))
if (!f.exists()) f.mkdirs()
f
}
def impl(annottees: c.Expr[Any]*): c.Expr[Any] = {
val result = annottees.map(_.tree).toList match {
case (ClassDef(mods, name, a, Template(parents, s, body))) :: Nil if mods.hasFlag(DEFAULTPARAM | TRAIT) =>
implicit val out = new PrintWriter(new File(outputDir, s"$name.conf"), "UTF-8")
try {
node(0)(s"$name") {
val imports = q"import scala.collection.JavaConverters._"
val conf = if (parents exists configurable) {
q"private val _config = config"
} else {
q"private val _config = ${reify(CONFIG).tree}"
}
ClassDef(mods, name, a, Template(parents, s, imports :: conf :: body.map {
case Initialized(vd) => generate(vd, s"$name", 1)
case t => t
}))
}
} finally out.close()
case _ =>
c.abort(c.enclosingPosition, "Annotation is only supported on trait")
}
c.Expr[Any](result)
}
lazy val seconds = reify(SECONDS) tree
def tpe(t: Tree): Type = c.typecheck(t).tpe
def is[T: TypeTag](t: Type) = t <:< typeOf[T]
def duration(t: Tree) = q"scala.concurrent.duration.Duration($t, $seconds)"
def configurable(t: Tree): Boolean = is[Configurable](tpe(q"0.asInstanceOf[$t]"))
def generate(cd: ClassDef, owner: String, level: Int)(implicit out: PrintWriter): ClassDef = cd match {
case ClassDef(m, name, a, Template(p, s, body)) =>
ClassDef(m, name, a, Template(p, s, body map {
case Initialized(vd) => generate(vd, s"$owner", level + 1)
case d => d
}))
case _ =>
c.abort(cd.pos, "A anonymous class definition should be here")
}
def generate(vd: ValDef, owner: String, level: Int)(implicit out: PrintWriter): ValDef = vd match {
case ValDef(mods, _, _, _) if mods.hasFlag(DEFERRED) =>
c.abort(vd.pos, "value should be initialized")
case ValDef(mods, _, _, _) if mods.hasFlag(MUTABLE) =>
c.abort(vd.pos, "var should be val")
case ValDef(mods, name, tpt, Block((cd: ClassDef) :: Nil, expr)) =>
val owner4dot = owner.replaceAll("\\\\$u002E","\\\\.")
val name4dot = name.toString.replaceAll("\\\\$u002E","\\\\.")
node(level)(s"$name4dot") {
ValDef(mods, name, tpt, Block(generate(cd, s"$owner4dot.$name4dot", level) :: Nil, expr))
}
case ValDef(mods, name, tpt, rhs) =>
try {
val e = c.eval(c.Expr[Any](Block(q"import scala.concurrent.duration._" :: Nil, rhs)))
val t = c.typecheck(rhs).tpe
val name4tricks = name.toString.replaceAll("\\\\$minus","\\\\-").replaceAll("\\\\$u002E","\\\\.")
val owner4dot = owner.replaceAll("\\\\$u002E","\\\\.")
leaf(level)(s"$name4tricks = ${value(t, e)}")
ValDef(mods, name, tpt, get(t, s"$owner4dot.$name4tricks"))
} catch {
case e: IllegalStateException => c.abort(vd.pos, e.getMessage)
case _: Throwable => vd
}
case _ =>
c.abort(vd.pos, "Unexpect value definition")
}
def value(t: Type, a: Any): String = {
t match {
case _ if is[Long](t) => bytes(a.asInstanceOf[Long])
case _ if is[Duration](t) => time(a.asInstanceOf[Duration])
case _ if is[List[Long]](t) => a.asInstanceOf[List[Long]].map(bytes).asArray
case _ if is[List[Duration]](t) => a.asInstanceOf[List[Duration]].map(time).asArray
case _ if is[List[_]](t) => a.asInstanceOf[List[_]].map(safeString).asArray
case _ if is[Map[_, _]](t) => a.asInstanceOf[Map[_, _]]
.map { case (k, v) => s"$k:${safeString(v)}" }
.asObject
case _ => safeString(a)
}
}
def get(t: Type, path: String): Tree = t match {
case _ if is[Boolean](t) => q"_config.getBoolean($path)"
case _ if is[Int](t) => q"_config.getInt($path)"
case _ if is[Long](t) => q"_config.getBytes($path)"
case _ if is[String](t) => q"_config.getString($path)"
case _ if is[Double](t) => q"_config.getDouble($path)"
case _ if is[Duration](t) => duration(q"_config.getDuration($path, $seconds)")
case _ if is[List[Boolean]](t) => q"_config.getBooleanList($path).asScala.toList"
case _ if is[List[Int]](t) => q"_config.getIntList($path).asScala.toList"
case _ if is[List[Long]](t) => q"_config.getBytesList($path).asScala.toList"
case _ if is[List[String]](t) => q"_config.getStringList($path).asScala.toList"
case _ if is[List[Double]](t) => q"_config.getDoubleList($path).asScala.toList"
case _ if is[List[Duration]](t) => q"_config.getDurationList($path, $seconds).asScala.toList.map {l => ${duration(q"l")} }"
case _ if is[Map[String, String]](t) => q"_config.getObject($path).asScala.map{case(x,y)=>x.toString -> y.unwrapped.toString}.toMap[String,String]"
case _ => throw new IllegalStateException(s"Unsupported type: $t")
}
object Initialized {
def unapply(t: Tree): Option[ValDef] = t match {
case v @ ValDef(mods, _, _, _) if !mods.hasFlag(DEFERRED) => Some(v)
case _ => None
}
}
}
object Macro {
val DefaultOutputDir = "src/main/resources"
val OutputDirSettings = "conf.output.dir="
lazy val CONFIG = ConfigFactory.load()
private val TAB = " "
def node[T](level: Int)(name: String)(f: => T)(implicit out: PrintWriter) = {
out println s"${TAB * level}$name {"
val r = f
out println s"${TAB * level}}"
r
}
def leaf(level: Int)(expr: String)(implicit out: PrintWriter) = {
out.println(s"${TAB * level}$expr")
}
def bytes(l: Long): String = l match {
case _ if l < 1024 || l % 1024 > 0 => s"${l}B"
case _ if l >= 1024 && l < 1024 * 1024 => s"${l / 1024}K"
case _ if l >= 1024 * 1024 && l < 1024 * 1024 * 1024 => s"${l / (1024 * 1024)}M"
case _ => s"${l / (1024 * 1024 * 1024)}G"
}
def time(d: Duration): String = d.unit match {
case NANOSECONDS => s"${d._1}ns"
case MICROSECONDS => s"${d._1}us"
case MILLISECONDS => s"${d._1}ms"
case SECONDS => s"${d._1}s"
case MINUTES => s"${d._1}m"
case HOURS => s"${d._1}h"
case DAYS => s"${d._1}d"
}
implicit class MkString(t: TraversableOnce[_]) {
def asArray = t.mkString("[", ", ", "]")
def asObject = t.mkString("{", ", ", "}")
}
def safeString(input: Any) = {
def quotationNeeded(s: String) = s.isEmpty || List(
'$', '"', '{', '}', '[', ']',
':', '=', ',', '+', '#', '`',
'^', '?', '!', '@', '*', '&', '\\\\'
).exists {s.indexOf(_) != -1}
def renderJsonString(s: String): String = {
val sb: StringBuilder = new StringBuilder
sb.append('"')
for (c <- s) {
(c: @switch) match {
case '"' =>
sb.append("\\\\\\"")
case '\\\\' =>
sb.append("\\\\\\\\")
case '\\n' =>
sb.append("\\\\n")
case '\\b' =>
sb.append("\\\\b")
case '\\f' =>
sb.append("\\\\f")
case '\\r' =>
sb.append("\\\\r")
case '\\t' =>
sb.append("\\\\t")
case _ =>
if (Character.isISOControl(c)) sb.append("\\\\u%04x".format(c.toInt))
else sb.append(c)
}
}
sb.append('"')
sb.toString()
}
val s = input.toString
if (quotationNeeded(s)) renderJsonString(s) else s
}
}
| wacai/config-annotation | src/main/scala/com/wacai/config/annotation/Macro.scala | Scala | apache-2.0 | 8,197 |
package com.wordnik.swagger.client
import akka.dispatch.{Future, Promise}
import org.json4s.jackson.JsonMethods
abstract class ApiClient(client: TransportClient, config: SwaggerConfig) extends JsonMethods {
protected implicit val execContext = client.execContext
protected val ser = config.dataFormat
protected def addFmt(pth: String) = pth.replace("{format}", ser.name)
protected def process[T](fn: => T): Future[T] = {
val fut = Promise[T]
try {
val r = fn
r match {
case t: Throwable => fut.complete(Left(t))
case s => fut.complete(Right(r))
}
} catch {
case t: Throwable => fut.complete(Left(t))
}
fut
}
}
| casualjim/swagger-async-httpclient | src/main/scala_2.9/com/wordnik/swagger/client/ApiClient.scala | Scala | apache-2.0 | 685 |
package org.openapitools.server.model
/**
* @param create for example: ''null''
* @param read for example: ''null''
* @param start for example: ''null''
* @param stop for example: ''null''
* @param `class` for example: ''null''
*/
final case class BranchImplpermissions (
create: Option[Boolean],
read: Option[Boolean],
start: Option[Boolean],
stop: Option[Boolean],
`class`: Option[String]
)
| cliffano/swaggy-jenkins | clients/scala-akka-http-server/generated/src/main/scala/org/openapitools/server/model/BranchImplpermissions.scala | Scala | mit | 416 |
/*
* Copyright 2014 Adam Rosenberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.nalloc.bitb.kcits.sandbox.exists
import org.nalloc.bitb.kcits.sandbox.Inspectable
class MethodValue extends Inspectable {
private[this] val be = b.exists(isZero)
private[this] val se = s.exists(isZero)
private[this] val ie = i.exists(isZero)
private[this] val le = l.exists(isZero)
private[this] val fe = f.exists(isZero)
private[this] val de = d.exists(isZero)
private[this] val ste = st.exists(isEmpty)
private def isZero[T: Numeric](t: T) = implicitly[Numeric[T]].zero == t
private def isEmpty(s: String) = s == ""
}
| arosenberger/nalloc_2.10 | sandbox/src/main/scala/org/nalloc/bitb/kcits/sandbox/exists/MethodValue.scala | Scala | apache-2.0 | 1,151 |
/*
* Copyright 2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reactiverogue.record
package field
import reactivemongo.bson._
class BsonRecordField[OwnerType <: BsonRecord[OwnerType],
SubRecordType <: BsonRecord[SubRecordType]](
rec: OwnerType,
valueMeta: BsonMetaRecord[SubRecordType])(implicit subRecordType: Manifest[SubRecordType])
extends RequiredRecordField[SubRecordType, OwnerType] {
def this(rec: OwnerType, valueMeta: BsonMetaRecord[SubRecordType], value: SubRecordType)(
implicit subRecordType: Manifest[SubRecordType]) = {
this(rec, value.meta)
set(value)
}
def this(rec: OwnerType, valueMeta: BsonMetaRecord[SubRecordType], value: Option[SubRecordType])(
implicit subRecordType: Manifest[SubRecordType]) = {
this(rec, valueMeta)
setOption(value)
}
def owner = rec
def defaultValue = valueMeta.createRecord
def asBSONValue: BSONValue =
value.asBSONDocument
def setFromBSONValue(value: BSONValue): Option[SubRecordType] = value match {
case v: BSONDocument => setOption(Some(valueMeta.fromBSONDocument(v)))
case _ => setOption(None)
}
}
| whisklabs/reactiverogue | reactiverogue-core/src/main/scala/reactiverogue/record/field/BsonRecordField.scala | Scala | apache-2.0 | 1,704 |
package gapt.expr.formula.hol
import gapt.expr._
import gapt.expr.ty.->:
import gapt.expr.ty.TBase
import gapt.expr.ty.Ty
/**
* Ordering for HOL Formulas (also for FOL)
*/
object HOLOrdering extends HOLOrdering
class HOLOrdering extends Ordering[Expr] {
override def compare( x: Expr, y: Expr ): Int = ( x, y ) match {
case ( x, y ) if x syntaxEquals y => 0
case ( Var( s1, t1 ), Var( s2, t2 ) ) =>
s1.toString() compare s2.toString() match {
case 0 => TAOrdering.compare( t1, t2 )
case x => x
}
case ( Const( s1, t1, _ ), Const( s2, t2, _ ) ) =>
s1.toString() compare s2.toString() match {
case 0 => TAOrdering.compare( t1, t2 )
case x => x
}
case ( App( s1, t1 ), App( s2, t2 ) ) =>
compare( s1, s2 ) match {
case 0 => compare( t1, t2 )
case x => x
}
case ( Abs( x1, t1 ), Abs( x2, t2 ) ) =>
compare( x1, x2 ) match {
case 0 => compare( t1, t2 )
case x => x
}
case ( Var( _, _ ), _ ) => -1
case ( Const( _, _, _ ), Var( _, _ ) ) => 1
case ( Const( _, _, _ ), _ ) => -1
case ( App( _, _ ), Var( _, _ ) ) => 1
case ( App( _, _ ), Const( _, _, _ ) ) => 1
case ( App( _, _ ), _ ) => -1
case ( Abs( _, _ ), Var( _, _ ) ) => 1
case ( Abs( _, _ ), Const( _, _, _ ) ) => 1
case ( Abs( _, _ ), App( _, _ ) ) => 1
case ( Abs( _, _ ), _ ) => -1
case _ => throw new Exception( "Unhandled comparision of hol epxressions: " + x + " ? " + y )
}
}
/**
* Ordering on types.
*/
object TAOrdering extends TAOrdering
class TAOrdering extends Ordering[Ty] {
override def compare( x: Ty, y: Ty ): Int = ( x, y ) match {
case ( x, y ) if x == y => 0
case ( t1 ->: t2, t3 ->: t4 ) =>
val r = compare( t1, t3 )
if ( r == 0 ) compare( t2, t4 ) else r
case ( _, _ ->: _ ) => -1
case ( _ ->: _, _ ) => 1
case ( TBase( x_, _ ), TBase( y_, _ ) ) => x_ compare y_
}
}
| gapt/gapt | core/src/main/scala/gapt/expr/formula/hol/orderings.scala | Scala | gpl-3.0 | 2,113 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.observers
import monix.execution.Ack.{Continue, Stop}
import monix.execution.cancelables.BooleanCancelable
import monix.reactive.{BaseTestSuite, Observer}
import scala.concurrent.Future
import scala.util.Success
object ObserverFeedSuite extends BaseTestSuite {
test("feed synchronous iterable") { implicit s =>
check1 { (xs: List[Int]) =>
var sum = 0
val downstream = new Observer.Sync[Int] {
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = sum += 100
def onNext(elem: Int) = {
sum += elem
Continue
}
}
val ack = downstream.onNextAll(xs)
s.tick()
ack.syncTryFlatten(s) == Continue &&
sum == xs.sum
}
}
test("feed asynchronous iterable") { implicit s =>
check1 { (xs: List[Int]) =>
var sum = 0
val downstream = new Observer[Int] {
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = sum += 100
def onNext(elem: Int) = Future {
sum += elem
Continue
}
}
val ack = downstream.onNextAll(xs)
s.tick()
ack.syncTryFlatten(s) == Continue &&
sum == xs.sum
}
}
test("stop observable synchronously") { implicit s =>
check1 { (xs: List[Int]) =>
var sum = 0
val downstream = new Observer.Sync[Int] {
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = sum += 100
def onNext(elem: Int) = {
sum += elem
Stop
}
}
val ack = downstream.onNextAll(xs); s.tick()
xs.isEmpty || (ack.syncTryFlatten(s) == Stop && sum == xs.head)
}
}
test("stop observable asynchronously") { implicit s =>
check1 { (xs: List[Int]) =>
var sum = 0
val downstream = new Observer[Int] {
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = sum += 100
def onNext(elem: Int) = Future {
sum += elem
Stop
}
}
val ack = downstream.onNextAll(xs); s.tick()
xs.isEmpty || (ack.syncTryFlatten(s) == Stop && sum == xs.head)
}
}
test("should be cancelable") { implicit s =>
check1 { (xs: List[Int]) =>
var sum = 0
val downstream = new Observer[Int] {
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = sum += 100
def onNext(elem: Int) = Future {
sum += elem
Continue
}
}
val c = BooleanCancelable()
val ack = downstream.feed(c, xs)
c.cancel(); s.tick()
(xs.length <= 1) || ack.syncTryFlatten(s) == Stop
}
}
test("synchronous feed should be stack safe") { implicit s =>
val total = 100000000
val iterator = Iterator.range(0, total)
var received = 0
val observer = new Observer[Int] {
def onNext(elem: Int) = {
received += 1
Continue
}
def onError(ex: Throwable): Unit = throw ex
def onComplete(): Unit = ()
}
val f = observer.feed(iterator); s.tick()
assertEquals(received, total)
assertEquals(f.value, Some(Success(Continue)))
}
}
| alexandru/monifu | monix-reactive/shared/src/test/scala/monix/reactive/observers/ObserverFeedSuite.scala | Scala | apache-2.0 | 3,829 |
package org.openurp.edu.eams.classroom.service.wrapper
import org.openurp.base.Room
import org.openurp.base.code.RoomUsage
import org.beangle.commons.lang.time.YearWeekTime
class OccupyUnit( val rooms: Iterable[Room],
val units: Array[YearWeekTime],
protected val usage: RoomUsage,
protected val userid: java.lang.Long) {
protected var comment: String = _
def getComment(): String = comment
def setComment(comment: String) {
this.comment = comment
}
def getUsage(): RoomUsage = usage
def getUserid(): java.lang.Long = userid
}
| openurp/edu-eams-webapp | core/src/main/scala/org/openurp/edu/eams/classroom/service/wrapper/OccupyUnit.scala | Scala | gpl-3.0 | 567 |
package com.lookout.borderpatrol.session
import com.lookout.borderpatrol.Session
import com.lookout.borderpatrol.session.id.{Marshaller, Generator => IdGenerator}
import com.lookout.borderpatrol.session.secret.InMemorySecretStore
import com.lookout.borderpatrol.session.store.InMemorySessionStore
import org.jboss.netty.handler.codec.http.{HttpMethod, HttpVersion, DefaultHttpRequest}
import org.scalatest.{FlatSpec, Matchers}
import org.scalatest.OptionValues._
class SessionSpec extends FlatSpec with Matchers {
implicit val secretStore = InMemorySecretStore(Secrets(Secret(SecretExpiry.currentExpiry), Secret(SecretExpiry.currentExpiry)))
implicit val marshaller = Marshaller(secretStore)
implicit val generator = new IdGenerator
def mockSessionStore = new InMemorySessionStore
behavior of "Session"
it should "be serializable" in {
val orig = Session("servicename", new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/"))
val json = orig.asJson
val sOption = json.asSession
val deser = sOption.value
deser.id shouldEqual(orig.id)
deser.tokens shouldEqual(orig.tokens)
deser.originalRequest should not equal(orig.originalRequest) // mutable values
deser.equals(orig) shouldBe true // overridden
}
}
| rtyler/borderpatrol | borderpatrol-core/src/test/scala/com/lookout/borderpatrol/session/SessionSpec.scala | Scala | mit | 1,264 |
import akka.actor.{ActorRef, Actor}
import api.WeatherApi
// we don't implement our route structure directly in the service actor because
// we want to be able to test it independently, without having to spin up an actor
class RestActor(val forecastRef: ActorRef) extends Actor
with WeatherApi {
// the HttpService trait defines only one abstract member, which
// connects the services environment to the enclosing actor or test
def actorRefFactory = context
// we use the enclosing ActorContext's or ActorSystem's dispatcher for our Futures and Scheduler
implicit def executionContext = actorRefFactory.dispatcher
def receive = runRoute(weatherRoute)
} | sbondor/sunwatch | server/src/main/scala/RestActor.scala | Scala | mit | 671 |
package models
import java.sql.Connection
import javax.inject.Inject
import javax.inject.Singleton
import play.api.db._
import anorm._
import anorm.SqlParser._
import scala.language.postfixOps
case class Project(id: Option[Long], folder: String, name: String)
@Singleton
class ProjectService @Inject() (dbapi: DBApi, userService: UserService) {
private val db = dbapi.database("default")
// -- Parsers
/**
* Parse a Project from a ResultSet
*/
val simple = {
get[Option[Long]]("project.id") ~
get[String]("project.folder") ~
get[String]("project.name") map {
case id~folder~name => Project(id, folder, name)
}
}
// -- Queries
/**
* Retrieve a Project from id.
*/
def findById(id: Long): Option[Project] = {
db.withConnection { implicit connection =>
SQL("select * from project where id = {id}").on(
"id" -> id
).as(simple.singleOpt)
}
}
/**
* Retrieve project for user
*/
def findInvolving(user: String): Seq[Project] = {
db.withConnection { implicit connection =>
SQL(
"""
select * from project
join project_member on project.id = project_member.project_id
where project_member.user_email = {email}
"""
).on(
"email" -> user
).as(simple *)
}
}
/**
* Update a project.
*/
def rename(id: Long, newName: String) {
db.withConnection { implicit connection =>
SQL("update project set name = {name} where id = {id}").on(
"id" -> id, "name" -> newName
).executeUpdate()
}
}
/**
* Delete a project.
*/
def delete(id: Long) {
db.withConnection { implicit connection =>
SQL("delete from project where id = {id}").on(
"id" -> id
).executeUpdate()
}
}
/**
* Delete all project in a folder
*/
def deleteInFolder(folder: String) {
db.withConnection { implicit connection =>
SQL("delete from project where folder = {folder}").on(
"folder" -> folder
).executeUpdate()
}
}
/**
* Rename a folder
*/
def renameFolder(folder: String, newName: String) {
db.withConnection { implicit connection =>
SQL("update project set folder = {newName} where folder = {name}").on(
"name" -> folder, "newName" -> newName
).executeUpdate()
}
}
/**
* Retrieve project member
*/
def membersOf(project: Long): Seq[User] = {
db.withConnection { implicit connection =>
SQL(
"""
select user.* from user
join project_member on project_member.user_email = user.email
where project_member.project_id = {project}
"""
).on(
"project" -> project
).as(userService.simple *)
}
}
/**
* Add a member to the project team.
*/
def addMember(project: Long, user: String) {
db.withConnection { implicit connection =>
SQL("insert into project_member values({project}, {user})").on(
"project" -> project,
"user" -> user
).executeUpdate()
}
}
/**
* Remove a member from the project team.
*/
def removeMember(project: Long, user: String) {
db.withConnection { implicit connection =>
SQL("delete from project_member where project_id = {project} and user_email = {user}").on(
"project" -> project,
"user" -> user
).executeUpdate()
}
}
/**
* Check if a user is a member of this project
*/
def isMember(project: Long, user: String): Boolean = {
db.withConnection { implicit connection =>
SQL(
"""
select count(user.email) = 1 from user
join project_member on project_member.user_email = user.email
where project_member.project_id = {project} and user.email = {email}
"""
).on(
"project" -> project,
"email" -> user
).as(scalar[Boolean].single)
}
}
/**
* Create a Project.
*/
def create(project: Project, members: Seq[String]): Project = {
db.withTransaction { implicit connection =>
// Get the project id
val id: Long = project.id.getOrElse {
SQL("select next value for project_seq").as(scalar[Long].single)
}
// Insert the project
SQL(
"""
insert into project values (
{id}, {name}, {folder}
)
"""
).on(
"id" -> id,
"name" -> project.name,
"folder" -> project.folder
).executeUpdate()
// Add members
members.foreach { email =>
SQL("insert into project_member values ({id}, {email})").on("id" -> id, "email" -> email).executeUpdate()
}
project.copy(id = Some(id))
}
}
}
| scoverage/scoverage-maven-samples | playframework/singlemodule/zentasks/zentasks-scala-2.11/app/models/Project.scala | Scala | apache-2.0 | 4,814 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the AuxFile entity.
*/
class AuxFileGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connectionHeader("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"X-XSRF-TOKEN" -> "${xsrf_token}"
)
val scn = scenario("Test the AuxFile entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))
.check(headerRegex("Set-Cookie", "XSRF-TOKEN=(.*);[\\\\s]").saveAs("xsrf_token"))).exitHereIfFailed
.pause(10)
.exec(http("Authentication")
.post("/api/authentication")
.headers(headers_http_authenticated)
.formParam("j_username", "admin")
.formParam("j_password", "admin")
.formParam("remember-me", "true")
.formParam("submit", "Login")
.check(headerRegex("Set-Cookie", "XSRF-TOKEN=(.*);[\\\\s]").saveAs("xsrf_token"))).exitHereIfFailed
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10)
.repeat(2) {
exec(http("Get all auxFiles")
.get("/api/aux-files")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new auxFile")
.post("/api/aux-files")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "creation_date":"2020-01-01T00:00:00.000Z", "filename":"SAMPLE_TEXT", "uri":"SAMPLE_TEXT", "description":"SAMPLE_TEXT"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_auxFile_url"))).exitHereIfFailed
.pause(10)
.repeat(5) {
exec(http("Get created auxFile")
.get("${new_auxFile_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created auxFile")
.delete("${new_auxFile_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(100) over (1 minutes))
).protocols(httpConf)
}
| CardiacAtlasProject/xpacs-web | src/test/gatling/simulations/AuxFileGatlingTest.scala | Scala | gpl-3.0 | 3,431 |
trait Functor[a] { type MyType[a] }
object Functor {
def listFunctor[a]: Functor[a] { type MyType[x] = List[x] } =
new Functor[a] { type List[t] = List[t] }
}
| scala/scala | test/files/neg/t10003.scala | Scala | apache-2.0 | 165 |
package scala.meta
import org.scalatest._
import scala.reflect.runtime.universe._
import scala.meta.internal.ast._
import scala.meta.dialects.Scala211
class QuasiSuite extends FunSuite {
val XtensionQuasiquoteTerm = "shadow scala.metq quasiquotes"
test("$x") {
assert(Term.Quasi(0, q"x").show[Syntax] === "${x @ Term}")
}
test("..$xs") {
assert(Term.Quasi(1, Term.Quasi(0, q"xs")).show[Syntax] === "..${xs @ Term}")
}
} | smarter/scalameta | tests/src/test/scala/show/QuasiSuite.scala | Scala | bsd-3-clause | 439 |
package com.avsystem.commons
package serialization
import com.avsystem.commons.annotation.AnnotationAggregate
import com.avsystem.commons.meta.{AutoOptionalParams, MacroInstances}
import com.avsystem.commons.misc.{AutoNamedEnum, NamedEnumCompanion, TypedKey}
object CodecTestData {
def col[T <: JCollection[Int]](col: T): T = {
col.add(1)
col.add(2)
col.add(3)
col
}
def map[M <: JMap[Int, Int]](map: M): M = {
map.put(1, 1)
map.put(2, 2)
map.put(3, 3)
map
}
def stringMap[M <: JMap[String, Int]](map: M): M = {
map.put("1", 1)
map.put("2", 2)
map.put("3", 3)
map
}
def doubleMap[M <: JMap[Double, Int]](map: M): M = {
map.put(1.0, 1)
map.put(2.0, 2)
map.put(3.0, 3)
map
}
val jArrayList: JArrayList[Int] = col(new JArrayList[Int])
val jLinkedList: JLinkedList[Int] = col(new JLinkedList[Int])
val jHashSet: JHashSet[Int] = col(new JHashSet[Int])
val jLinkedHashSet: JLinkedHashSet[Int] = col(new JLinkedHashSet[Int])
val jTreeSet: JTreeSet[Int] = col(new JTreeSet[Int])
val jHashMap: JHashMap[String, Int] = stringMap(new JHashMap[String, Int])
val jIntHashMap: JHashMap[Int, Int] = map(new JHashMap[Int, Int])
val jDoubleHashMap: JHashMap[Double, Int] = doubleMap(new JHashMap[Double, Int])
val jLinkedHashMap: JLinkedHashMap[String, Int] = stringMap(new JLinkedHashMap[String, Int])
val some = Option(42)
val none = Option.empty[Int]
val list = List(1, 2, 3)
val set = Set(1, 2, 3)
val map = Map("1" -> 1, "2" -> 2, "3" -> 3)
val hashMap = IHashMap("1" -> 1, "2" -> 2, "3" -> 3)
val intMap = Map(1 -> 1, 2 -> 2, 3 -> 3)
val doubleMap = Map(1.0 -> 1, 2.0 -> 2, 3.0 -> 3)
case class ValueClass(str: String) extends AnyVal
object ValueClass extends HasGenCodec[ValueClass]
sealed trait SealedBase
object SealedBase {
case object CaseObject extends SealedBase
case class CaseClass(str: String) extends SealedBase
case class Rec(sub: Opt[SealedBase], local: Opt[Rec]) extends SealedBase
sealed trait InnerBase extends SealedBase
object InnerBase {
case object InnerCaseObject extends InnerBase
case class InnerCaseClass(str: String = "kek") extends InnerBase
}
implicit val codec: GenCodec[SealedBase] = GenCodec.materialize[SealedBase]
}
class mongoId extends AnnotationAggregate {
@outOfOrder @name("_id")
final def aggregated: List[StaticAnnotation] = reifyAggregated
}
@flatten sealed trait FlatSealedBase {
@mongoId def id: String
@generated @name("upper_id") def upperId: String = id.toUpperCase
}
object FlatSealedBase {
case class FirstCase(id: String, int: Int = 42) extends FlatSealedBase
case class SecondCase(id: String, dbl: Double, moar: Double*) extends FlatSealedBase
case object ThirdCase extends FlatSealedBase {
@generated def id = "third"
}
case class RecursiveCase(id: String, sub: Opt[FlatSealedBase]) extends FlatSealedBase
case class LocallyRecursiveCase(id: String, sub: Opt[LocallyRecursiveCase]) extends FlatSealedBase
// for Scala 2.11
implicit val codec: GenCodec[FlatSealedBase] = GenCodec.materialize
}
@flatten sealed trait TransparentFlatSealedBase
case class TransparentCaseWrap(thing: TransparentFlatThing) extends TransparentFlatSealedBase
object TransparentCaseWrap extends TransparentWrapperCompanion[TransparentFlatThing, TransparentCaseWrap]
object TransparentFlatSealedBase extends HasGenCodec[TransparentFlatSealedBase]
case class TransparentFlatThing(num: Int, text: String)
object TransparentFlatThing extends HasApplyUnapplyCodec[TransparentFlatThing]
abstract class Wrapper[Self <: Wrapper[Self] : ClassTag](private val args: Any*) { this: Self =>
override def equals(obj: Any): Boolean = obj match {
case other: Self => args == other.args
case _ => false
}
override def hashCode(): Int = args.hashCode()
}
trait Framework {
type Field
case class Stuff(field: Field)
}
trait BetterFramework extends Framework {
implicit def fieldCodec: GenCodec[Field]
implicit val stuffCodec: GenCodec[Stuff] = GenCodec.materialize
}
object SomeObject {
@generated def random: Int = 42
implicit val codec: GenCodec[SomeObject.type] = GenCodec.materialize[SomeObject.type]
}
case class NoArgCaseClass()
object NoArgCaseClass extends HasGenCodec[NoArgCaseClass]
case class SingleArgCaseClass(str: String)
object SingleArgCaseClass extends HasGenCodec[SingleArgCaseClass]
@transparent
case class TransparentWrapper(str: String)
object TransparentWrapper extends HasGenCodec[TransparentWrapper]
@transparent
case class TransparentWrapperWithDependency(str: String)
object TransparentWrapperWithDependency {
//order matters
implicit val codec: GenCodec[TransparentWrapperWithDependency] = GenCodec.materialize
implicit val stringCodec: GenCodec[String] = GenCodec.StringCodec
}
@transparent case class StringId(id: String)
object StringId extends TransparentWrapperCompanion[String, StringId]
trait HasSomeStr {
@name("some.str") def str: String
@generated def someStrLen: Int = str.length
}
case class SomeCaseClass(str: String, intList: List[Int]) extends HasSomeStr
object SomeCaseClass extends HasGenCodec[SomeCaseClass]
case class Stuff[T](name: String)
object Stuff {
implicit val codec: GenCodec[Stuff[_]] = GenCodec.create(
in => new Stuff[Any](in.readSimple().readString()),
(out, value) => out.writeSimple().writeString(value.name)
)
}
case class CaseClassWithWildcard(stuff: Stuff[_])
object CaseClassWithWildcard extends HasGenCodec[CaseClassWithWildcard]
case class CaseClassWithOptionalFields(
str: String,
@optionalParam int: Opt[Int],
@optionalParam bul: Option[Boolean]
)
object CaseClassWithOptionalFields extends HasGenCodec[CaseClassWithOptionalFields]
case class CaseClassWithAutoOptionalFields(
str: String,
int: Opt[Int],
bul: Option[Boolean],
nint: NOpt[Opt[Int]],
)
object CaseClassWithAutoOptionalFields extends HasGenCodecWithDeps[AutoOptionalParams.type, CaseClassWithAutoOptionalFields]
class CaseClassLike(val str: String, val intList: List[Int])
extends Wrapper[CaseClassLike](str, intList)
object CaseClassLike extends HasGenCodec[CaseClassLike] {
def apply(@name("some.str") str: String, intList: List[Int]): CaseClassLike = new CaseClassLike(str, intList)
def unapply(ccl: CaseClassLike): Opt[(String, List[Int])] = (ccl.str, ccl.intList).opt
}
class HasInheritedApply(val str: String, val intList: List[Int])
extends Wrapper[HasInheritedApply](str, intList)
trait ApplyAndUnapply[A, B, C] {
protected def doApply(a: A, lb: List[B]): C
protected def doUnapply(c: C): Option[(A, List[B])]
def apply(a: A, lb: List[B]): C = doApply(a, lb)
def unapply(c: C): Option[(A, List[B])] = doUnapply(c)
}
object HasInheritedApply extends HasGenCodec[HasInheritedApply] with ApplyAndUnapply[String, Int, HasInheritedApply] {
protected def doApply(a: String, lb: List[Int]): HasInheritedApply = new HasInheritedApply(a, lb)
protected def doUnapply(c: HasInheritedApply): Option[(String, List[Int])] = (c.str, c.intList).option
}
case class ThirdParty(i: Int, s: String)
object ThirdParty extends HasGenCodecFromAU[ThirdPartyFakeCompanion.type, ThirdParty]
object ThirdPartyFakeCompanion {
def apply(str: String, int: Int): ThirdParty = ThirdParty(int, str)
def unapply(tp: ThirdParty): Opt[(String, Int)] = (tp.s, tp.i).opt
}
case class VarargsCaseClass(int: Int, strings: String*)
object VarargsCaseClass extends HasGenCodec[VarargsCaseClass]
case class OnlyVarargsCaseClass(strings: String*)
object OnlyVarargsCaseClass extends HasGenCodec[OnlyVarargsCaseClass]
class VarargsCaseClassLike(val str: String, val ints: Seq[Int]) extends Wrapper[VarargsCaseClassLike](str, ints)
object VarargsCaseClassLike extends HasGenCodec[VarargsCaseClassLike] {
def apply(@name("some.str") str: String, ints: Int*): VarargsCaseClassLike = new VarargsCaseClassLike(str, ints)
def unapplySeq(vccl: VarargsCaseClassLike): Opt[(String, Seq[Int])] = (vccl.str, vccl.ints).opt
}
class OnlyVarargsCaseClassLike(val strings: Seq[String]) extends Wrapper[OnlyVarargsCaseClassLike](strings)
object OnlyVarargsCaseClassLike extends HasGenCodec[OnlyVarargsCaseClassLike] {
def apply(strings: String*): OnlyVarargsCaseClassLike = new OnlyVarargsCaseClassLike(strings)
def unapplySeq(vccl: OnlyVarargsCaseClassLike): Opt[Seq[String]] = vccl.strings.opt
}
case class HasDefaults(@transientDefault int: Int = 42, @transientDefault @whenAbsent("dafuq") str: String = "kek")
object HasDefaults extends HasGenCodec[HasDefaults]
sealed trait CustomList
case object CustomTail extends CustomList
@transparent case class CustomCons(tail: CustomList) extends CustomList
object CustomCons extends HasGenCodec[CustomCons]
object CustomList extends HasGenCodec[CustomList]
sealed trait BaseExpr {
type Value
def value: Value
}
sealed abstract class Expr[T](val value: T) extends BaseExpr {
type Value = T
}
case class IntExpr(int: Int) extends Expr[Int](int)
case class StringExpr(str: String) extends Expr[String](str)
case object NullExpr extends Expr[Null](null)
object BaseExpr {
implicit val baseCodec: GenCodec[BaseExpr] = GenCodec.materialize
implicit val stringCodec: GenCodec[Expr[String]] = GenCodec.materialize
implicit def baseGenericCodec[T]: GenCodec[BaseExpr {type Value = T}] = GenCodec.materialize
}
object Expr extends HasGadtCodec[Expr]
trait RecBound[+T]
case class RecBounded(int: Int) extends RecBound[RecBounded]
object RecBounded extends HasGenCodec[RecBounded]
@flatten sealed trait RecExpr[+T]
case class IntRecExpr(int: Int) extends RecExpr[Int]
case class StringRecExpr(str: String) extends RecExpr[String]
case object NothingRecExpr extends RecExpr[Nothing]
case class ArbitraryRecExpr[+T](value: T) extends RecExpr[T]
case class RecBoundedExpr[+T <: RecBound[T]](value: T) extends RecExpr[T]
case class LazyRecExpr[+T](expr: RecExpr[T]) extends RecExpr[T]
object RecExpr {
private def mkCodec[T <: RecBound[T] : GenCodec]: GenCodec[RecExpr[T]] = GenCodec.materialize
implicit def codec[T: GenCodec]: GenCodec[RecExpr[T]] =
mkCodec[Nothing](GenCodec[T].asInstanceOf[GenCodec[Nothing]]).asInstanceOf[GenCodec[RecExpr[T]]]
}
@flatten sealed trait PureGadtExpr[T]
case class StringLiteral(value: String) extends PureGadtExpr[String]
case class IntLiteral(value: Int) extends PureGadtExpr[Int]
case object NullLiteral extends PureGadtExpr[Null]
case class Plus[T](lhs: PureGadtExpr[T], rhs: PureGadtExpr[T]) extends PureGadtExpr[T]
object PureGadtExpr extends HasGadtCodec[PureGadtExpr]
sealed trait Tree[T]
case class Leaf[T](value: T) extends Tree[T]
case class Branch[T](left: Tree[T], right: Tree[T]) extends Tree[T]
object Tree extends HasPolyGenCodec[Tree]
sealed trait Enumz
object Enumz {
@name("Primary")
case object First extends Enumz
case object Second extends Enumz
case object Third extends Enumz
implicit val codec: GenCodec[Enumz] = GenCodec.materialize[Enumz]
}
sealed trait KeyEnumz
object KeyEnumz {
@name("Primary")
case object First extends KeyEnumz
case object Second extends KeyEnumz
case object Third extends KeyEnumz
implicit val codec: GenCodec[KeyEnumz] = GenCodec.forSealedEnum[KeyEnumz]
}
sealed abstract class SealedKey[T](implicit val valueCodec: GenCodec[T]) extends TypedKey[T] with AutoNamedEnum
object SealedKey extends NamedEnumCompanion[SealedKey[_]] {
case object StringKey extends SealedKey[String]
case object IntKey extends SealedKey[Int]
case object BooleanKey extends SealedKey[Boolean]
val values: List[SealedKey[_]] = caseObjects
}
@flatten("kejs") sealed trait CustomizedSeal
@defaultCase(transient = true) case class CustomizedCase(str: String) extends CustomizedSeal
case class OtherCustomCase(value: Int, flag: Boolean) extends CustomizedSeal
case object CustomizedObjekt extends CustomizedSeal
object CustomizedSeal extends HasGenCodec[CustomizedSeal]
case class ItsOverTwentyTwo(
a1: String,
a2: String,
a3: String,
a4: String,
a5: String,
a6: String,
a7: String,
a8: String,
a9: String,
a10: String,
a11: String,
a12: String,
a13: String,
a14: String,
a15: String,
a16: String,
a17: String,
a18: String,
a19: String,
a20: String,
a21: String,
a22: String,
a23: String
)
object ItsOverTwentyTwo extends HasGenCodec[ItsOverTwentyTwo]
@flatten
sealed trait Dep
case class DepCase(str: String) extends Dep
@flatten
sealed trait HasColl
case class HasCollCase(coll: Seq[Dep]) extends HasColl
object HasColl extends HasRecursiveGenCodec[HasColl]
sealed trait SealedRefined {
type X
}
object SealedRefined {
final case class First[Type](foo: Type) extends SealedRefined {
type X = Type
}
implicit def codec[T: GenCodec]: GenCodec[SealedRefined {type X = T}] = GenCodec.materialize
}
case class StepOne(stepTwo: StepTwo)
case class StepTwo(stepOne: Opt[StepOne])
case class OuterThing(inner: InnerThing)
case class InnerThing(recursiveThing: Opt[OuterThing])
object OuterThing extends HasRecursiveGenCodec[OuterThing]
@transparent
case class ThingId(value: String)
object ThingId extends StringWrapperCompanion[ThingId]
locally {
case class LocalStuff()
object LocalStuff extends HasGenCodec[LocalStuff]()(MacroInstances.materialize)
}
}
| AVSystem/scala-commons | commons-core/src/test/scala/com/avsystem/commons/serialization/CodecTestData.scala | Scala | mit | 13,780 |
package fpinscala.datastructures
import java.util.concurrent.atomic.DoubleAccumulator
sealed trait Tree[+A]
case class Leaf[A](value: A) extends Tree[A]
case class Branch[A](left: Tree[A], right: Tree[A]) extends Tree[A]
object Tree {
def size[A](t: Tree[A]): Int = {
t match {
case Leaf(_) => 1
case Branch(left, right) => 1 + size(left) + size(right)
}
}
def maximum(t: Tree[Int]): Int = {
t match {
case Leaf(elem) => elem
case Branch(left, right) => Math.max(maximum(left), maximum(right))
}
}
def depth[A](t: Tree[A]): Int = {
t match {
case Leaf(_) => 0
case Branch(left, right) => 1 + depth(left) max depth(right)
}
}
def map[A,B](t: Tree[A])(f: A => B): Tree[B] = {
t match {
case Leaf(a) => Leaf(f(a))
case Branch(left, right) => Branch(map(left)(f), map(right)(f))
}
}
def fold[A,B](t: Tree[A])(f: A => B)(g: (B, B) => B): B = {
t match {
case Leaf(a) => f(a)
case Branch(left, right) => g(fold(left)(f)(g), fold(right)(f)(g))
}
}
def sizeWithFold[A](t: Tree[A]): Int = {
fold(t)(_ => 1)(1 + _ + _)
}
def depthWithFold[A](t: Tree[A]): Int = {
fold(t)(_ => 0)((a,b) => 1 + a max b)
}
def maximumWithFold(t: Tree[Int]): Int = {
fold(t)(elem => elem)((a,b) => a.max(b))
}
def mapWithFold[A,B](t: Tree[A])(f: A => B): Tree[B] = {
fold(t)(elem => Leaf(f(elem)): Tree[B])((a,b) => Branch(a, b))
}
} | P3trur0/fpinscala | exercises/src/main/scala/fpinscala/datastructures/Tree.scala | Scala | mit | 1,500 |
object Test {
"\\u"
"\\u "
"\\uuuu"
"\\uuuuu"
"\\u123"
"\\uu123"
"""\\u"""
"""\\u """
"""\\uuuu"""
"""\\uuuuu"""
"""\\u123"""
"""\\uu123"""
} | scala/scala | test/files/neg/t12294.scala | Scala | apache-2.0 | 157 |
package loom
import play.api.{Mode, Play, Logger, Application}
import loom.utils.Memcached
/**
*
* @author chaosky
*/
object Global {
// s -> scala
val cacheNameSpace = Play.mode(play.api.Play.current) match {
case Mode.Prod => "s/loom/"
case m => m.toString + "/s/loom/"
}
var isDbMysql: Boolean = false
var isDbH2: Boolean = false
def onStart(app: Application) {
Logger.info("application start")
Memcached.init()
val dbType = Play.maybeApplication.flatMap(_.configuration.getString("database.type")).getOrElse("None")
Logger.debug("db type " + dbType)
dbType match {
case "mysql" => isDbMysql = true
case "h2" => isDbH2 = true
}
}
def onStop(app: Application) {
Logger.info("application stop")
Memcached.destroy()
}
}
| chaosky/loom | app/loom/Global.scala | Scala | mit | 799 |
package com.github.sammyrulez
import akka.actor.ActorSystem
import com.github.sammyrulez.models.Example
import com.mongodb.casbah.Imports._
import com.novus.salat.dao.SalatDAO
import org.bson.types.ObjectId
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatra.test.scalatest._
import org.scalatest.FunSuiteLike
import com.github.SalatConfig.ctx
@RunWith(classOf[JUnitRunner])
class ControllerTest extends ScalatraSuite with FunSuiteLike {
// `HelloWorldServlet` is your app which extends ScalatraServlet
val mongoHost = "127.0.0.1"
private val actorSystem: ActorSystem = ActorSystem("MySpec")
private val mongoDb: MongoDB = MongoClient(mongoHost, 27017).getDB("test")
object RestDAO extends SalatDAO[Example, ObjectId](collection = mongoDb("test_collection"))
RestDAO.insert(new Example(new ObjectId,false,"me","myself"))
val indexController = new IndexController(actorSystem, mongoDb)
addServlet(indexController, "/api/example")
test("simple get list ") {
get("/api/example") {
status should equal (200)
println(body)
body should include ("\\"name\\":\\"myself\\"")
}
}
protected override def afterAll(): Unit = {
super.afterAll()
actorSystem.shutdown()
}
}
| sammyrulez/resting-scalable-hamster | src/test/scala/com/github/sammyrulez/ControllerTest.scala | Scala | mit | 1,262 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package base
package types
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScQuotedType
import org.jetbrains.plugins.scala.lang.psi.impl.expr.ScExpressionImplBase
class ScQuotedTypeImpl(node: ASTNode) extends ScExpressionImplBase(node) with ScQuotedType {
override def toString: String = "QuotedType"
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/base/types/ScQuotedTypeImpl.scala | Scala | apache-2.0 | 425 |
//: ----------------------------------------------------------------------------
//: Copyright (C) 2015 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package funnel
package zeromq
/**
* A payload for transmitting via a ∅ socket
*/
case class Transported(
/** every message should have a unique serial which is used by recipient to track message order */
serial: Serial,
/** the scheme by which this packet was encoded */
scheme: Scheme,
/** The version of the scheme */
version: Version,
/** The version of the scheme */
window: Option[Window],
/** An optional "topic" for this packet, which can be used for filtering subscriptions */
topic: Option[Topic],
/** The actual payload */
bytes: Array[Byte]
){
/**
* construct the String for the header packet
*/
def header: String = (window,topic) match {
case (None, None) => s"$scheme/$version"
case (Some(w), None) => s"$scheme/$version/$w"
case (None, Some(t)) => s"$scheme/$version/${Windows.unknown}/$t"
case (Some(w), Some(t)) => s"$scheme/$version/$w/$t"
}
}
object Transported {
import Versions._
val P = new scalaparsers.Parsing[Unit] {}
import scalaparsers.ParseState
import scalaparsers.Supply
import scalaparsers.Pos
import scalaparsers.Err
import P._
val slash = ch('/')
val notSlash: Parser[String] = satisfy(_ != '/').many map(_.mkString)
val scheme: Parser[Scheme] = notSlash map Schemes.fromString
val version: Parser[Version] = notSlash map Versions.fromString
val window: Parser[Option[Window]] = notSlash map Windows.fromString
// this is overly complicated in order to handle the case when we have a topic in an unknown window
val topic: Parser[Option[Topic]] =
slash.optional flatMap {
case None => unit(None)
case Some(s) => (notSlash map {t => Some(Topic(t))}).orElse(Some(Topic("")))
}
val windowTopic: Parser[(Option[Window], Option[Topic])] =
(slash.optional).flatMap {
case None => unit((None, None))
case Some(_) => for {
w <- window
t <- topic
} yield(w -> t)
}
val headerParse: Parser[Array[Byte] => Serial => Transported] =
for {
s <- scheme << slash
v <- version
wt <- windowTopic
} yield (bytes => serial => Transported(serial, s, v, wt._1, wt._2, bytes))
/**
* Reconstructed the Transported instance on the receive side given the header and payload
*/
def apply(h: String, serial: Serial, bytes: Array[Byte]): Transported = {
headerParse.run(ParseState(
loc = Pos.start("header", h),
input = h,
s = (),
layoutStack = List()), Supply.create) match {
case Left(e) => Transported(serial, Schemes.unknown, Versions.unknown, None, None, bytes)
case Right(f) => f._2(bytes)(serial)
}
}
}
/**
* A typeclass for preparing an A for being written to a ∅ socket, It
* needs to be able to produce a Transportable object for any instance
* of A
*/
abstract class Transportable[A] {
def apply(a: A, serial: Serial): Transported
}
object Transportable {
// create a Transportable from a (A,Serial) => Transported
def apply[A](f: (A,Serial) => Transported): Transportable[A] =
new Transportable[A] { def apply(a: A, s: Serial) = f(a,s) }
}
| neigor/funnel | zeromq/src/main/scala/Transported.scala | Scala | apache-2.0 | 3,999 |
package org.apache.spark.ml.bundle.ops
import ml.combust.bundle.BundleContext
import ml.combust.bundle.op.OpModel
import ml.combust.bundle.serializer.GraphSerializer
import ml.combust.bundle.dsl._
import org.apache.spark.ml.bundle.{ParamSpec, SimpleParamSpec, SimpleSparkOp, SparkBundleContext}
import org.apache.spark.ml.{PipelineModel, Transformer}
/**
* Created by hollinwilkins on 8/21/16.
*/
class PipelineOp extends SimpleSparkOp[PipelineModel] {
override val Model: OpModel[SparkBundleContext, PipelineModel] = new OpModel[SparkBundleContext, PipelineModel] {
override val klazz: Class[PipelineModel] = classOf[PipelineModel]
override def opName: String = Bundle.BuiltinOps.pipeline
override def store(model: Model, obj: PipelineModel)
(implicit context: BundleContext[SparkBundleContext]): Model = {
val nodes = GraphSerializer(context).write(obj.stages).get
model.withValue("nodes", Value.stringList(nodes))
}
override def load(model: Model)
(implicit context: BundleContext[SparkBundleContext]): PipelineModel = {
val nodes = GraphSerializer(context).read(model.value("nodes").getStringList).
map(_.map(_.asInstanceOf[Transformer])).get.toArray
new PipelineModel(uid = "", stages = nodes)
}
}
override def sparkLoad(uid: String, shape: NodeShape, model: PipelineModel): PipelineModel = {
new PipelineModel(uid = uid, stages = model.stages)
}
override def sparkInputs(obj: PipelineModel): Seq[ParamSpec] = Seq()
override def sparkOutputs(obj: PipelineModel): Seq[SimpleParamSpec] = Seq()
override def load(node: Node, model: PipelineModel)(implicit context: BundleContext[SparkBundleContext]): PipelineModel = {
new PipelineModel(uid = node.name, stages = model.stages)
}
}
| combust/mleap | mleap-spark/src/main/scala/org/apache/spark/ml/bundle/ops/PipelineOp.scala | Scala | apache-2.0 | 1,819 |
package edu.rice.habanero.benchmarks.sieve
import akka.actor.{ActorRef, Props}
import edu.rice.habanero.actors.{AkkaActor, AkkaActorState}
import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner}
/**
*
* @author <a href="http://shams.web.rice.edu/">Shams Imam</a> (shams@rice.edu)
*/
object SieveAkkaActorBenchmark {
def main(args: Array[String]) {
BenchmarkRunner.runBenchmark(args, new SieveAkkaActorBenchmark)
}
private final class SieveAkkaActorBenchmark extends Benchmark {
def initialize(args: Array[String]) {
SieveConfig.parseArgs(args)
}
def printArgInfo() {
SieveConfig.printArgs()
}
def runIteration() {
val system = AkkaActorState.newActorSystem("Sieve")
val producerActor = system.actorOf(Props(new NumberProducerActor(SieveConfig.N)))
AkkaActorState.startActor(producerActor)
val filterActor = system.actorOf(Props(new PrimeFilterActor(1, 2, SieveConfig.M)))
AkkaActorState.startActor(filterActor)
producerActor ! filterActor
AkkaActorState.awaitTermination(system)
}
def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double) {
}
}
case class LongBox(value: Long)
private class NumberProducerActor(limit: Long) extends AkkaActor[AnyRef] {
override def process(msg: AnyRef) {
msg match {
case filterActor: ActorRef =>
var candidate: Long = 3
while (candidate < limit) {
filterActor ! LongBox(candidate)
candidate += 2
}
filterActor ! "EXIT"
exit()
}
}
}
private class PrimeFilterActor(val id: Int, val myInitialPrime: Long, numMaxLocalPrimes: Int) extends AkkaActor[AnyRef] {
var nextFilterActor: ActorRef = null
val localPrimes = new Array[Long](numMaxLocalPrimes)
var availableLocalPrimes = 1
localPrimes(0) = myInitialPrime
private def handleNewPrime(newPrime: Long): Unit = {
if (SieveConfig.debug)
println("Found new prime number " + newPrime)
if (availableLocalPrimes < numMaxLocalPrimes) {
// Store locally if there is space
localPrimes(availableLocalPrimes) = newPrime
availableLocalPrimes += 1
} else {
// Create a new actor to store the new prime
nextFilterActor = context.system.actorOf(Props(new PrimeFilterActor(id + 1, newPrime, numMaxLocalPrimes)))
AkkaActorState.startActor(nextFilterActor)
}
}
override def process(msg: AnyRef) {
try {
msg match {
case candidate: LongBox =>
val locallyPrime = SieveConfig.isLocallyPrime(candidate.value, localPrimes, 0, availableLocalPrimes)
if (locallyPrime) {
if (nextFilterActor != null) {
// Pass along the chain to detect for 'primeness'
nextFilterActor ! candidate
} else {
// Found a new prime!
handleNewPrime(candidate.value)
}
}
case x: String =>
if (nextFilterActor != null) {
// Signal next actor for termination
nextFilterActor ! x
} else {
val totalPrimes = ((id - 1) * numMaxLocalPrimes) + availableLocalPrimes
println("Total primes = " + totalPrimes)
}
if (SieveConfig.debug)
println("Terminating prime actor for number " + myInitialPrime)
exit()
}
} catch {
case e: Exception => e.printStackTrace()
}
}
}
}
| shamsmahmood/savina | src/main/scala/edu/rice/habanero/benchmarks/sieve/SieveAkkaActorBenchmark.scala | Scala | gpl-2.0 | 3,585 |
package uk.co.appministry.scathon.models.sse
import java.nio.charset.StandardCharsets
import java.util.Scanner
object ServerSentEventParser {
private val linePattern = """([^:]+): ?(.*)""".r
def parse(bytes: Array[Byte]): Option[ServerSentEvent] = {
parse(new String(bytes, StandardCharsets.UTF_8))
}
def parse(message: String): Option[ServerSentEvent] = {
var event = ServerSentEvent()
var finished = false
val sc = new Scanner(message)
while (sc.hasNextLine) {
val line = sc.nextLine().trim()
line match {
case "" => finished = true
case linePattern(f, v) if finished == false =>
f match {
case "id" => event = event.copy(id = Some(v))
case "event" => event = event.copy(eventType = Some(v))
case "data" => event = event.copy(data = Some(v))
case "repeat" => event = event.copy(repeat = Some(v))
case _ =>
}
case _ =>
}
}
if (finished) {
Some(event)
} else {
None
}
}
}
case class ServerSentEvent(val id: Option[String] = None,
val eventType: Option[String] = None,
val data: Option[String] = None,
val repeat: Option[String] = None) {
override def toString(): String = {
List(
id match {
case Some(v) => Some(s"id: $v")
case None => None
},
eventType match {
case Some(v) => Some(s"event: $v")
case None => None
},
data match {
case Some(v) => Some(s"data: $v")
case None => None
},
repeat match {
case Some(v) => Some(s"repeat: $v")
case None => None
}
).flatten.mkString("\\n") + "\\n\\n"
}
}
| AppMinistry/scathon | scathon-models/src/main/scala/uk/co/appministry/scathon/models/sse/ServerSentEvent.scala | Scala | apache-2.0 | 1,789 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import com.intel.analytics.bigdl.dllib.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.dllib.utils.T
import org.scalatest.{FlatSpec, Matchers}
@com.intel.analytics.bigdl.tags.Serial
class DiceCoefficientCriterionSpec extends FlatSpec with Matchers {
"A DiceCoefficientCriterionSpec" should "generate correct output and gradInput vector input" in {
val input = Tensor[Float](Storage[Float](Array(0.1f, 0.2f)))
val target = Tensor[Float](Storage[Float](Array(0.2f, 0.3f)))
val expectedOutput = 0.35555553f
val expectedgradInput =
Tensor[Float](Storage[Float](Array(0.13580247f, 0.02469136f)))
val criterion = DiceCoefficientCriterion[Float](epsilon = 1.0f)
val loss = criterion.forward(input, target)
val gradInput = criterion.backward(input, target)
loss should be (expectedOutput +- 1e-5f)
gradInput.map(expectedgradInput, (a, b) => {
a should be (b +- 1e-5f)
a
})
}
"A DiceCoefficientCriterionSpec" should "generate correct output and gradInput scala input" in {
val input = Tensor[Float](Storage[Float](Array(0.1f)))
val target = Tensor[Float](Storage[Float](Array(0.2f)))
val expectedOutput = 0.2f
val expectedgradInput =
Tensor[Float](Storage[Float](Array(0.30769231f)))
val criterion = DiceCoefficientCriterion[Float](epsilon = 1.0f)
val loss = criterion.forward(input, target)
val gradInput = criterion.backward(input, target)
loss should be (expectedOutput +- 1e-5f)
gradInput.map(expectedgradInput, (a, b) => {
a should be (b +- 1e-5f)
a
})
}
"A DiceCoefficientCriterionSpec" should "generate correct output and gradInput batch input" in {
val input = Tensor[Float](Storage[Float](Array(0.3f, 0.8f, 0.7f, 1.3f)),
storageOffset = 1, size = Array(2, 2))
val target = Tensor[Float](Storage[Float](Array(1.5f, 2.5f, 3.5f, 4.5f)),
storageOffset = 1, size = Array(2, 2))
val expectedOutput = -0.28360658884f
val expectedgradInput =
Tensor[Float](Storage[Float](Array(-0.16662188f, -0.33055633f, -0.24545453f, -0.3363636f)),
storageOffset = 1, size = Array(2, 2))
val criterion = DiceCoefficientCriterion[Float](epsilon = 1.0f)
val loss = criterion.forward(input, target)
val gradInput = criterion.backward(input, target)
loss should be (expectedOutput +- 1e-5f)
gradInput.map(expectedgradInput, (a, b) => {
a should be (b +- 1e-5f)
a
})
}
"A DiceCoefficientCriterionSpec" should "generate pass gradient check" in {
val input = Tensor[Float](Array(3, 3, 3)).rand
val target = Tensor[Float](Array(3, 3, 3)).rand
val criterion = DiceCoefficientCriterion[Float](epsilon = 0.1f)
println("gradient check for input")
val gradCheckerInput = new GradientChecker(1e-2, 1)
val checkFlagInput = gradCheckerInput.checkCriterion[Float](criterion, input, target)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/DiceCoefficientCriterionSpec.scala | Scala | apache-2.0 | 3,563 |
package com.signalcollect.graphproviders
/**
* Required for integration testing. Returns an undirected grid-structured graph.
* Example Grid(2,2): Edges=(1,3), (3,1), (1,2), (2,1), (2,4), (4,2), (3,4), (4,3)
* 1-2
* | |
* 3-4
*/
class Grid(val width: Int, height: Int) extends Traversable[(Int, Int)] with Serializable {
def foreach[U](f: ((Int, Int)) => U) = {
val max = width * height
for (n <- 1 to max) {
if (n + width <= max) {
f(n, n + width)
f(n + width, n)
}
if (n % height != 0) {
f(n, n + 1)
f(n + 1, n)
}
}
}
}
class Torus(val width: Int, height: Int) extends Traversable[(Int, Int)] with Serializable {
def foreach[U](f: ((Int, Int)) => U) = {
val max = width * height
for (y <- 0 until height) {
for (x <- 0 until width) {
val flattenedCurrentId = flatten((x, y), width)
for (neighbor <- neighbors(x, y, width, height).map(flatten(_, width))) {
f(flattenedCurrentId, neighbor)
}
}
}
}
// def neighbors(x: Int, y: Int, width: Int, height: Int): List[(Int, Int)] = {
// List(
// (decrease(x, width), decrease(y, height)), (x, decrease(y, height)), (increase(x, width), decrease(y, height)),
// (decrease(x, width), y), (increase(x, width), y),
// (decrease(x, width), increase(y, height)), (x, increase(y, height)), (increase(x, width), increase(y, height)))
// }
def neighbors(x: Int, y: Int, width: Int, height: Int): List[(Int, Int)] = {
List(
(x, decrease(y, height)),
(decrease(x, width), y), (increase(x, width), y),
(x, increase(y, height)))
}
def decrease(counter: Int, limit: Int): Int = {
if (counter - 1 >= 0) {
counter - 1
} else {
width - 1
}
}
def increase(counter: Int, limit: Int): Int = {
if (counter + 1 >= width) {
0
} else {
counter + 1
}
}
def flatten(coordinates: (Int, Int), width: Int): Int = {
coordinates._1 + coordinates._2 * width
}
} | Tjoene/thesis | Case_Programs/signal-collect/src/test/scala/com/signalcollect/graphproviders/SyntheticGraphProviders.scala | Scala | gpl-2.0 | 2,286 |
package mur
import scala.collection.mutable
import scala.util.parsing.input.{Position, Positional}
// Intermediate representation for expressions
sealed trait Expr extends Positional
case class Literal(value: AnyVal) extends Expr // 10, 3.14
case class Brackets(expr: Expr) extends Expr // ()
case class Id(name: String) extends Expr // identifier - a string like abc123
case class Sequence(begin: Expr, end: Expr) extends Expr // {1, 100}
// Arithmetic operator like +,-,*,/,^. Example: 1 + 2
sealed trait Op extends Expr {
def left: Expr // left operand - 1 in the example
def right: Expr // right operand
}
case class Plus(left: Expr, right: Expr) extends Op // 1 + 2
case class Minus(left: Expr, right: Expr) extends Op // 2.0 - 1
case class Mul(left: Expr, right: Expr) extends Op // 3 * 4
case class Div(left: Expr, right: Expr) extends Op // 6/3.3
case class Pow(left: Expr, right: Expr) extends Op // 2 ^ 8
// Applying a lambda function to each element of a sequence:
// map(sequence, x -> x + 1)
case class MapSeq(seq: Expr, x: Id, expr: Expr) extends Expr
// Reducing a sequence by applying of a lambda function:
// reduce(sequence, init, x y -> x + y)
case class ReduceSeq(seq: Expr, init: Expr, x: Id, y: Id, expr: Expr) extends Expr
case class SumSeq(seq: Expr) extends Expr
object Expr {
// Result of calculation of an expression: value or error
type Result = Either[Error, ExprValue]
def calc(expr: Expr, context: Context): Result = {
val ctx = context.copy(pos = expr.pos)
expr match {
case Literal(d: Double) => Right(Real(d))
case Literal(i: Int) => Right(Num(i))
case Literal(v) => error(ctx, s"invalid literal type (${v.getClass.getName})")
case Brackets(expr) => calc(expr, ctx)
case Id(name) =>
ctx.ids.get(name) match { // Look up the identifier in the ids map in context
case None => error(ctx, s"identifier `$name` is not defined")
case Some(value) => Right(value)
}
case seq: Sequence =>
// Materialise begin and end of the expression. Supported only Nums
val (begin, end) = (calc(seq.begin, ctx), calc(seq.end, ctx))
(begin, end) match {
case (Right(Num(bv)), Right(Num(ev))) =>
if (bv <= ev) // Supported only ascending sequence of numbers
Right(Range(bv, ev))
else
error(ctx, s"wrong params of the sequence. It should be ${bv} <= ${ev}")
case (error @ Left(_), _) => error
case (_, error @ Left(_)) => error
case (_, _) => error(ctx, s"wrong type of sequence begin or/and end")
}
case op: Op =>
// Materialisation of left operand and after that right operand even if
// the left operand is invalid.
val (left, right) = (calc(op.left, ctx), calc(op.right, ctx))
(left, right) match {
case (error @ Left(_), _) => error
case (_, error @ Left(_)) => error
case (Right(lvalue), Right(rvalue)) =>
op match {
case _: Plus => ExprValue.plus(ctx, lvalue, rvalue)
case _: Minus => ExprValue.minus(ctx, lvalue, rvalue)
case _: Mul => ExprValue.mul(ctx, lvalue, rvalue)
case _: Div => ExprValue.div(ctx, lvalue, rvalue)
case _: Pow => ExprValue.pow(ctx, lvalue, rvalue)
}
}
case map: MapSeq => MapReduce.calc(map, ctx)
case reduce: ReduceSeq => MapReduce.calc(reduce, ctx)
case sum: SumSeq => MapReduce.calc(sum, ctx)
}
}
def error(ctx: Context, msg: String): Result = {
Left(Error(pos = ctx.pos, msg = msg))
}
def positions(expr: Expr, set: mutable.Set[Position]): Unit = {
set.add(expr.pos)
expr match {
case brackets: Brackets =>
positions(brackets.expr, set)
case seq: Sequence =>
positions(seq.begin, set)
positions(seq.end, set)
case op: Op =>
positions(op.left, set)
positions(op.right, set)
case map: MapSeq =>
positions(map.seq, set)
positions(map.x, set)
positions(map.expr, set)
case reduce: ReduceSeq =>
positions(reduce.seq, set)
positions(reduce.init, set)
positions(reduce.x, set)
positions(reduce.y, set)
positions(reduce.expr, set)
case _ => ()
}
}
}
| MaxGekk/mur | src/main/scala/mur/Expr.scala | Scala | bsd-2-clause | 4,379 |
package play.api.cache.redis.configuration
private[configuration] object Equals {
// $COVERAGE-OFF$
@inline
def check[T](a: T, b: T)(property: (T => Any)*): Boolean = {
property.forall(property => property(a) == property(b))
}
// $COVERAGE-ON$
}
| KarelCemus/play-redis | src/main/scala/play/api/cache/redis/configuration/Equals.scala | Scala | mpl-2.0 | 262 |
/*
* Copyright 2011-2012 The myBatis Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.scala.mapping
/** Fully Qualified Identifier */
private[scala] case class FQI(spaceId : String, localId : String) {
def resolveIn(externalSpaceId : String) : String = {
if (externalSpaceId == spaceId)
localId
else
spaceId + "." + localId
}
def id = spaceId + "." + localId
}
| tempbottle/scala-1 | mybatis-scala-core/src/main/scala/org/mybatis/scala/mapping/FQI.scala | Scala | apache-2.0 | 929 |
/*
* DataTypes.scala is part of grado_informatica_tfg_naturallanguageprocessing (grado_informatica_TFG_NaturalLanguageProcessing).
*
* grado_informatica_TFG_NaturalLanguageProcessing is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* grado_informatica_TFG_NaturalLanguageProcessing is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with grado_informatica_TFG_NaturalLanguageProcessing. If not, see <http://www.gnu.org/licenses/>.
*/
package com.elbauldelprogramador.nlp.utils
/**
* Created by Alejandro Alcalde <contacto@elbauldelprogramador.com> on 8/23/16.
*/
object DataTypes {
type SentenceTokens = (Vector[String], Vector[String], Vector[Int])
type Counter = Map[String, Int]
} | algui91/NLP_Dependency_Parsing | src/main/scala/com/elbauldelprogramador/nlp/utils/DataTypes.scala | Scala | gpl-3.0 | 1,193 |
package com.ing.baker.runtime.serialization.protomappings
import com.ing.baker.runtime.akka.actor.protobuf
import com.ing.baker.runtime.scaladsl.IngredientInstance
import com.ing.baker.runtime.serialization.ProtoMap.{ctxFromProto, ctxToProto, versioned}
import com.ing.baker.runtime.serialization.ProtoMap
import scala.util.Try
class IngredientInstanceMapping extends ProtoMap[IngredientInstance, protobuf.Ingredient] {
val companion = protobuf.Ingredient
override def toProto(data: IngredientInstance): protobuf.Ingredient =
protobuf.Ingredient(Option(data.name), None, Some(ctxToProto(data.value)))
override def fromProto(message: protobuf.Ingredient): Try[IngredientInstance] =
for {
name <- versioned(message.name, "name")
valueProto <- versioned(message.value, "value")
value <- ctxFromProto(valueProto)
} yield IngredientInstance(name, value)
}
| ing-bank/baker | core/baker-interface/src/main/scala/com/ing/baker/runtime/serialization/protomappings/IngredientInstanceMapping.scala | Scala | mit | 894 |
package models
package product
import javax.persistence.Entity
import javax.persistence.MappedSuperclass
import models.BaseEntity
import javax.persistence.Basic
import javax.persistence.Column
import java.util.{Set => JSet}
import java.util.LinkedHashSet
import javax.persistence.OneToMany
import javax.persistence.CascadeType
import javax.persistence.Transient
import scala.collection.JavaConversions._
import models.product.traits.BasicDetails
import javax.persistence.OneToOne
@Entity
class Product extends BaseEntity with BasicDetails {
@Column(nullable = false)
var enabled: Boolean = true
@Column(nullable = false)
var featured: Boolean = false
@OneToMany(mappedBy = "product", cascade = Array(CascadeType.REMOVE))
var allSkus: JSet[Sku] = new LinkedHashSet
def activeSkus = {
allSkus.filter(_.enabled)
}
@OneToOne(optional = false, cascade = Array(CascadeType.REMOVE))
var defaultSku: Sku = _
} | Bhashit/play-commerce | app/models/product/Product.scala | Scala | mit | 933 |
package distributer
import java.io.{File, FileInputStream, RandomAccessFile}
import java.nio.ByteBuffer
import java.nio.channels.FileChannel
import java.util.jar.{JarEntry, JarInputStream}
/**
* Created by #GrowinScala
*/
object JarFileHandler {
private val ClassExtension = ".class"
def getClassNames(file: File): List[String] = {
try {
val jis: JarInputStream = new JarInputStream(new FileInputStream(file))
def getAllJarEntries: Stream[JarEntry] = Option(jis.getNextJarEntry) match {
case None => Stream.empty
case Some(jarEntry) =>
jarEntry #:: {
jis.closeEntry()
getAllJarEntries
}
}
def getNextJarLoop(jarEntries: Stream[JarEntry]): List[String] = jarEntries match {
case Stream() => Nil
case je #:: others =>
val originalName = je.getName
if (originalName.endsWith(ClassExtension)) {
val correctedName =
originalName
.replace(ClassExtension, "")
.replace('/', '.')
.replace('\\\\', '.')
correctedName :: getNextJarLoop(others)
} else {
getNextJarLoop(others)
}
}
val classNames = getNextJarLoop(getAllJarEntries)
jis.close()
classNames
} catch {
case e: Exception =>
e.printStackTrace()
List()
}
}
def getJarBytes(file: File): Array[Byte] = {
// val resultJarAsBytes =
// try {
val roChannel: FileChannel = new RandomAccessFile(file, "r").getChannel
val roBuf: ByteBuffer = roChannel.map(FileChannel.MapMode.READ_ONLY, 0, roChannel.size())
roBuf.clear()
val jarAsBytes = Array.ofDim[Byte](roBuf.capacity())
roBuf.get(jarAsBytes, 0, jarAsBytes.length)
roChannel.close()
jarAsBytes
// } catch {
// case e: Exception =>
// e.printStackTrace()
// }
// resultJarAsBytes
}
}
| exocute/Toolkit | src/main/scala/distributer/JarFileHandler.scala | Scala | bsd-2-clause | 1,989 |
package apdl.parser
import scala.io.Source
import scala.util.matching.Regex
/*
* Basically, all we are doing here is to replace the @include <file> by the content of the <file> itself...
*/
class IncludeProcessor {
val include : Regex = "@include \\"(.*\\\\.apdl)\\"".r
def process(code: String): String = {
include.replaceAllIn(code,rm => s"\\n${Source.fromFile(rm.group(1)).mkString}\\n")
}
}
| SnipyJulmy/APDL | src/main/scala/apdl/parser/IncludeProcessor.scala | Scala | lgpl-3.0 | 404 |
package com.xhachi.gae4s.taskqueue
import com.google.appengine.api.taskqueue.{TaskHandle, TaskOptions, TransactionalTaskException}
import com.google.appengine.tools.development.testing.{LocalDatastoreServiceTestConfig, LocalTaskQueueTestConfig}
import com.xhachi.gae4s.datastore.Datastore
import com.xhachi.gae4s.tests.AppEngineTestSuite
import org.scalatest.FunSuite
class TaskQueueTest extends FunSuite with AppEngineTestSuite {
override def getConfig = new LocalDatastoreServiceTestConfig :: new LocalTaskQueueTestConfig :: super.getConfig
test("TaskQueueのデフォルトの名称が取得できること") {
val name = TaskQueue.queueName
assert(name == "default")
}
test("TaskQueueの名称が取得できること") {
val name = TaskQueue("queue1").queueName
assert(name == "queue1")
}
test("TaskQueueにaddできること") {
TaskQueue.add(TaskOptions.Builder.withUrl("/task/null"))
}
test("TaskQueueにaddしてdeleteできること") {
val handle = TaskQueue.add(TaskOptions.Builder.withUrl("/task/null").countdownMillis(10000))
val actual = TaskQueue.delete(handle)
assert(actual)
}
test("TaskQueueにトランザクション内でaddしてdeleteできないこと") {
Datastore.tx {
val handle = TaskQueue.add(TaskOptions.Builder.withUrl("/task/null"))
val actual = TaskQueue.delete(handle)
assert(!actual)
}
}
test("TaskQueueにトランザクション内でaddしてトランザクション外でdeleteできること") {
val handle: TaskHandle = Datastore.tx {
TaskQueue.add(TaskOptions.Builder.withUrl("/task/null"))
}
val actual = TaskQueue.delete(handle)
assert(actual)
}
test("TaskQueueの統計情報を取得できること") {
TaskQueue.add(TaskOptions.Builder.withUrl("/task/null"))
val statistics = TaskQueue.fetchStatistics
assert(statistics != null)
}
test("TaskQueueにトランザクション内で5回addできること") {
Datastore.tx {
1 to 5 foreach {
_ => TaskQueue.add(TaskOptions.Builder.withUrl("/task/null"))
}
}
}
test("TaskQueueにトランザクション内で6回addしたらエラーになること") {
Datastore.tx {
1 to 5 foreach {
_ => TaskQueue.add(TaskOptions.Builder.withUrl("/task/null"))
}
intercept[TransactionalTaskException] {
TaskQueue.add(TaskOptions.Builder.withUrl("/task/null"))
}
}
}
test("TaskQueue#addWithoutTxがトランザクションから除外されていること") {
Datastore.tx {
TaskQueue.addWithoutTx(TaskOptions.Builder.withUrl("/task/null"))
1 to 5 foreach {
_ => TaskQueue.add(TaskOptions.Builder.withUrl("/task/null"))
}
}
}
test("登録してないタスクを削除すると失敗すること") {
assert(!TaskQueue.delete("hoge"))
}
test("名前付きのタスクを登録して削除すると成功すること") {
TaskQueue.add(TaskOptions.Builder.withUrl("/task/null").taskName("hoge"))
assert(TaskQueue.delete("hoge"))
}
} | thachi/gae4s | core/src/test/scala/com/xhachi/gae4s/taskqueue/TaskQueueTest.scala | Scala | apache-2.0 | 3,088 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.logical
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.plan.optimize.program.{FlinkBatchProgram, FlinkHepRuleSetProgramBuilder, HEP_RULES_EXECUTION_TYPE}
import org.apache.flink.table.planner.utils.{TableConfigUtils, TableTestBase}
import org.apache.calcite.plan.hep.HepMatchOrder
import org.apache.calcite.tools.RuleSets
import org.junit.{Before, Test}
/**
* Test for [[FlinkSemiAntiJoinFilterTransposeRule]].
*/
class FlinkSemiAntiJoinFilterTransposeRuleTest extends TableTestBase {
private val util = batchTestUtil()
@Before
def setup(): Unit = {
util.buildBatchProgram(FlinkBatchProgram.DEFAULT_REWRITE)
val calciteConfig = TableConfigUtils.getCalciteConfig(util.tableEnv.getConfig)
calciteConfig.getBatchProgram.get.addLast(
"rules",
FlinkHepRuleSetProgramBuilder.newBuilder
.setHepRulesExecutionType(HEP_RULES_EXECUTION_TYPE.RULE_SEQUENCE)
.setHepMatchOrder(HepMatchOrder.BOTTOM_UP)
.add(RuleSets.ofList(
FlinkSemiAntiJoinProjectTransposeRule.INSTANCE,
FlinkSemiAntiJoinFilterTransposeRule.INSTANCE))
.build()
)
util.addTableSource[(Int, Long, String)]("MyTable1", 'a, 'b, 'c)
util.addTableSource[(Int, Long, String)]("MyTable2", 'd, 'e, 'f)
}
@Test
def testSemiJoinFilterTranspose(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (SELECT * FROM MyTable1 WHERE a > 10) t
| WHERE b IN (SELECT e FROM MyTable2)
""".stripMargin
util.verifyRelPlan(sqlQuery)
}
@Test
def testAntiJoinFilterTranspose(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (SELECT * FROM MyTable1 WHERE a > 10) t
| WHERE b NOT IN (SELECT e FROM MyTable2)
""".stripMargin
util.verifyRelPlan(sqlQuery)
}
}
| apache/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/rules/logical/FlinkSemiAntiJoinFilterTransposeRuleTest.scala | Scala | apache-2.0 | 2,693 |
package org.joda.time
trait ReadWritableInterval extends ReadableInterval {
def setInterval(startInstant: Long, endInstant: Long): Unit
def setInterval(interval: ReadableInterval): Unit
def setInterval(startInstant: ReadableInstant,
endInstant: ReadableInstant): Unit
def setChronology(chrono: Chronology): Unit
def setStartMillis(millisInstant: Long): Unit
def setStart(instant: ReadableInstant): Unit
def setEndMillis(millisInstant: Long): Unit
def setEnd(instant: ReadableInstant): Unit
def setDurationAfterStart(duration: ReadableDuration): Unit
def setDurationBeforeEnd(duration: ReadableDuration): Unit
def setPeriodAfterStart(period: ReadablePeriod): Unit
def setPeriodBeforeEnd(period: ReadablePeriod): Unit
}
| mdedetrich/soda-time | shared/src/main/scala/org/joda/time/ReadWritableInterval.scala | Scala | bsd-2-clause | 773 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.exceptions.TestFailedException
import FailureMessages._
import Matchers._
import org.scalactic.CheckedEquality
class ShouldContainOnlyTypeCheckSpec extends Spec with CheckedEquality {
// Checking for a specific size
object `The 'contain only (1, <element>)' syntax` {
object `should give a type error if the types are not compatible` {
def `on Array` {
"""Array(1, 2) should contain only ("1", "2")""" shouldNot typeCheck
"""Array(1, 2) should (contain only ("1", "2"))""" shouldNot typeCheck
"""Array(1, 2) should not { contain only ("1", "3") }""" shouldNot typeCheck
"""Array(1, 2) should not contain only ("1", "3")""" shouldNot typeCheck
"""Array(1, 2) should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""Array(1, 2) should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""Array(1, 2) should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""Array(1, 2) should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""Array(1, 2) should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""Array(1, 2) should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""Array(1, 2) should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""Array(1, 2) should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""Array(1, 2) should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""Array(1, 2) should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""Array(1, 2) should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""Array(1, 2) should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""Array(1, 2) should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""Array(1, 2) should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""Array(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""Array(1, 2) should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""Array(1, 2) should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""Array(1, 2) should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""Array(1, 2) should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""Array(1, 2) should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""Array(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""Array(1, 2) should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""Array(1, 2) should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""Array(1, 2) should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
/*
def `on scala.collection.immutable.Set` {
"""Set(1, 2) should contain only ("1", "2")""" shouldNot typeCheck
"""Set(1, 2) should (contain only ("1", "2"))""" shouldNot typeCheck
"""Set(1, 2) should not { contain only ("1", "3") }""" shouldNot typeCheck
"""Set(1, 2) should not contain only ("1", "3")""" shouldNot typeCheck
"""Set(1, 2) should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""Set(1, 2) should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""Set(1, 2) should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""Set(1, 2) should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""Set(1, 2) should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""Set(1, 2) should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""Set(1, 2) should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""Set(1, 2) should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""Set(1, 2) should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""Set(1, 2) should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""Set(1, 2) should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""Set(1, 2) should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""Set(1, 2) should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""Set(1, 2) should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""Set(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""Set(1, 2) should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""Set(1, 2) should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""Set(1, 2) should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""Set(1, 2) should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""Set(1, 2) should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""Set(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""Set(1, 2) should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""Set(1, 2) should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""Set(1, 2) should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
def `on scala.collection.mutable.Set` {
import scala.collection.mutable
"""mutable.Set(1, 2) should contain only ("1", "2")""" shouldNot typeCheck
"""mutable.Set(1, 2) should (contain only ("1", "2"))""" shouldNot typeCheck
"""mutable.Set(1, 2) should not { contain only ("1", "3") }""" shouldNot typeCheck
"""mutable.Set(1, 2) should not contain only ("1", "3")""" shouldNot typeCheck
"""mutable.Set(1, 2) should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""mutable.Set(1, 2) should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""mutable.Set(1, 2) should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""mutable.Set(1, 2) should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""mutable.Set(1, 2) should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""mutable.Set(1, 2) should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""mutable.Set(1, 2) should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""mutable.Set(1, 2) should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""mutable.Set(1, 2) should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""mutable.Set(1, 2) should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""mutable.Set(1, 2) should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""mutable.Set(1, 2) should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""mutable.Set(1, 2) should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""mutable.Set(1, 2) should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""mutable.Set(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""mutable.Set(1, 2) should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""mutable.Set(1, 2) should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""mutable.Set(1, 2) should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""mutable.Set(1, 2) should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""mutable.Set(1, 2) should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""mutable.Set(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""mutable.Set(1, 2) should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""mutable.Set(1, 2) should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""mutable.Set(1, 2) should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
def `on scala.collection.Set` {
val set: scala.collection.Set[Int] = Set(1, 2)
"""set should contain only ("1", "2")""" shouldNot typeCheck
"""set should (contain only ("1", "2"))""" shouldNot typeCheck
"""set should not { contain only ("1", "3") }""" shouldNot typeCheck
"""set should not contain only ("1", "3")""" shouldNot typeCheck
"""set should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""set should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""set should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""set should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""set should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""set should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""set should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""set should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""set should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""set should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""set should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""set should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""set should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""set should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""set should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""set should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""set should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""set should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""set should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""set should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""set should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""set should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""set should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""set should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
def `on scala.collection.immutable.HashSet` {
import scala.collection.immutable.HashSet
"""HashSet(1, 2) should contain only ("1", "2")""" shouldNot typeCheck
"""HashSet(1, 2) should (contain only ("1", "2"))""" shouldNot typeCheck
"""HashSet(1, 2) should not { contain only ("1", "3") }""" shouldNot typeCheck
"""HashSet(1, 2) should not contain only ("1", "3")""" shouldNot typeCheck
"""HashSet(1, 2) should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""HashSet(1, 2) should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""HashSet(1, 2) should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""HashSet(1, 2) should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""HashSet(1, 2) should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""HashSet(1, 2) should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""HashSet(1, 2) should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""HashSet(1, 2) should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""HashSet(1, 2) should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""HashSet(1, 2) should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""HashSet(1, 2) should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""HashSet(1, 2) should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""HashSet(1, 2) should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""HashSet(1, 2) should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""HashSet(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""HashSet(1, 2) should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""HashSet(1, 2) should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""HashSet(1, 2) should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""HashSet(1, 2) should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""HashSet(1, 2) should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""HashSet(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""HashSet(1, 2) should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""HashSet(1, 2) should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""HashSet(1, 2) should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
def `on scala.collection.mutable.HashSet` {
import scala.collection.mutable
"""mutable.HashSet(1, 2) should contain only ("1", "2")""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (contain only ("1", "2"))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should not { contain only ("1", "3") }""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should not contain only ("1", "3")""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""mutable.HashSet(1, 2) should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
def `on List` {
"""List(1, 2) should contain only ("1", "2")""" shouldNot typeCheck
"""List(1, 2) should (contain only ("1", "2"))""" shouldNot typeCheck
"""List(1, 2) should not { contain only ("1", "3") }""" shouldNot typeCheck
"""List(1, 2) should not contain only ("1", "3")""" shouldNot typeCheck
"""List(1, 2) should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""List(1, 2) should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""List(1, 2) should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""List(1, 2) should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""List(1, 2) should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""List(1, 2) should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""List(1, 2) should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""List(1, 2) should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""List(1, 2) should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""List(1, 2) should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""List(1, 2) should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""List(1, 2) should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""List(1, 2) should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""List(1, 2) should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""List(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""List(1, 2) should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""List(1, 2) should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""List(1, 2) should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""List(1, 2) should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""List(1, 2) should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""List(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""List(1, 2) should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""List(1, 2) should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""List(1, 2) should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
def `on Vector` {
"""Vector(1, 2) should contain only ("1", "2")""" shouldNot typeCheck
"""Vector(1, 2) should (contain only ("1", "2"))""" shouldNot typeCheck
"""Vector(1, 2) should not { contain only ("1", "3") }""" shouldNot typeCheck
"""Vector(1, 2) should not contain only ("1", "3")""" shouldNot typeCheck
"""Vector(1, 2) should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""Vector(1, 2) should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""Vector(1, 2) should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""Vector(1, 2) should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""Vector(1, 2) should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""Vector(1, 2) should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""Vector(1, 2) should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""Vector(1, 2) should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""Vector(1, 2) should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""Vector(1, 2) should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""Vector(1, 2) should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""Vector(1, 2) should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""Vector(1, 2) should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""Vector(1, 2) should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""Vector(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""Vector(1, 2) should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""Vector(1, 2) should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""Vector(1, 2) should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""Vector(1, 2) should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""Vector(1, 2) should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""Vector(1, 2) should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""Vector(1, 2) should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""Vector(1, 2) should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""Vector(1, 2) should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
def `on java.util.List` {
val javaList: java.util.List[Int] = new java.util.ArrayList
javaList.add(1)
javaList.add(2)
"""javaList should contain only ("1", "2")""" shouldNot typeCheck
"""javaList should (contain only ("1", "2"))""" shouldNot typeCheck
"""javaList should not { contain only ("1", "3") }""" shouldNot typeCheck
"""javaList should not contain only ("1", "3")""" shouldNot typeCheck
"""javaList should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""javaList should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""javaList should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""javaList should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""javaList should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""javaList should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""javaList should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""javaList should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""javaList should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""javaList should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""javaList should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""javaList should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""javaList should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""javaList should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""javaList should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""javaList should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""javaList should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""javaList should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""javaList should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""javaList should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""javaList should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""javaList should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""javaList should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""javaList should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
def `on scala.collection.immutable.Map ` {
"""Map("one" -> 1, "two" -> 2) should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (contain only (1 -> 1, 2 -> 2))""" shouldNot typeCheck
"""Map(1 -> "one", 2 -> "two") should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (contain only ("1" -> "1", "two" -> "two"))""" shouldNot typeCheck
"""Map(1 -> "one", 2 -> "two") should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should not { contain only (1 -> 1, 3 -> 3) }""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should not contain only (1 -> 1, 3 -> 3)""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should not { contain only ("1" -> "1", "three" -> "three") }""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should not contain only ("1" -> "1", "three" -> "three")""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "two" -> 2) and (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "two" -> 2)) and (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "two" -> 2) and contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "two" -> 2) and (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "two" -> 2)) and (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "two" -> 2) and contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "cat" -> 77) or (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "cat" -> 77)) or (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "cat" -> 77) or contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "cat" -> 77) or (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "cat" -> 77)) or (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "cat" -> 77) or contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
}
def `on scala.collection.mutable.Map ` {
import scala.collection.mutable
"""mutable.Map("one" -> 1, "two" -> 2) should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (contain only (1 -> 1, 2 -> 2))""" shouldNot typeCheck
"""mutable.Map(1 -> "one", 2 -> "two") should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (contain only ("1" -> "1", "two" -> "two"))""" shouldNot typeCheck
"""mutable.Map(1 -> "one", 2 -> "two") should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should not { contain only (1 -> 1, 3 -> 3) }""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should not contain only (1 -> 1, 3 -> 3)""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should not { contain only ("1" -> "1", "three" -> "three") }""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should not contain only ("1" -> "1", "three" -> "three")""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "two" -> 2) and (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "two" -> 2)) and (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "two" -> 2) and contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "two" -> 2) and (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "two" -> 2)) and (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "two" -> 2) and contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "cat" -> 77) or (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "cat" -> 77)) or (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "cat" -> 77) or contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should { contain only ("1", "cat" -> 77) or (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should ((contain only ("1", "cat" -> 77)) or (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (contain only ("1", "cat" -> 77) or contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""mutable.Map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
}
def `on scala.collection.Map ` {
val map: scala.collection.Map[String, Int] = Map("one" -> 1, "two" -> 2)
"""map("one" -> 1, "two" -> 2) should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (contain only (1 -> 1, 2 -> 2))""" shouldNot typeCheck
"""map(1 -> "one", 2 -> "two") should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (contain only ("1" -> "1", "two" -> "two"))""" shouldNot typeCheck
"""map(1 -> "one", 2 -> "two") should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should not { contain only (1 -> 1, 3 -> 3) }""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should not contain only (1 -> 1, 3 -> 3)""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should not { contain only ("1" -> "1", "three" -> "three") }""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should not contain only ("1" -> "1", "three" -> "three")""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "two" -> 2) and (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "two" -> 2)) and (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "two" -> 2) and contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "two" -> 2) and (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "two" -> 2)) and (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "two" -> 2) and contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "cat" -> 77) or (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "cat" -> 77)) or (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "cat" -> 77) or contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "cat" -> 77) or (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "cat" -> 77)) or (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "cat" -> 77) or contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""map("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
}
def `on scala.collection.immutable.HashMap ` {
import scala.collection.immutable.HashMap
"""HashMap("one" -> 1, "two" -> 2) should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (contain only (1 -> 1, 2 -> 2))""" shouldNot typeCheck
"""HashMap(1 -> "one", 2 -> "two") should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (contain only ("1" -> "1", "two" -> "two"))""" shouldNot typeCheck
"""HashMap(1 -> "one", 2 -> "two") should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should not { contain only (1 -> 1, 3 -> 3) }""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should not contain only (1 -> 1, 3 -> 3)""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should not { contain only ("1" -> "1", "three" -> "three") }""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should not contain only ("1" -> "1", "three" -> "three")""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "two" -> 2) and (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "two" -> 2)) and (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "two" -> 2) and contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "two" -> 2) and (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "two" -> 2)) and (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "two" -> 2) and contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "cat" -> 77) or (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "cat" -> 77)) or (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "cat" -> 77) or contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "cat" -> 77) or (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "cat" -> 77)) or (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "cat" -> 77) or contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
}
def `on scala.collection.mutable.HashMap ` {
import scala.collection.mutable
"""mutable.HashMap("one" -> 1, "two" -> 2) should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (contain only (1 -> 1, 2 -> 2))""" shouldNot typeCheck
"""mutable.HashMap(1 -> "one", 2 -> "two") should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should contain only ("1" -> "1", "two" -> "two")""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (contain only ("1" -> "1", "two" -> "two"))""" shouldNot typeCheck
"""mutable.HashMap(1 -> "one", 2 -> "two") should contain only (1 -> 1, 2 -> 2)""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should not { contain only (1 -> 1, 3 -> 3) }""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should not contain only (1 -> 1, 3 -> 3)""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should not { contain only ("1" -> "1", "three" -> "three") }""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should not contain only ("1" -> "1", "three" -> "three")""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should { contain only ("1", "two" -> 2) and (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should ((contain only ("1", "two" -> 2)) and (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (contain only ("1", "two" -> 2) and contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should { contain only ("1", "two" -> 2) and (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should ((contain only ("1", "two" -> 2)) and (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (contain only ("1", "two" -> 2) and contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "cat" -> 77) or (contain only (1 -> 1, 1 -> 1)) }""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "cat" -> 77)) or (contain only (1 -> 1, 1 -> 1)))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "cat" -> 77) or contain only (1 -> 1, 1 -> 1))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should { contain only ("1" -> 1, "cat" -> 77) or (contain only ("1" -> "1", "one" -> "one")) }""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should ((contain only ("1" -> 1, "cat" -> 77)) or (contain only ("1" -> "1", "one" -> "one")))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (contain only ("1" -> 1, "cat" -> 77) or contain only ("1" -> "1", "one" -> "one"))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "five" -> 5) } and not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "five" -> 5)) and (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "five" -> 5) and not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only (1 -> 1, 3 -> 3) }}""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only (1 -> 1, 3 -> 3)))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only (1 -> 1, 3 -> 3))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should { not { contain only ("1" -> 1, "two" -> 2) } or not { contain only ("1" -> "1", "three" -> "three") }}""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should ((not contain only ("1" -> 1, "two" -> 2)) or (not contain only ("1" -> "1", "three" -> "three")))""" shouldNot typeCheck
"""mutable.HashMap("one" -> 1, "two" -> 2) should (not contain only ("1" -> 1, "two" -> 2) or not contain only ("1" -> "1", "three" -> "three"))""" shouldNot typeCheck
}
def `on java.util.Set` {
val javaSet: java.util.Set[Int] = new java.util.HashSet
javaSet.add(1)
javaSet.add(2)
"""javaSet should contain only ("1", "2")""" shouldNot typeCheck
"""javaSet should (contain only ("1", "2"))""" shouldNot typeCheck
"""javaSet should not { contain only ("1", "3") }""" shouldNot typeCheck
"""javaSet should not contain only ("1", "3")""" shouldNot typeCheck
"""javaSet should { contain only ("1", "2") and (contain only (1, 1)) }""" shouldNot typeCheck
"""javaSet should ((contain only ("1", "2")) and (contain only (1, 1)))""" shouldNot typeCheck
"""javaSet should (contain only ("1", "2") and contain only (1, 1))""" shouldNot typeCheck
"""javaSet should { contain only (1, 2) and (contain only ("1", "1")) }""" shouldNot typeCheck
"""javaSet should ((contain only (1, 2)) and (contain only ("1", "1")))""" shouldNot typeCheck
"""javaSet should (contain only (1, 2) and contain only ("1", "1"))""" shouldNot typeCheck
"""javaSet should { contain only ("1", "77") or (contain only (1, 2)) }""" shouldNot typeCheck
"""javaSet should ((contain only ("1", "77")) or (contain only (1, 2)))""" shouldNot typeCheck
"""javaSet should (contain only ("1", "77") or contain only (1, 2))""" shouldNot typeCheck
"""javaSet should { contain only (1, 77) or (contain only ("1", "2")) }""" shouldNot typeCheck
"""javaSet should ((contain only (1, 77)) or (contain only ("1", "2")))""" shouldNot typeCheck
"""javaSet should (contain only (1, 77) or contain only ("1", "2"))""" shouldNot typeCheck
"""javaSet should { not { contain only ("1", "5") } and not { contain only (1, 3) }}""" shouldNot typeCheck
"""javaSet should ((not contain only ("1", "5")) and (not contain only (1, 3)))""" shouldNot typeCheck
"""javaSet should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""javaSet should { not { contain only (1, 5) } and not { contain only ("1", "3") }}""" shouldNot typeCheck
"""javaSet should ((not contain only (1, 5)) and (not contain only ("1", "3")))""" shouldNot typeCheck
"""javaSet should { not { contain only (1, 1) } or not { contain only ("1", "3") }}""" shouldNot typeCheck
"""javaSet should ((not contain only ("1", "1")) or (not contain only (1, 3)))""" shouldNot typeCheck
"""javaSet should (not contain only ("1", "3") or not contain only (1, 2))""" shouldNot typeCheck
"""javaSet should (not contain only ("1", "5") and not contain only (1, 3))""" shouldNot typeCheck
"""javaSet should ((not contain only (1, 1)) or (not contain only ("1", "3")))""" shouldNot typeCheck
"""javaSet should (not contain only (1, 3) or not contain only ("1", "2"))""" shouldNot typeCheck
"""javaSet should (not contain only (1, 5) and not contain only ("1", "3"))""" shouldNot typeCheck
}
def `on java.util.Map` {
val javaMap: java.util.Map[String, Int] = new java.util.HashMap
javaMap.put("one",1)
javaMap.put("two", 2)
"""javaMap should contain only (Entry(3, 3), Entry(2, 2))""" shouldNot typeCheck
"""javaMap should (contain only (Entry(3, 3), Entry(2, 2)))""" shouldNot typeCheck
"""javaMap should contain only (Entry(3, 3), Entry("two", "two"))""" shouldNot typeCheck
"""javaMap should contain only (Entry(3, 3), Entry("two", "two"))""" shouldNot typeCheck
"""javaMap should (contain only (Entry(3, 3), Entry("two", "two")))""" shouldNot typeCheck
"""javaMap should contain only (Entry(3, 3), Entry(2, 2))""" shouldNot typeCheck
"""javaMap should not { contain only (Entry(1, 1), Entry(3, 3)) }""" shouldNot typeCheck
"""javaMap should not contain only (Entry(1, 1), Entry(3, 3))""" shouldNot typeCheck
"""javaMap should (not contain only (Entry(1, 1), Entry(3, 3)))""" shouldNot typeCheck
"""javaMap should not { contain only (Entry(1, 1), Entry("three", "three")) }""" shouldNot typeCheck
"""javaMap should not contain only (Entry(1, 1), Entry("three", "three"))""" shouldNot typeCheck
"""javaMap should (not contain only (Entry(1, 1), Entry("three", "three")))""" shouldNot typeCheck
"""javaMap should { contain only (Entry("one", 1), Entry("two", 2)) and (contain only (Entry(2, 2), Entry(1, 1))) }""" shouldNot typeCheck
"""javaMap should ((contain only (Entry("one", 1), Entry("two", 2))) and (contain only (Entry(2, 2), Entry(1, 1))))""" shouldNot typeCheck
"""javaMap should (contain only (Entry("one", 1), Entry("two", 2)) and contain only (Entry(2, 2), Entry(1, 1)))""" shouldNot typeCheck
"""javaMap should { contain only (Entry("one", 1), Entry("two", 2)) and (contain only (Entry("two", "two"), Entry("one", "one"))) }""" shouldNot typeCheck
"""javaMap should ((contain only (Entry("one", 1), Entry("two", 2))) and (contain only (Entry("two", "two"), Entry("one", "one"))))""" shouldNot typeCheck
"""javaMap should (contain only (Entry("one", 1), Entry("two", 2)) and contain only (Entry("two", "two"), Entry("one", "one")))""" shouldNot typeCheck
"""javaMap should { contain only (Entry("dog", 99), Entry("cat", 77)) or (contain only (Entry(2, 2), Entry(1, 1))) }""" shouldNot typeCheck
"""javaMap should ((contain only (Entry("dog", 99), Entry("cat", 77))) or (contain only (Entry(2, 2), Entry(1, 1))))""" shouldNot typeCheck
"""javaMap should (contain only (Entry("dog", 99), Entry("cat", 77)) or contain only (Entry(2, 2), Entry(1, 1)))""" shouldNot typeCheck
"""javaMap should { contain only (Entry("dog", 99), Entry("cat", 77)) or (contain only (Entry("two", "two"), Entry("one", "one"))) }""" shouldNot typeCheck
"""javaMap should ((contain only (Entry("dog", 99), Entry("cat", 77))) or (contain only (Entry("two", "two"), Entry("one", "one"))))""" shouldNot typeCheck
"""javaMap should (contain only (Entry("dog", 99), Entry("cat", 77)) or contain only (Entry("two", "two"), Entry("one", "one")))""" shouldNot typeCheck
"""javaMap should { not { contain only (Entry("one", 1), Entry("five", 5)) } and not { contain only (Entry(1, 1), Entry(3, 3)) }}""" shouldNot typeCheck
"""javaMap should ((not contain only (Entry("one", 1), Entry("five", 5))) and (not contain only (Entry(1, 1), Entry(3, 3))))""" shouldNot typeCheck
"""javaMap should (not contain only (Entry("one", 1), Entry("five", 5)) and not contain only (Entry(1, 1), Entry(3, 3)))""" shouldNot typeCheck
"""javaMap should { not { contain only (Entry("one", 1), Entry("five", 5)) } and not { contain only (Entry("one", "one"), Entry("three", "three")) }}""" shouldNot typeCheck
"""javaMap should ((not contain only (Entry("one", 1), Entry("five", 5))) and (not contain only (Entry("one", "one"), Entry("three", "three"))))""" shouldNot typeCheck
"""javaMap should (not contain only (Entry("one", 1), Entry("five", 5)) and not contain only (Entry("one", "one"), Entry("three", "three")))""" shouldNot typeCheck
"""javaMap should { not { contain only (Entry("one", 1), Entry(Entry("two", 2))) } or not { contain only (Entry(1, 1), Entry(3, 3)) }}""" shouldNot typeCheck
"""javaMap should ((not contain only (Entry("one", 1), Entry(Entry("two", 2)))) or (not contain only (Entry(1, 1), Entry(3, 3))))""" shouldNot typeCheck
"""javaMap should (not contain only (Entry("one", 1), Entry(Entry("two", 2))) or not contain only (Entry(1, 1), Entry(3, 3)))""" shouldNot typeCheck
"""javaMap should { not { contain only (Entry("one", 1), Entry(Entry("two", 2))) } or not { contain only (Entry("one", "one"), Entry("three", "three")) }}""" shouldNot typeCheck
"""javaMap should ((not contain only (Entry("one", 1), Entry(Entry("two", 2)))) or (not contain only (Entry("one", "one"), Entry("three", "three"))))""" shouldNot typeCheck
"""javaMap should (not contain only (Entry("one", 1), Entry(Entry("two", 2))) or not contain only (Entry("one", "one"), Entry("three", "three")))""" shouldNot typeCheck
}
*/
}
}
}
| travisbrown/scalatest | src/test/scala/org/scalatest/ShouldContainOnlyTypeCheckSpec.scala | Scala | apache-2.0 | 63,352 |
/*
* Copyright (C) 2015 Noorq, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.action
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.reflect.runtime.universe
import play.api.mvc.ActionBuilder
import play.api.mvc.ActionFilter
import play.api.mvc.ActionTransformer
import play.api.mvc.Request
import play.api.mvc.Result
import play.api.mvc.Results
import play.api.mvc.WrappedRequest
import scaldi.Injectable
import scaldi.Injector
import services.DomainService
import services.DomainId
import services.DomainContext
class DomainRequest[A](val domainContext: Option[DomainContext], request: Request[A]) extends WrappedRequest[A](request)
class DomainAction(domIdn: String, domainService: DomainService) extends
ActionBuilder[DomainRequest]
with ActionTransformer[Request, DomainRequest] {
def transform[A](request: Request[A]) = {
val domainId = com.mailrest.maildal.util.DomainId.INSTANCE.fromDomainIdn(domIdn);
domainService.lookupDomain(domainId).map { v => new DomainRequest[A](v, request) }
}
}
object DomainAuthAction extends ActionFilter[DomainRequest] {
def authByBasic[A](input: DomainRequest[A], domainContext: DomainContext): Option[Result] = {
BasicAuthHelper.getCredentials(input).filter( x => x._1 == "api" && x._2 == domainContext.apiKey)
match {
case Some(s) => None
case None => Some(BasicAuthHelper.requestAuth)
}
}
def authByToken[A](input: DomainRequest[A], domainContext: DomainContext): Option[Result] = {
input.headers.get("X-Auth-Token").filter { x => x == domainContext.apiKey }
match {
case Some(s) => None
case None => authByBasic(input, domainContext)
}
}
def filter[A](input: DomainRequest[A]): Future[Option[Result]] = Future.successful {
input.domainContext match {
case Some(di) => authByToken(input, di)
case None => Some(Results.NotFound)
}
}
}
| mailrest/mailrest | app/controllers/action/DomainAction.scala | Scala | apache-2.0 | 2,567 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions
import org.apache.flink.api.common.typeinfo.{TypeInformation, Types}
import org.apache.flink.table.api.{TableException, ValidationException}
import org.apache.flink.table.expressions._
import org.apache.flink.table.functions.BuiltInFunctionDefinitions._
import org.apache.flink.table.functions._
import org.apache.flink.table.planner.expressions.{E => PlannerE, UUID => PlannerUUID}
import org.apache.flink.table.planner.functions.InternalFunctionDefinitions.THROW_EXCEPTION
import org.apache.flink.table.runtime.types.TypeInfoDataTypeConverter.fromDataTypeToTypeInfo
import org.apache.flink.table.types.logical.LogicalTypeRoot.{CHAR, DECIMAL, SYMBOL}
import org.apache.flink.table.types.logical.utils.LogicalTypeChecks._
import _root_.scala.collection.JavaConverters._
/**
* Visitor implementation for converting [[Expression]]s to [[PlannerExpression]]s.
*/
class PlannerExpressionConverter private extends ApiExpressionVisitor[PlannerExpression] {
override def visit(call: CallExpression): PlannerExpression = {
val definition = call.getFunctionDefinition
translateCall(
definition, call.getChildren.asScala,
() =>
definition match {
case ROW | ARRAY | MAP => ApiResolvedCallExpression(call)
case _ =>
if (definition.getKind == FunctionKind.AGGREGATE ||
definition.getKind == FunctionKind.TABLE_AGGREGATE) {
ApiResolvedAggregateCallExpression(call)
} else {
ApiResolvedCallExpression(call)
}
})
}
override def visit(unresolvedCall: UnresolvedCallExpression): PlannerExpression = {
val definition = unresolvedCall.getFunctionDefinition
translateCall(
definition,
unresolvedCall.getChildren.asScala,
() => throw new TableException(s"Unsupported function definition: $definition"))
}
private def translateCall(
func: FunctionDefinition,
children: Seq[Expression],
unknownFunctionHandler: () => PlannerExpression)
: PlannerExpression = {
// special case: requires individual handling of child expressions
func match {
case CAST =>
assert(children.size == 2)
return Cast(
children.head.accept(this),
fromDataTypeToTypeInfo(
children(1).asInstanceOf[TypeLiteralExpression].getOutputDataType))
case REINTERPRET_CAST =>
assert(children.size == 3)
return Reinterpret(
children.head.accept(this),
fromDataTypeToTypeInfo(
children(1).asInstanceOf[TypeLiteralExpression].getOutputDataType),
getValue[Boolean](children(2).accept(this)))
case WINDOW_START =>
assert(children.size == 1)
val windowReference = translateWindowReference(children.head)
return WindowStart(windowReference)
case WINDOW_END =>
assert(children.size == 1)
val windowReference = translateWindowReference(children.head)
return WindowEnd(windowReference)
case PROCTIME =>
assert(children.size == 1)
val windowReference = translateWindowReference(children.head)
return ProctimeAttribute(windowReference)
case ROWTIME =>
assert(children.size == 1)
val windowReference = translateWindowReference(children.head)
return RowtimeAttribute(windowReference)
case THROW_EXCEPTION =>
assert(children.size == 2)
return ThrowException(
children.head.accept(this),
fromDataTypeToTypeInfo(
children(1).asInstanceOf[TypeLiteralExpression].getOutputDataType))
case _ =>
}
val args = children.map(_.accept(this))
func match {
case sfd: ScalarFunctionDefinition =>
val call = PlannerScalarFunctionCall(
sfd.getScalarFunction,
args)
//it configures underlying state
call.validateInput()
call
case tfd: TableFunctionDefinition =>
PlannerTableFunctionCall(
tfd.toString,
tfd.getTableFunction,
args,
tfd.getResultType)
case afd: AggregateFunctionDefinition =>
AggFunctionCall(
afd.getAggregateFunction,
afd.getResultTypeInfo,
afd.getAccumulatorTypeInfo,
args)
case tafd: TableAggregateFunctionDefinition =>
AggFunctionCall(
tafd.getTableAggregateFunction,
tafd.getResultTypeInfo,
tafd.getAccumulatorTypeInfo,
args)
case fd: FunctionDefinition =>
fd match {
case FLATTEN =>
assert(args.size == 1)
Flattening(args.head)
case GET =>
assert(args.size == 2)
val expr = GetCompositeField(args.head, getValue(args.last))
//it configures underlying state
expr.validateInput()
expr
case AND =>
assert(args.size >= 2)
args.reduceLeft(And)
case OR =>
assert(args.size >= 2)
args.reduceLeft(Or)
case NOT =>
assert(args.size == 1)
Not(args.head)
case EQUALS =>
assert(args.size == 2)
EqualTo(args.head, args.last)
case GREATER_THAN =>
assert(args.size == 2)
GreaterThan(args.head, args.last)
case GREATER_THAN_OR_EQUAL =>
assert(args.size == 2)
GreaterThanOrEqual(args.head, args.last)
case LESS_THAN =>
assert(args.size == 2)
LessThan(args.head, args.last)
case LESS_THAN_OR_EQUAL =>
assert(args.size == 2)
LessThanOrEqual(args.head, args.last)
case NOT_EQUALS =>
assert(args.size == 2)
NotEqualTo(args.head, args.last)
case IN =>
assert(args.size > 1)
In(args.head, args.drop(1))
case IS_NULL =>
assert(args.size == 1)
IsNull(args.head)
case IS_NOT_NULL =>
assert(args.size == 1)
IsNotNull(args.head)
case IS_TRUE =>
assert(args.size == 1)
IsTrue(args.head)
case IS_FALSE =>
assert(args.size == 1)
IsFalse(args.head)
case IS_NOT_TRUE =>
assert(args.size == 1)
IsNotTrue(args.head)
case IS_NOT_FALSE =>
assert(args.size == 1)
IsNotFalse(args.head)
case IF =>
assert(args.size == 3)
If(args.head, args(1), args.last)
case BETWEEN =>
assert(args.size == 3)
Between(args.head, args(1), args.last)
case NOT_BETWEEN =>
assert(args.size == 3)
NotBetween(args.head, args(1), args.last)
case DISTINCT =>
assert(args.size == 1)
DistinctAgg(args.head)
case AVG =>
assert(args.size == 1)
Avg(args.head)
case COUNT =>
assert(args.size == 1)
Count(args.head)
case MAX =>
assert(args.size == 1)
Max(args.head)
case MIN =>
assert(args.size == 1)
Min(args.head)
case SUM =>
assert(args.size == 1)
Sum(args.head)
case SUM0 =>
assert(args.size == 1)
Sum0(args.head)
case STDDEV_POP =>
assert(args.size == 1)
StddevPop(args.head)
case STDDEV_SAMP =>
assert(args.size == 1)
StddevSamp(args.head)
case VAR_POP =>
assert(args.size == 1)
VarPop(args.head)
case VAR_SAMP =>
assert(args.size == 1)
VarSamp(args.head)
case COLLECT =>
assert(args.size == 1)
Collect(args.head)
case CHAR_LENGTH =>
assert(args.size == 1)
CharLength(args.head)
case INIT_CAP =>
assert(args.size == 1)
InitCap(args.head)
case LIKE =>
assert(args.size == 2)
Like(args.head, args.last)
case LOWER =>
assert(args.size == 1)
Lower(args.head)
case LOWERCASE =>
assert(args.size == 1)
Lower(args.head)
case SIMILAR =>
assert(args.size == 2)
Similar(args.head, args.last)
case SUBSTRING =>
assert(args.size == 2 || args.size == 3)
if (args.size == 2) {
new Substring(args.head, args.last)
} else {
Substring(args.head, args(1), args.last)
}
case REPLACE =>
assert(args.size == 3)
Replace(args.head, args(1), args.last)
case TRIM =>
assert(args.size == 4)
val removeLeading = getValue[Boolean](args.head)
val removeTrailing = getValue[Boolean](args(1))
val trimMode = if (removeLeading && removeTrailing) {
PlannerTrimMode.BOTH
} else if (removeLeading) {
PlannerTrimMode.LEADING
} else if (removeTrailing) {
PlannerTrimMode.TRAILING
} else {
throw new TableException("Unsupported trim mode.")
}
Trim(trimMode, args(2), args(3))
case UPPER =>
assert(args.size == 1)
Upper(args.head)
case UPPERCASE =>
assert(args.size == 1)
Upper(args.head)
case POSITION =>
assert(args.size == 2)
Position(args.head, args.last)
case OVERLAY =>
assert(args.size == 3 || args.size == 4)
if (args.size == 3) {
new Overlay(args.head, args(1), args.last)
} else {
Overlay(
args.head,
args(1),
args(2),
args.last)
}
case CONCAT =>
Concat(args)
case CONCAT_WS =>
assert(args.nonEmpty)
ConcatWs(args.head, args.tail)
case LPAD =>
assert(args.size == 3)
Lpad(args.head, args(1), args.last)
case RPAD =>
assert(args.size == 3)
Rpad(args.head, args(1), args.last)
case REGEXP_EXTRACT =>
assert(args.size == 2 || args.size == 3)
if (args.size == 2) {
RegexpExtract(args.head, args.last)
} else {
RegexpExtract(args.head, args(1), args.last)
}
case FROM_BASE64 =>
assert(args.size == 1)
FromBase64(args.head)
case TO_BASE64 =>
assert(args.size == 1)
ToBase64(args.head)
case BuiltInFunctionDefinitions.UUID =>
assert(args.isEmpty)
PlannerUUID()
case LTRIM =>
assert(args.size == 1)
LTrim(args.head)
case RTRIM =>
assert(args.size == 1)
RTrim(args.head)
case REPEAT =>
assert(args.size == 2)
Repeat(args.head, args.last)
case REGEXP_REPLACE =>
assert(args.size == 3)
RegexpReplace(args.head, args(1), args.last)
case PLUS =>
assert(args.size == 2)
Plus(args.head, args.last)
case MINUS =>
assert(args.size == 2)
Minus(args.head, args.last)
case DIVIDE =>
assert(args.size == 2)
Div(args.head, args.last)
case TIMES =>
assert(args.size == 2)
Mul(args.head, args.last)
case ABS =>
assert(args.size == 1)
Abs(args.head)
case CEIL =>
assert(args.size == 1 || args.size == 2)
if (args.size == 1) {
Ceil(args.head)
} else {
TemporalCeil(args.head, args.last)
}
case EXP =>
assert(args.size == 1)
Exp(args.head)
case FLOOR =>
assert(args.size == 1 || args.size == 2)
if (args.size == 1) {
Floor(args.head)
} else {
TemporalFloor(args.head, args.last)
}
case LOG10 =>
assert(args.size == 1)
Log10(args.head)
case LOG2 =>
assert(args.size == 1)
Log2(args.head)
case LN =>
assert(args.size == 1)
Ln(args.head)
case LOG =>
assert(args.size == 1 || args.size == 2)
if (args.size == 1) {
Log(args.head)
} else {
Log(args.head, args.last)
}
case POWER =>
assert(args.size == 2)
Power(args.head, args.last)
case MOD =>
assert(args.size == 2)
Mod(args.head, args.last)
case SQRT =>
assert(args.size == 1)
Sqrt(args.head)
case MINUS_PREFIX =>
assert(args.size == 1)
UnaryMinus(args.head)
case SIN =>
assert(args.size == 1)
Sin(args.head)
case COS =>
assert(args.size == 1)
Cos(args.head)
case SINH =>
assert(args.size == 1)
Sinh(args.head)
case TAN =>
assert(args.size == 1)
Tan(args.head)
case TANH =>
assert(args.size == 1)
Tanh(args.head)
case COT =>
assert(args.size == 1)
Cot(args.head)
case ASIN =>
assert(args.size == 1)
Asin(args.head)
case ACOS =>
assert(args.size == 1)
Acos(args.head)
case ATAN =>
assert(args.size == 1)
Atan(args.head)
case ATAN2 =>
assert(args.size == 2)
Atan2(args.head, args.last)
case COSH =>
assert(args.size == 1)
Cosh(args.head)
case DEGREES =>
assert(args.size == 1)
Degrees(args.head)
case RADIANS =>
assert(args.size == 1)
Radians(args.head)
case SIGN =>
assert(args.size == 1)
Sign(args.head)
case ROUND =>
assert(args.size == 2)
Round(args.head, args.last)
case PI =>
assert(args.isEmpty)
Pi()
case BuiltInFunctionDefinitions.E =>
assert(args.isEmpty)
PlannerE()
case RAND =>
assert(args.isEmpty || args.size == 1)
if (args.isEmpty) {
new Rand()
} else {
Rand(args.head)
}
case RAND_INTEGER =>
assert(args.size == 1 || args.size == 2)
if (args.size == 1) {
new RandInteger(args.head)
} else {
RandInteger(args.head, args.last)
}
case BIN =>
assert(args.size == 1)
Bin(args.head)
case HEX =>
assert(args.size == 1)
Hex(args.head)
case TRUNCATE =>
assert(args.size == 1 || args.size == 2)
if (args.size == 1) {
new Truncate(args.head)
} else {
Truncate(args.head, args.last)
}
case EXTRACT =>
assert(args.size == 2)
Extract(args.head, args.last)
case CURRENT_DATE =>
assert(args.isEmpty)
CurrentDate()
case CURRENT_TIME =>
assert(args.isEmpty)
CurrentTime()
case CURRENT_TIMESTAMP =>
assert(args.isEmpty)
CurrentTimestamp()
case LOCAL_TIME =>
assert(args.isEmpty)
LocalTime()
case LOCAL_TIMESTAMP =>
assert(args.isEmpty)
LocalTimestamp()
case TEMPORAL_OVERLAPS =>
assert(args.size == 4)
TemporalOverlaps(
args.head,
args(1),
args(2),
args.last)
case DATE_TIME_PLUS =>
assert(args.size == 2)
Plus(args.head, args.last)
case DATE_FORMAT =>
assert(args.size == 2)
DateFormat(args.head, args.last)
case TIMESTAMP_DIFF =>
assert(args.size == 3)
TimestampDiff(args.head, args(1), args.last)
case AT =>
assert(args.size == 2)
ItemAt(args.head, args.last)
case CARDINALITY =>
assert(args.size == 1)
Cardinality(args.head)
case ARRAY_ELEMENT =>
assert(args.size == 1)
ArrayElement(args.head)
case ORDER_ASC =>
assert(args.size == 1)
Asc(args.head)
case ORDER_DESC =>
assert(args.size == 1)
Desc(args.head)
case MD5 =>
assert(args.size == 1)
Md5(args.head)
case SHA1 =>
assert(args.size == 1)
Sha1(args.head)
case SHA224 =>
assert(args.size == 1)
Sha224(args.head)
case SHA256 =>
assert(args.size == 1)
Sha256(args.head)
case SHA384 =>
assert(args.size == 1)
Sha384(args.head)
case SHA512 =>
assert(args.size == 1)
Sha512(args.head)
case SHA2 =>
assert(args.size == 2)
Sha2(args.head, args.last)
case OVER =>
assert(args.size >= 4)
OverCall(
args.head,
args.slice(4, args.size),
args(1),
args(2),
args(3)
)
case UNBOUNDED_RANGE =>
assert(args.isEmpty)
UnboundedRange()
case UNBOUNDED_ROW =>
assert(args.isEmpty)
UnboundedRow()
case CURRENT_RANGE =>
assert(args.isEmpty)
CurrentRange()
case CURRENT_ROW =>
assert(args.isEmpty)
CurrentRow()
case STREAM_RECORD_TIMESTAMP =>
assert(args.isEmpty)
StreamRecordTimestamp()
case _ =>
unknownFunctionHandler()
}
}
}
override def visit(literal: ValueLiteralExpression): PlannerExpression = {
if (hasRoot(literal.getOutputDataType.getLogicalType, SYMBOL)) {
val plannerSymbol = getSymbol(literal.getValueAs(classOf[TableSymbol]).get())
return SymbolPlannerExpression(plannerSymbol)
}
val typeInfo = getLiteralTypeInfo(literal)
if (literal.isNull) {
Null(typeInfo)
} else {
Literal(
literal.getValueAs(typeInfo.getTypeClass).get(),
typeInfo)
}
}
/**
* This method makes the planner more lenient for new data types defined for literals.
*/
private def getLiteralTypeInfo(literal: ValueLiteralExpression): TypeInformation[_] = {
val logicalType = literal.getOutputDataType.getLogicalType
if (hasRoot(logicalType, DECIMAL)) {
if (literal.isNull) {
return Types.BIG_DEC
}
val value = literal.getValueAs(classOf[java.math.BigDecimal]).get()
if (hasPrecision(logicalType, value.precision()) && hasScale(logicalType, value.scale())) {
return Types.BIG_DEC
}
}
else if (hasRoot(logicalType, CHAR)) {
if (literal.isNull) {
return Types.STRING
}
val value = literal.getValueAs(classOf[java.lang.String]).get()
if (hasLength(logicalType, value.length)) {
return Types.STRING
}
}
fromDataTypeToTypeInfo(literal.getOutputDataType)
}
private def getSymbol(symbol: TableSymbol): PlannerSymbol = symbol match {
case TimeIntervalUnit.YEAR => PlannerTimeIntervalUnit.YEAR
case TimeIntervalUnit.YEAR_TO_MONTH => PlannerTimeIntervalUnit.YEAR_TO_MONTH
case TimeIntervalUnit.QUARTER => PlannerTimeIntervalUnit.QUARTER
case TimeIntervalUnit.MONTH => PlannerTimeIntervalUnit.MONTH
case TimeIntervalUnit.WEEK => PlannerTimeIntervalUnit.WEEK
case TimeIntervalUnit.DAY => PlannerTimeIntervalUnit.DAY
case TimeIntervalUnit.DAY_TO_HOUR => PlannerTimeIntervalUnit.DAY_TO_HOUR
case TimeIntervalUnit.DAY_TO_MINUTE => PlannerTimeIntervalUnit.DAY_TO_MINUTE
case TimeIntervalUnit.DAY_TO_SECOND => PlannerTimeIntervalUnit.DAY_TO_SECOND
case TimeIntervalUnit.HOUR => PlannerTimeIntervalUnit.HOUR
case TimeIntervalUnit.SECOND => PlannerTimeIntervalUnit.SECOND
case TimeIntervalUnit.HOUR_TO_MINUTE => PlannerTimeIntervalUnit.HOUR_TO_MINUTE
case TimeIntervalUnit.HOUR_TO_SECOND => PlannerTimeIntervalUnit.HOUR_TO_SECOND
case TimeIntervalUnit.MINUTE => PlannerTimeIntervalUnit.MINUTE
case TimeIntervalUnit.MINUTE_TO_SECOND => PlannerTimeIntervalUnit.MINUTE_TO_SECOND
case TimePointUnit.YEAR => PlannerTimePointUnit.YEAR
case TimePointUnit.MONTH => PlannerTimePointUnit.MONTH
case TimePointUnit.DAY => PlannerTimePointUnit.DAY
case TimePointUnit.HOUR => PlannerTimePointUnit.HOUR
case TimePointUnit.MINUTE => PlannerTimePointUnit.MINUTE
case TimePointUnit.SECOND => PlannerTimePointUnit.SECOND
case TimePointUnit.QUARTER => PlannerTimePointUnit.QUARTER
case TimePointUnit.WEEK => PlannerTimePointUnit.WEEK
case TimePointUnit.MILLISECOND => PlannerTimePointUnit.MILLISECOND
case TimePointUnit.MICROSECOND => PlannerTimePointUnit.MICROSECOND
case _ =>
throw new TableException("Unsupported symbol: " + symbol)
}
override def visit(fieldReference: FieldReferenceExpression): PlannerExpression = {
PlannerResolvedFieldReference(
fieldReference.getName,
fromDataTypeToTypeInfo(fieldReference.getOutputDataType))
}
override def visit(fieldReference: UnresolvedReferenceExpression)
: PlannerExpression = {
UnresolvedFieldReference(fieldReference.getName)
}
override def visit(typeLiteral: TypeLiteralExpression): PlannerExpression = {
throw new TableException("Unsupported type literal expression: " + typeLiteral)
}
override def visit(tableRef: TableReferenceExpression): PlannerExpression = {
TableReference(
tableRef.asInstanceOf[TableReferenceExpression].getName,
tableRef.asInstanceOf[TableReferenceExpression].getQueryOperation
)
}
override def visit(local: LocalReferenceExpression): PlannerExpression =
PlannerLocalReference(local.getName, fromDataTypeToTypeInfo(local.getOutputDataType))
override def visit(lookupCall: LookupCallExpression): PlannerExpression =
throw new TableException("Unsupported function call: " + lookupCall)
override def visitNonApiExpression(other: Expression): PlannerExpression = {
other match {
// already converted planner expressions will pass this visitor without modification
case plannerExpression: PlannerExpression => plannerExpression
case expr: RexNodeExpression => RexPlannerExpression(expr.getRexNode)
case _ =>
throw new TableException("Unrecognized expression: " + other)
}
}
private def getValue[T](literal: PlannerExpression): T = {
literal.asInstanceOf[Literal].value.asInstanceOf[T]
}
private def assert(condition: Boolean): Unit = {
if (!condition) {
throw new ValidationException("Invalid number of arguments for function.")
}
}
private def translateWindowReference(reference: Expression): PlannerExpression = reference match {
case expr : LocalReferenceExpression =>
WindowReference(expr.getName, Some(fromDataTypeToTypeInfo(expr.getOutputDataType)))
//just because how the datastream is converted to table
case expr: UnresolvedReferenceExpression =>
UnresolvedFieldReference(expr.getName)
case _ =>
throw new ValidationException(s"Expected LocalReferenceExpression. Got: $reference")
}
}
object PlannerExpressionConverter {
val INSTANCE: PlannerExpressionConverter = new PlannerExpressionConverter
}
| GJL/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/expressions/PlannerExpressionConverter.scala | Scala | apache-2.0 | 25,253 |
package controllers
import play.api.mvc._
object LogoutController extends Controller {
def logout = Action {implicit request =>
Redirect(routes.IndexController.index).withNewSession.flashing("level" -> "info", "msg" -> "You have been successfully logged out.")
}
} | semberal/homelibrary | app/controllers/LogoutController.scala | Scala | apache-2.0 | 279 |
/*
* Copyright 2014 JHC Systems Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sqlest.sql
import sqlest.ast._
trait SQLServerStatementBuilder extends base.StatementBuilder
object SQLServerStatementBuilder extends SQLServerStatementBuilder
| DavidGregory084/sqlest | sqlest/src/main/scala/sqlest/sql/SQLServerStatementBuilder.scala | Scala | apache-2.0 | 775 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.workspace.modules.linking
import de.fuberlin.wiwiss.silk.config.LinkSpecification
import de.fuberlin.wiwiss.silk.workspace.modules.ModuleTask
import de.fuberlin.wiwiss.silk.evaluation.ReferenceLinks
import de.fuberlin.wiwiss.silk.workspace.Project
/**
* A linking task, which interlinks two data sets.
*/
class LinkingTask private(val linkSpec: LinkSpecification, val referenceLinks: ReferenceLinks, val cache: LinkingCaches) extends ModuleTask {
val name = linkSpec.id
def updateLinkSpec(linkSpec: LinkSpecification, project: Project) = {
LinkingTask(project, linkSpec, referenceLinks, cache)
}
def updateReferenceLinks(referenceLinks: ReferenceLinks, project: Project) = {
LinkingTask(project, linkSpec, referenceLinks, cache)
}
}
object LinkingTask {
/**
* Constructs a new linking task and starts loading the cache.
*/
def apply(project: Project, linkSpec: LinkSpecification, referenceLinks: ReferenceLinks = ReferenceLinks(), cache: LinkingCaches = new LinkingCaches()) = {
val task = new LinkingTask(linkSpec, referenceLinks, cache)
task.cache.load(project, task)
task
}
} | fusepoolP3/p3-silk | silk-workspace/src/main/scala/de/fuberlin/wiwiss/silk/workspace/modules/linking/LinkingTask.scala | Scala | apache-2.0 | 1,724 |
package tmvault.util
class Future[+T](private[util] val value:T) {
def withFilter(p:T=>Boolean) : Future[T] = this
def filter(p:T=>Boolean) : Future[T] = this
def map[U](f:T=>U) : Future[U] = Future(f(value))
def flatMap[U](f:T=>Future[U]) : Future[U] = f(value)
}
object Future {
def successful[T](value:T) : Future[T] = new Future(value)
def apply[T](f: => T) : Future[T] = new Future(f)
}
object Await {
def result[T](f:Future[T],d:scala.concurrent.duration.FiniteDuration) : T = f.value
}
trait ExecutionContext
object ExecutionContext {
private val _global = new ExecutionContext {}
def global = _global
object Implicits {
implicit def global = _global
}
} | rklaehn/tmvault | tmvault/src/main/scala/tmvault/util/Future.scala | Scala | apache-2.0 | 701 |
/*
* This file is part of AckCord, licensed under the MIT License (MIT).
*
* Copyright (c) 2019 Katrix
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package ackcord
import java.time.Instant
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import ackcord.cachehandlers.CacheTypeRegistry
import ackcord.commands.{AbstractCommandSettings, CommandSettings, CoreCommands}
import ackcord.data.PresenceStatus
import ackcord.data.raw.RawActivity
import ackcord.requests.Ratelimiter
import akka.actor.typed.scaladsl.AskPattern._
import akka.actor.typed.scaladsl.Behaviors
import akka.actor.typed.{ActorRef, ActorSystem}
import akka.stream.OverflowStrategy
import akka.util.Timeout
/**
* Settings used when connecting to Discord.
* @param token The token for the bot.
* @param largeThreshold The large threshold.
* @param shardNum The shard index of this shard.
* @param shardTotal The amount of shards.
* @param idleSince If the bot has been idle, set the time since.
* @param activity Send an activity when connecting.
* @param status The status to use when connecting.
* @param afk If the bot should be afk when connecting.
* @param system The actor system to use.
* @param commandSettings The command settings to use.
* @param requestSettings The request settings to use.
*/
class ClientSettings(
token: String,
largeThreshold: Int = 50,
shardNum: Int = 0,
shardTotal: Int = 1,
idleSince: Option[Instant] = None,
activity: Option[RawActivity] = None,
status: PresenceStatus = PresenceStatus.Online,
afk: Boolean = false,
val system: ActorSystem[Nothing] = ActorSystem(Behaviors.ignore, "AckCord"),
val commandSettings: AbstractCommandSettings = CommandSettings(needsMention = true, prefixes = Set.empty),
val requestSettings: RequestSettings = RequestSettings()
//TODO: Allow setting ignored and cacheTypeRegistry here at some point
) extends GatewaySettings(token, largeThreshold, shardNum, shardTotal, idleSince, activity, status, afk) {
implicit val executionContext: ExecutionContext = system.executionContext
/**
* Create a [[DiscordClient]] from these settings.
*/
def createClient(): Future[DiscordClient] = {
implicit val actorSystem: ActorSystem[Nothing] = system
DiscordShard.fetchWsGateway.flatMap { uri =>
val cache = Cache.create
val clientActor = actorSystem.systemActorOf(
DiscordClientActor(Seq(DiscordShard(uri, this, cache, Nil, CacheTypeRegistry.default)), cache),
"DiscordClient"
)
implicit val timeout: Timeout = Timeout(1.second)
clientActor.ask[DiscordClientActor.GetRatelimiterReply](DiscordClientActor.GetRatelimiter).map {
case DiscordClientActor.GetRatelimiterReply(ratelimiter) =>
val requests = requestSettings.toRequests(token, ratelimiter)
val commands = CoreCommands.create(commandSettings, cache, requests)
new DiscordClientCore(
cache,
commands,
requests,
clientActor
)
}
}
}
/**
* Create a [[DiscordClient]] from these settings while letting Discord
* set the shard amount.
*/
def createClientAutoShards(): Future[DiscordClient] = {
implicit val actorSystem: ActorSystem[Nothing] = system
DiscordShard.fetchWsGatewayWithShards(token).flatMap {
case (uri, receivedShardTotal) =>
val cache = Cache.create
val shards = DiscordShard.many(uri, receivedShardTotal, this, cache, Nil, CacheTypeRegistry.default)
val clientActor = actorSystem.systemActorOf(DiscordClientActor(shards, cache), "DiscordClient")
implicit val timeout: Timeout = Timeout(1.second)
clientActor.ask[DiscordClientActor.GetRatelimiterReply](DiscordClientActor.GetRatelimiter).map {
case DiscordClientActor.GetRatelimiterReply(ratelimiter) =>
val requests = requestSettings.toRequests(token, ratelimiter)
val commands = CoreCommands.create(commandSettings, cache, requests)
new DiscordClientCore(
cache,
commands,
requests,
clientActor
)
}
}
}
override def toString: String =
s"ClientSettings($token, $largeThreshold, $shardNum, $shardTotal, $idleSince, " +
s"$activity, $status, $afk, $executionContext, $system, $commandSettings, $requestSettings)"
}
object ClientSettings {
/**
* Settings used when connecting to Discord.
* @param token The token for the bot.
* @param largeThreshold The large threshold.
* @param shardNum The shard index of this shard.
* @param shardTotal The amount of shards.
* @param idleSince If the bot has been idle, set the time since.
* @param gameStatus Send some presence when connecting.
* @param status The status to use when connecting.
* @param afk If the bot should be afk when connecting.
* @param system The actor system to use.
* @param commandSettings The command settings to use.
* @param requestSettings The request settings to use.
*/
def apply(
token: String,
largeThreshold: Int = 100,
shardNum: Int = 0,
shardTotal: Int = 1,
idleSince: Option[Instant] = None,
gameStatus: Option[RawActivity] = None,
status: PresenceStatus = PresenceStatus.Online,
afk: Boolean = false,
system: ActorSystem[Nothing] = ActorSystem(Behaviors.ignore, "AckCord"),
commandSettings: AbstractCommandSettings = CommandSettings(needsMention = true, prefixes = Set.empty),
requestSettings: RequestSettings = RequestSettings()
): ClientSettings =
new ClientSettings(
token,
largeThreshold,
shardNum,
shardTotal,
idleSince,
gameStatus,
status,
afk,
system,
commandSettings,
requestSettings
)
}
/**
* @param parallelism Parallelism to use for requests.
* @param bufferSize The buffer size to use for waiting requests.
* @param maxRetryCount The maximum amount of times a request will be retried.
* Only affects requests that uses retries.
* @param overflowStrategy The overflow strategy to use when the buffer is full.
* @param maxAllowedWait The max allowed wait time before giving up on a request.
*/
case class RequestSettings(
millisecondPrecision: Boolean = true,
relativeTime: Boolean = false,
parallelism: Int = 4,
bufferSize: Int = 32,
maxRetryCount: Int = 3,
overflowStrategy: OverflowStrategy = OverflowStrategy.backpressure,
maxAllowedWait: FiniteDuration = 2.minutes
) {
def toRequests(token: String, ratelimitActor: ActorRef[Ratelimiter.Command])(
implicit system: ActorSystem[Nothing]
): RequestHelper =
new RequestHelper(
BotAuthentication(token),
ratelimitActor,
millisecondPrecision,
relativeTime,
parallelism,
maxRetryCount,
bufferSize,
overflowStrategy,
maxAllowedWait
)
}
| Katrix-/AckCord | ackCord/src/main/scala/ackcord/ClientSettings.scala | Scala | mit | 8,086 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import java.util.{Map => jMap}
import org.apache.accumulo.core.client.IteratorSetting
import org.geotools.factory.Hints
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
/**
* Mixin trait to provide support for sampling features.
*
* Current implementation takes every nth feature. As such, sampling
* percents > 0.5 will not have any effect.
*/
trait SamplingIterator {
/**
* Configure a sampling function based on the iterator configuration
*
* @param options iterator options
* @return sampling function, if defined
*/
def sample(options: jMap[String, String]): Option[(SimpleFeature) => Boolean] = {
import scala.collection.JavaConverters._
sample(options.asScala.toMap)
}
/**
* Configure a sampling function based on the iterator configuration
*
* @param options iterator options
* @return sampling function, if defined
*/
def sample(options: Map[String, String]): Option[(SimpleFeature) => Boolean] = {
import SamplingIterator.{SAMPLE_BY_OPT, SAMPLE_OPT}
val sampling = options.get(SAMPLE_OPT).map(_.toInt)
val sampleBy = options.get(SAMPLE_BY_OPT).map(_.toInt)
sampling.map(SamplingIterator.sample(_, sampleBy))
}
}
object SamplingIterator {
val SAMPLE_OPT = "sample"
val SAMPLE_BY_OPT = "sample-by"
def configure(is: IteratorSetting, sft: SimpleFeatureType, hints: Hints): Unit = {
import org.locationtech.geomesa.accumulo.index.QueryHints.RichHints
hints.getSampling.foreach(configure(is, sft, _))
}
def configure(is: IteratorSetting, sft: SimpleFeatureType, sampling: (Float, Option[String])): Unit = {
val (percent, by) = sampling
require(percent > 0 && percent < 1f, "Sampling must be a percentage between (0, 1)")
val nth = (1 / percent.toFloat).toInt
if (nth > 1) {
is.addOption(SAMPLE_OPT, nth.toString)
by.map(sft.indexOf).filter(_ != -1).foreach(i => is.addOption(SAMPLE_BY_OPT, i.toString))
}
}
/**
* Returns a sampling function that will indicate if a feature should be kept or discarded
*
* @param nth will keep every nth feature
* @param field field to use for threading of samples
* @return sampling function
*/
def sample(nth: Int, field: Option[Int]): (SimpleFeature) => Boolean = {
field match {
case None =>
var i = 1
(_) => if (i == 1) { i += 1; true } else if (i < nth) { i += 1; false } else { i = 1; false }
case Some(f) =>
val i = scala.collection.mutable.HashMap.empty[String, Int].withDefaultValue(1)
(sf) => {
val value = sf.getAttribute(f)
val key = if (value == null) "" else value.toString
val count = i(key)
if (count < nth) { i(key) = count + 1; count == 1 } else { i(key) = 1; false }
}
}
}
}
| mdzimmerman/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/SamplingIterator.scala | Scala | apache-2.0 | 3,346 |
package cromwell.engine.backend.jes
import java.nio.file.Paths
import com.google.api.services.genomics.model.Disk
import cromwell.engine.backend.runtimeattributes.DiskType
import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.prop.Tables.Table
import org.scalatest.{FlatSpec, Matchers, TryValues}
import scala.util.Failure
class JesAttachedDiskSpec extends FlatSpec with Matchers with TryValues {
val validTable = Table(
("unparsed", "parsed"),
("/mnt 3 SSD", JesEmptyMountedDisk(DiskType.SSD, 3, Paths.get("/mnt"))),
("/mnt/my_path 10 HDD", JesEmptyMountedDisk(DiskType.HDD, 10, Paths.get("/mnt/my_path"))),
("local-disk 100 SSD", JesWorkingDisk(DiskType.SSD, 100)),
("local-disk 100 LOCAL", JesWorkingDisk(DiskType.LOCAL, 100))
)
it should "parse" in {
forAll(validTable) { (unparsed, parsed) =>
JesAttachedDisk.parse(unparsed).get shouldEqual parsed
}
}
it should "stringify" in {
forAll(validTable) { (unparsed, parsed) =>
parsed.toString shouldEqual unparsed
}
}
val invalidTable = Table(
("unparsed"),
("local-disk BAD HDD"),
("local-disk 10 BAD"),
("BAD 100 SSD"),
("foobar")
)
it should "reject malformed disk mounts" in {
forAll(invalidTable) { (unparsed) =>
JesAttachedDisk.parse(unparsed) should be(a[Failure[_]])
}
}
it should "convert to Google Disk" in {
val disk = new Disk().setName("d-39de0dbcfb68c8735bd088c62fa061a4")
.setType(DiskType.SSD.googleTypeName).setAutoDelete(true).setSizeGb(100).setMountPoint("/mnt")
JesAttachedDisk.parse("/mnt 100 SSD").get.toGoogleDisk shouldEqual disk
}
}
| cowmoo/cromwell | engine/src/test/scala/cromwell/engine/backend/jes/JesAttachedDiskSpec.scala | Scala | bsd-3-clause | 1,666 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.serialization
import java.io.InputStream
import java.io.OutputStream
import java.io.Serializable
import java.nio.ByteBuffer
import org.apache.hadoop.io.serializer.{Serialization, Deserializer, Serializer, WritableSerialization}
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.{Serializer => KSerializer}
import com.esotericsoftware.kryo.io.{Input, Output}
import cascading.kryo.KryoSerialization;
import cascading.tuple.hadoop.TupleSerialization
import cascading.tuple.hadoop.io.BufferedInputStream
import scala.annotation.tailrec
import scala.collection.immutable.ListMap
import scala.collection.immutable.HashMap
import com.twitter.scalding.DateRange
import com.twitter.scalding.RichDate
import com.twitter.scalding.Args
import com.twitter.chill._
import org.objenesis.strategy.StdInstantiatorStrategy;
class KryoHadoop extends KryoSerialization {
/** TODO!!!
* Deal with this issue. The problem is grouping by Kryo serialized
* objects silently breaks the results. If Kryo gets in front of TupleSerialization
* (and possibly Writable, unclear at this time), grouping is broken.
* There are two issues here:
* 1) Kryo objects not being compared properly.
* 2) Kryo being used instead of cascading.
*
* We must identify each and fix these bugs.
*/
val highPrioritySerializations = List(new WritableSerialization, new TupleSerialization)
override def accept(klass : Class[_]) = {
highPrioritySerializations.forall { x: Serialization[_] => !x.accept(klass) }
}
override def newKryo() : Kryo = {
val k = new Kryo {
lazy val objSer = new ObjectSerializer[AnyRef]
override def newDefaultSerializer(cls : Class[_]) : KSerializer[_] = {
if(objSer.accepts(cls)) {
objSer
}
else {
super.newDefaultSerializer(cls)
}
}
}
k.setInstantiatorStrategy(new StdInstantiatorStrategy());
k
}
override def decorateKryo(newK : Kryo) {
// These are scalding objects:
newK.register(classOf[RichDate], new RichDateSerializer())
newK.register(classOf[DateRange], new DateRangeSerializer())
newK.register(classOf[Args], new ArgsSerializer)
// Some of the monoids from Algebird that we use:
newK.register(classOf[com.twitter.algebird.AveragedValue], new AveragedValueSerializer)
newK.register(classOf[com.twitter.algebird.DecayedValue], new DecayedValueSerializer)
newK.register(classOf[com.twitter.algebird.HyperLogLogMonoid], new HLLMonoidSerializer)
newK.register(classOf[com.twitter.algebird.Moments], new MomentsSerializer)
newK.addDefaultSerializer(classOf[com.twitter.algebird.HLL], new HLLSerializer)
// Register all the chill serializers:
KryoSerializer.registerAll(newK)
//Add commonly used types with Fields serializer:
registeredTypes.foreach { cls => newK.register(cls) }
/**
* Pipes can be swept up into closures inside of case classes. This can generally
* be safely ignored. If the case class has a method that actually accesses something
* in the job, you will get a null pointer exception, so it shouldn't cause data corruption.
* a more robust solution is to use Spark's closure cleaner approach on every object that
* is serialized, but that's very expensive.
*/
newK.addDefaultSerializer(classOf[cascading.pipe.Pipe], new SingletonSerializer(null))
// keeping track of references is costly for memory, and often triggers OOM on Hadoop
val useRefs = getConf.getBoolean("scalding.kryo.setreferences", false)
newK.setReferences(useRefs)
}
// Types to pre-register.
// TODO: this was cargo-culted from spark. We should actually measure to see the best
// choices for the common use cases. Since hadoop tells us the class we are deserializing
// the benefit of this is much less than spark
def registeredTypes : List[Class[_]] = {
List(
// Arrays
Array(1), Array(1.0), Array(1.0f), Array(1L), Array(""), Array(("", "")),
Array(new java.lang.Object), Array(1.toByte), Array(true), Array('c'),
// Options and Either
Some(1), Left(1), Right(1)
).map { _.getClass }
}
}
| stripe/scalding | scalding-core/src/main/scala/com/twitter/scalding/serialization/KryoHadoop.scala | Scala | apache-2.0 | 4,773 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.tools
/**
* Called when the run has been finished. TODO: Justify this one.
*
* @author Bill Venners
*/
private[scalatest] trait RunDoneListener {
def done(): Unit = ()
}
| dotty-staging/scalatest | scalatest/src/main/scala/org/scalatest/tools/RunDoneListener.scala | Scala | apache-2.0 | 803 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.