code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.wegtam.tensei.agent.analyzer
import akka.actor.{ Actor, ActorLogging }
import com.wegtam.tensei.agent.adt.ParserDataContainer
import org.dfasdl.utils.DataElementProcessors
import org.w3c.dom.Element
object GenericAnalyzer {
sealed trait GenericAnalyzerMessages
object NumericAnalyzerMessages {
/**
* A message with the data that should be analyzed by the analyzer.
*
* @param data The complete container with additional information.
*/
case class AnalyzeData(data: ParserDataContainer) extends GenericAnalyzerMessages
}
}
/**
* A generic analyzer that represents the base class of all specific analyzers.
*
* @param elementId The ID of the element that should be analyzed by the analyzer.
* @param element The element of the DFASDL that represents the container for the data.
* @param percent The amount of data that should be analyzed from the total amount of elements.
*/
abstract case class GenericAnalyzer(elementId: String, element: Element, percent: Int = 100)
extends Actor
with ActorLogging
with DataElementProcessors
| Tensei-Data/tensei-agent | src/main/scala/com/wegtam/tensei/agent/analyzer/GenericAnalyzer.scala | Scala | agpl-3.0 | 1,863 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.{lang => jl}
import java.util.Locale
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.NumberConverter
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
/**
* A leaf expression specifically for math constants. Math constants expect no input.
*
* There is no code generation because they should get constant folded by the optimizer.
*
* @param c The math constant.
* @param name The short name of the function
*/
abstract class LeafMathExpression(c: Double, name: String)
extends LeafExpression with CodegenFallback with Serializable {
override def dataType: DataType = DoubleType
override def foldable: Boolean = true
override def nullable: Boolean = false
override def toString: String = s"$name()"
override def prettyName: String = name
override def eval(input: InternalRow): Any = c
}
/**
* A unary expression specifically for math functions. Math Functions expect a specific type of
* input format, therefore these functions extend `ExpectsInputTypes`.
*
* @param f The math function.
* @param name The short name of the function
*/
abstract class UnaryMathExpression(val f: Double => Double, name: String)
extends UnaryExpression with ImplicitCastInputTypes with NullIntolerant with Serializable {
override def inputTypes: Seq[AbstractDataType] = Seq(DoubleType)
override def dataType: DataType = DoubleType
override def nullable: Boolean = true
override def toString: String = s"$prettyName($child)"
override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(name)
protected override def nullSafeEval(input: Any): Any = {
f(input.asInstanceOf[Double])
}
// name of function in java.lang.Math
def funcName: String = name.toLowerCase(Locale.ROOT)
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"java.lang.Math.${funcName}($c)")
}
}
abstract class UnaryLogExpression(f: Double => Double, name: String)
extends UnaryMathExpression(f, name) {
override def nullable: Boolean = true
// values less than or equal to yAsymptote eval to null in Hive, instead of NaN or -Infinity
protected val yAsymptote: Double = 0.0
protected override def nullSafeEval(input: Any): Any = {
val d = input.asInstanceOf[Double]
if (d <= yAsymptote) null else f(d)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, c =>
s"""
if ($c <= $yAsymptote) {
${ev.isNull} = true;
} else {
${ev.value} = java.lang.StrictMath.${funcName}($c);
}
"""
)
}
}
/**
* A binary expression specifically for math functions that take two `Double`s as input and returns
* a `Double`.
*
* @param f The math function.
* @param name The short name of the function
*/
abstract class BinaryMathExpression(f: (Double, Double) => Double, name: String)
extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant with Serializable {
override def inputTypes: Seq[DataType] = Seq(DoubleType, DoubleType)
override def toString: String = s"$prettyName($left, $right)"
override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(name)
override def dataType: DataType = DoubleType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
f(input1.asInstanceOf[Double], input2.asInstanceOf[Double])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) =>
s"java.lang.Math.${name.toLowerCase(Locale.ROOT)}($c1, $c2)")
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// Leaf math functions
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Euler's number. Note that there is no code generation because this is only
* evaluated by the optimizer during constant folding.
*/
@ExpressionDescription(
usage = "_FUNC_() - Returns Euler's number, e.",
examples = """
Examples:
> SELECT _FUNC_();
2.718281828459045
""",
since = "1.5.0",
group = "math_funcs")
case class EulerNumber() extends LeafMathExpression(math.E, "E")
/**
* Pi. Note that there is no code generation because this is only
* evaluated by the optimizer during constant folding.
*/
@ExpressionDescription(
usage = "_FUNC_() - Returns pi.",
examples = """
Examples:
> SELECT _FUNC_();
3.141592653589793
""",
since = "1.5.0",
group = "math_funcs")
case class Pi() extends LeafMathExpression(math.Pi, "PI")
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// Unary math functions
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns the inverse cosine (a.k.a. arc cosine) of `expr`, as if computed by
`java.lang.Math._FUNC_`.
""",
examples = """
Examples:
> SELECT _FUNC_(1);
0.0
> SELECT _FUNC_(2);
NaN
""",
since = "1.4.0",
group = "math_funcs")
case class Acos(child: Expression) extends UnaryMathExpression(math.acos, "ACOS")
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns the inverse sine (a.k.a. arc sine) the arc sin of `expr`,
as if computed by `java.lang.Math._FUNC_`.
""",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
> SELECT _FUNC_(2);
NaN
""",
since = "1.4.0",
group = "math_funcs")
case class Asin(child: Expression) extends UnaryMathExpression(math.asin, "ASIN")
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns the inverse tangent (a.k.a. arc tangent) of `expr`, as if computed by
`java.lang.Math._FUNC_`
""",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
""",
since = "1.4.0",
group = "math_funcs")
case class Atan(child: Expression) extends UnaryMathExpression(math.atan, "ATAN")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the cube root of `expr`.",
examples = """
Examples:
> SELECT _FUNC_(27.0);
3.0
""",
since = "1.4.0",
group = "math_funcs")
case class Cbrt(child: Expression) extends UnaryMathExpression(math.cbrt, "CBRT")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the smallest integer not smaller than `expr`.",
examples = """
Examples:
> SELECT _FUNC_(-0.1);
0
> SELECT _FUNC_(5);
5
""",
since = "1.4.0",
group = "math_funcs")
case class Ceil(child: Expression) extends UnaryMathExpression(math.ceil, "CEIL") {
override def dataType: DataType = child.dataType match {
case dt @ DecimalType.Fixed(_, 0) => dt
case DecimalType.Fixed(precision, scale) =>
DecimalType.bounded(precision - scale + 1, 0)
case _ => LongType
}
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(DoubleType, DecimalType, LongType))
protected override def nullSafeEval(input: Any): Any = child.dataType match {
case LongType => input.asInstanceOf[Long]
case DoubleType => f(input.asInstanceOf[Double]).toLong
case DecimalType.Fixed(_, _) => input.asInstanceOf[Decimal].ceil
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.dataType match {
case DecimalType.Fixed(_, 0) => defineCodeGen(ctx, ev, c => s"$c")
case DecimalType.Fixed(_, _) =>
defineCodeGen(ctx, ev, c => s"$c.ceil()")
case LongType => defineCodeGen(ctx, ev, c => s"$c")
case _ => defineCodeGen(ctx, ev, c => s"(long)(java.lang.Math.${funcName}($c))")
}
}
}
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns the cosine of `expr`, as if computed by
`java.lang.Math._FUNC_`.
""",
arguments = """
Arguments:
* expr - angle in radians
""",
examples = """
Examples:
> SELECT _FUNC_(0);
1.0
""",
since = "1.4.0",
group = "math_funcs")
case class Cos(child: Expression) extends UnaryMathExpression(math.cos, "COS")
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns the hyperbolic cosine of `expr`, as if computed by
`java.lang.Math._FUNC_`.
""",
arguments = """
Arguments:
* expr - hyperbolic angle
""",
examples = """
Examples:
> SELECT _FUNC_(0);
1.0
""",
since = "1.4.0",
group = "math_funcs")
case class Cosh(child: Expression) extends UnaryMathExpression(math.cosh, "COSH")
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns inverse hyperbolic cosine of `expr`.
""",
examples = """
Examples:
> SELECT _FUNC_(1);
0.0
> SELECT _FUNC_(0);
NaN
""",
since = "3.0.0",
group = "math_funcs")
case class Acosh(child: Expression)
extends UnaryMathExpression((x: Double) => StrictMath.log(x + math.sqrt(x * x - 1.0)), "ACOSH") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev,
c => s"java.lang.StrictMath.log($c + java.lang.Math.sqrt($c * $c - 1.0))")
}
}
/**
* Convert a num from one base to another
*
* @param numExpr the number to be converted
* @param fromBaseExpr from which base
* @param toBaseExpr to which base
*/
@ExpressionDescription(
usage = "_FUNC_(num, from_base, to_base) - Convert `num` from `from_base` to `to_base`.",
examples = """
Examples:
> SELECT _FUNC_('100', 2, 10);
4
> SELECT _FUNC_(-10, 16, -10);
-16
""",
since = "1.5.0",
group = "math_funcs")
case class Conv(numExpr: Expression, fromBaseExpr: Expression, toBaseExpr: Expression)
extends TernaryExpression with ImplicitCastInputTypes with NullIntolerant {
override def children: Seq[Expression] = Seq(numExpr, fromBaseExpr, toBaseExpr)
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, IntegerType, IntegerType)
override def dataType: DataType = StringType
override def nullable: Boolean = true
override def nullSafeEval(num: Any, fromBase: Any, toBase: Any): Any = {
NumberConverter.convert(
num.asInstanceOf[UTF8String].getBytes,
fromBase.asInstanceOf[Int],
toBase.asInstanceOf[Int])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val numconv = NumberConverter.getClass.getName.stripSuffix("$")
nullSafeCodeGen(ctx, ev, (num, from, to) =>
s"""
${ev.value} = $numconv.convert($num.getBytes(), $from, $to);
if (${ev.value} == null) {
${ev.isNull} = true;
}
"""
)
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns e to the power of `expr`.",
examples = """
Examples:
> SELECT _FUNC_(0);
1.0
""",
since = "1.4.0",
group = "math_funcs")
case class Exp(child: Expression) extends UnaryMathExpression(StrictMath.exp, "EXP") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"java.lang.StrictMath.exp($c)")
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns exp(`expr`) - 1.",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
""",
since = "1.4.0",
group = "math_funcs")
case class Expm1(child: Expression) extends UnaryMathExpression(StrictMath.expm1, "EXPM1") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"java.lang.StrictMath.expm1($c)")
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the largest integer not greater than `expr`.",
examples = """
Examples:
> SELECT _FUNC_(-0.1);
-1
> SELECT _FUNC_(5);
5
""",
since = "1.4.0",
group = "math_funcs")
case class Floor(child: Expression) extends UnaryMathExpression(math.floor, "FLOOR") {
override def dataType: DataType = child.dataType match {
case dt @ DecimalType.Fixed(_, 0) => dt
case DecimalType.Fixed(precision, scale) =>
DecimalType.bounded(precision - scale + 1, 0)
case _ => LongType
}
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(DoubleType, DecimalType, LongType))
protected override def nullSafeEval(input: Any): Any = child.dataType match {
case LongType => input.asInstanceOf[Long]
case DoubleType => f(input.asInstanceOf[Double]).toLong
case DecimalType.Fixed(_, _) => input.asInstanceOf[Decimal].floor
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.dataType match {
case DecimalType.Fixed(_, 0) => defineCodeGen(ctx, ev, c => s"$c")
case DecimalType.Fixed(_, _) =>
defineCodeGen(ctx, ev, c => s"$c.floor()")
case LongType => defineCodeGen(ctx, ev, c => s"$c")
case _ => defineCodeGen(ctx, ev, c => s"(long)(java.lang.Math.${funcName}($c))")
}
}
}
object Factorial {
def factorial(n: Int): Long = {
if (n < factorials.length) factorials(n) else Long.MaxValue
}
private val factorials: Array[Long] = Array[Long](
1,
1,
2,
6,
24,
120,
720,
5040,
40320,
362880,
3628800,
39916800,
479001600,
6227020800L,
87178291200L,
1307674368000L,
20922789888000L,
355687428096000L,
6402373705728000L,
121645100408832000L,
2432902008176640000L
)
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the factorial of `expr`. `expr` is [0..20]. Otherwise, null.",
examples = """
Examples:
> SELECT _FUNC_(5);
120
""",
since = "1.5.0",
group = "math_funcs")
case class Factorial(child: Expression)
extends UnaryExpression with ImplicitCastInputTypes with NullIntolerant {
override def inputTypes: Seq[DataType] = Seq(IntegerType)
override def dataType: DataType = LongType
// If the value not in the range of [0, 20], it still will be null, so set it to be true here.
override def nullable: Boolean = true
protected override def nullSafeEval(input: Any): Any = {
val value = input.asInstanceOf[jl.Integer]
if (value > 20 || value < 0) {
null
} else {
Factorial.factorial(value)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, eval => {
s"""
if ($eval > 20 || $eval < 0) {
${ev.isNull} = true;
} else {
${ev.value} =
org.apache.spark.sql.catalyst.expressions.Factorial.factorial($eval);
}
"""
})
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the natural logarithm (base e) of `expr`.",
examples = """
Examples:
> SELECT _FUNC_(1);
0.0
""",
since = "1.4.0",
group = "math_funcs")
case class Log(child: Expression) extends UnaryLogExpression(StrictMath.log, "LOG") {
override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("ln")
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the logarithm of `expr` with base 2.",
examples = """
Examples:
> SELECT _FUNC_(2);
1.0
""",
since = "1.4.0",
group = "math_funcs")
case class Log2(child: Expression)
extends UnaryLogExpression((x: Double) => StrictMath.log(x) / StrictMath.log(2), "LOG2") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, c =>
s"""
if ($c <= $yAsymptote) {
${ev.isNull} = true;
} else {
${ev.value} = java.lang.StrictMath.log($c) / java.lang.StrictMath.log(2);
}
"""
)
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the logarithm of `expr` with base 10.",
examples = """
Examples:
> SELECT _FUNC_(10);
1.0
""",
since = "1.4.0",
group = "math_funcs")
case class Log10(child: Expression) extends UnaryLogExpression(StrictMath.log10, "LOG10")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns log(1 + `expr`).",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
""",
since = "1.4.0",
group = "math_funcs")
case class Log1p(child: Expression) extends UnaryLogExpression(StrictMath.log1p, "LOG1P") {
protected override val yAsymptote: Double = -1.0
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the double value that is closest in value to the argument and is equal to a mathematical integer.",
examples = """
Examples:
> SELECT _FUNC_(12.3456);
12.0
""",
since = "1.4.0",
group = "math_funcs")
// scalastyle:on line.size.limit
case class Rint(child: Expression) extends UnaryMathExpression(math.rint, "ROUND") {
override def funcName: String = "rint"
override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("rint")
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns -1.0, 0.0 or 1.0 as `expr` is negative, 0 or positive.",
examples = """
Examples:
> SELECT _FUNC_(40);
1.0
""",
since = "1.4.0",
group = "math_funcs")
case class Signum(child: Expression) extends UnaryMathExpression(math.signum, "SIGNUM")
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the sine of `expr`, as if computed by `java.lang.Math._FUNC_`.",
arguments = """
Arguments:
* expr - angle in radians
""",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
""",
since = "1.4.0",
group = "math_funcs")
case class Sin(child: Expression) extends UnaryMathExpression(math.sin, "SIN")
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns hyperbolic sine of `expr`, as if computed by `java.lang.Math._FUNC_`.
""",
arguments = """
Arguments:
* expr - hyperbolic angle
""",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
""",
since = "1.4.0",
group = "math_funcs")
case class Sinh(child: Expression) extends UnaryMathExpression(math.sinh, "SINH")
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns inverse hyperbolic sine of `expr`.
""",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
""",
since = "3.0.0",
group = "math_funcs")
case class Asinh(child: Expression)
extends UnaryMathExpression((x: Double) => x match {
case Double.NegativeInfinity => Double.NegativeInfinity
case _ => StrictMath.log(x + math.sqrt(x * x + 1.0)) }, "ASINH") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c =>
s"$c == Double.NEGATIVE_INFINITY ? Double.NEGATIVE_INFINITY : " +
s"java.lang.StrictMath.log($c + java.lang.Math.sqrt($c * $c + 1.0))")
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the square root of `expr`.",
examples = """
Examples:
> SELECT _FUNC_(4);
2.0
""",
since = "1.1.1",
group = "math_funcs")
case class Sqrt(child: Expression) extends UnaryMathExpression(math.sqrt, "SQRT")
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns the tangent of `expr`, as if computed by `java.lang.Math._FUNC_`.
""",
arguments = """
Arguments:
* expr - angle in radians
""",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
""",
since = "1.4.0",
group = "math_funcs")
case class Tan(child: Expression) extends UnaryMathExpression(math.tan, "TAN")
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns the cotangent of `expr`, as if computed by `1/java.lang.Math._FUNC_`.
""",
arguments = """
Arguments:
* expr - angle in radians
""",
examples = """
Examples:
> SELECT _FUNC_(1);
0.6420926159343306
""",
since = "2.3.0",
group = "math_funcs")
case class Cot(child: Expression)
extends UnaryMathExpression((x: Double) => 1 / math.tan(x), "COT") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, c => s"${ev.value} = 1 / java.lang.Math.tan($c);")
}
}
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns the hyperbolic tangent of `expr`, as if computed by
`java.lang.Math._FUNC_`.
""",
arguments = """
Arguments:
* expr - hyperbolic angle
""",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
""",
since = "1.4.0",
group = "math_funcs")
case class Tanh(child: Expression) extends UnaryMathExpression(math.tanh, "TANH")
@ExpressionDescription(
usage = """
_FUNC_(expr) - Returns inverse hyperbolic tangent of `expr`.
""",
examples = """
Examples:
> SELECT _FUNC_(0);
0.0
> SELECT _FUNC_(2);
NaN
""",
since = "3.0.0",
group = "math_funcs")
case class Atanh(child: Expression)
// SPARK-28519: more accurate express for 1/2 * ln((1 + x) / (1 - x))
extends UnaryMathExpression((x: Double) =>
0.5 * (StrictMath.log1p(x) - StrictMath.log1p(-x)), "ATANH") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev,
c => s"0.5 * (java.lang.StrictMath.log1p($c) - java.lang.StrictMath.log1p(- $c))")
}
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Converts radians to degrees.",
arguments = """
Arguments:
* expr - angle in radians
""",
examples = """
Examples:
> SELECT _FUNC_(3.141592653589793);
180.0
""",
since = "1.4.0",
group = "math_funcs")
case class ToDegrees(child: Expression) extends UnaryMathExpression(math.toDegrees, "DEGREES") {
override def funcName: String = "toDegrees"
}
@ExpressionDescription(
usage = "_FUNC_(expr) - Converts degrees to radians.",
arguments = """
Arguments:
* expr - angle in degrees
""",
examples = """
Examples:
> SELECT _FUNC_(180);
3.141592653589793
""",
since = "1.4.0",
group = "math_funcs")
case class ToRadians(child: Expression) extends UnaryMathExpression(math.toRadians, "RADIANS") {
override def funcName: String = "toRadians"
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the string representation of the long value `expr` represented in binary.",
examples = """
Examples:
> SELECT _FUNC_(13);
1101
> SELECT _FUNC_(-13);
1111111111111111111111111111111111111111111111111111111111110011
> SELECT _FUNC_(13.3);
1101
""",
since = "1.5.0",
group = "math_funcs")
// scalastyle:on line.size.limit
case class Bin(child: Expression)
extends UnaryExpression with ImplicitCastInputTypes with NullIntolerant with Serializable {
override def inputTypes: Seq[DataType] = Seq(LongType)
override def dataType: DataType = StringType
protected override def nullSafeEval(input: Any): Any =
UTF8String.fromString(jl.Long.toBinaryString(input.asInstanceOf[Long]))
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c) =>
s"UTF8String.fromString(java.lang.Long.toBinaryString($c))")
}
}
object Hex {
val hexDigits = Array[Char](
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'
).map(_.toByte)
// lookup table to translate '0' -> 0 ... 'F'/'f' -> 15
val unhexDigits = {
val array = Array.fill[Byte](128)(-1)
(0 to 9).foreach(i => array('0' + i) = i.toByte)
(0 to 5).foreach(i => array('A' + i) = (i + 10).toByte)
(0 to 5).foreach(i => array('a' + i) = (i + 10).toByte)
array
}
def hex(bytes: Array[Byte]): UTF8String = {
val length = bytes.length
val value = new Array[Byte](length * 2)
var i = 0
while (i < length) {
value(i * 2) = Hex.hexDigits((bytes(i) & 0xF0) >> 4)
value(i * 2 + 1) = Hex.hexDigits(bytes(i) & 0x0F)
i += 1
}
UTF8String.fromBytes(value)
}
def hex(num: Long): UTF8String = {
// Extract the hex digits of num into value[] from right to left
val value = new Array[Byte](16)
var numBuf = num
var len = 0
do {
len += 1
value(value.length - len) = Hex.hexDigits((numBuf & 0xF).toInt)
numBuf >>>= 4
} while (numBuf != 0)
UTF8String.fromBytes(java.util.Arrays.copyOfRange(value, value.length - len, value.length))
}
def unhex(bytes: Array[Byte]): Array[Byte] = {
val out = new Array[Byte]((bytes.length + 1) >> 1)
var i = 0
if ((bytes.length & 0x01) != 0) {
// padding with '0'
if (bytes(0) < 0) {
return null
}
val v = Hex.unhexDigits(bytes(0))
if (v == -1) {
return null
}
out(0) = v
i += 1
}
// two characters form the hex value.
while (i < bytes.length) {
if (bytes(i) < 0 || bytes(i + 1) < 0) {
return null
}
val first = Hex.unhexDigits(bytes(i))
val second = Hex.unhexDigits(bytes(i + 1))
if (first == -1 || second == -1) {
return null
}
out(i / 2) = (((first << 4) | second) & 0xFF).toByte
i += 2
}
out
}
}
/**
* If the argument is an INT or binary, hex returns the number as a STRING in hexadecimal format.
* Otherwise if the number is a STRING, it converts each character into its hex representation
* and returns the resulting STRING. Negative numbers would be treated as two's complement.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Converts `expr` to hexadecimal.",
examples = """
Examples:
> SELECT _FUNC_(17);
11
> SELECT _FUNC_('Spark SQL');
537061726B2053514C
""",
since = "1.5.0",
group = "math_funcs")
case class Hex(child: Expression)
extends UnaryExpression with ImplicitCastInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(LongType, BinaryType, StringType))
override def dataType: DataType = StringType
protected override def nullSafeEval(num: Any): Any = child.dataType match {
case LongType => Hex.hex(num.asInstanceOf[Long])
case BinaryType => Hex.hex(num.asInstanceOf[Array[Byte]])
case StringType => Hex.hex(num.asInstanceOf[UTF8String].getBytes)
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (c) => {
val hex = Hex.getClass.getName.stripSuffix("$")
s"${ev.value} = " + (child.dataType match {
case StringType => s"""$hex.hex($c.getBytes());"""
case _ => s"""$hex.hex($c);"""
})
})
}
}
/**
* Performs the inverse operation of HEX.
* Resulting characters are returned as a byte array.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Converts hexadecimal `expr` to binary.",
examples = """
Examples:
> SELECT decode(_FUNC_('537061726B2053514C'), 'UTF-8');
Spark SQL
""",
since = "1.5.0",
group = "math_funcs")
case class Unhex(child: Expression)
extends UnaryExpression with ImplicitCastInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] = Seq(StringType)
override def nullable: Boolean = true
override def dataType: DataType = BinaryType
protected override def nullSafeEval(num: Any): Any =
Hex.unhex(num.asInstanceOf[UTF8String].getBytes)
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
nullSafeCodeGen(ctx, ev, (c) => {
val hex = Hex.getClass.getName.stripSuffix("$")
s"""
${ev.value} = $hex.unhex($c.getBytes());
${ev.isNull} = ${ev.value} == null;
"""
})
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
// Binary math functions
////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////
@ExpressionDescription(
usage = """
_FUNC_(exprY, exprX) - Returns the angle in radians between the positive x-axis of a plane
and the point given by the coordinates (`exprX`, `exprY`), as if computed by
`java.lang.Math._FUNC_`.
""",
arguments = """
Arguments:
* exprY - coordinate on y-axis
* exprX - coordinate on x-axis
""",
examples = """
Examples:
> SELECT _FUNC_(0, 0);
0.0
""",
since = "1.4.0",
group = "math_funcs")
case class Atan2(left: Expression, right: Expression)
extends BinaryMathExpression(math.atan2, "ATAN2") {
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
// With codegen, the values returned by -0.0 and 0.0 are different. Handled with +0.0
math.atan2(input1.asInstanceOf[Double] + 0.0, input2.asInstanceOf[Double] + 0.0)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"java.lang.Math.atan2($c1 + 0.0, $c2 + 0.0)")
}
}
@ExpressionDescription(
usage = "_FUNC_(expr1, expr2) - Raises `expr1` to the power of `expr2`.",
examples = """
Examples:
> SELECT _FUNC_(2, 3);
8.0
""",
since = "1.4.0",
group = "math_funcs")
case class Pow(left: Expression, right: Expression)
extends BinaryMathExpression(StrictMath.pow, "POWER") {
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (c1, c2) => s"java.lang.StrictMath.pow($c1, $c2)")
}
}
/**
* Bitwise left shift.
*
* @param left the base number to shift.
* @param right number of bits to left shift.
*/
@ExpressionDescription(
usage = "_FUNC_(base, expr) - Bitwise left shift.",
examples = """
Examples:
> SELECT _FUNC_(2, 1);
4
""",
since = "1.5.0",
group = "math_funcs")
case class ShiftLeft(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(IntegerType, LongType), IntegerType)
override def dataType: DataType = left.dataType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
input1 match {
case l: jl.Long => l << input2.asInstanceOf[jl.Integer]
case i: jl.Integer => i << input2.asInstanceOf[jl.Integer]
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (left, right) => s"$left << $right")
}
}
/**
* Bitwise (signed) right shift.
*
* @param left the base number to shift.
* @param right number of bits to right shift.
*/
@ExpressionDescription(
usage = "_FUNC_(base, expr) - Bitwise (signed) right shift.",
examples = """
Examples:
> SELECT _FUNC_(4, 1);
2
""",
since = "1.5.0",
group = "bitwise_funcs")
case class ShiftRight(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(IntegerType, LongType), IntegerType)
override def dataType: DataType = left.dataType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
input1 match {
case l: jl.Long => l >> input2.asInstanceOf[jl.Integer]
case i: jl.Integer => i >> input2.asInstanceOf[jl.Integer]
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (left, right) => s"$left >> $right")
}
}
/**
* Bitwise unsigned right shift, for integer and long data type.
*
* @param left the base number.
* @param right the number of bits to right shift.
*/
@ExpressionDescription(
usage = "_FUNC_(base, expr) - Bitwise unsigned right shift.",
examples = """
Examples:
> SELECT _FUNC_(4, 1);
2
""",
since = "1.5.0",
group = "bitwise_funcs")
case class ShiftRightUnsigned(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes with NullIntolerant {
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(IntegerType, LongType), IntegerType)
override def dataType: DataType = left.dataType
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
input1 match {
case l: jl.Long => l >>> input2.asInstanceOf[jl.Integer]
case i: jl.Integer => i >>> input2.asInstanceOf[jl.Integer]
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (left, right) => s"$left >>> $right")
}
}
@ExpressionDescription(
usage = "_FUNC_(expr1, expr2) - Returns sqrt(`expr1`**2 + `expr2`**2).",
examples = """
Examples:
> SELECT _FUNC_(3, 4);
5.0
""",
since = "1.4.0",
group = "math_funcs")
case class Hypot(left: Expression, right: Expression)
extends BinaryMathExpression(math.hypot, "HYPOT")
/**
* Computes the logarithm of a number.
*
* @param left the logarithm base, default to e.
* @param right the number to compute the logarithm of.
*/
@ExpressionDescription(
usage = "_FUNC_(base, expr) - Returns the logarithm of `expr` with `base`.",
examples = """
Examples:
> SELECT _FUNC_(10, 100);
2.0
""",
since = "1.5.0",
group = "math_funcs")
case class Logarithm(left: Expression, right: Expression)
extends BinaryMathExpression((c1, c2) => StrictMath.log(c2) / StrictMath.log(c1), "LOG") {
/**
* Natural log, i.e. using e as the base.
*/
def this(child: Expression) = {
this(EulerNumber(), child)
}
override def nullable: Boolean = true
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val dLeft = input1.asInstanceOf[Double]
val dRight = input2.asInstanceOf[Double]
// Unlike Hive, we support Log base in (0.0, 1.0]
if (dLeft <= 0.0 || dRight <= 0.0) null else StrictMath.log(dRight) / StrictMath.log(dLeft)
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
if (left.isInstanceOf[EulerNumber]) {
nullSafeCodeGen(ctx, ev, (c1, c2) =>
s"""
if ($c2 <= 0.0) {
${ev.isNull} = true;
} else {
${ev.value} = java.lang.StrictMath.log($c2);
}
""")
} else {
nullSafeCodeGen(ctx, ev, (c1, c2) =>
s"""
if ($c1 <= 0.0 || $c2 <= 0.0) {
${ev.isNull} = true;
} else {
${ev.value} = java.lang.StrictMath.log($c2) / java.lang.StrictMath.log($c1);
}
""")
}
}
}
/**
* Round the `child`'s result to `scale` decimal place when `scale` >= 0
* or round at integral part when `scale` < 0.
*
* Child of IntegralType would round to itself when `scale` >= 0.
* Child of FractionalType whose value is NaN or Infinite would always round to itself.
*
* Round's dataType would always equal to `child`'s dataType except for DecimalType,
* which would lead scale decrease from the origin DecimalType.
*
* @param child expr to be round, all [[NumericType]] is allowed as Input
* @param scale new scale to be round to, this should be a constant int at runtime
* @param mode rounding mode (e.g. HALF_UP, HALF_EVEN)
* @param modeStr rounding mode string name (e.g. "ROUND_HALF_UP", "ROUND_HALF_EVEN")
*/
abstract class RoundBase(child: Expression, scale: Expression,
mode: BigDecimal.RoundingMode.Value, modeStr: String)
extends BinaryExpression with Serializable with ImplicitCastInputTypes {
override def left: Expression = child
override def right: Expression = scale
// round of Decimal would eval to null if it fails to `changePrecision`
override def nullable: Boolean = true
override def foldable: Boolean = child.foldable
override lazy val dataType: DataType = child.dataType match {
// if the new scale is bigger which means we are scaling up,
// keep the original scale as `Decimal` does
case DecimalType.Fixed(p, s) => DecimalType(p, if (_scale > s) s else _scale)
case t => t
}
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType, IntegerType)
override def checkInputDataTypes(): TypeCheckResult = {
super.checkInputDataTypes() match {
case TypeCheckSuccess =>
if (scale.foldable) {
TypeCheckSuccess
} else {
TypeCheckFailure("Only foldable Expression is allowed for scale arguments")
}
case f => f
}
}
// Avoid repeated evaluation since `scale` is a constant int,
// avoid unnecessary `child` evaluation in both codegen and non-codegen eval
// by checking if scaleV == null as well.
private lazy val scaleV: Any = scale.eval(EmptyRow)
private lazy val _scale: Int = scaleV.asInstanceOf[Int]
override def eval(input: InternalRow): Any = {
if (scaleV == null) { // if scale is null, no need to eval its child at all
null
} else {
val evalE = child.eval(input)
if (evalE == null) {
null
} else {
nullSafeEval(evalE)
}
}
}
// not overriding since _scale is a constant int at runtime
def nullSafeEval(input1: Any): Any = {
dataType match {
case DecimalType.Fixed(_, s) =>
val decimal = input1.asInstanceOf[Decimal]
// Overflow cannot happen, so no need to control nullOnOverflow
decimal.toPrecision(decimal.precision, s, mode)
case ByteType =>
BigDecimal(input1.asInstanceOf[Byte]).setScale(_scale, mode).toByte
case ShortType =>
BigDecimal(input1.asInstanceOf[Short]).setScale(_scale, mode).toShort
case IntegerType =>
BigDecimal(input1.asInstanceOf[Int]).setScale(_scale, mode).toInt
case LongType =>
BigDecimal(input1.asInstanceOf[Long]).setScale(_scale, mode).toLong
case FloatType =>
val f = input1.asInstanceOf[Float]
if (f.isNaN || f.isInfinite) {
f
} else {
BigDecimal(f.toDouble).setScale(_scale, mode).toFloat
}
case DoubleType =>
val d = input1.asInstanceOf[Double]
if (d.isNaN || d.isInfinite) {
d
} else {
BigDecimal(d).setScale(_scale, mode).toDouble
}
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val ce = child.genCode(ctx)
val evaluationCode = dataType match {
case DecimalType.Fixed(_, s) =>
s"""
|${ev.value} = ${ce.value}.toPrecision(${ce.value}.precision(), $s,
| Decimal.$modeStr(), true);
|${ev.isNull} = ${ev.value} == null;
""".stripMargin
case ByteType =>
if (_scale < 0) {
s"""
${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).byteValue();"""
} else {
s"${ev.value} = ${ce.value};"
}
case ShortType =>
if (_scale < 0) {
s"""
${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).shortValue();"""
} else {
s"${ev.value} = ${ce.value};"
}
case IntegerType =>
if (_scale < 0) {
s"""
${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).intValue();"""
} else {
s"${ev.value} = ${ce.value};"
}
case LongType =>
if (_scale < 0) {
s"""
${ev.value} = new java.math.BigDecimal(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).longValue();"""
} else {
s"${ev.value} = ${ce.value};"
}
case FloatType => // if child eval to NaN or Infinity, just return it.
s"""
if (Float.isNaN(${ce.value}) || Float.isInfinite(${ce.value})) {
${ev.value} = ${ce.value};
} else {
${ev.value} = java.math.BigDecimal.valueOf(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).floatValue();
}"""
case DoubleType => // if child eval to NaN or Infinity, just return it.
s"""
if (Double.isNaN(${ce.value}) || Double.isInfinite(${ce.value})) {
${ev.value} = ${ce.value};
} else {
${ev.value} = java.math.BigDecimal.valueOf(${ce.value}).
setScale(${_scale}, java.math.BigDecimal.${modeStr}).doubleValue();
}"""
}
val javaType = CodeGenerator.javaType(dataType)
if (scaleV == null) { // if scale is null, no need to eval its child at all
ev.copy(code = code"""
boolean ${ev.isNull} = true;
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};""")
} else {
ev.copy(code = code"""
${ce.code}
boolean ${ev.isNull} = ${ce.isNull};
$javaType ${ev.value} = ${CodeGenerator.defaultValue(dataType)};
if (!${ev.isNull}) {
$evaluationCode
}""")
}
}
}
/**
* Round an expression to d decimal places using HALF_UP rounding mode.
* round(2.5) == 3.0, round(3.5) == 4.0.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr, d) - Returns `expr` rounded to `d` decimal places using HALF_UP rounding mode.",
examples = """
Examples:
> SELECT _FUNC_(2.5, 0);
3
""",
since = "1.5.0",
group = "math_funcs")
// scalastyle:on line.size.limit
case class Round(child: Expression, scale: Expression)
extends RoundBase(child, scale, BigDecimal.RoundingMode.HALF_UP, "ROUND_HALF_UP")
with Serializable with ImplicitCastInputTypes {
def this(child: Expression) = this(child, Literal(0))
}
/**
* Round an expression to d decimal places using HALF_EVEN rounding mode,
* also known as Gaussian rounding or bankers' rounding.
* round(2.5) = 2.0, round(3.5) = 4.0.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr, d) - Returns `expr` rounded to `d` decimal places using HALF_EVEN rounding mode.",
examples = """
Examples:
> SELECT _FUNC_(2.5, 0);
2
""",
since = "2.0.0",
group = "math_funcs")
// scalastyle:on line.size.limit
case class BRound(child: Expression, scale: Expression)
extends RoundBase(child, scale, BigDecimal.RoundingMode.HALF_EVEN, "ROUND_HALF_EVEN")
with Serializable with ImplicitCastInputTypes {
def this(child: Expression) = this(child, Literal(0))
}
object WidthBucket {
def computeBucketNumber(value: Double, min: Double, max: Double, numBucket: Long): jl.Long = {
if (numBucket <= 0 || numBucket == Long.MaxValue || jl.Double.isNaN(value) || min == max ||
jl.Double.isNaN(min) || jl.Double.isInfinite(min) ||
jl.Double.isNaN(max) || jl.Double.isInfinite(max)) {
return null
}
val lower = Math.min(min, max)
val upper = Math.max(min, max)
if (min < max) {
if (value < lower) {
0L
} else if (value >= upper) {
numBucket + 1L
} else {
(numBucket.toDouble * (value - lower) / (upper - lower)).toLong + 1L
}
} else { // `min > max` case
if (value > upper) {
0L
} else if (value <= lower) {
numBucket + 1L
} else {
(numBucket.toDouble * (upper - value) / (upper - lower)).toLong + 1L
}
}
}
}
/**
* Returns the bucket number into which the value of this expression would fall
* after being evaluated. Note that input arguments must follow conditions listed below;
* otherwise, the method will return null.
* - `numBucket` must be greater than zero and be less than Long.MaxValue
* - `value`, `min`, and `max` cannot be NaN
* - `min` bound cannot equal `max`
* - `min` and `max` must be finite
*
* Note: If `minValue` > `maxValue`, a return value is as follows;
* if `value` > `minValue`, it returns 0.
* if `value` <= `maxValue`, it returns `numBucket` + 1.
* otherwise, it returns (`numBucket` * (`minValue` - `value`) / (`minValue` - `maxValue`)) + 1
*
* @param value is the expression to compute a bucket number in the histogram
* @param minValue is the minimum value of the histogram
* @param maxValue is the maximum value of the histogram
* @param numBucket is the number of buckets
*/
@ExpressionDescription(
usage = """
_FUNC_(value, min_value, max_value, num_bucket) - Returns the bucket number to which
`value` would be assigned in an equiwidth histogram with `num_bucket` buckets,
in the range `min_value` to `max_value`."
""",
examples = """
Examples:
> SELECT _FUNC_(5.3, 0.2, 10.6, 5);
3
> SELECT _FUNC_(-2.1, 1.3, 3.4, 3);
0
> SELECT _FUNC_(8.1, 0.0, 5.7, 4);
5
> SELECT _FUNC_(-0.9, 5.2, 0.5, 2);
3
""",
since = "3.1.0",
group = "math_funcs")
case class WidthBucket(
value: Expression,
minValue: Expression,
maxValue: Expression,
numBucket: Expression)
extends QuaternaryExpression with ImplicitCastInputTypes with NullIntolerant {
override def children: Seq[Expression] = Seq(value, minValue, maxValue, numBucket)
override def inputTypes: Seq[AbstractDataType] = Seq(DoubleType, DoubleType, DoubleType, LongType)
override def dataType: DataType = LongType
override def nullable: Boolean = true
override def prettyName: String = "width_bucket"
override protected def nullSafeEval(input: Any, min: Any, max: Any, numBucket: Any): Any = {
WidthBucket.computeBucketNumber(
input.asInstanceOf[Double],
min.asInstanceOf[Double],
max.asInstanceOf[Double],
numBucket.asInstanceOf[Long])
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
defineCodeGen(ctx, ev, (input, min, max, numBucket) =>
"org.apache.spark.sql.catalyst.expressions.WidthBucket" +
s".computeBucketNumber($input, $min, $max, $numBucket)")
}
}
| witgo/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala | Scala | apache-2.0 | 47,350 |
package dotty.tools.dotc.semanticdb
import dotty.tools.dotc.semanticdb.internal._
import scala.annotation.internal.sharable
object TextDocuments {
def parseFrom(in: Array[Byte]): TextDocuments = {
parseFrom(SemanticdbInputStream.newInstance(in))
}
def parseFrom(in: SemanticdbInputStream): TextDocuments = {
defaultInstance.mergeFrom(in)
}
val defaultInstance: TextDocuments = TextDocuments(Nil)
}
final case class TextDocuments(documents: Seq[TextDocument]) extends SemanticdbMessage[TextDocuments] derives Eql {
@sharable
private var __serializedSizeCachedValue: Int = 0
private def __computeSerializedValue(): Int = {
var __size = 0
documents.foreach { __item =>
val __value = __item
__size += 1 +
SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) +
__value.serializedSize
}
__size
}
final override def serializedSize: Int = {
var read = __serializedSizeCachedValue
if (read == 0) {
read = __computeSerializedValue()
__serializedSizeCachedValue = read
}
read
}
def writeTo(`_output__`: SemanticdbOutputStream): Unit = {
documents.foreach { __v =>
val __m = __v
_output__.writeTag(1, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
}
def mergeFrom(`_input__`: SemanticdbInputStream): TextDocuments = {
val __documents = (Vector.newBuilder[TextDocument] ++= this.documents)
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__documents += LiteParser.readMessage(_input__, TextDocument.defaultInstance)
case tag => _input__.skipField(tag)
}
}
TextDocuments(documents = __documents.result())
}
}
| som-snytt/dotty | compiler/src/dotty/tools/dotc/semanticdb/TextDocuments.scala | Scala | apache-2.0 | 1,831 |
package com.seanshubin.detangler.domain
import java.nio.charset.Charset
import java.nio.file.{LinkOption, Path}
import com.seanshubin.detangler.contract.test.FilesNotImplemented
class FilesStub(fileContentByName: Map[String, String], charset: Charset) extends FilesNotImplemented {
override def exists(path: Path, options: LinkOption*): Boolean = {
fileContentByName.contains(path.toString)
}
override def readAllBytes(path: Path): Seq[Byte] = {
val stringContent = fileContentByName(path.toString)
val bytes = stringContent.getBytes(charset)
bytes.toSeq
}
}
| SeanShubin/detangler | domain/src/test/scala/com/seanshubin/detangler/domain/FilesStub.scala | Scala | unlicense | 587 |
package example
import java.util.NoSuchElementException
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
/**
* This class implements a ScalaTest test suite for the methods in object
* `Lists` that need to be implemented as part of this assignment. A test
* suite is simply a collection of individual tests for some specific
* component of a program.
*
* A test suite is created by defining a class which extends the type
* `org.scalatest.FunSuite`. When running ScalaTest, it will automatically
* find this class and execute all of its tests.
*
* Adding the `@RunWith` annotation enables the test suite to be executed
* inside eclipse using the built-in JUnit test runner.
*
* You have two options for running this test suite:
*
* - Start the sbt console and run the "test" command
* - Right-click this file in eclipse and chose "Run As" - "JUnit Test"
*/
@RunWith(classOf[JUnitRunner])
class ListsSuite extends FunSuite {
/**
* Tests are written using the `test` operator which takes two arguments:
*
* - A description of the test. This description has to be unique, no two
* tests can have the same description.
* - The test body, a piece of Scala code that implements the test
*
* The most common way to implement a test body is using the method `assert`
* which tests that its argument evaluates to `true`. So one of the simplest
* successful tests is the following:
*/
test("one plus one is two")(assert(1 + 1 == 2))
/**
* In Scala, it is allowed to pass an argument to a method using the block
* syntax, i.e. `{ argument }` instead of parentheses `(argument)`.
*
* This allows tests to be written in a more readable manner:
*/
test("one plus one is three?") {
assert(1 + 1 != 3) // This assertion fails! Go ahead and fix it.
}
/**
* One problem with the previous (failing) test is that ScalaTest will
* only tell you that a test failed, but it will not tell you what was
* the reason for the failure. The output looks like this:
*
* {{{
* [info] - one plus one is three? *** FAILED ***
* }}}
*
* This situation can be improved by using a special equality operator
* `===` instead of `==` (this is only possible in ScalaTest). So if you
* run the next test, ScalaTest will show the following output:
*
* {{{
* [info] - details why one plus one is not three *** FAILED ***
* [info] 2 did not equal 3 (ListsSuite.scala:67)
* }}}
*
* We recommend to always use the `===` equality operator when writing tests.
*/
test("details why one plus one is not three") {
assert(1 + 1 === 3) // Fix me, please!
}
/**
* In order to test the exceptional behavior of a methods, ScalaTest offers
* the `intercept` operation.
*
* In the following example, we test the fact that the method `intNotZero`
* throws an `IllegalArgumentException` if its argument is `0`.
*/
test("intNotZero throws an exception if its argument is 0") {
intercept[IllegalArgumentException] {
intNotZero(0)
}
}
def intNotZero(x: Int): Int = {
if (x == 0) throw new IllegalArgumentException("zero is not allowed")
else x
}
/**
* Now we finally write some tests for the list functions that have to be
* implemented for this assignment. We fist import all members of the
* `List` object.
*/
import Lists._
/**
* We only provide two very basic tests for you. Write more tests to make
* sure your `sum` and `max` methods work as expected.
*
* In particular, write tests for corner cases: negative numbers, zeros,
* empty lists, lists with repeated elements, etc.
*
* It is allowed to have multiple `assert` statements inside one test,
* however it is recommended to write an individual `test` statement for
* every tested aspect of a method.
*/
test("sum of a few numbers") {
assert(sum(List(1,2,0)) === 3)
}
test("sum of a few numbers with negative") {
assert(sum(List(1,-2,0)) === -1)
}
test("sum of an empty list") {
assert(sum(List()) === 0)
}
test("max of a few numbers") {
assert(max(List(3, 7, 2)) === 7)
}
test("max of negative numbers") {
assert(max(List(-3, -17, -4)) === -3)
}
test("max of an empty list") {
intercept[NoSuchElementException] {
max(List())
}
}
}
| avenezia/Functional-Programming-Principles-in-Scala | week1/list_example/src/test/scala/example/ListsSuite.scala | Scala | apache-2.0 | 4,416 |
println(/* resolved: false */ String.getClass)
println(classOf[/* */ String])
"1.0".asInstanceOf[String]./* */ toFloat | ilinum/intellij-scala | testdata/resolve2/predef/literal/String.scala | Scala | apache-2.0 | 118 |
/*
* This file is part of Kiama.
*
* Copyright (C) 2009-2015 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.kiama
package example.lambda2
import LambdaTree.Exp
import org.kiama.util.{Emitter, ErrorEmitter, OutputEmitter,
ParsingREPLWithConfig, REPLConfig}
/**
* Configuration for the Lambda REPL.
*/
abstract class LambdaConfig (args : Array[String]) extends REPLConfig (args) {
lazy val mechanism = opt[String] ("mechanism", descr = "Evaluation mechanism",
default = Some ("reduce"))
}
/**
* A simple typed lambda calculus read-eval-print-loop that offers
* choice from among multiple evaluation mechanisms. The lambda calculus
* supported and the strategies used are heavily based on "Building
* Interpreters with Rewriting Strategies", Eelco Dolstra and Eelco
* Visser, LDTA 2002 (published in Volume 65/3 of Electronic Notes in
* Theoretical Computer Science, Elsevier).
*/
class LambdaDriver extends ParsingREPLWithConfig[Exp,LambdaConfig] with SyntaxAnalyser {
import Evaluators.{evaluatorFor, mechanisms}
import LambdaTree.LambdaTree
import PrettyPrinter.formattedLayout
import org.kiama.util.{Emitter, Console}
import org.kiama.util.Messaging.report
def createConfig (args : Array[String],
out : Emitter = new OutputEmitter,
err : Emitter = new ErrorEmitter) : LambdaConfig =
new LambdaConfig (args) {
lazy val output = out
lazy val error = err
}
val banner = "Enter lambda calculus expressions for evaluation (:help for help)"
/**
* Process a user input line by intercepting meta-level commands to
* update the evaluation mechanisms. By default we just parse what
* they type into an expression.
*/
override def processline (line : String, console : Console, config : LambdaConfig) : Option[LambdaConfig] = {
// Shorthand access to the output emitter
val output = config.output
/*
* Print help about the available commands.
*/
def printHelp () {
output.emitln ("""exp print the result of evaluating exp
|:eval list the available evaluation mechanisms
|:eval <mechanism> change to using <mechanism> to evaluate
|:help print this help message
|:quit quit this REPL""".stripMargin)
}
line match {
case Command (Array (":help")) =>
printHelp ()
Some (config)
case Command (Array (":quit")) =>
None
case Command (Array (":eval")) =>
output.emitln ("Available evaluation mechanisms:")
for (mech <- mechanisms) {
output.emit (s" $mech")
if (mech == config.mechanism ())
output.emitln (" (current)")
else
output.emitln
}
Some (config)
case Command (Array (":eval", mech)) =>
if (mechanisms contains mech)
Some (createAndInitConfig (Array ("-m", mech), output))
else {
output.emitln (s"unknown evaluation mechanism: $mech")
Some (config)
}
// Otherwise it's an expression for evaluation
case _ =>
super.processline (line, console, config)
}
}
/**
* Extractor for commands, splits the line into separate words.
*/
object Command {
def unapply (line : String) : Option[Array[String]] = {
Some (line split ' ')
}
}
/**
* Process an expression.
*/
def process (e : Exp, config : LambdaConfig) {
// Make an analyser for a tree for this expression
val tree = new LambdaTree (e)
val analyser = new Analyser (tree)
// First conduct a semantic analysis check: compute the expression's
// type and see if any errors occurred
val messages = analyser.errors
if (messages.length == 0) {
// If everything is OK, evaluate the expression
val evaluator = evaluatorFor (config.mechanism ())
config.output.emitln (formattedLayout (evaluator.eval (e)))
} else {
// Otherwise report the errors and reset for next expression
report (messages, config.error)
}
}
}
/**
* Main object for Lambda REPL.
*/
object Lambda extends LambdaDriver
| adeze/kiama | library/src/org/kiama/example/lambda2/Lambda.scala | Scala | gpl-3.0 | 5,380 |
package im.actor.server.models
import java.time.LocalDateTime
import org.joda.time.DateTime
case class GroupUser(groupId: Int, userId: Int, inviterUserId: Int, invitedAt: DateTime, joinedAt: Option[LocalDateTime])
| boneyao/actor-platform | actor-server/actor-models/src/main/scala/im/actor/server/models/GroupUser.scala | Scala | mit | 217 |
package builder
import play.api.libs.json.{JsArray, JsBoolean, JsLookupResult, JsNumber, JsObject, JsString, JsValue, JsNull}
import scala.util.{Failure, Success, Try}
/**
* Parse numbers and string json values as strings
*/
object JsonUtil {
def validate(
json: JsValue,
strings: Seq[String] = Nil,
optionalStrings: Seq[String] = Nil,
anys: Seq[String] = Nil,
arrayOfAnys: Seq[String] = Nil,
arrayOfObjects: Seq[String] = Nil,
optionalArraysOfStrings: Seq[String] = Nil,
optionalArraysOfObjects: Seq[String] = Nil,
optionalObjects: Seq[String] = Nil,
objects: Seq[String] = Nil,
optionalBooleans: Seq[String] = Nil,
optionalNumbers: Seq[String] = Nil,
optionalAnys: Seq[String] = Nil,
prefix: Option[String] = None
): Seq[String] = {
val keys = strings ++ anys ++ optionalStrings ++ arrayOfAnys ++ arrayOfObjects ++ optionalArraysOfStrings ++ optionalArraysOfObjects ++ optionalObjects ++ objects ++ optionalBooleans ++ optionalNumbers ++ optionalAnys
val unrecognized = json.asOpt[JsObject] match {
case None => Seq.empty
case Some(v) => unrecognizedFieldsErrors(v, keys, prefix)
}
unrecognized ++
strings.flatMap { field =>
(json \\ field).toOption match {
case Some(o: JsString) => {
parseString(o.value) match {
case None => Some(withPrefix(prefix, s"$field must be a non empty string"))
case Some(_) => None
}
}
case Some(_) => Some(withPrefix(prefix, s"$field must be a string"))
case None => Some(withPrefix(prefix, s"Missing $field"))
}
} ++
anys.flatMap { field =>
(json \\ field).toOption match {
case Some(_) => None
case None => Some(withPrefix(prefix, s"Missing $field"))
}
} ++
optionalStrings.flatMap { field =>
(json \\ field).toOption match {
case Some(_: JsString) => None
case Some(_) => Some(withPrefix(prefix, s"$field, if present, must be a string"))
case None => None
}
} ++
optionalBooleans.flatMap { field =>
(json \\ field).toOption match {
case Some(_: JsBoolean) => None
case Some(o: JsString) => {
parseBoolean(o.value) match {
case None => Some(withPrefix(prefix, s"$field, if present, must be a boolean or the string 'true' or 'false'"))
case Some(_) => None
}
}
case Some(_) => Some(withPrefix(prefix, s"$field, if present, must be a boolean"))
case None => None
}
} ++
optionalNumbers.flatMap { field =>
(json \\ field).toOption match {
case Some(_: JsNumber) => None
case Some(o: JsString) => {
parseLong(o.value) match {
case None => Some(withPrefix(prefix, s"$field, if present, must be a number"))
case Some(_) => None
}
}
case Some(_) => Some(withPrefix(prefix, s"$field, if present, must be a number"))
case None => None
}
} ++
arrayOfAnys.flatMap { field =>
(json \\ field).toOption match {
case Some(_) => None
case None => Some(withPrefix(prefix, s"Missing $field"))
}
} ++
arrayOfObjects.flatMap { field =>
(json \\ field).toOption match {
case Some(o: JsArray) => validateArrayOfObjects(withPrefix(prefix, s"elements of $field"), o.value.toSeq)
case Some(_) => Some(withPrefix(prefix, s"$field must be an array"))
case None => Some(withPrefix(prefix, s"Missing $field"))
}
} ++
optionalArraysOfStrings.flatMap { field =>
(json \\ field).toOption match {
case Some(o: JsArray) => validateArrayOfStrings(withPrefix(prefix, s"elements of $field"), o.value.toSeq)
case Some(_) => Some(withPrefix(prefix, s"$field must be an array"))
case None => None
}
} ++
optionalArraysOfObjects.flatMap { field =>
(json \\ field).toOption match {
case Some(o: JsArray) => validateArrayOfObjects(withPrefix(prefix, s"elements of $field"), o.value.toSeq)
case Some(_) => Some(withPrefix(prefix, s"$field, if present, must be an array"))
case None => None
}
} ++
optionalObjects.flatMap { field =>
(json \\ field).toOption match {
case Some(_: JsObject) => None
case Some(_) => Some(withPrefix(prefix, s"$field, if present, must be an object"))
case None => None
}
} ++
objects.flatMap { field =>
(json \\ field).toOption match {
case Some(_: JsObject) => None
case Some(_) => Some(withPrefix(prefix, s"$field, must be an object"))
case None => Some(withPrefix(prefix, s"Missing $field"))
}
}
}
private def validateArrayOfStrings(
prefix: String,
js: Seq[JsValue]
): Option[String] = {
js.headOption match {
case None => None
case Some(o) => {
o match {
case _: JsString => None
case _ => Some(s"${prefix} must be strings")
}
}
}
}
private def validateArrayOfObjects(
prefix: String,
js: Seq[JsValue]
): Option[String] = {
js.headOption match {
case None => None
case Some(o) => {
o match {
case _: JsObject => None
case _ => Some(s"${prefix} must be objects")
}
}
}
}
private def unrecognizedFieldsErrors(
json: JsObject,
fields: Seq[String],
prefix: Option[String] = None
): Seq[String] = {
val keys = json.value map { case (key, _) => key }
keys.filter { k => !fields.contains(k) }.toList match {
case Nil => Nil
case one :: Nil => Seq(withPrefix(prefix, s"Unrecognized element[$one]"))
case multiple => Seq(withPrefix(prefix, s"Unrecognized elements[${multiple.sorted.mkString(", ")}]"))
}
}
def asOptString(value: JsValue): Option[String] = {
value match {
case v: JsString => parseString(v.value)
case v: JsValue => parseString(v.toString)
}
}
def asOptString(value: JsLookupResult): Option[String] = {
asOptJsValue(value).flatMap { asOptString }
}
def asOptJsValue(value: JsLookupResult): Option[JsValue] = {
value.toOption
}
def asOptBoolean(value: JsValue): Option[Boolean] = {
asOptString(value).flatMap { parseBoolean }
}
def asOptBoolean(value: JsLookupResult): Option[Boolean] = {
value.toOption.flatMap { asOptBoolean }
}
def asSeqOfString(value: JsValue): Seq[String] = {
value match {
case JsNull => Nil
case a: JsArray => a.value.flatMap(v=> asOptString(v)).toSeq
case JsString(text) => parseString(text).toSeq
case v => parseString(v.toString()).toSeq
}
}
def asSeqOfString(value: JsLookupResult): Seq[String] = asSeqOfString(value.getOrElse(JsNull))
def parseBoolean(value: String): Option[Boolean] = {
if (value == "true") {
Some(true)
} else if (value == "false") {
Some(false)
} else {
None
}
}
def isNumeric(value: String): Boolean = {
Try(value.toLong) match {
case Success(_) => true
case Failure(_) => false
}
}
def asOptLong(value: JsValue): Option[Long] = {
asOptString(value).flatMap { parseLong }
}
def asOptLong(value: JsLookupResult): Option[Long] = {
value.toOption.flatMap { asOptLong }
}
def parseBigDecimal(value: String): Option[BigDecimal] = {
Try(BigDecimal(value)) match {
case Success(v) => Some(v)
case Failure(_) => None
}
}
private def parseLong(value: String): Option[Long] = {
Try(value.toLong) match {
case Success(v) => Some(v)
case Failure(_) => None
}
}
def hasKey(json: JsValue, field: String): Boolean = {
(json \\ field).toOption match {
case None => false
case Some(_) => true
}
}
private def parseString(value: String): Option[String] = {
Some(value.trim).filter(_.nonEmpty)
}
private def withPrefix(prefix: Option[String], message: String): String = {
prefix match {
case None => message
case Some(value) => s"$value $message"
}
}
}
| apicollective/apibuilder | core/src/main/scala/core/builder/JsonUtil.scala | Scala | mit | 8,149 |
package services.neo4j
import javax.inject.{Inject, Named}
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Sink, Source}
import akka.{Done, NotUsed}
import models.twitter.Status
import play.api.libs.json.{JsArray, JsValue, Json}
import play.api.libs.ws.{StreamedResponse, WSAuthScheme, WSClient, WSResponse}
import streams.Identifier
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (c) 2017 A. Roberto Fischer
*
* @author A. Roberto Fischer <a.robertofischer@gmail.com> on 5/5/2017
*/
class TimeLineSink @Inject()(wsClient: WSClient,
@Named("Neo4jURI") neo4jURI: String,
@Named("Neo4jUserName") neo4jUserName: String,
@Named("Neo4jPassword") neo4jPassword: String)
(implicit context: ExecutionContext,
system: ActorSystem) extends NodeIdExtractor {
//------------------------------------------------------------------------------------------//
// Implicits
//------------------------------------------------------------------------------------------//
private implicit val materializer = ActorMaterializer()
//------------------------------------------------------------------------------------------//
// Public
//------------------------------------------------------------------------------------------//
def getAllUsers: Future[Source[Identifier, NotUsed]] = {
extractIdFromUser(_getAllUsers())
}
def insertTimelines(source: Source[(Identifier, JsValue), NotUsed]): Future[Done] = {
source
.map(_._2)
.grouped(1000)
.map(JsArray)
.mapAsyncUnordered(Runtime.getRuntime.availableProcessors())(mergeStatuses)
.runWith(Sink.ignore)
}
private def mergeStatuses(statuses: JsArray)(implicit executionContext: ExecutionContext): Future[WSResponse] = {
val query = {
val mergeRetweetedStatus =
s"""FOREACH (x IN CASE WHEN exists(status.retweeted_status) THEN [1] ELSE [] END |
|MERGE (o:${Schema.NodeLabels.User} {id: status.retweeted_status.user.id_str})
|MERGE (t:${Schema.NodeLabels.Status} {id: status.retweeted_status.id_str})
|SET t.createdAt = status.retweeted_status.created_at,
|t.createdAtSinceEpoch = status.retweeted_status.created_at_since_epoch,
|t.text = status.retweeted_status.text,
|t.retweetCount = status.retweeted_status.retweet_count,
|t.favoriteCount = status.retweeted_status.favorite_count
|CREATE UNIQUE (s)-[:${Schema.RelationshipTypes.Retweeted}]-> (t)
|CREATE UNIQUE (u)-[:${Schema.RelationshipTypes.RetweetedByUser}]-> (t)
|CREATE UNIQUE (o)-[:${Schema.RelationshipTypes.Tweeted}]-> (t)
|)""".stripMargin
val mergeQuotedStatus =
s"""FOREACH (x IN CASE WHEN exists(status.quoted_status) THEN [1] ELSE [] END |
|MERGE (o:${Schema.NodeLabels.User} {id: status.quoted_status.user.id_str})
|MERGE (t:${Schema.NodeLabels.Status} {id: status.quoted_status.id_str})
|SET t.createdAt = status.quoted_status.created_at,
|t.createdAtSinceEpoch = status.quoted_status.created_at_since_epoch,
|t.text = status.quoted_status.text,
|t.retweetCount = status.quoted_status.retweet_count,
|t.favoriteCount = status.quoted_status.favorite_count
|CREATE UNIQUE (s)-[:${Schema.RelationshipTypes.Quoted}]-> (t)
|CREATE UNIQUE (o)-[:${Schema.RelationshipTypes.Tweeted}]-> (t)
|)""".stripMargin
val mergeReply =
s"""FOREACH (x IN CASE WHEN exists(status.in_reply_to_user_id_str) AND exists(status.in_reply_to_status_id_str)
|THEN [1] ELSE [] END |
|MERGE (o:${Schema.NodeLabels.User} {id: status.in_reply_to_user_id_str})
|MERGE (t:${Schema.NodeLabels.Status} {id: status.in_reply_to_status_id_str})
|CREATE UNIQUE (s)-[:${Schema.RelationshipTypes.ReplyTo}]-> (t)
|)""".stripMargin
val mergeHashTags =
s"""FOREACH (hashTag IN status.entities.hashtags |
|MERGE (h:${Schema.NodeLabels.HashTag} {name: toLower(hashTag.text)})
|CREATE UNIQUE (s)-[:${Schema.RelationshipTypes.Tags}{
|startIndex: hashTag.indices[0],
|endIndex: hashTag.indices[1]
|}]->(h)
|)""".stripMargin
val mergeUserMentions =
s"""FOREACH (userMention IN status.entities.user_mentions |
|MERGE (o:${Schema.NodeLabels.User} {id: userMention.id_str})
|CREATE UNIQUE (s)-[:${Schema.RelationshipTypes.Mentions}{
|startIndex: userMention.indices[0],
|endIndex: userMention.indices[1]
|}]->(o)
|)""".stripMargin
val mergeUrlLinks =
s"""FOREACH (url IN status.entities.urls |
|MERGE (o:${Schema.NodeLabels.URL} {name: url.expanded_url})
|MERGE (d:${Schema.NodeLabels.Domain} {name: url.domain})
|CREATE UNIQUE (s)-[:${Schema.RelationshipTypes.LinksTo}{
|startIndex: url.indices[0],
|endIndex: url.indices[1]
|}]->(o)
|CREATE UNIQUE (o)-[:${Schema.RelationshipTypes.BelongsTo}]->(d)
|)""".stripMargin
val mergeSymbols =
s"""FOREACH (symbol IN status.entities.symbols |
|MERGE (d:${Schema.NodeLabels.Symbol} {name: symbol.text})
|CREATE UNIQUE (s)-[:${Schema.RelationshipTypes.Mentions}{
|startIndex: symbol.indices[0],
|endIndex: symbol.indices[1]
|}]->(d)
|)""".stripMargin
val mergeStatusAndUser =
s"""UNWIND $$json AS status
|MERGE (s:${Schema.NodeLabels.Status} {id: status.id_str})
|SET s.createdAt = status.created_at,
|s.text = status.text,
|s.retweetCount = status.retweet_count,
|s.favoriteCount = status.favorite_count,
|s.createdAtSinceEpoch = status.created_at_since_epoch
|MERGE (u:${Schema.NodeLabels.User} {id: status.user.id_str})
|CREATE UNIQUE (u)-[:${Schema.RelationshipTypes.Tweeted}]->(s)""".stripMargin
val json = Json.obj("json" -> Json.toJson(statuses.value.map(_.transform(Status.transformer).get)))
Json.obj(
"statement" ->
(mergeStatusAndUser +
mergeRetweetedStatus +
mergeQuotedStatus +
mergeReply +
mergeHashTags +
mergeUserMentions +
mergeUrlLinks +
mergeSymbols),
"parameters" -> json
)
}
wsClient.url(neo4jURI + "/db/data/transaction/commit")
.withAuth(neo4jUserName, neo4jPassword, WSAuthScheme.BASIC)
.withHeaders(
"Accept" -> "application/json; charset=UTF-8",
"Content-Type" -> "application/json"
)
.post {
Json.obj(
"statements" -> Json.toJson(Vector(query))
)
}
}
//------------------------------------------------------------------------------------------//
// Private
//------------------------------------------------------------------------------------------//
// private def _getUserIdsWhereTimeLineNotAdded(): Future[StreamedResponse] = {
private def _getAllUsers(): Future[StreamedResponse] = {
val query = Json.obj(
"statement" ->
s"""MATCH (user:${Schema.NodeLabels.User})
|RETURN user""".stripMargin
)
wsClient.url(neo4jURI + "/db/data/transaction/commit")
.withMethod("POST")
.withAuth(neo4jUserName, neo4jPassword, WSAuthScheme.BASIC)
.withHeaders(
"Accept" -> "application/json; charset=UTF-8",
"Content-Type" -> "application/json",
"X-Stream" -> "true"
)
.withBody(Json.obj(
"statements" -> Json.toJson(Vector(query))
))
.stream()
}
} | Queendimimi/twitter_extractor | app/services/neo4j/TimeLineSink.scala | Scala | apache-2.0 | 7,954 |
package au.com.dius.pact.model
sealed trait RequestMatch extends Ordered[RequestMatch] {
def allMatched = false
protected def goodness: Int
def compare(that: RequestMatch): Int = goodness.compare(that.goodness)
def toOption: Option[Interaction] = this match {
case FullRequestMatch(inter) => Some(inter)
case _ => None
}
/**
* Take the first total match, or merge partial matches, or take the best available.
*/
def merge(other: RequestMatch): RequestMatch = (this, other) match {
case (a @ FullRequestMatch(_), FullRequestMatch(_)) => a
case (a @ PartialRequestMatch(problems1),
b @ PartialRequestMatch(problems2)) => PartialRequestMatch(a.problems ++ b.problems)
case (a, b) => if (a > b) a else b
}
}
case class FullRequestMatch(interaction: Interaction) extends RequestMatch {
override def allMatched = true
override protected def goodness = 2
}
object PartialRequestMatch {
def apply(expected: Interaction, mismatches: Seq[RequestPartMismatch]): PartialRequestMatch =
PartialRequestMatch(Map(expected -> mismatches))
}
case class PartialRequestMatch(problems: Map[Interaction, Seq[RequestPartMismatch]]) extends RequestMatch {
def description() = {
var s = ""
for (problem <- problems) {
s += problem._1.description + ":\\n"
for (mismatch <- problem._2) {
s += " " + mismatch.description + "\\n"
}
}
s
}
// These invariants should be enforced by a better use of the type system. NonEmptyList, etc
require(problems.nonEmpty, "Partial match must contain some failed matches")
require(problems.values.forall(_.nonEmpty), "Mismatch lists shouldn't be empty")
override protected def goodness = 1
}
case object RequestMismatch extends RequestMatch {
override protected def goodness = 0
} | caoquendo/pact-jvm | pact-jvm-matchers/src/main/scala/au/com/dius/pact/model/RequestMatch.scala | Scala | apache-2.0 | 1,826 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.util.Arrays
import java.util.Objects
import org.apache.spark._
import org.apache.spark.rdd.RDD
/**
* A Partitioner that might group together one or more partitions from the parent.
*
* @param parent a parent partitioner
* @param partitionStartIndices indices of partitions in parent that should create new partitions
* in child (this should be an array of increasing partition IDs). For example, if we have a
* parent with 5 partitions, and partitionStartIndices is [0, 2, 4], we get three output
* partitions, corresponding to partition ranges [0, 1], [2, 3] and [4] of the parent partitioner.
*/
class CoalescedPartitioner(val parent: Partitioner, val partitionStartIndices: Array[Int])
extends Partitioner {
@transient private lazy val parentPartitionMapping: Array[Int] = {
val n = parent.numPartitions
val result = new Array[Int](n)
for (i <- 0 until partitionStartIndices.length) {
val start = partitionStartIndices(i)
val end = if (i < partitionStartIndices.length - 1) partitionStartIndices(i + 1) else n
for (j <- start until end) {
result(j) = i
}
}
result
}
override def numPartitions: Int = partitionStartIndices.size
override def getPartition(key: Any): Int = {
parentPartitionMapping(parent.getPartition(key))
}
override def hashCode(): Int =
31 * Objects.hashCode(parent) + Arrays.hashCode(partitionStartIndices)
override def equals(other: Any): Boolean = other match {
case c: CoalescedPartitioner =>
c.parent == parent && Arrays.equals(c.partitionStartIndices, partitionStartIndices)
case _ =>
false
}
}
private[spark] class CustomShuffledRDDPartition(
val index: Int, val startIndexInParent: Int, val endIndexInParent: Int)
extends Partition {
override def hashCode(): Int = index
override def equals(other: Any): Boolean = super.equals(other)
}
/**
* A special ShuffledRDD that supports a ShuffleDependency object from outside and launching reduce
* tasks that read multiple map output partitions.
*/
class CustomShuffledRDD[K, V, C](
var dependency: ShuffleDependency[K, V, C],
partitionStartIndices: Array[Int])
extends RDD[(K, C)](dependency.rdd.context, Seq(dependency)) {
def this(dep: ShuffleDependency[K, V, C]) = {
this(dep, (0 until dep.partitioner.numPartitions).toArray)
}
override def getDependencies: Seq[Dependency[_]] = List(dependency)
override val partitioner = {
Some(new CoalescedPartitioner(dependency.partitioner, partitionStartIndices))
}
override def getPartitions: Array[Partition] = {
val n = dependency.partitioner.numPartitions
Array.tabulate[Partition](partitionStartIndices.length) { i =>
val startIndex = partitionStartIndices(i)
val endIndex = if (i < partitionStartIndices.length - 1) partitionStartIndices(i + 1) else n
new CustomShuffledRDDPartition(i, startIndex, endIndex)
}
}
override def compute(p: Partition, context: TaskContext): Iterator[(K, C)] = {
val part = p.asInstanceOf[CustomShuffledRDDPartition]
val metrics = context.taskMetrics().createTempShuffleReadMetrics()
SparkEnv.get.shuffleManager.getReader(
dependency.shuffleHandle, part.startIndexInParent, part.endIndexInParent, context, metrics)
.read()
.asInstanceOf[Iterator[(K, C)]]
}
override def clearDependencies(): Unit = {
super.clearDependencies()
dependency = null
}
}
| goldmedal/spark | core/src/test/scala/org/apache/spark/scheduler/CustomShuffledRDD.scala | Scala | apache-2.0 | 4,296 |
object Script {
// this is the proof script for robot_quantified8.dl
// a theorem about the safety of the control algorithm that enforces a
// multiple virtual fixtures in 2D, with force feedback
val safestr =
"(w() /= end() ==> "+
"(qx() - px(w())) * nx(w()) + (qy() - py(w())) * ny(w()) + " +
"K() * (fx() * nx(w()) + fy() * ny(w())) * GG * QQ + " +
"1 / 2 * K() * (FXP * nx(w()) + FYP * ny(w())) * GG * QQ^2 >= 0)"
val oldsafestr =
"(w() /= end() ==> "+
"(qx() - px(w())) * nx(w()) + (qy() - py(w())) * ny(w()) >= 0)"
val oldsafestrW =
"(WW /= end() ==> "+
"(qx() - px(WW)) * nx(WW) + (qy() - py(WW)) * ny(WW) >= 0)"
val safestrW =
"(WW /= end() ==> "+
"(qx() - px(WW)) * nx(WW) + (qy() - py(WW)) * ny(WW) + " +
"K() * (fx() * nx(WW) + fy() * ny(WW)) * GG * QQ + " +
"1 / 2 * K() * (FXP * nx(WW) + FYP * ny(WW)) * GG * QQ^2 >= 0)"
val iteratestrA = "[w() := AA; { w() := next(w())}*] "
val iteratestrAZ = "[w() := AA; {?next(w()) /= ZZ; w() := next(w())}*] "
// This is the invariant for the outer loop of the controller. Each
// loop is an epsilon-step in the machine where the controller enforces
// control and continuous time evolves
val loopInvOuter =
parseFormula("K() > 0 & dd() > 0 & e() > 0 & g() >= 0 &" +
"(forall i : B. nx(i)^2 + ny(i)^2 = 1) & " +
" next(end()) = end() & " +
"(forall h : B . forall g :B . " +
"(next(h) /= end()) ==> (h /= g) ==> (next(h) /= next(g))) & " +
"[m() := first(); {m() := next(m())}*] " +
"((m() /= end()) ==> " +
"((qx() - px(m())) * nx(m()) + (qy() - py(m())) * ny(m()) >= 0))")
// This is the invariant for the nested inner loop of the
// controller. This is a loop over the different virtal fixture
// boundaries, where each loop is the controller creating force
// feedback and enforcing safety for one boundary
val loopInvInner =
parseFormula("K() > 0 & dd() > 0 & e() > 0 & " +
"(forall i : B. nx(i)^2 + ny(i)^2 = 1) & " +
"next(end()) = end() & " +
"(forall h : B . forall g :B . " +
"(next(h) /= end()) ==> (h /= g) ==> (next(h) /= next(g))) & " +
"([m() := ZZ; { m() := next(m()) }*] " +
"((m() /= end()) ==> " +
"((qx() - px(m())) * nx(m()) + (qy() - py(m())) * ny(m()) >= 0))) &" +
"(GG >= 0)&" +
"([m() := first(); { m() := next(m()) }*] " +
"((m() /= end()) ==> " +
"((qx() - px(m())) * nx(m()) + (qy() - py(m())) * ny(m()) >= 0))) &" +
"((ZZ /= first()) ==> (forall q : Real . ((0<=q)&(q<=e())) ==> " +
"[w() := first(); {?next(w()) /= ZZ; w() := next(w())}*] " +
"((w() /= end()) ==> " +
"((qx() - px(w())) * nx(w()) + (qy() - py(w())) * ny(w()) + " +
"K() * (fx() * nx(w()) + fy() * ny(w()))* GG * q + " +
"(1 / 2) * K() * (FXP * nx(w()) + FYP * ny(w())) * GG * q^2 >= 0))))")
// This is an invariant used for an induction in the postcondition
// branch of the nested inner loop. The postcondition is where
// continuous time evolves. This invariant helps rearrange the terms of
// the boxed modality so that we can unsubstitute common terms, i.e.
// fnp, fn, etc.. so that arithT completes quickly (see quickaritTh)
val rearrangeInv = parseFormula(
"(SS = s()) &" +
"(QXX = qx() + K() * fx() * GG * s() + 1/2 * K() * FXP * GG * s()^2) &"+
"(QYY = qy() + K() * fy() * GG * s() + 1/2 * K() * FYP * GG * s()^2) &"+
"(next(end()) = end()) & " +
"(YY = end()) & " +
"[w() := ZZ; {?next(w()) /= YY; w() := next(w())}*]" +
"((w() /= end()) ==> " +
"((qx() - px(w())) * nx(w()) + (qy() - py(w())) * ny(w()) + " +
"K() * (fx() * nx(w()) + fy() * ny(w())) * GG * SS + " +
"(1/2) * K() * (FXP * nx(w()) + FYP * ny(w())) * GG * SS^2 >= 0))"
)
// This is an invariant used for an induction in the inductive "step"
// branch of the nested inner loop. This helps show that if the
// modalities in invariant loopInvInner hold at one step, then they
// hold at the next step as well.
// It essentially breaks the modality into two pieces: one that is the
// collection of virtual fixture boundaries that we made safe before,
// and the other is the collection of virtual fixture boundaries that
// we just made safe during this most recent step.
val breakInv = parseFormula("(" + iteratestrAZ + safestr + ") & (" +
safestrW + ") & (" +
iteratestrA + oldsafestr + ")")
val furtherdamped = parseFormula(iteratestrAZ + safestr)
val gqassumptionsstr = "GGN <= GG & GGN >= 0 & QQ = QQO &"
val furtherdampedctxt = parseFormula(
gqassumptionsstr + "(" + iteratestrAZ + safestr + ") & ("+
iteratestrA + oldsafestr + ")")
val obviousimpT =
composelistT(
tryruleatT(hide)(RightP(1)),
substT*,
nonarithcloseT
)
//"(forall h:B.forall g:B.next(h) /= end() ==> h /= g ==> next(h) /= next(g)) &" +
val onemore = parseFormula(
"(ZZ /= end()) & (OO = next(ZZ)) &" + "(" + safestrW +
" & ((AA /= ZZ) ==>" + iteratestrAZ + safestr + "))" )
// instantiate forall left
val fooz = new Tactic("fooz"){
def apply(nd:Nodes.OrNode) = {
val Sequent(sig, cs, ss) = nd.goal
ss match {
case List(Atom(R("/=", List(Fn(v,List(i)), _)))) =>
tryruleT(allLeft(i))(nd)
case _ => None
}
}
}
// instantiate forall right
val fooz2 = new Tactic("fooz2"){
def apply(nd:Nodes.OrNode) = {
val Sequent(sig, cs, ss) = nd.goal
ss match {
case List(Atom(R("/=", List(_, Fn(v,List(i)))))) =>
tryruleT(allLeft(i))(nd)
case _ => None
}
}
}
// instantiate forall left
val fooz3 = new Tactic("fooz3"){
def apply(nd:Nodes.OrNode) = {
val Sequent(sig, cs, ss) = nd.goal
var res : Option[Term] = None
for (z <- cs) {
z match {
case Atom(R("<=", List(Num(Exact.zero),i))) => res = Some(i)
case _ => ()
}
}
res match {
case None => nilT(nd)
case Some(tm) => tryruleT(allLeft(tm))(nd)
}
}
}
// instantiate forall left
val fooz4 = new Tactic("fooz4"){
def apply(nd:Nodes.OrNode) = {
val Sequent(sig, cs, ss) = nd.goal
var res : Option[Term] = None
for (z <- cs) {
z match {
case Atom(R("=", List(i,Fn("next", List(j))))) =>
res = Some(j)
case _ => ()
}
}
res match {
case None => nilT(nd)
case Some(tm) => tryruleT(allLeft(tm))(nd)
}
}
}
// instantiate forall left
val fooz5 = new Tactic("fooz5"){
def apply(nd:Nodes.OrNode) = {
val Sequent(sig, cs, ss) = nd.goal
var res : Option[Term] = None
for (z <- cs) {
z match {
case Atom(R("/=", List(i,Fn("end", List())))) =>
res = Some(i)
case _ => ()
}
}
res match {
case None => nilT(nd)
case Some(tm) => tryruleT(allLeft(tm))(nd)
}
}
}
val quickarithT : Tactic =
composelistT(
substT*,
nullarizeT*,
tryruleT(andLeft)*,
(unsubT(
parseFormula("D0 + K() * AA * (FN * e() + FNP * e()^2 * (1 / 2)) >= 0"),
List(Var("FN"),Var("FNP"),Var("D0"))) | unitT),
arithT
)
val quickarith2T : Tactic =
composelistT(
substT*,
nullarizeT*,
tryruleT(andLeft)*,
(unsubT(
parseFormula("D0 + K() * FN * AA * SS + (1 / 2) * K() * FNP * AA* (SS)^2 >= 0"),
List(Var("FN"),Var("FNP"),Var("D0"))) |
unsubT(
parseFormula("D0 + K() * AA * (FN * e() + FNP * e()^2 * (1 / 2)) >= 0"),
List(Var("FN"),Var("FNP"),Var("D0"))) |
unsubT(
parseFormula("D0 + K() * AA * (FN * e() + FNP * e()^2 * (1 / 2)) <= 0"),
List(Var("FN"),Var("FNP"),Var("D0"))) |
unsubT(
parseFormula("(K() * AA * FN)^2 - 2 * K() * AA * FNP * D0 <= 0"),
List(Var("FN"),Var("FNP"),Var("D0"))) |
unsubT(
parseFormula("(K() * AA * FN)^2 - 2 * K() * AA * FNP * D0 >= 0"),
List(Var("FN"),Var("FNP"),Var("D0"))) |
unsubT(
parseFormula("FNP <= 0"),
List(Var("FNP"))) |
unsubT(
parseFormula("FNP >= 0"),
List(Var("FNP"))) | unitT),
arithT
)
val identiboxT =
composelistT(
tryruleT(seq)*,
substT*,
tryruleT(renameAssign("ii")),
tryruleatT(renameAssign("ii"))(RightP(0)),
substT*,
nonarithcloseT
)
val nextloopT =
composelistT(
tryruleT(iterate),
tryruleT(andLeft),
tryruleT(seq)*,
identiboxT
)
val onestepT =
composelistT(
tryruleT(seq)*,
tryruleT(check)*,
tryruleT(impRight)*,
tryruleT(assign)*,
tryruleT(andRight)<(
tryruleT(andRight)<(
easiestT,
composelistT(
tryruleT(iterate),
tryruleT(andLeft)*,
tryruleT(seq)*,
tryruleT(check),
tryruleatT(impLeft)(LeftP(5))<(
composelistT(
identiboxT
),
composelistT(
tryruleatT(hide)(RightP(1)),
substT*,
nonarithcloseT
)
)
)
),
composelistT(
tryruleatT(reorder)(LeftP(5)),
nextloopT
))
)
val onestep2T =
composelistT(
tryruleT(seq)*,
tryruleT(check)*,
tryruleT(impRight)*,
tryruleatT(assign)(RightP(0)),
tryruleT(andRight)<(
easiestT,
tryruleT(andRight)<(
nonarithcloseT,
composelistT(
tryruleT(impRight),
tryruleT(seq)*,
substT*,
cutT(StandardCut,
parseFormula("next(AA) /= next(BB)"),
parseFormula("AA /= BB | AA = BB"))<(
composelistT(
tryruleT(orRight),
tryruleatT(notEquals)(RightP(0)),
tryruleT(not),
nonarithcloseT
),
tryruleT(orLeft)<(
composelistT(
tryruleatT(hide)(LeftP(3)),
tryruleT(impLeft)<(
composelistT(
tryruleT(seq)*,
tryruleT(assign),
tryruleT(iterate),
tryruleT(andLeft)*,
tryruleT(seq)*,
tryruleT(check),
tryruleatT(impLeft)(LeftP(5))<(
composelistT(
tryruleatT(commuteEquals)(LeftP(3)),
identiboxT
),
composelistT(
tryruleatT(hide)(RightP(1)),
substT*,
nonarithcloseT
)
)
),
composelistT(
tryruleatT(hide)(RightP(1)),
nonarithcloseT
)
)
),
composelistT(
tryruleatT(hide)(LeftP(5)),
tryruleatT(hide)(LeftP(3)),
tryruleatT(hide)(RightP(0)),
substT*,
nonarithcloseT
)
)
)
)
)
)
)
val concludeT =
composelistT(
tryruleT(impRight)*,
hpalphaT*,
tryruleT(iterate),
tryruleT(andLeft),
tryruleatT(hide)(LeftP(5)),
substT*,
tryruleT(impLeft)<(
composelistT(
tryruleT(iterate),
tryruleT(andLeft),
tryruleatT(hide)(LeftP(5)),
tryruleT(impLeft)<(
composelistT(
tryruleatT(hide)(LeftP(0)),
quickarith2T
),
composelistT(
tryruleatT(hide)(RightP(1)),
substT*,
nonarithcloseT
)
)
),
nonarithcloseT),
nilT
)
val conclude2T =
composelistT(
tryruleT(impRight)*,
hpalphaT*,
tryruleT(iterate),
tryruleT(andLeft),
tryruleatT(hide)(LeftP(3)),
substT*,
tryruleT(impLeft)<(
nonarithcloseT,
nonarithcloseT),
nilT
)
val equivboxT =
composelistT(
tryruleT(seq)*,
tryruleatT(assign)(RightP(0)),
cutmT(StandardCut,
List(parseFormula("AA = GGN"),
parseFormula("GGO >= 0")),
parseFormula("GGN <= GGO"))<(
composelistT(
substT*,
tryruleatT(hide)(LeftP(33+2)),
tryruleatT(hide)(LeftP(32+2)),
tryruleatT(hide)(LeftP(31+2)),
tryruleatT(hide)(LeftP(30+2)),
tryruleatT(hide)(LeftP(29+2)),
tryruleatT(hide)(LeftP(28+2)),
tryruleatT(hide)(LeftP(27+2)),
tryruleatT(hide)(LeftP(26+2)),
tryruleatT(hide)(LeftP(25+2)),
tryruleatT(hide)(LeftP(24+2)),
tryruleatT(hide)(LeftP(23+2)),
tryruleatT(hide)(LeftP(22+2)),
tryruleatT(hide)(LeftP(21+2)),
tryruleatT(hide)(LeftP(20+2)),
tryruleatT(hide)(LeftP(19+2)),
tryruleatT(hide)(LeftP(18+2)),
tryruleatT(hide)(LeftP(17+2)),
tryruleatT(hide)(LeftP(16+2)),
tryruleatT(hide)(LeftP(15+2)),
tryruleatT(hide)(LeftP(14+2)),
tryruleatT(hide)(LeftP(13+2)),
tryruleatT(hide)(LeftP(12+2)),
tryruleatT(hide)(LeftP(11+2)),
tryruleatT(hide)(LeftP(10+2)),
tryruleatT(hide)(LeftP(9+2)),
tryruleatT(hide)(LeftP(7)),
tryruleatT(hide)(LeftP(6)),
tryruleatT(hide)(LeftP(5)),
tryruleatT(hide)(LeftP(4)),
tryruleatT(hide)(LeftP(3)),
tryruleatT(hide)(LeftP(2)),
tryruleatT(hide)(LeftP(1)),
tryruleatT(hide)(LeftP(0)),
nullarizeT*,
arithT
),
composelistT(
substT*,
newestT("fyp",
fyp_string =>
newestT("fxp",
fxp_string =>
unifyT(parseFormula("GGN <= GG"),
Prover.substitute_Formula("FYP", Fn(fyp_string, Nil),
Prover.substitute_Formula("FXP", Fn(fxp_string, Nil),
furtherdampedctxt)),
(w => unifyT(parseFormula("QQ = QQO"), w,
(x => unifyT(parseFormula("ZZ /= end()"), x,
(y => unifyT(parseFormula("AA = first()"), y,
(z => tryruleT(loopInduction(z)))))))))))))<(
tryruleT(andRight)<(
tryruleT(andRight)<(
tryruleT(andRight)<(
tryruleT(andRight)<(
composelistT(
tryruleatT(hide)(LeftP(35+2)),
tryruleatT(hide)(LeftP(34+2)),
tryruleatT(hide)(LeftP(33+2)),
tryruleatT(hide)(LeftP(32+2)),
tryruleatT(hide)(LeftP(31+2)),
tryruleatT(hide)(LeftP(30+2)),
tryruleatT(hide)(LeftP(29+2)),
tryruleatT(hide)(LeftP(28+2)),
tryruleatT(hide)(LeftP(27+2)),
tryruleatT(hide)(LeftP(26+2)),
tryruleatT(hide)(LeftP(25+2)),
tryruleatT(hide)(LeftP(24+2)),
tryruleatT(hide)(LeftP(23+2)),
tryruleatT(hide)(LeftP(22+2)),
tryruleatT(hide)(LeftP(21+2)),
tryruleatT(hide)(LeftP(20+2)),
tryruleatT(hide)(LeftP(19+2)),
tryruleatT(hide)(LeftP(18+2)),
tryruleatT(hide)(LeftP(17+2)),
tryruleatT(hide)(LeftP(16+2)),
tryruleatT(hide)(LeftP(15+2)),
tryruleatT(hide)(LeftP(14+2)),
tryruleatT(hide)(LeftP(13+2)),
tryruleatT(hide)(LeftP(12+2)),
tryruleatT(hide)(LeftP(11+2)),
tryruleatT(hide)(LeftP(10+2)),
tryruleatT(hide)(LeftP(9+2)),
tryruleatT(hide)(LeftP(8)),
tryruleatT(hide)(LeftP(7)),
tryruleatT(hide)(LeftP(6)),
tryruleatT(hide)(LeftP(5)),
tryruleatT(hide)(LeftP(4)),
tryruleatT(hide)(LeftP(3)),
tryruleatT(hide)(LeftP(2)),
tryruleatT(hide)(LeftP(1)),
nullarizeT*,
arithT
),
composelistT(
tryruleatT(allLeft(Fn("first",List())))(LeftP(21)),
tryruleatT(reorder)(LeftP(20)),
fooz5,
tryruleT(impLeft)<(
tryruleT(impLeft)<(
composelistT(
tryruleatT(hide)(LeftP(38)),
tryruleatT(hide)(LeftP(36)),
tryruleatT(hide)(LeftP(34)),
tryruleatT(hide)(LeftP(32)),
tryruleatT(hide)(LeftP(30)),
tryruleatT(hide)(LeftP(29)),
tryruleatT(hide)(LeftP(27)),
tryruleatT(hide)(LeftP(25)),
tryruleatT(hide)(LeftP(23)),
tryruleatT(hide)(LeftP(18)),
tryruleatT(hide)(LeftP(17)),
tryruleatT(hide)(LeftP(16)),
tryruleatT(hide)(LeftP(11)),
tryruleatT(hide)(LeftP(8)),
tryruleatT(hide)(LeftP(5)),
tryruleatT(hide)(LeftP(1)),
quickarith2T
),
obviousimpT
),
obviousimpT
)
)
),
nonarithcloseT
),
identiboxT
),
composelistT(
tryruleatT(reorder)(LeftP(21+2)),
identiboxT
)
),
composelistT(
tryruleT(andLeft)*,
tryruleatT(reorder)(LeftP(2)),
onestepT
),
composelistT(
tryruleT(andLeft)*,
tryruleT(seq)*,
tryruleT(assign)*,
tryruleatT(iterate)(LeftP(3)),
tryruleT(andLeft)*,
tryruleatT(hide)(LeftP(4)),
tryruleatT(iterate)(LeftP(4)),
tryruleT(andLeft)*,
tryruleatT(hide)(LeftP(5)),
tryruleT(impRight),
tryruleT(impLeft)<(
tryruleT(impLeft)<(
composelistT(
tryruleatT(hide)(LeftP(0)),
quickarith2T
),
composelistT(
tryruleatT(hide)(RightP(1)),
tryruleT(notEquals)*,
tryruleT(not)*,
tryruleatT(commuteEquals)(LeftP(6)),
substT*,
nonarithcloseT)
),
composelistT(
tryruleatT(hide)(RightP(1)),
tryruleT(notEquals)*,
tryruleT(not)*,
tryruleatT(commuteEquals)(LeftP(6)),
substT*,
nonarithcloseT)
)
)
)
)
)
// This is the proof that shows one modality with our desired
// postcondition implies another, whose terms are identical but
// rearranged.
val rearrangepf =
composelistT(
newestT("g",
g_string =>
newestT("fyp",
fyp_string =>
newestT("fxp",
fxp_string =>
newestT("qx",
qx_string =>
newestT("qy",
qy_string =>
unifyT(parseFormula("SS = s()"),
Prover.substitute_Formula("GG", Fn(g_string, Nil),
Prover.substitute_Formula("FYP", Fn(fyp_string, Nil),
Prover.substitute_Formula("FXP", Fn(fxp_string, Nil),
Prover.substitute_Formula("QXX", Fn(qx_string, Nil),
Prover.substitute_Formula("QYY", Fn(qy_string, Nil),
rearrangeInv))))),
(x => unifyT(parseFormula("YY = end()"), x,
(y => unifyT(parseFormula("ZZ = first()"), y,
(z => tryruleT(loopInduction(z)))))))))))))<(
tryruleT(andRight)<(
easiestT,
identiboxT // added here 1
),
composelistT(
tryruleT(andLeft)*,
tryruleT(assign),
tryruleT(andRight)<(
easiestT,
composelistT(
tryruleT(seq)*,
tryruleatT(hide)(LeftP(2)),
tryruleatT(hide)(LeftP(1)),
tryruleatT(hide)(LeftP(0)),
tryruleatT(assign)(LeftP(2)),
tryruleT(iterate),
tryruleT(andLeft),
tryruleT(seq)*,
tryruleT(check)*,
tryruleatT(reorder)(LeftP(1)),
newestT("w",
w_string =>
cutT(StandardCut,parseFormula("II = end()"),
Prover.substitute_Formula("WW", Fn(w_string, Nil),
parseFormula("next(WW) = II | next(WW) /= II"))))<(
composelistT(
tryruleT(orRight),
tryruleT(notEquals),
tryruleT(not),
nonarithcloseT
),
tryruleT(orLeft)<(
composelistT(
tryruleatT(hide)(LeftP(4)),
tryruleatT(hide)(LeftP(3)),
substT*,
tryruleT(assign)*,
unifyT(parseFormula("WW = next(MM)"),
parseFormula("next(end()) = end() & WW = end()"),
( y => tryruleT(loopInduction(y))))<(
composelistT(substT*,easiestT),
composelistT(
substT*,
easiestT,
tryruleatT(substitute)(LeftP(2)),
tryruleatT(substitute)(LeftP(2)),
nonarithcloseT
),
composelistT(
substT*,
easiestT,
tryruleT(notEquals),
tryruleT(not),
substT*,
nonarithcloseT
)
)
),
composelistT(
tryruleatT(hide)(LeftP(3)),
tryruleT(impLeft)<(
composelistT(
tryruleatT(commuteEquals)(LeftP(5)),
substT*,
identiboxT // added here
),
composelistT(
tryruleatT(hide)(RightP(1)),
nonarithcloseT
)
)
)
)
)
)
)
),
composelistT(
tryruleT(andLeft)*,
tryruleT(impRight)*,
tryruleT(seq)*,
tryruleT(assign)*,
tryruleT(iterate),
tryruleT(andLeft),
tryruleatT(hide)(LeftP(7)),
substT*,
tryruleT(impLeft)<(
composelistT(tryruleatT(hide)(LeftP(0)),nullarizeT*,arithT),
composelistT(tryruleatT(hide)(RightP(1)),nonarithcloseT)
)
)
)
)
val uglyrotation : Tactic =
composelistT(
//
// Rotation to allow proving of zero branch cluster
//
// The zero-branch cluster needs another piece of context.
// We thread the necessary state through the proof in a
// straightforward manner up to this point. Until this point,
// the proof takes apart the structure of the HP, and organizes
// itself around this structure, preparing to prove its safety property.
// Starting at this point, the proof begins to refer to specific pieces
// of context, and for its success depends on the order of antecedents.
// Consequently, threading the state through past this point will break
// the rest of the proof, and require revisiting all of its different
// cuts and inductions.
//
// To avoid this, I observe that one of the antecedents, necessary for
// the postcondition in one of the outer inductive loops, has been carried
// forward to this point. Replacing it with the new piece of context leaves
// the positions of each of the other antecedents should leave the rest
// of the proof intact, while making this context available to whatever
// branches need it to close.
//
// It goes without saying that this is extremely brittle.
//
tryruleT(assign),
tryruleT(iterate),
tryruleatT(andLeft)(LeftP(14)),
tryruleatT(hide)(LeftP(15)),
tryruleatT(hide)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(34)),
cutT(StandardCut, parseFormula("AA = WW"),
parseFormula("forall i : B . " + oldsafestrW))<(
composelistT(
substT,
tryruleT(allRight),
nonarithcloseT
),
composelistT(
substT,
tryruleatT(hide)(LeftP(1)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
tryruleatT(reorder)(LeftP(13)),
unitT // ready to go
)
)
)
// Used in the inductive "step" part of induction for the nested inner
// loop. Show that the if second modal term in the nested inner loop's
// invariant hold in one step, it holds in the next.
val inductivesteppf : Tactic =
composelistT(
uglyrotation, // see notes above
tryruleT(impRight),
tryruleT(allRight),
tryruleT(impRight),
substT*,
cutT(StandardCut, parseFormula("AA = next(BB)"),
parseFormula("(BB = first()) | (BB /= first())"))<(
composelistT(
tryruleT(orRight),
tryruleT(notEquals)*,
tryruleatT(not)(RightP(1)),
nonarithcloseT
),
composelistT(
tryruleT(orLeft)<(
composelistT(
tryruleatT(hide)(LeftP(16+2)),
tryruleatT(hide)(LeftP(15+2)),
substT*,
tryruleT(andLeft),
tryruleT(seq)*,
tryruleatT(assign)(RightP(0)),
tryruleatT(iterate)(RightP(0)),
tryruleT(andRight)<(
composelistT(
substT*,
tryruleatT(allLeft(Fn("first",List())))(LeftP(13)),
tryruleatT(hide)(LeftP(14)),
tryruleatT(allLeft(Fn("first",List())))(LeftP(14)),
tryruleatT(hide)(LeftP(15)),
tryruleT(impRight),
tryruleT(impLeft)<(
composelistT(
tryruleatT(hide)(LeftP(24)),
tryruleatT(hide)(LeftP(23)),
tryruleatT(hide)(LeftP(22)),
tryruleatT(hide)(LeftP(20)),
tryruleatT(hide)(LeftP(19)),
tryruleatT(hide)(LeftP(18)),
tryruleatT(hide)(LeftP(17)),
tryruleatT(hide)(LeftP(12)),
tryruleatT(hide)(LeftP(11)),
tryruleatT(hide)(LeftP(5)),
tryruleatT(hide)(LeftP(0)),
quickarith2T
),
composelistT(
tryruleatT(hide)(RightP(1)),
nonarithcloseT
)
)
),
composelistT(
hpalphaT*,
tryruleatT(reorder)(LeftP(6+2)),
tryruleatT(reorder)(LeftP(6+2)),
tryruleatT(hide)(RightP(0)),
substT*,
nonarithcloseT
)
)
),
// got to here
tryruleT(impLeft)<(
composelistT(
tryruleT(andLeft),
tryruleT(seq)*,
tryruleatT(assign)(RightP(0)),
tryruleatT(reorder)(LeftP(22+2)),
newestT("g",
g_string =>
cutmT(StandardCut,
List(parseFormula("0 <= QQ"),
parseFormula("WW /= first()"),
parseFormula("FNP = FXP * nx(AA) + FYP * ny(BB)")),
Prover.substitute_Formula("GG", Fn(g_string, Nil),
parseFormula(safestrW))))<(
composelistT(
tryruleatT(allLeft(Fn("first",List())))(LeftP(17)),
tryruleatT(hide)(LeftP(18)),
tryruleT(impLeft)<(
composelistT(
tryruleT(impRight),
tryruleatT(hide)(LeftP(34)),
tryruleatT(hide)(LeftP(32)),
tryruleatT(hide)(LeftP(30)),
tryruleatT(hide)(LeftP(28)),
tryruleatT(hide)(LeftP(26)),
tryruleatT(hide)(LeftP(25)),
tryruleatT(hide)(LeftP(23)),
tryruleatT(hide)(LeftP(21)), // get rid of boxen
tryruleatT(hide)(LeftP(20)),
tryruleatT(hide)(LeftP(18)),
tryruleatT(hide)(LeftP(17)),
tryruleatT(hide)(LeftP(13)),
tryruleatT(hide)(LeftP(12)),
tryruleatT(hide)(LeftP(11)),
tryruleatT(hide)(LeftP(6)),
tryruleatT(hide)(LeftP(3)),
tryruleatT(hide)(LeftP(0)),
substT*,
quickarith2T
),
composelistT(
tryruleatT(hide)(RightP(1)),
nonarithcloseT
)
)
),
newestT("fyp",
fyp_string =>
newestT("fxp",
fxp_string =>
newestT("g",
g_string =>
unifyT(parseFormula("ZZ = next(WW)"),
Prover.substitute_Formula("GG", Fn(g_string, Nil),
Prover.substitute_Formula("FXP", Fn(fxp_string, Nil),
Prover.substitute_Formula("FYP", Fn(fyp_string, Nil), breakInv))),
(x => unifyT(parseFormula("QQ <= e()"), x,
(y => unifyT(parseFormula("AA = first()"), y,
(z => tryruleT(loopInduction(z)))))))))))<(
tryruleT(andRight)<(
tryruleT(andRight)<(
composelistT(
fooz3,
tryruleatT(hide)(LeftP(21+2)),
tryruleT(impLeft)<(
composelistT(
newestT("fxp", fxp_string =>
newestT("fyp", fyp_string =>
newestT("g", g_string =>
cutmT(StandardCut,
List(
parseFormula(safestrW),
parseFormula("ZZ /= end()")
),
Prover.substitute_Formula("FYP", Fn(fyp_string, Nil),
Prover.substitute_Formula("FXP", Fn(fxp_string, Nil),
Prover.substitute_Formula("GG", Fn(g_string, Nil),
Prover.substitute_Formula("AA", Fn("first", Nil),
furtherdamped))))))))<(
// this cut asserts (using a box modality) that all previous
// boundaries are still safe given the update to damping that we made
// with the current boundary
composelistT(
tryruleT(seq)*,
tryruleatT(assign)(RightP(0)),
tryruleatT(reorder)(LeftP(36)),
newestT("w", w_string =>
newestT("fxp", fxp_string =>
newestT("fyp", fyp_string =>
newestT("q", q_string =>
unifyT(parseFormula("QQO <= e()"),
Prover.substitute_Formula("FYP", Fn(fyp_string, Nil),
Prover.substitute_Formula("FXP", Fn(fxp_string, Nil),
Prover.substitute_Formula("QQ", Fn(q_string, Nil),
Prover.substitute_Formula("AA", Fn(w_string, Nil),
furtherdampedctxt)))),
(v => unifyT(parseFormula("ZZ /= end()"), v,
(x => unifyT(parseFormula("GGN = GG"), x,
(y => tryruleT(loopInduction(y))))))))))))<(
tryruleT(andRight)<(
tryruleT(andRight)<(
tryruleT(impLeft)<(
composelistT(
tryruleatT(reorder)(LeftP(19)),
fooz5,
tryruleatT(hide)(LeftP(1)),
tryruleatT(allLeft(Fn("first",List())))(LeftP(21)),
tryruleT(impLeft)<(
composelistT(
tryruleatT(hide)(LeftP(40)),
tryruleatT(hide)(LeftP(39)),
tryruleatT(hide)(LeftP(38)),
tryruleatT(hide)(LeftP(37)),
tryruleatT(hide)(LeftP(35)),
tryruleatT(hide)(LeftP(33)),
tryruleatT(hide)(LeftP(31)),
tryruleatT(hide)(LeftP(29)),
tryruleatT(hide)(LeftP(28)),
tryruleatT(hide)(LeftP(26)),
tryruleatT(hide)(LeftP(24)),
tryruleatT(hide)(LeftP(22)),
tryruleatT(hide)(LeftP(21)),
tryruleatT(hide)(LeftP(17)),
tryruleatT(hide)(LeftP(16)),
tryruleatT(hide)(LeftP(10)),
tryruleatT(hide)(LeftP(7)),
tryruleatT(hide)(LeftP(4)),
// try this:
tryruleT(andRight)<(
tryruleT(andRight)<(
quickarith2T,
quickarith2T
),
quickarith2T
)
),
obviousimpT
)
),
easiestT
),
equivboxT
),
composelistT(
tryruleatT(reorder)(LeftP(21+2)),
identiboxT
)
),
composelistT(
tryruleT(andLeft)*,
tryruleatT(reorder)(LeftP(2)),
onestepT
),
composelistT(
tryruleT(andLeft)*,
tryruleatT(reorder)(LeftP(2)),
concludeT
)
)
),
// we use the modal assertion that we just made, plus an assertion
// about the next boundary that is currently being controlled for, to
// assert the inductive step: if in the current step all previous
// boundaries were safe, then in the next step all previous boundaries
// are safe, plus one more
composelistT(
tryruleT(seq)*,
tryruleatT(assign)(RightP(0)),
substT*,
newestT("fxp", fxp_string =>
newestT("fyp", fyp_string =>
unifyT(parseFormula("AA = first()"),
Prover.substitute_Formula("FYP", Fn(fyp_string, Nil),
Prover.substitute_Formula("FXP", Fn(fxp_string, Nil),
onemore)),
(v => unifyT(parseFormula("OO = next(ZZ)"), v,
(x => unifyT(parseFormula(safestrW), x,
(y => tryruleT(loopInduction(y))))))))))<(
tryruleT(andRight)<(
easiestT,
tryruleT(andRight)<(
easiestT,
composelistT(tryruleT(impRight),identiboxT)
)
),
composelistT(
tryruleT(andLeft)*,
tryruleatT(reorder)(LeftP(2)),
onestep2T
),
composelistT(
tryruleT(andLeft)*,
tryruleatT(reorder)(LeftP(3)),
tryruleT(impRight),
cutmT(StandardCut,
List(parseFormula("WW /= end()"),
parseFormula("JJ = next(II)")),
parseFormula("WW = II | WW /= II"))<(
composelistT(
tryruleT(orRight),
tryruleatT(notEquals)(RightP(1)),
tryruleT(not),
nonarithcloseT
),
tryruleT(orLeft)<(
composelistT(
tryruleatT(hide)(LeftP(2)),
substT*,
tryruleT(impLeft)<(
nonarithcloseT,
nonarithcloseT
)
),
tryruleT(impLeft)<(
conclude2T,
composelistT(
tryruleatT(hide)(RightP(1)),
nonarithcloseT
)
)
)
)
)
)
)
) // end of cut
),
composelistT(
tryruleatT(hide)(LeftP(19)),
tryruleatT(hide)(RightP(1)),
substT*,
easiestT
)
)
),
composelistT(
tryruleT(impRight),
tryruleT(impLeft)<(
nonarithcloseT,
nonarithcloseT
)
)
),
identiboxT
),
composelistT( // inductive step
tryruleT(seq)*,
tryruleT(andLeft)*,
hpalphaT*,
tryruleT(andRight)<(
tryruleT(andRight)<(
composelistT(
tryruleT(seq)*,
substT*,
//iterate here
tryruleT(iterate),
tryruleT(andLeft),
tryruleT(seq)*,
tryruleT(check)*,
tryruleatT(impLeft)(LeftP(2))<(
identiboxT,
composelistT(
tryruleatT(hide)(RightP(1)),
substT*,
nonarithcloseT
)
)
),
easiestT
),
composelistT(
tryruleatT(reorder)(LeftP(3)),
nextloopT
)
)
),
composelistT( // postcondition
tryruleT(andLeft)*,
tryruleT(seq),
tryruleT(assign),
tryruleT(iterate),
tryruleT(andLeft),
tryruleatT(hide)(LeftP(1)),
substT*,
nonarithcloseT
)
) // end of induction
) // end of cut
),
composelistT(
tryruleatT(hide)(RightP(1)),
nonarithcloseT
)
)
)
)
),
nilT,
nilT
)
val tailstepT : Tactic =
composelistT(
tryruleT(seq)*,
substT,
tryruleatT(assign)(LeftP(3)),
tryruleatT(iterate)(LeftP(3)),
tryruleT(andLeft),
tryruleT(seq)*,
tryruleT(check),
tryruleT(impLeft)<(
composelistT(
tryruleatT(reorder)(LeftP(5)),
tryruleatT(renameAssign("ii"))(LeftP(5)),
tryruleatT(renameAssign("ii"))(RightP(0)),
substT*,
nonarithcloseT
),
composelistT(
tryruleatT(hide)(RightP(1)),
substT,
tryruleT(notEquals)*,
tryruleatT(not)(RightP(0)),
tryruleatT(not)(LeftP(1)),
substT,
nonarithcloseT
)
)
)
val brT : Tactic =
composelistT(
tryruleatT(seq)(RightP(0)),
tryruleatT(check)(RightP(0)),
tryruleT(impRight),
tryruleatT(assign)(RightP(0))*,
tryruleT(andRight)<(
tryruleT(andRight)<(
tryruleT(andRight)<(
tryruleT(andRight)<(
easiestT,
composelistT(
tryruleT(assign),
tryruleT(iterate),
tryruleT(andLeft)*,
identiboxT
)
),
composelistT(
tryruleT(assign),
tryruleT(iterate),
tryruleatT(andLeft)(LeftP(14)),
tryruleatT(hide)(LeftP(15)),
tryruleatT(reorder)(LeftP(11)),
fooz5,
substT*,
tryruleT(impLeft)<(
composelistT(
tryruleatT(hide)(LeftP(31)),
tryruleatT(hide)(LeftP(29)),
tryruleatT(hide)(LeftP(27)),
tryruleatT(hide)(LeftP(25)),
tryruleatT(hide)(LeftP(23)),
tryruleatT(hide)(LeftP(21)),
tryruleatT(hide)(LeftP(19)),
tryruleatT(hide)(LeftP(17)),
tryruleatT(hide)(LeftP(16)),
tryruleatT(hide)(LeftP(13)),
tryruleatT(hide)(LeftP(12)),
tryruleatT(hide)(LeftP(8)),
tryruleatT(hide)(LeftP(7)),
tryruleatT(hide)(LeftP(6)),
tryruleatT(hide)(LeftP(1)),
substT*,
quickarith2T
),
composelistT(
tryruleatT(hide)(RightP(1)),
nonarithcloseT
)
)
)
),
composelistT(
tryruleatT(hide)(LeftP(14)),
identiboxT
)
),
inductivesteppf
)
)
val easybranchT : Tactic => Tactic =
foo =>
composelistT(
tryruleatT(seq)(RightP(0))*,
tryruleT(check),
tryruleT(impRight),
tryruleT(assignAnyRight)*,
tryruleT(check)*,
tryruleT(impRight)*,
tryruleatT(assign)(RightP(0)),
tryruleatT(choose)(RightP(0)),
tryruleT(andRight)<(
brT
,
brT)
)
val hpalphaRT : Tactic =
(tryruleT(seq) |
tryruleatT(assign)(RightP(0)) |
tryruleatT(check)(RightP(0)) |
tryruleatT(assignAnyRight)(RightP(0)) |
tryruleatT(qassign)(RightP(0)) |
tryruleatT(choose)(RightP(0)) |
tryruleT(impRight) |
tryruleT(allRight) |
tryruleT(existsLeft) |
tryruleT(orRight) |
tryruleT(not)
)
// tryruleT(andLeft) |
val safeproofz : Tactic => Tactic =
solvebranch =>
composelistT(
hpalphaRT*,
substT,
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleatT(reorder)(LeftP(24)),
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
// branch 1
solvebranch
,
// branch 2
solvebranch
)),
//branch 3
solvebranch
)),
// branch 4
solvebranch
)),
// branch 5
solvebranch
)),
// branch 6
solvebranch
)),
// branch 7
solvebranch
))
val safeproofu1 =
safeproofz(easybranchT(unitT))
// This proves the safety of a single, arbitrarily oriented boundary
val singleboundarypf =
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
tryruleT(seq)*,
tryruleT(check),
tryruleT(impRight),
tryruleT(andLeft),
safeproofu1
)
,
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
composelistT(
hpalphaRT*,
tryruleT(andRight)<(
safeproofu1
,
safeproofu1
)),
safeproofu1
)),
safeproofu1
)),
safeproofu1
)),
safeproofu1
)),
safeproofu1
)),
safeproofu1
)))),
composelistT(
tryruleT(seq)*,
tryruleT(check),
tryruleT(impRight),
tryruleT(andLeft),
safeproofu1
)
))
val main =
// outer loop
tryruleT(loopInduction(loopInvOuter))<(
// outer loop base case
tryruleT(andRight)<(
easiestT,
composelistT(
tryruleT(seq)*,
tryruleatT(renameAssign("ii"))(LeftP(7)),
tryruleatT(renameAssign("ii"))(RightP(0)),
nonarithcloseT
)
),
composelistT(
tryruleT(andLeft)*,
tryruleT(seq)*,
tryruleT(assignAnyRight)*,
tryruleatT(assign)(RightP(0)),
tryruleatT(assign)(RightP(0)),
tryruleatT(reorder)(LeftP(8)),
newestT("fyp",
fyp_string =>
newestT("fxp",
fxp_string =>
unifyT(parseFormula("GG = 1"),
Prover.substitute_Formula("FYP", Fn(fyp_string, Nil),
Prover.substitute_Formula("FXP", Fn(fxp_string, Nil),
loopInvInner)),
(y => unifyT(parseFormula("ZZ = first()"),
y,
(x => tryruleT(loopInduction(x))))))))<(
// nilT, // get rid of me -- scaffolding
// /////
// composelistT(
// tryruleT(andLeft)*,
// singleboundarypf,
// nilT
// ),
// /////
// nilT // get rid of me -- scaffolding
// inner loop base case
tryruleT(andRight)<(
tryruleT(andRight)<(
tryruleT(andRight)<(
tryruleT(andRight)<(
composelistT(
easiestT,
substT*,
tryruleatT(hide)(LeftP(8)),
tryruleatT(hide)(LeftP(7)),
tryruleatT(hide)(LeftP(6)),
tryruleatT(hide)(LeftP(5)),
tryruleatT(hide)(LeftP(4)),
arithT
),
identiboxT
),
composelistT(
tryruleatT(hide)(LeftP(9)),
tryruleatT(hide)(LeftP(8)),
tryruleatT(hide)(LeftP(7)),
tryruleatT(hide)(LeftP(6)),
tryruleatT(hide)(LeftP(5)),
substT*,
arithT
)
),
identiboxT
),
composelistT(
tryruleT(impRight),
tryruleatT(hide)(LeftP(8)),
cutT(StandardCut,parseFormula("ZZ=first()"),
parseFormula("first()/=first()"))<(
composelistT(substT*,easiestT),
easiestT
)
)
),
// inner loop inductive step
composelistT(
tryruleT(andLeft)*,
singleboundarypf,
nilT
),
// inner loop postcondition
composelistT(
tryruleT(andLeft)*,
tryruleatT(hide)(LeftP(6)),
tryruleT(seq)*,
tryruleT(check)*,
tryruleT(impRight),
tryruleatT(assign)(RightP(0)),
diffsolveT(RightP(0), Endpoint),
cutT(StandardCut, parseFormula("AA = end()"),
parseFormula("AA = first() | AA /= first()"))<(
composelistT(
tryruleT(orRight),
tryruleT(notEquals),
tryruleT(not),
nonarithcloseT),
composelistT(
tryruleT(assign),
tryruleatT(assign)(RightP(0)),
tryruleT(andRight)<(
easiestT,
tryruleT(orLeft)<(
composelistT(
tryruleatT(hide)(LeftP(12)),
tryruleatT(hide)(LeftP(11)),
tryruleT(seq)*,
tryruleT(assign),
substT*,
tryruleatT(reorder)(LeftP(11)),
unifyT(parseFormula("ZZ = end()"),
parseFormula("ZZ = end() & next(end()) = end()"),
m_string =>
tryruleT(loopInduction(m_string)))<(
easiestT,
composelistT(
tryruleT(andLeft),
tryruleT(assign),
tryruleT(andRight)<(
composelistT(
substT*,nonarithcloseT
),
nonarithcloseT
)
),
composelistT(
tryruleT(andLeft),
tryruleT(impRight),
substT*,
nonarithcloseT
)
)
),
composelistT(
tryruleT(impLeft)<(
composelistT(
tryruleT(allLeft(Fn("s",List()))),
tryruleT(impLeft)<(
composelistT(
tryruleatT(hide)(LeftP(14)),
substT*,
tryruleT(seq)*,
tryruleatT(assign)(RightP(0)),
rearrangepf
),
composelistT(
tryruleatT(hide)(RightP(1)),
substT*,
// tryruleatT(hide)(LeftP(13)),
tryruleatT(hide)(LeftP(12)),
tryruleatT(hide)(LeftP(11)),
tryruleatT(hide)(LeftP(10)),
tryruleatT(hide)(LeftP(9)),
tryruleatT(hide)(LeftP(8)),
tryruleatT(hide)(LeftP(7)),
tryruleatT(hide)(LeftP(6)),
tryruleatT(hide)(LeftP(5)),
tryruleatT(hide)(LeftP(4)),
tryruleatT(hide)(LeftP(3)),
tryruleatT(hide)(LeftP(0)),
tryruleT(andRight)<(arithT,arithT)
)
)
),
composelistT(
tryruleatT(hide)(RightP(1)),
nonarithcloseT
)
)
)
)
)
)
)
)
) // end of inner loop
),
// outer loop invariant implies postcondition
composelistT(
tryruleT(andLeft)*,
tryruleT(seq)*,
tryruleatT(renameAssign("ii"))(LeftP(7)),
tryruleatT(renameAssign("ii"))(RightP(0)),
nonarithcloseT
)
)
}
| keymaerad/KeYmaeraD | examples/medrobot/robot_quantified9.dl.scala | Scala | bsd-3-clause | 45,246 |
/*
* Copyright 2011 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.scrooge.mustache
import HandlebarLoader._
import java.util.logging.Level
import java.util.logging.Logger
import java.util.Properties
import scala.collection.concurrent.TrieMap
import scala.io.Source
class HandlebarLoader(
prefix: String,
suffix: String = ".mustache",
commentFct: (CommentStyle => String) = scalaJavaCommentFunction) {
private[this] val cache = new TrieMap[String, Handlebar]
def apply(name: String): Handlebar = {
val fullName = prefix + name + suffix
cache.getOrElseUpdate(
name,
getClass.getResourceAsStream(fullName) match {
case null =>
throw new NoSuchElementException("template not found: " + fullName)
case inputStream =>
try {
new Handlebar(Source.fromInputStream(inputStream).getLines().mkString("\\n"))
} catch {
case e: Exception =>
println("Exception parsing template at " + fullName)
throw e
}
}
)
}
val header: String = {
val p = new Properties
val resource = getClass.getResource("/com/twitter/scrooge-generator/build.properties")
if (resource == null)
Logger
.getLogger("scrooge-generator")
.log(Level.CONFIG, "Scrooge's build.properties not found")
else
p.load(resource.openStream())
Seq(
commentFct(BlockBegin),
commentFct(BlockContinuation) + "Generated by Scrooge",
commentFct(BlockContinuation) + " version: %s".format(p.getProperty("version", "?")),
commentFct(BlockContinuation) + " rev: %s".format(p.getProperty("build_revision", "?")),
commentFct(BlockContinuation) + " built at: %s".format(p.getProperty("build_name", "?")),
commentFct(BlockEnd)
).mkString("\\n")
}
}
object HandlebarLoader {
sealed abstract class CommentStyle
case object BlockBegin extends CommentStyle
case object BlockContinuation extends CommentStyle
case object BlockEnd extends CommentStyle
case object SingleLineComment extends CommentStyle
def scalaJavaCommentFunction(commentStyle: CommentStyle): String = {
commentStyle match {
case BlockBegin => "/**"
case BlockContinuation => " * "
case BlockEnd => " */\\n"
case SingleLineComment => "// "
}
}
}
| twitter/scrooge | scrooge-generator/src/main/scala/com/twitter/scrooge/mustache/HandlebarLoader.scala | Scala | apache-2.0 | 2,885 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.expressions
import org.apache.flink.table.api._
import org.apache.flink.table.expressions.utils.ScalarTypesTestBase
import org.junit.Test
class ScalarFunctionsTest extends ScalarTypesTestBase {
// ----------------------------------------------------------------------------------------------
// String functions
// ----------------------------------------------------------------------------------------------
@Test
def testOverlay(): Unit = {
testAllApis(
"xxxxxtest".overlay("xxxx", 6),
"'xxxxxtest'.overlay('xxxx', 6)",
"OVERLAY('xxxxxtest' PLACING 'xxxx' FROM 6)",
"xxxxxxxxx")
testAllApis(
"xxxxxtest".overlay("xxxx", 6, 2),
"'xxxxxtest'.overlay('xxxx', 6, 2)",
"OVERLAY('xxxxxtest' PLACING 'xxxx' FROM 6 FOR 2)",
"xxxxxxxxxst")
}
@Test
def testPosition(): Unit = {
testAllApis(
"test".position("xxxtest"),
"'test'.position('xxxtest')",
"POSITION('test' IN 'xxxtest')",
"4")
testAllApis(
"testx".position("xxxtest"),
"'testx'.position('xxxtest')",
"POSITION('testx' IN 'xxxtest')",
"0")
}
@Test
def testSubstring(): Unit = {
testAllApis(
'f0.substring(2),
"f0.substring(2)",
"SUBSTRING(f0, 2)",
"his is a test String.")
testAllApis(
'f0.substring(2, 5),
"f0.substring(2, 5)",
"SUBSTRING(f0, 2, 5)",
"his i")
testAllApis(
'f0.substring(1, 'f7),
"f0.substring(1, f7)",
"SUBSTRING(f0, 1, f7)",
"Thi")
testAllApis(
'f0.substring(1.cast(Types.BYTE), 'f7),
"f0.substring(1.cast(BYTE), f7)",
"SUBSTRING(f0, CAST(1 AS TINYINT), f7)",
"Thi")
testSqlApi(
"SUBSTRING(f0 FROM 2 FOR 1)",
"h")
testSqlApi(
"SUBSTRING(f0 FROM 2)",
"his is a test String.")
}
@Test
def testReplace(): Unit = {
testAllApis(
'f0.replace(" ", "_"),
"f0.replace(' ', '_')",
"REPLACE(f0, ' ', '_')",
"This_is_a_test_String.")
testAllApis(
'f0.replace("i", ""),
"f0.replace('i', '')",
"REPLACE(f0, 'i', '')",
"Ths s a test Strng.")
testAllApis(
'f33.replace("i", ""),
"f33.replace('i', '')",
"REPLACE(f33, 'i', '')",
"null")
testAllApis(
'f0.replace(nullOf(Types.STRING), ""),
"f0.replace(nullOf(STRING), '')",
"REPLACE(f0, NULLIF('', ''), '')",
"null")
testAllApis(
'f0.replace(" ", nullOf(Types.STRING)),
"f0.replace(' ', nullOf(STRING))",
"REPLACE(f0, ' ', NULLIF('', ''))",
"null")
}
@Test
def testTrim(): Unit = {
testAllApis(
'f8.trim(),
"f8.trim()",
"TRIM(f8)",
"This is a test String.")
testAllApis(
'f8.trim(removeLeading = true, removeTrailing = true, " "),
"trim(f8)",
"TRIM(f8)",
"This is a test String.")
testAllApis(
'f8.trim(removeLeading = false, removeTrailing = true, " "),
"f8.trim(TRAILING, ' ')",
"TRIM(TRAILING FROM f8)",
" This is a test String.")
testAllApis(
'f0.trim(removeLeading = true, removeTrailing = true, "."),
"trim(BOTH, '.', f0)",
"TRIM(BOTH '.' FROM f0)",
"This is a test String")
}
@Test
def testCharLength(): Unit = {
testAllApis(
'f0.charLength(),
"f0.charLength()",
"CHAR_LENGTH(f0)",
"22")
testAllApis(
'f0.charLength(),
"charLength(f0)",
"CHARACTER_LENGTH(f0)",
"22")
}
@Test
def testUpperCase(): Unit = {
testAllApis(
'f0.upperCase(),
"f0.upperCase()",
"UPPER(f0)",
"THIS IS A TEST STRING.")
}
@Test
def testLowerCase(): Unit = {
testAllApis(
'f0.lowerCase(),
"f0.lowerCase()",
"LOWER(f0)",
"this is a test string.")
}
@Test
def testInitCap(): Unit = {
testAllApis(
'f0.initCap(),
"f0.initCap()",
"INITCAP(f0)",
"This Is A Test String.")
}
@Test
def testConcat(): Unit = {
testAllApis(
'f0 + 'f0,
"f0 + f0",
"f0||f0",
"This is a test String.This is a test String.")
}
@Test
def testLike(): Unit = {
testAllApis(
'f0.like("Th_s%"),
"f0.like('Th_s%')",
"f0 LIKE 'Th_s%'",
"true")
testAllApis(
'f0.like("%is a%"),
"f0.like('%is a%')",
"f0 LIKE '%is a%'",
"true")
}
@Test
def testNotLike(): Unit = {
testAllApis(
!'f0.like("Th_s%"),
"!f0.like('Th_s%')",
"f0 NOT LIKE 'Th_s%'",
"false")
testAllApis(
!'f0.like("%is a%"),
"!f0.like('%is a%')",
"f0 NOT LIKE '%is a%'",
"false")
}
@Test
def testLikeWithEscape(): Unit = {
testSqlApi(
"f23 LIKE '&%Th_s%' ESCAPE '&'",
"true")
testSqlApi(
"f23 LIKE '&%%is a%' ESCAPE '&'",
"true")
testSqlApi(
"f0 LIKE 'Th_s%' ESCAPE '&'",
"true")
testSqlApi(
"f0 LIKE '%is a%' ESCAPE '&'",
"true")
}
@Test
def testNotLikeWithEscape(): Unit = {
testSqlApi(
"f23 NOT LIKE '&%Th_s%' ESCAPE '&'",
"false")
testSqlApi(
"f23 NOT LIKE '&%%is a%' ESCAPE '&'",
"false")
testSqlApi(
"f0 NOT LIKE 'Th_s%' ESCAPE '&'",
"false")
testSqlApi(
"f0 NOT LIKE '%is a%' ESCAPE '&'",
"false")
}
@Test
def testSimilar(): Unit = {
testAllApis(
'f0.similar("_*"),
"f0.similar('_*')",
"f0 SIMILAR TO '_*'",
"true")
testAllApis(
'f0.similar("This (is)? a (test)+ Strin_*"),
"f0.similar('This (is)? a (test)+ Strin_*')",
"f0 SIMILAR TO 'This (is)? a (test)+ Strin_*'",
"true")
}
@Test
def testNotSimilar(): Unit = {
testAllApis(
!'f0.similar("_*"),
"!f0.similar('_*')",
"f0 NOT SIMILAR TO '_*'",
"false")
testAllApis(
!'f0.similar("This (is)? a (test)+ Strin_*"),
"!f0.similar('This (is)? a (test)+ Strin_*')",
"f0 NOT SIMILAR TO 'This (is)? a (test)+ Strin_*'",
"false")
}
@Test
def testSimilarWithEscape(): Unit = {
testSqlApi(
"f24 SIMILAR TO '&*&__*' ESCAPE '&'",
"true")
testSqlApi(
"f0 SIMILAR TO '_*' ESCAPE '&'",
"true")
testSqlApi(
"f24 SIMILAR TO '&*&_This (is)? a (test)+ Strin_*' ESCAPE '&'",
"true")
testSqlApi(
"f0 SIMILAR TO 'This (is)? a (test)+ Strin_*' ESCAPE '&'",
"true")
}
@Test
def testNotSimilarWithEscape(): Unit = {
testSqlApi(
"f24 NOT SIMILAR TO '&*&__*' ESCAPE '&'",
"false")
testSqlApi(
"f0 NOT SIMILAR TO '_*' ESCAPE '&'",
"false")
testSqlApi(
"f24 NOT SIMILAR TO '&*&_This (is)? a (test)+ Strin_*' ESCAPE '&'",
"false")
testSqlApi(
"f0 NOT SIMILAR TO 'This (is)? a (test)+ Strin_*' ESCAPE '&'",
"false")
}
@Test
def testMultiConcat(): Unit = {
testAllApis(concat("xx", 'f33), "concat('xx', f33)", "CONCAT('xx', f33)", "null")
testAllApis(
concat("AA", "BB", "CC", "---"),
"concat('AA','BB','CC','---')",
"CONCAT('AA','BB','CC','---')",
"AABBCC---")
testAllApis(
concat("x~x", "b~b", "c~~~~c", "---"),
"concat('x~x','b~b','c~~~~c','---')",
"CONCAT('x~x','b~b','c~~~~c','---')",
"x~xb~bc~~~~c---")
}
@Test
def testConcatWs(): Unit = {
testAllApis(
concat_ws('f33, "AA"),
"concat_ws(f33, 'AA')",
"CONCAT_WS(f33, 'AA')",
"null")
testAllApis(
concat_ws("~~~~", "AA"),
"concat_ws('~~~~','AA')",
"concat_ws('~~~~','AA')",
"AA")
testAllApis(
concat_ws("~", "AA", "BB"),
"concat_ws('~','AA','BB')",
"concat_ws('~','AA','BB')",
"AA~BB")
testAllApis(
concat_ws("~", 'f33, "AA", "BB", "", 'f33, "CC"),
"concat_ws('~',f33, 'AA','BB','',f33, 'CC')",
"concat_ws('~',f33, 'AA','BB','',f33, 'CC')",
"AA~BB~~CC")
testAllApis(
concat_ws("~~~~", "Flink", 'f33, "xx", 'f33, 'f33),
"concat_ws('~~~~','Flink', f33, 'xx', f33, f33)",
"CONCAT_WS('~~~~','Flink', f33, 'xx', f33, f33)",
"Flink~~~~xx")
}
@Test
def testLPad(): Unit = {
testSqlApi("LPAD('hi',4,'??')", "??hi")
testSqlApi("LPAD('hi',1,'??')", "h")
testSqlApi("LPAD('',1,'??')", "?")
testSqlApi("LPAD('',30,'??')", "??????????????????????????????")
testSqlApi("LPAD('111',-2,'??')", "null")
testSqlApi("LPAD(f33,1,'??')", "null")
testSqlApi("LPAD('\\u0061\\u0062',1,'??')", "a") // the unicode of ab is \\u0061\\u0062
testSqlApi("LPAD('⎨⎨',1,'??')", "⎨")
testSqlApi("LPAD('äääääääää',2,'??')", "ää")
testSqlApi("LPAD('äääääääää',10,'??')", "?äääääääää")
testSqlApi("LPAD('Hello', -1, 'x') IS NULL", "true")
testSqlApi("LPAD('Hello', -1, 'x') IS NOT NULL", "false")
testAllApis(
"äää".lpad(13, "12345"),
"'äää'.lpad(13, '12345')",
"LPAD('äää',13,'12345')",
"1234512345äää")
}
@Test
def testRPad(): Unit = {
testSqlApi("RPAD('hi',4,'??')", "hi??")
testSqlApi("RPAD('hi',1,'??')", "h")
testSqlApi("RPAD('',1,'??')", "?")
testSqlApi("RPAD('1',30,'??')", "1?????????????????????????????")
testSqlApi("RPAD('111',-2,'??')", "null")
testSqlApi("RPAD(f33,1,'??')", "null")
testSqlApi("RPAD('\\u0061\\u0062',1,'??')", "a") // the unicode of ab is \\u0061\\u0062
testSqlApi("RPAD('üö',1,'??')", "ü")
testAllApis(
"äää".rpad(13, "12345"),
"'äää'.rpad(13, '12345')",
"RPAD('äää',13,'12345')",
"äää1234512345")
}
@Test
def testHex(): Unit = {
testAllApis(
100.hex(),
"100.hex()",
"HEX(100)",
"64")
testAllApis(
'f2.hex(),
"f2.hex()",
"HEX(f2)",
"2A")
testAllApis(
nullOf(Types.BYTE).hex(),
"hex(nullOf(BYTE))",
"HEX(CAST(NULL AS TINYINT))",
"null")
testAllApis(
'f3.hex(),
"f3.hex()",
"HEX(f3)",
"2B")
testAllApis(
'f4.hex(),
"f4.hex()",
"HEX(f4)",
"2C")
testAllApis(
'f7.hex(),
"f7.hex()",
"HEX(f7)",
"3")
testAllApis(
12.hex(),
"12.hex()",
"HEX(12)",
"C")
testAllApis(
10.hex(),
"10.hex()",
"HEX(10)",
"A")
testAllApis(
0.hex(),
"0.hex()",
"HEX(0)",
"0")
testAllApis(
"ö".hex(),
"'ö'.hex()",
"HEX('ö')",
"C3B6")
testAllApis(
'f32.hex(),
"f32.hex()",
"HEX(f32)",
"FFFFFFFFFFFFFFFF")
testAllApis(
'f0.hex(),
"f0.hex()",
"HEX(f0)",
"546869732069732061207465737420537472696E672E")
testAllApis(
'f8.hex(),
"f8.hex()",
"HEX(f8)",
"20546869732069732061207465737420537472696E672E20")
testAllApis(
'f23.hex(),
"f23.hex()",
"HEX(f23)",
"25546869732069732061207465737420537472696E672E")
testAllApis(
'f24.hex(),
"f24.hex()",
"HEX(f24)",
"2A5F546869732069732061207465737420537472696E672E")
testAllApis(
"你好".hex(),
"'你好'.hex()",
"HEX('你好')",
"E4BDA0E5A5BD"
)
}
@Test
def testBin(): Unit = {
testAllApis(
nullOf(Types.BYTE).bin(),
"bin(nullOf(BYTE))",
"BIN((CAST(NULL AS TINYINT)))",
"null")
testAllApis(
'f2.bin(),
"f2.bin()",
"BIN(f2)",
"101010")
testAllApis(
'f3.bin(),
"f3.bin()",
"BIN(f3)",
"101011")
testAllApis(
'f4.bin(),
"f4.bin()",
"BIN(f4)",
"101100")
testAllApis(
'f7.bin(),
"f7.bin()",
"BIN(f7)",
"11")
testAllApis(
12.bin(),
"12.bin()",
"BIN(12)",
"1100")
testAllApis(
10.bin(),
"10.bin()",
"BIN(10)",
"1010")
testAllApis(
0.bin(),
"0.bin()",
"BIN(0)",
"0")
testAllApis(
'f32.bin(),
"f32.bin()",
"BIN(f32)",
"1111111111111111111111111111111111111111111111111111111111111111")
}
@Test
def testRegexpReplace(): Unit = {
testAllApis(
"foobar".regexpReplace("oo|ar", "abc"),
"'foobar'.regexpReplace('oo|ar', 'abc')",
"regexp_replace('foobar', 'oo|ar', 'abc')",
"fabcbabc")
testAllApis(
"foofar".regexpReplace("^f", ""),
"'foofar'.regexpReplace('^f', '')",
"regexp_replace('foofar', '^f', '')",
"oofar")
testAllApis(
"foobar".regexpReplace("^f*.*r$", ""),
"'foobar'.regexpReplace('^f*.*r$', '')",
"regexp_replace('foobar', '^f*.*r$', '')",
"")
testAllApis(
"foo1bar2".regexpReplace("\\\\d", ""),
"'foo1bar2'.regexpReplace('\\\\d', '')",
"regexp_replace('foobar', '\\\\d', '')",
"foobar")
testAllApis(
"foobar".regexpReplace("\\\\w", ""),
"'foobar'.regexpReplace('\\\\w', '')",
"regexp_replace('foobar', '\\\\w', '')",
"")
testAllApis(
"fooobar".regexpReplace("oo", "$"),
"'fooobar'.regexpReplace('oo', '$')",
"regexp_replace('fooobar', 'oo', '$')",
"f$obar")
testAllApis(
"foobar".regexpReplace("oo", "\\\\"),
"'foobar'.regexpReplace('oo', '\\\\')",
"regexp_replace('foobar', 'oo', '\\\\')",
"f\\\\bar")
testAllApis(
'f33.regexpReplace("oo|ar", ""),
"f33.regexpReplace('oo|ar', '')",
"REGEXP_REPLACE(f33, 'oo|ar', '')",
"null")
testAllApis(
"foobar".regexpReplace('f33, ""),
"'foobar'.regexpReplace(f33, '')",
"REGEXP_REPLACE('foobar', f33, '')",
"null")
testAllApis(
"foobar".regexpReplace("oo|ar", 'f33),
"'foobar'.regexpReplace('oo|ar', f33)",
"REGEXP_REPLACE('foobar', 'oo|ar', f33)",
"null")
// This test was added for the null literal problem in string expression parsing (FLINK-10463).
testAllApis(
nullOf(Types.STRING).regexpReplace("oo|ar", 'f33),
"nullOf(STRING).regexpReplace('oo|ar', f33)",
"REGEXP_REPLACE(CAST(NULL AS VARCHAR), 'oo|ar', f33)",
"null")
}
@Test
def testRegexpExtract(): Unit = {
testAllApis(
"foothebar".regexpExtract("foo(.*?)(bar)", 2),
"'foothebar'.regexpExtract('foo(.*?)(bar)', 2)",
"REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)', 2)",
"bar")
testAllApis(
"foothebar".regexpExtract("foo(.*?)(bar)", 0),
"'foothebar'.regexpExtract('foo(.*?)(bar)', 0)",
"REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)', 0)",
"foothebar")
testAllApis(
"foothebar".regexpExtract("foo(.*?)(bar)", 1),
"'foothebar'.regexpExtract('foo(.*?)(bar)', 1)",
"REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)', 1)",
"the")
testAllApis(
"foothebar".regexpExtract("foo([\\\\w]+)", 1),
"'foothebar'.regexpExtract('foo([\\\\w]+)', 1)",
"REGEXP_EXTRACT('foothebar', 'foo([\\\\w]+)', 1)",
"thebar")
testAllApis(
"foothebar".regexpExtract("foo([\\\\d]+)", 1),
"'foothebar'.regexpExtract('foo([\\\\d]+)', 1)",
"REGEXP_EXTRACT('foothebar', 'foo([\\\\d]+)', 1)",
"null")
testAllApis(
'f33.regexpExtract("foo(.*?)(bar)", 2),
"f33.regexpExtract('foo(.*?)(bar)', 2)",
"REGEXP_EXTRACT(f33, 'foo(.*?)(bar)', 2)",
"null")
testAllApis(
"foothebar".regexpExtract('f33, 2),
"'foothebar'.regexpExtract(f33, 2)",
"REGEXP_EXTRACT('foothebar', f33, 2)",
"null")
//test for optional group index
testAllApis(
"foothebar".regexpExtract("foo(.*?)(bar)"),
"'foothebar'.regexpExtract('foo(.*?)(bar)')",
"REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)')",
"foothebar")
}
@Test
def testFromBase64(): Unit = {
testAllApis(
'f35.fromBase64(),
"f35.fromBase64()",
"from_base64(f35)",
"hello world")
testAllApis(
'f35.fromBase64(),
"f35.fromBase64()",
"FROM_BASE64(f35)",
"hello world")
//null test
testAllApis(
'f33.fromBase64(),
"f33.fromBase64()",
"FROM_BASE64(f33)",
"null")
testAllApis(
"5L2g5aW9".fromBase64(),
"'5L2g5aW9'.fromBase64()",
"FROM_BASE64('5L2g5aW9')",
"你好"
)
}
@Test
def testToBase64(): Unit = {
testAllApis(
'f0.toBase64(),
"f0.toBase64()",
"TO_BASE64(f0)",
"VGhpcyBpcyBhIHRlc3QgU3RyaW5nLg==")
testAllApis(
'f8.toBase64(),
"f8.toBase64()",
"TO_BASE64(f8)",
"IFRoaXMgaXMgYSB0ZXN0IFN0cmluZy4g")
testAllApis(
"".toBase64(),
"''.toBase64()",
"TO_BASE64('')",
"")
//null test
testAllApis(
'f33.toBase64(),
"f33.toBase64()",
"TO_BASE64(f33)",
"null")
testAllApis(
"你好".toBase64(),
"'你好'.toBase64()",
"TO_BASE64('你好')",
"5L2g5aW9"
)
}
@Test
def testUUID(): Unit = {
testAllApis(
uuid().charLength(),
"uuid().charLength",
"CHARACTER_LENGTH(UUID())",
"36")
testAllApis(
uuid().substring(9, 1),
"uuid().substring(9, 1)",
"SUBSTRING(UUID(), 9, 1)",
"-")
testAllApis(
uuid().substring(14, 1),
"uuid().substring(14, 1)",
"SUBSTRING(UUID(), 14, 1)",
"-")
testAllApis(
uuid().substring(19, 1),
"uuid().substring(19, 1)",
"SUBSTRING(UUID(), 19, 1)",
"-")
testAllApis(
uuid().substring(24, 1),
"uuid().substring(24, 1)",
"SUBSTRING(UUID(), 24, 1)",
"-")
}
@Test
def testLTrim(): Unit = {
testAllApis(
'f8.ltrim(),
"f8.ltrim",
"LTRIM(f8)",
"This is a test String. ")
testAllApis(
'f0.ltrim(),
"f0.ltrim",
"LTRIM(f0)",
"This is a test String.")
testAllApis(
"".ltrim(),
"''.ltrim()",
"LTRIM('')",
"")
testAllApis(
'f33.ltrim(),
"f33.ltrim",
"LTRIM(f33)",
"null")
}
@Test
def testRTrim(): Unit = {
testAllApis(
'f8.rtrim(),
"f8.rtrim",
"RTRIM(f8)",
" This is a test String.")
testAllApis(
'f0.rtrim(),
"f0.rtrim",
"RTRIM(f0)",
"This is a test String.")
testAllApis(
"".rtrim(),
"''.rtrim()",
"RTRIM('')",
"")
testAllApis(
'f33.rtrim(),
"f33.rtrim",
"RTRIM(f33)",
"null")
}
@Test
def testRepeat(): Unit = {
testAllApis(
'f0.repeat(1),
"f0.repeat(1)",
"REPEAT(f0, 1)",
"This is a test String.")
testAllApis(
'f0.repeat(2),
"f0.repeat(2)",
"REPEAT(f0, 2)",
"This is a test String.This is a test String.")
testAllApis(
'f0.repeat(0),
"f0.repeat(0)",
"REPEAT(f0, 0)",
"")
testAllApis(
'f0.repeat(-1),
"f0.repeat(-1)",
"REPEAT(f0, -1)",
"")
testAllApis(
'f33.repeat(2),
"f33.repeat(2)",
"REPEAT(f33, 2)",
"null")
testAllApis(
"".repeat(1),
"''.repeat(1)",
"REPEAT('', 2)",
"")
}
// ----------------------------------------------------------------------------------------------
// Math functions
// ----------------------------------------------------------------------------------------------
@Test
def testMod(): Unit = {
testAllApis(
'f4.mod('f7),
"f4.mod(f7)",
"MOD(f4, f7)",
"2")
testAllApis(
'f4.mod(3),
"mod(f4, 3)",
"MOD(f4, 3)",
"2")
testAllApis(
'f4 % 3,
"mod(44, 3)",
"MOD(44, 3)",
"2")
}
@Test
def testExp(): Unit = {
testAllApis(
'f2.exp(),
"f2.exp()",
"EXP(f2)",
math.exp(42.toByte).toString)
testAllApis(
'f3.exp(),
"f3.exp()",
"EXP(f3)",
math.exp(43.toShort).toString)
testAllApis(
'f4.exp(),
"f4.exp()",
"EXP(f4)",
math.exp(44.toLong).toString)
testAllApis(
'f5.exp(),
"f5.exp()",
"EXP(f5)",
math.exp(4.5.toFloat).toString)
testAllApis(
'f6.exp(),
"f6.exp()",
"EXP(f6)",
math.exp(4.6).toString)
testAllApis(
'f7.exp(),
"exp(f7)",
"EXP(f7)",
math.exp(3).toString)
testAllApis(
3.exp(),
"exp(3)",
"EXP(3)",
math.exp(3).toString)
}
@Test
def testLog10(): Unit = {
testAllApis(
'f2.log10(),
"f2.log10()",
"LOG10(f2)",
math.log10(42.toByte).toString)
testAllApis(
'f3.log10(),
"f3.log10()",
"LOG10(f3)",
math.log10(43.toShort).toString)
testAllApis(
'f4.log10(),
"f4.log10()",
"LOG10(f4)",
math.log10(44.toLong).toString)
testAllApis(
'f5.log10(),
"f5.log10()",
"LOG10(f5)",
math.log10(4.5.toFloat).toString)
testAllApis(
'f6.log10(),
"f6.log10()",
"LOG10(f6)",
math.log10(4.6).toString)
}
@Test
def testLog2(): Unit = {
testAllApis(
'f6.log2(),
"f6.log2",
"LOG2(f6)",
"2.2016338611696504")
testAllApis(
('f6 - 'f6 + 100).log2(),
"(f6 - f6 + 100).log2()",
"LOG2(f6 - f6 + 100)",
"6.643856189774725")
testAllApis(
('f6 + 20).log2(),
"(f6+20).log2",
"LOG2(f6+20)",
"4.620586410451877")
testAllApis(
10.log2(),
"10.log2",
"LOG2(10)",
"3.3219280948873626")
}
@Test
def testPower(): Unit = {
// f7: int , f4: long, f6: double
testAllApis(
'f2.power('f7),
"f2.power(f7)",
"POWER(f2, f7)",
math.pow(42.toByte, 3).toString)
testAllApis(
'f3.power('f6),
"f3.power(f6)",
"POWER(f3, f6)",
math.pow(43.toShort, 4.6D).toString)
testAllApis(
'f4.power('f5),
"f4.power(f5)",
"POWER(f4, f5)",
math.pow(44.toLong, 4.5.toFloat).toString)
testAllApis(
'f4.power('f5),
"f4.power(f5)",
"POWER(f4, f5)",
math.pow(44.toLong, 4.5.toFloat).toString)
// f5: float
testAllApis('f5.power('f5),
"f5.power(f5)",
"power(f5, f5)",
math.pow(4.5F, 4.5F).toString)
testAllApis('f5.power('f6),
"f5.power(f6)",
"power(f5, f6)",
math.pow(4.5F, 4.6D).toString)
testAllApis('f5.power('f7),
"f5.power(f7)",
"power(f5, f7)",
math.pow(4.5F, 3).toString)
testAllApis('f5.power('f4),
"f5.power(f4)",
"power(f5, f4)",
math.pow(4.5F, 44L).toString)
// f22: bigDecimal
// TODO delete casting in SQL when CALCITE-1467 is fixed
testAllApis(
'f22.cast(Types.DOUBLE).power('f5),
"f22.cast(DOUBLE).power(f5)",
"power(CAST(f22 AS DOUBLE), f5)",
math.pow(2, 4.5F).toString)
testAllApis(
'f22.cast(Types.DOUBLE).power('f6),
"f22.cast(DOUBLE).power(f6)",
"power(CAST(f22 AS DOUBLE), f6)",
math.pow(2, 4.6D).toString)
testAllApis(
'f22.cast(Types.DOUBLE).power('f7),
"f22.cast(DOUBLE).power(f7)",
"power(CAST(f22 AS DOUBLE), f7)",
math.pow(2, 3).toString)
testAllApis(
'f22.cast(Types.DOUBLE).power('f4),
"f22.cast(DOUBLE).power(f4)",
"power(CAST(f22 AS DOUBLE), f4)",
math.pow(2, 44L).toString)
testAllApis(
'f6.power('f22.cast(Types.DOUBLE)),
"f6.power(f22.cast(DOUBLE))",
"power(f6, f22)",
math.pow(4.6D, 2).toString)
}
@Test
def testSqrt(): Unit = {
testAllApis(
'f6.sqrt(),
"f6.sqrt",
"SQRT(f6)",
math.sqrt(4.6D).toString)
testAllApis(
'f7.sqrt(),
"f7.sqrt",
"SQRT(f7)",
math.sqrt(3).toString)
testAllApis(
'f4.sqrt(),
"f4.sqrt",
"SQRT(f4)",
math.sqrt(44L).toString)
testAllApis(
'f22.cast(Types.DOUBLE).sqrt(),
"f22.cast(DOUBLE).sqrt",
"SQRT(CAST(f22 AS DOUBLE))",
math.sqrt(2.0).toString)
testAllApis(
'f5.sqrt(),
"f5.sqrt",
"SQRT(f5)",
math.pow(4.5F, 0.5).toString)
testAllApis(
25.sqrt(),
"25.sqrt()",
"SQRT(25)",
"5.0")
testAllApis(
2.2.sqrt(),
"2.2.sqrt()",
"POWER(CAST(2.2 AS DOUBLE), CAST(0.5 AS DOUBLE))", // TODO fix FLINK-4621
math.sqrt(2.2).toString)
}
@Test
def testCosh(): Unit = {
testAllApis(
0.cosh(),
"0.cosh()",
"COSH(0)",
math.cosh(0).toString
)
testAllApis(
-1.cosh(),
"-1.cosh()",
"COSH(-1)",
math.cosh(-1).toString
)
testAllApis(
'f4.cosh(),
"f4.cosh",
"COSH(f4)",
math.cosh(44L).toString)
testAllApis(
'f6.cosh(),
"f6.cosh",
"COSH(f6)",
math.cosh(4.6D).toString)
testAllApis(
'f7.cosh(),
"f7.cosh",
"COSH(f7)",
math.cosh(3).toString)
testAllApis(
'f22.cosh(),
"f22.cosh",
"COSH(f22)",
math.cosh(2.0).toString)
}
@Test
def testLn(): Unit = {
testAllApis(
'f2.ln(),
"f2.ln()",
"LN(f2)",
math.log(42.toByte).toString)
testAllApis(
'f3.ln(),
"f3.ln()",
"LN(f3)",
math.log(43.toShort).toString)
testAllApis(
'f4.ln(),
"f4.ln()",
"LN(f4)",
math.log(44.toLong).toString)
testAllApis(
'f5.ln(),
"f5.ln()",
"LN(f5)",
math.log(4.5.toFloat).toString)
testAllApis(
'f6.ln(),
"f6.ln()",
"LN(f6)",
math.log(4.6).toString)
}
@Test
def testAbs(): Unit = {
testAllApis(
'f2.abs(),
"f2.abs()",
"ABS(f2)",
"42")
testAllApis(
'f3.abs(),
"f3.abs()",
"ABS(f3)",
"43")
testAllApis(
'f4.abs(),
"f4.abs()",
"ABS(f4)",
"44")
testAllApis(
'f5.abs(),
"f5.abs()",
"ABS(f5)",
"4.5")
testAllApis(
'f6.abs(),
"f6.abs()",
"ABS(f6)",
"4.6")
testAllApis(
'f9.abs(),
"f9.abs()",
"ABS(f9)",
"42")
testAllApis(
'f10.abs(),
"f10.abs()",
"ABS(f10)",
"43")
testAllApis(
'f11.abs(),
"f11.abs()",
"ABS(f11)",
"44")
testAllApis(
'f12.abs(),
"f12.abs()",
"ABS(f12)",
"4.5")
testAllApis(
'f13.abs(),
"f13.abs()",
"ABS(f13)",
"4.6")
testAllApis(
'f15.abs(),
"f15.abs()",
"ABS(f15)",
"1231.1231231321321321111")
}
@Test
def testArithmeticFloorCeil(): Unit = {
testAllApis(
'f5.floor(),
"f5.floor()",
"FLOOR(f5)",
"4.0")
testAllApis(
'f5.ceil(),
"f5.ceil()",
"CEIL(f5)",
"5.0")
testAllApis(
'f3.floor(),
"f3.floor()",
"FLOOR(f3)",
"43")
testAllApis(
'f3.ceil(),
"f3.ceil()",
"CEIL(f3)",
"43")
testAllApis(
'f15.floor(),
"f15.floor()",
"FLOOR(f15)",
"-1232")
testAllApis(
'f15.ceil(),
"f15.ceil()",
"CEIL(f15)",
"-1231")
}
@Test
def testSin(): Unit = {
testAllApis(
'f2.sin(),
"f2.sin()",
"SIN(f2)",
math.sin(42.toByte).toString)
testAllApis(
'f3.sin(),
"f3.sin()",
"SIN(f3)",
math.sin(43.toShort).toString)
testAllApis(
'f4.sin(),
"f4.sin()",
"SIN(f4)",
math.sin(44.toLong).toString)
testAllApis(
'f5.sin(),
"f5.sin()",
"SIN(f5)",
math.sin(4.5.toFloat).toString)
testAllApis(
'f6.sin(),
"f6.sin()",
"SIN(f6)",
math.sin(4.6).toString)
testAllApis(
'f15.sin(),
"sin(f15)",
"SIN(f15)",
math.sin(-1231.1231231321321321111).toString)
}
@Test
def testCos(): Unit = {
testAllApis(
'f2.cos(),
"f2.cos()",
"COS(f2)",
math.cos(42.toByte).toString)
testAllApis(
'f3.cos(),
"f3.cos()",
"COS(f3)",
math.cos(43.toShort).toString)
testAllApis(
'f4.cos(),
"f4.cos()",
"COS(f4)",
math.cos(44.toLong).toString)
testAllApis(
'f5.cos(),
"f5.cos()",
"COS(f5)",
math.cos(4.5.toFloat).toString)
testAllApis(
'f6.cos(),
"f6.cos()",
"COS(f6)",
math.cos(4.6).toString)
testAllApis(
'f15.cos(),
"cos(f15)",
"COS(f15)",
math.cos(-1231.1231231321321321111).toString)
}
@Test
def testSinh(): Unit = {
testAllApis(
0.sinh(),
"0.sinh()",
"SINH(0)",
math.sinh(0).toString)
testAllApis(
-1.sinh(),
"-1.sinh()",
"SINH(-1)",
math.sinh(-1).toString)
testAllApis(
'f4.sinh(),
"f4.sinh",
"SINH(f4)",
math.sinh(44L).toString)
testAllApis(
'f6.sinh(),
"f6.sinh",
"SINH(f6)",
math.sinh(4.6D).toString)
testAllApis(
'f7.sinh(),
"f7.sinh",
"SINH(f7)",
math.sinh(3).toString)
testAllApis(
'f22.sinh(),
"f22.sinh",
"SINH(f22)",
math.sinh(2.0).toString)
}
@Test
def testTan(): Unit = {
testAllApis(
'f2.tan(),
"f2.tan()",
"TAN(f2)",
math.tan(42.toByte).toString)
testAllApis(
'f3.tan(),
"f3.tan()",
"TAN(f3)",
math.tan(43.toShort).toString)
testAllApis(
'f4.tan(),
"f4.tan()",
"TAN(f4)",
math.tan(44.toLong).toString)
testAllApis(
'f5.tan(),
"f5.tan()",
"TAN(f5)",
math.tan(4.5.toFloat).toString)
testAllApis(
'f6.tan(),
"f6.tan()",
"TAN(f6)",
math.tan(4.6).toString)
testAllApis(
'f15.tan(),
"tan(f15)",
"TAN(f15)",
math.tan(-1231.1231231321321321111).toString)
}
@Test
def testTanh(): Unit = {
testAllApis(
0.tanh(),
"0.tanh()",
"TANH(0)",
math.tanh(0).toString)
testAllApis(
-1.tanh(),
"-1.tanh()",
"TANH(-1)",
math.tanh(-1).toString)
testAllApis(
'f4.tanh(),
"f4.tanh",
"TANH(f4)",
math.tanh(44L).toString)
testAllApis(
'f6.tanh(),
"f6.tanh",
"TANH(f6)",
math.tanh(4.6D).toString)
testAllApis(
'f7.tanh(),
"f7.tanh",
"TANH(f7)",
math.tanh(3).toString)
testAllApis(
'f22.tanh(),
"f22.tanh",
"TANH(f22)",
math.tanh(2.0).toString)
}
@Test
def testCot(): Unit = {
testAllApis(
'f2.cot(),
"f2.cot()",
"COT(f2)",
(1.0d / math.tan(42.toByte)).toString)
testAllApis(
'f3.cot(),
"f3.cot()",
"COT(f3)",
(1.0d / math.tan(43.toShort)).toString)
testAllApis(
'f4.cot(),
"f4.cot()",
"COT(f4)",
(1.0d / math.tan(44.toLong)).toString)
testAllApis(
'f5.cot(),
"f5.cot()",
"COT(f5)",
(1.0d / math.tan(4.5.toFloat)).toString)
testAllApis(
'f6.cot(),
"f6.cot()",
"COT(f6)",
(1.0d / math.tan(4.6)).toString)
testAllApis(
'f15.cot(),
"cot(f15)",
"COT(f15)",
(1.0d / math.tan(-1231.1231231321321321111)).toString)
}
@Test
def testAsin(): Unit = {
testAllApis(
'f25.asin(),
"f25.asin()",
"ASIN(f25)",
math.asin(0.42.toByte).toString)
testAllApis(
'f26.asin(),
"f26.asin()",
"ASIN(f26)",
math.asin(0.toShort).toString)
testAllApis(
'f27.asin(),
"f27.asin()",
"ASIN(f27)",
math.asin(0.toLong).toString)
testAllApis(
'f28.asin(),
"f28.asin()",
"ASIN(f28)",
math.asin(0.45.toFloat).toString)
testAllApis(
'f29.asin(),
"f29.asin()",
"ASIN(f29)",
math.asin(0.46).toString)
testAllApis(
'f30.asin(),
"f30.asin()",
"ASIN(f30)",
math.asin(1).toString)
testAllApis(
'f31.asin(),
"f31.asin()",
"ASIN(f31)",
math.asin(-0.1231231321321321111).toString)
}
@Test
def testAcos(): Unit = {
testAllApis(
'f25.acos(),
"f25.acos()",
"ACOS(f25)",
math.acos(0.42.toByte).toString)
testAllApis(
'f26.acos(),
"f26.acos()",
"ACOS(f26)",
math.acos(0.toShort).toString)
testAllApis(
'f27.acos(),
"f27.acos()",
"ACOS(f27)",
math.acos(0.toLong).toString)
testAllApis(
'f28.acos(),
"f28.acos()",
"ACOS(f28)",
math.acos(0.45.toFloat).toString)
testAllApis(
'f29.acos(),
"f29.acos()",
"ACOS(f29)",
math.acos(0.46).toString)
testAllApis(
'f30.acos(),
"f30.acos()",
"ACOS(f30)",
math.acos(1).toString)
testAllApis(
'f31.acos(),
"f31.acos()",
"ACOS(f31)",
math.acos(-0.1231231321321321111).toString)
}
@Test
def testAtan(): Unit = {
testAllApis(
'f25.atan(),
"f25.atan()",
"ATAN(f25)",
math.atan(0.42.toByte).toString)
testAllApis(
'f26.atan(),
"f26.atan()",
"ATAN(f26)",
math.atan(0.toShort).toString)
testAllApis(
'f27.atan(),
"f27.atan()",
"ATAN(f27)",
math.atan(0.toLong).toString)
testAllApis(
'f28.atan(),
"f28.atan()",
"ATAN(f28)",
math.atan(0.45.toFloat).toString)
testAllApis(
'f29.atan(),
"f29.atan()",
"ATAN(f29)",
math.atan(0.46).toString)
testAllApis(
'f30.atan(),
"f30.atan()",
"ATAN(f30)",
math.atan(1).toString)
testAllApis(
'f31.atan(),
"f31.atan()",
"ATAN(f31)",
math.atan(-0.1231231321321321111).toString)
}
@Test
def testAtan2(): Unit = {
testAllApis(
atan2('f25, 'f26),
"atan2(f25, f26)",
"ATAN2(f25, f26)",
math.atan2(0.42.toByte, 0.toByte).toString)
testAllApis(
atan2('f26, 'f25),
"atan2(f26, f25)",
"ATAN2(f26, f25)",
math.atan2(0.toShort, 0.toShort).toString)
testAllApis(
atan2('f27, 'f27),
"atan2(f27, f27)",
"ATAN2(f27, f27)",
math.atan2(0.toLong, 0.toLong).toString)
testAllApis(
atan2('f28, 'f28),
"atan2(f28, f28)",
"ATAN2(f28, f28)",
math.atan2(0.45.toFloat, 0.45.toFloat).toString)
testAllApis(
atan2('f29, 'f29),
"atan2(f29, f29)",
"ATAN2(f29, f29)",
math.atan2(0.46, 0.46).toString)
testAllApis(
atan2('f30, 'f30),
"atan2(f30, f30)",
"ATAN2(f30, f30)",
math.atan2(1, 1).toString)
testAllApis(
atan2('f31, 'f31),
"atan2(f31, f31)",
"ATAN2(f31, f31)",
math.atan2(-0.1231231321321321111, -0.1231231321321321111).toString)
}
@Test
def testDegrees(): Unit = {
testAllApis(
'f2.degrees(),
"f2.degrees()",
"DEGREES(f2)",
math.toDegrees(42.toByte).toString)
testAllApis(
'f3.degrees(),
"f3.degrees()",
"DEGREES(f3)",
math.toDegrees(43.toShort).toString)
testAllApis(
'f4.degrees(),
"f4.degrees()",
"DEGREES(f4)",
math.toDegrees(44.toLong).toString)
testAllApis(
'f5.degrees(),
"f5.degrees()",
"DEGREES(f5)",
math.toDegrees(4.5.toFloat).toString)
testAllApis(
'f6.degrees(),
"f6.degrees()",
"DEGREES(f6)",
math.toDegrees(4.6).toString)
testAllApis(
'f15.degrees(),
"degrees(f15)",
"DEGREES(f15)",
math.toDegrees(-1231.1231231321321321111).toString)
}
@Test
def testRadians(): Unit = {
testAllApis(
'f2.radians(),
"f2.radians()",
"RADIANS(f2)",
math.toRadians(42.toByte).toString)
testAllApis(
'f3.radians(),
"f3.radians()",
"RADIANS(f3)",
math.toRadians(43.toShort).toString)
testAllApis(
'f4.radians(),
"f4.radians()",
"RADIANS(f4)",
math.toRadians(44.toLong).toString)
testAllApis(
'f5.radians(),
"f5.radians()",
"RADIANS(f5)",
math.toRadians(4.5.toFloat).toString)
testAllApis(
'f6.radians(),
"f6.radians()",
"RADIANS(f6)",
math.toRadians(4.6).toString)
testAllApis(
'f15.radians(),
"radians(f15)",
"RADIANS(f15)",
math.toRadians(-1231.1231231321321321111).toString)
}
@Test
def testSign(): Unit = {
testAllApis(
'f4.sign(),
"f4.sign()",
"SIGN(f4)",
1.toString)
testAllApis(
'f6.sign(),
"f6.sign()",
"SIGN(f6)",
1.0.toString)
testAllApis(
'f15.sign(),
"sign(f15)",
"SIGN(f15)",
(-1).toString)
}
@Test
def testRound(): Unit = {
testAllApis(
'f29.round('f30),
"f29.round(f30)",
"ROUND(f29, f30)",
0.5.toString)
testAllApis(
'f31.round('f7),
"f31.round(f7)",
"ROUND(f31, f7)",
(-0.123).toString)
testAllApis(
'f4.round('f32),
"f4.round(f32)",
"ROUND(f4, f32)",
40.toString)
}
@Test
def testPi(): Unit = {
testAllApis(
pi(),
"pi()",
"PI",
math.Pi.toString)
}
@Test
def testRandAndRandInteger(): Unit = {
val random1 = new java.util.Random(1)
testAllApis(
rand(1),
"rand(1)",
"RAND(1)",
random1.nextDouble().toString)
val random2 = new java.util.Random(3)
testAllApis(
rand('f7),
"rand(f7)",
"RAND(f7)",
random2.nextDouble().toString)
val random3 = new java.util.Random(1)
testAllApis(
randInteger(1, 10),
"randInteger(1, 10)",
"RAND_INTEGER(1, 10)",
random3.nextInt(10).toString)
val random4 = new java.util.Random(3)
testAllApis(
randInteger('f7, 'f4.cast(Types.INT)),
"randInteger(f7, f4.cast(INT))",
"RAND_INTEGER(f7, CAST(f4 AS INT))",
random4.nextInt(44).toString)
}
@Test
def testE(): Unit = {
testAllApis(
e(),
"E()",
"E()",
math.E.toString)
testAllApis(
e(),
"e()",
"e()",
math.E.toString)
}
@Test
def testLog(): Unit = {
testAllApis(
'f6.log(),
"f6.log",
"LOG(f6)",
"1.5260563034950492"
)
testTableApi(
log('f6),
"log(f6)",
"1.5260563034950492"
)
testAllApis(
('f6 - 'f6 + 100).log('f6 - 'f6 + 10),
"(f6 - f6 + 100).log(f6 - f6 + 10)",
"LOG(f6 - f6 + 10, f6 - f6 + 100)",
"2.0"
)
testAllApis(
('f6 + 20).log(),
"(f6+20).log",
"LOG(f6+20)",
"3.202746442938317"
)
testAllApis(
10.log(),
"10.log",
"LOG(10)",
"2.302585092994046"
)
testAllApis(
100.log(10),
"100.log(10)",
"LOG(10, 100)",
"2.0"
)
testTableApi(
log(10, 100),
"log(10, 100)",
"2.0"
)
}
@Test
def testTruncate(): Unit = {
testAllApis(
'f29.truncate('f30),
"f29.truncate(f30)",
"truncate(f29, f30)",
"0.4")
testAllApis(
'f31.truncate('f7),
"f31.truncate(f7)",
"truncate(f31, f7)",
"-0.123")
testAllApis(
'f4.truncate('f32),
"f4.truncate(f32)",
"truncate(f4, f32)",
"40")
testAllApis(
'f28.cast(Types.DOUBLE).truncate(1),
"f28.cast(DOUBLE).truncate(1)",
"truncate(cast(f28 as DOUBLE), 1)",
"0.4")
testAllApis(
'f31.cast(Types.DECIMAL).truncate(2),
"f31.cast(DECIMAL).truncate(2)",
"truncate(cast(f31 as decimal), 2)",
"-0.12")
testAllApis(
'f36.cast(Types.DECIMAL).truncate(),
"f36.cast(DECIMAL).truncate()",
"truncate(42.324)",
"42")
testAllApis(
'f5.cast(Types.FLOAT).truncate(),
"f5.cast(FLOAT).truncate()",
"truncate(cast(f5 as float))",
"4.0")
testAllApis(
42.truncate(-1),
"42.truncate(-1)",
"truncate(42, -1)",
"40")
testAllApis(
42.truncate(-3),
"42.truncate(-3)",
"truncate(42, -3)",
"0")
// The validation parameter is null
testAllApis(
'f33.cast(Types.INT).truncate(1),
"f33.cast(INT).truncate(1)",
"truncate(cast(null as integer), 1)",
"null")
testAllApis(
43.21.truncate('f33.cast(Types.INT)),
"43.21.truncate(f33.cast(INT))",
"truncate(43.21, cast(null as integer))",
"null")
testAllApis(
'f33.cast(Types.DOUBLE).truncate(1),
"f33.cast(DOUBLE).truncate(1)",
"truncate(cast(null as double), 1)",
"null")
testAllApis(
'f33.cast(Types.INT).truncate(1),
"f33.cast(INT).truncate(1)",
"truncate(cast(null as integer))",
"null")
testAllApis(
'f33.cast(Types.DOUBLE).truncate(),
"f33.cast(DOUBLE).truncate()",
"truncate(cast(null as double))",
"null")
}
// ----------------------------------------------------------------------------------------------
// Temporal functions
// ----------------------------------------------------------------------------------------------
@Test
def testExtract(): Unit = {
testAllApis(
'f16.extract(TimeIntervalUnit.YEAR),
"f16.extract(YEAR)",
"EXTRACT(YEAR FROM f16)",
"1996")
testAllApis(
'f16.extract(TimeIntervalUnit.QUARTER),
"f16.extract(QUARTER)",
"EXTRACT(QUARTER FROM f16)",
"4")
testAllApis(
'f16.extract(TimeIntervalUnit.MONTH),
"extract(f16, MONTH)",
"EXTRACT(MONTH FROM f16)",
"11")
testAllApis(
'f16.extract(TimeIntervalUnit.WEEK),
"extract(f16, WEEK)",
"EXTRACT(WEEK FROM f16)",
"45")
testAllApis(
'f16.extract(TimeIntervalUnit.DAY),
"f16.extract(DAY)",
"EXTRACT(DAY FROM f16)",
"10")
testAllApis(
'f18.extract(TimeIntervalUnit.YEAR),
"f18.extract(YEAR)",
"EXTRACT(YEAR FROM f18)",
"1996")
testAllApis(
'f18.extract(TimeIntervalUnit.QUARTER),
"f18.extract(QUARTER)",
"EXTRACT(QUARTER FROM f18)",
"4")
testAllApis(
'f16.extract(TimeIntervalUnit.QUARTER),
"f16.extract(QUARTER)",
"EXTRACT(QUARTER FROM f16)",
"4")
testAllApis(
'f18.extract(TimeIntervalUnit.MONTH),
"f18.extract(MONTH)",
"EXTRACT(MONTH FROM f18)",
"11")
testAllApis(
'f18.extract(TimeIntervalUnit.WEEK),
"f18.extract(WEEK)",
"EXTRACT(WEEK FROM f18)",
"45")
testAllApis(
'f18.extract(TimeIntervalUnit.DAY),
"f18.extract(DAY)",
"EXTRACT(DAY FROM f18)",
"10")
testAllApis(
'f18.extract(TimeIntervalUnit.HOUR),
"f18.extract(HOUR)",
"EXTRACT(HOUR FROM f18)",
"6")
testAllApis(
'f17.extract(TimeIntervalUnit.HOUR),
"f17.extract(HOUR)",
"EXTRACT(HOUR FROM f17)",
"6")
testAllApis(
'f18.extract(TimeIntervalUnit.MINUTE),
"f18.extract(MINUTE)",
"EXTRACT(MINUTE FROM f18)",
"55")
testAllApis(
'f17.extract(TimeIntervalUnit.MINUTE),
"f17.extract(MINUTE)",
"EXTRACT(MINUTE FROM f17)",
"55")
testAllApis(
'f18.extract(TimeIntervalUnit.SECOND),
"f18.extract(SECOND)",
"EXTRACT(SECOND FROM f18)",
"44")
testAllApis(
'f17.extract(TimeIntervalUnit.SECOND),
"f17.extract(SECOND)",
"EXTRACT(SECOND FROM f17)",
"44")
testAllApis(
'f19.extract(TimeIntervalUnit.DAY),
"f19.extract(DAY)",
"EXTRACT(DAY FROM f19)",
"16979")
testAllApis(
'f19.extract(TimeIntervalUnit.HOUR),
"f19.extract(HOUR)",
"EXTRACT(HOUR FROM f19)",
"7")
testAllApis(
'f19.extract(TimeIntervalUnit.MINUTE),
"f19.extract(MINUTE)",
"EXTRACT(MINUTE FROM f19)",
"23")
testAllApis(
'f19.extract(TimeIntervalUnit.SECOND),
"f19.extract(SECOND)",
"EXTRACT(SECOND FROM f19)",
"33")
testAllApis(
'f20.extract(TimeIntervalUnit.MONTH),
"f20.extract(MONTH)",
"EXTRACT(MONTH FROM f20)",
"1")
testAllApis(
'f20.extract(TimeIntervalUnit.QUARTER),
"f20.extract(QUARTER)",
"EXTRACT(QUARTER FROM f20)",
"1")
testAllApis(
'f20.extract(TimeIntervalUnit.YEAR),
"f20.extract(YEAR)",
"EXTRACT(YEAR FROM f20)",
"2")
// test SQL only time units
testSqlApi(
"EXTRACT(MILLENNIUM FROM f18)",
"2")
testSqlApi(
"EXTRACT(MILLENNIUM FROM f16)",
"2")
testSqlApi(
"EXTRACT(CENTURY FROM f18)",
"20")
testSqlApi(
"EXTRACT(CENTURY FROM f16)",
"20")
testSqlApi(
"EXTRACT(DOY FROM f18)",
"315")
testSqlApi(
"EXTRACT(DOY FROM f16)",
"315")
testSqlApi(
"EXTRACT(DOW FROM f18)",
"1")
testSqlApi(
"EXTRACT(DOW FROM f16)",
"1")
testSqlApi(
"EXTRACT(WEEK FROM f18)",
"45")
testSqlApi(
"EXTRACT(WEEK FROM f16)",
"45")
testSqlApi(
"YEAR(f18)",
"1996")
testSqlApi(
"YEAR(f16)",
"1996")
testSqlApi(
"QUARTER(f18)",
"4")
testSqlApi(
"QUARTER(f16)",
"4")
testSqlApi(
"MONTH(f18)",
"11")
testSqlApi(
"MONTH(f16)",
"11")
testSqlApi(
"WEEK(f18)",
"45")
testSqlApi(
"WEEK(f16)",
"45")
testSqlApi(
"DAYOFYEAR(f18)",
"315")
testSqlApi(
"DAYOFYEAR(f16)",
"315")
testSqlApi(
"DAYOFMONTH(f18)",
"10")
testSqlApi(
"DAYOFMONTH(f16)",
"10")
testSqlApi(
"DAYOFWEEK(f18)",
"1")
testSqlApi(
"DAYOFWEEK(f16)",
"1")
testSqlApi(
"HOUR(f17)",
"6")
testSqlApi(
"HOUR(f19)",
"7")
testSqlApi(
"MINUTE(f17)",
"55")
testSqlApi(
"MINUTE(f19)",
"23")
testSqlApi(
"SECOND(f17)",
"44")
testSqlApi(
"SECOND(f19)",
"33")
}
@Test
def testTemporalFloor(): Unit = {
testAllApis(
'f18.floor(TimeIntervalUnit.YEAR),
"f18.floor(YEAR)",
"FLOOR(f18 TO YEAR)",
"1996-01-01 00:00:00.000")
testAllApis(
'f18.floor(TimeIntervalUnit.MONTH),
"f18.floor(MONTH)",
"FLOOR(f18 TO MONTH)",
"1996-11-01 00:00:00.000")
testAllApis(
'f18.floor(TimeIntervalUnit.DAY),
"f18.floor(DAY)",
"FLOOR(f18 TO DAY)",
"1996-11-10 00:00:00.000")
testAllApis(
'f18.floor(TimeIntervalUnit.MINUTE),
"f18.floor(MINUTE)",
"FLOOR(f18 TO MINUTE)",
"1996-11-10 06:55:00.000")
testAllApis(
'f18.floor(TimeIntervalUnit.SECOND),
"f18.floor(SECOND)",
"FLOOR(f18 TO SECOND)",
"1996-11-10 06:55:44.000")
testAllApis(
'f17.floor(TimeIntervalUnit.HOUR),
"f17.floor(HOUR)",
"FLOOR(f17 TO HOUR)",
"06:00:00")
testAllApis(
'f17.floor(TimeIntervalUnit.MINUTE),
"f17.floor(MINUTE)",
"FLOOR(f17 TO MINUTE)",
"06:55:00")
testAllApis(
'f17.floor(TimeIntervalUnit.SECOND),
"f17.floor(SECOND)",
"FLOOR(f17 TO SECOND)",
"06:55:44")
testAllApis(
'f16.floor(TimeIntervalUnit.YEAR),
"f16.floor(YEAR)",
"FLOOR(f16 TO YEAR)",
"1996-01-01")
testAllApis(
'f16.floor(TimeIntervalUnit.MONTH),
"f16.floor(MONTH)",
"FLOOR(f16 TO MONTH)",
"1996-11-01")
testAllApis(
'f18.ceil(TimeIntervalUnit.YEAR),
"f18.ceil(YEAR)",
"CEIL(f18 TO YEAR)",
"1997-01-01 00:00:00.000")
testAllApis(
'f18.ceil(TimeIntervalUnit.MONTH),
"f18.ceil(MONTH)",
"CEIL(f18 TO MONTH)",
"1996-12-01 00:00:00.000")
testAllApis(
'f18.ceil(TimeIntervalUnit.DAY),
"f18.ceil(DAY)",
"CEIL(f18 TO DAY)",
"1996-11-11 00:00:00.000")
testAllApis(
'f18.ceil(TimeIntervalUnit.MINUTE),
"f18.ceil(MINUTE)",
"CEIL(f18 TO MINUTE)",
"1996-11-10 06:56:00.000")
testAllApis(
'f18.ceil(TimeIntervalUnit.SECOND),
"f18.ceil(SECOND)",
"CEIL(f18 TO SECOND)",
"1996-11-10 06:55:45.000")
testAllApis(
'f17.ceil(TimeIntervalUnit.HOUR),
"f17.ceil(HOUR)",
"CEIL(f17 TO HOUR)",
"07:00:00")
testAllApis(
'f17.ceil(TimeIntervalUnit.MINUTE),
"f17.ceil(MINUTE)",
"CEIL(f17 TO MINUTE)",
"06:56:00")
testAllApis(
'f17.ceil(TimeIntervalUnit.SECOND),
"f17.ceil(SECOND)",
"CEIL(f17 TO SECOND)",
"06:55:44")
testAllApis(
'f16.ceil(TimeIntervalUnit.YEAR),
"f16.ceil(YEAR)",
"CEIL(f16 TO YEAR)",
"1997-01-01")
testAllApis(
'f16.ceil(TimeIntervalUnit.MONTH),
"f16.ceil(MONTH)",
"CEIL(f16 TO MONTH)",
"1996-12-01")
}
@Test
def testCurrentTimePoint(): Unit = {
// current time points are non-deterministic
// we just test the format of the output
// manual test can be found in NonDeterministicTests
testAllApis(
currentDate().cast(Types.STRING).charLength() >= 5,
"currentDate().cast(STRING).charLength() >= 5",
"CHAR_LENGTH(CAST(CURRENT_DATE AS VARCHAR)) >= 5",
"true")
testAllApis(
currentTime().cast(Types.STRING).charLength() >= 5,
"currentTime().cast(STRING).charLength() >= 5",
"CHAR_LENGTH(CAST(CURRENT_TIME AS VARCHAR)) >= 5",
"true")
testAllApis(
currentTimestamp().cast(Types.STRING).charLength() >= 12,
"currentTimestamp().cast(STRING).charLength() >= 12",
"CHAR_LENGTH(CAST(CURRENT_TIMESTAMP AS VARCHAR)) >= 12",
"true")
testAllApis(
localTimestamp().cast(Types.STRING).charLength() >= 12,
"localTimestamp().cast(STRING).charLength() >= 12",
"CHAR_LENGTH(CAST(LOCALTIMESTAMP AS VARCHAR)) >= 12",
"true")
testAllApis(
localTime().cast(Types.STRING).charLength() >= 5,
"localTime().cast(STRING).charLength() >= 5",
"CHAR_LENGTH(CAST(LOCALTIME AS VARCHAR)) >= 5",
"true")
// comparisons are deterministic
testAllApis(
localTimestamp() === localTimestamp(),
"localTimestamp() === localTimestamp()",
"LOCALTIMESTAMP = LOCALTIMESTAMP",
"true")
}
@Test
def testOverlaps(): Unit = {
testAllApis(
temporalOverlaps("2:55:00".toTime, 1.hour, "3:30:00".toTime, 2.hours),
"temporalOverlaps('2:55:00'.toTime, 1.hour, '3:30:00'.toTime, 2.hours)",
"(TIME '2:55:00', INTERVAL '1' HOUR) OVERLAPS (TIME '3:30:00', INTERVAL '2' HOUR)",
"true")
testAllApis(
temporalOverlaps("9:00:00".toTime, "9:30:00".toTime, "9:29:00".toTime, "9:31:00".toTime),
"temporalOverlaps(toTime('9:00:00'), '9:30:00'.toTime, '9:29:00'.toTime, '9:31:00'.toTime)",
"(TIME '9:00:00', TIME '9:30:00') OVERLAPS (TIME '9:29:00', TIME '9:31:00')",
"true")
testAllApis(
temporalOverlaps("9:00:00".toTime, "10:00:00".toTime, "10:15:00".toTime, 3.hours),
"temporalOverlaps('9:00:00'.toTime, '10:00:00'.toTime, '10:15:00'.toTime, 3.hours)",
"(TIME '9:00:00', TIME '10:00:00') OVERLAPS (TIME '10:15:00', INTERVAL '3' HOUR)",
"false")
testAllApis(
temporalOverlaps("2011-03-10".toDate, 10.days, "2011-03-19".toDate, 10.days),
"temporalOverlaps(toDate('2011-03-10'), 10.days, '2011-03-19'.toDate, 10.days)",
"(DATE '2011-03-10', INTERVAL '10' DAY) OVERLAPS (DATE '2011-03-19', INTERVAL '10' DAY)",
"true")
testAllApis(
temporalOverlaps("2011-03-10 05:02:02".toTimestamp, 0.milli,
"2011-03-10 05:02:02".toTimestamp, "2011-03-10 05:02:01".toTimestamp),
"temporalOverlaps(toTimestamp('2011-03-10 05:02:02'), 0.milli, " +
"'2011-03-10 05:02:02'.toTimestamp, '2011-03-10 05:02:01'.toTimestamp)",
"(TIMESTAMP '2011-03-10 05:02:02', INTERVAL '0' SECOND) OVERLAPS " +
"(TIMESTAMP '2011-03-10 05:02:02', TIMESTAMP '2011-03-10 05:02:01')",
"true")
testAllApis(
temporalOverlaps("2011-03-10 02:02:02.001".toTimestamp, 0.milli,
"2011-03-10 02:02:02.002".toTimestamp, "2011-03-10 02:02:02.002".toTimestamp),
"temporalOverlaps('2011-03-10 02:02:02.001'.toTimestamp, 0.milli, " +
"'2011-03-10 02:02:02.002'.toTimestamp, '2011-03-10 02:02:02.002'.toTimestamp)",
"(TIMESTAMP '2011-03-10 02:02:02.001', INTERVAL '0' SECOND) OVERLAPS " +
"(TIMESTAMP '2011-03-10 02:02:02.002', TIMESTAMP '2011-03-10 02:02:02.002')",
"false")
}
@Test
def testTimestampDiff(): Unit = {
val dataMap = Map(
("DAY", TimePointUnit.DAY, "SQL_TSI_DAY") -> Seq(
("2018-07-03 11:11:11", "2018-07-05 11:11:11", "2"), // timestamp, timestamp
("2016-06-15", "2016-06-16 11:11:11", "1"), // date, timestamp
("2016-06-15 11:00:00", "2016-06-19", "3"), // timestamp, date
("2016-06-15", "2016-06-18", "3") // date, date
),
("HOUR", TimePointUnit.HOUR, "SQL_TSI_HOUR") -> Seq(
("2018-07-03 11:11:11", "2018-07-04 12:12:11", "25"),
("2016-06-15", "2016-06-16 11:11:11", "35"),
("2016-06-15 11:00:00", "2016-06-19", "85"),
("2016-06-15", "2016-06-12", "-72")
),
("MINUTE", TimePointUnit.MINUTE, "SQL_TSI_MINUTE") -> Seq(
("2018-07-03 11:11:11", "2018-07-03 12:10:11", "59"),
("2016-06-15", "2016-06-16 11:11:11", "2111"),
("2016-06-15 11:00:00", "2016-06-19", "5100"),
("2016-06-15", "2016-06-18", "4320")
),
("SECOND", TimePointUnit.SECOND, "SQL_TSI_SECOND") -> Seq(
("2018-07-03 11:11:11", "2018-07-03 11:12:12", "61"),
("2016-06-15", "2016-06-16 11:11:11", "126671"),
("2016-06-15 11:00:00", "2016-06-19", "306000"),
("2016-06-15", "2016-06-18", "259200")
),
("WEEK", TimePointUnit.WEEK, "SQL_TSI_WEEK") -> Seq(
("2018-05-03 11:11:11", "2018-07-03 11:12:12", "8"),
("2016-04-15", "2016-07-16 11:11:11", "13"),
("2016-04-15 11:00:00", "2016-09-19", "22"),
("2016-08-15", "2016-06-18", "-8")
),
("MONTH", TimePointUnit.MONTH, "SQL_TSI_MONTH") -> Seq(
("2018-07-03 11:11:11", "2018-09-05 11:11:11", "2"),
("2016-06-15", "2018-06-16 11:11:11", "24"),
("2016-06-15 11:00:00", "2018-05-19", "23"),
("2016-06-15", "2018-03-18", "21")
),
("QUARTER", TimePointUnit.QUARTER, "SQL_TSI_QUARTER") -> Seq(
("2018-01-03 11:11:11", "2018-09-05 11:11:11", "2"),
("2016-06-15", "2018-06-16 11:11:11", "8"),
("2016-06-15 11:00:00", "2018-05-19", "7"),
("2016-06-15", "2018-03-18", "7")
)
)
for ((unitParts, dataParts) <- dataMap) {
for ((data,index) <- dataParts.zipWithIndex) {
index match {
case 0 => // timestamp, timestamp
testAllApis(
timestampDiff(unitParts._2, data._1.toTimestamp, data._2.toTimestamp),
s"timestampDiff(${unitParts._1}, '${data._1}'.toTimestamp, '${data._2}'.toTimestamp)",
s"TIMESTAMPDIFF(${unitParts._1}, TIMESTAMP '${data._1}', TIMESTAMP '${data._2}')",
data._3
)
testSqlApi( // sql tsi
s"TIMESTAMPDIFF(${unitParts._3}, TIMESTAMP '${data._1}', TIMESTAMP '${data._2}')",
data._3
)
case 1 => // date, timestamp
testAllApis(
timestampDiff(unitParts._2, data._1.toDate, data._2.toTimestamp),
s"timestampDiff(${unitParts._1}, '${data._1}'.toDate, '${data._2}'.toTimestamp)",
s"TIMESTAMPDIFF(${unitParts._1}, DATE '${data._1}', TIMESTAMP '${data._2}')",
data._3
)
testSqlApi( // sql tsi
s"TIMESTAMPDIFF(${unitParts._3}, DATE '${data._1}', TIMESTAMP '${data._2}')",
data._3
)
case 2 => // timestamp, date
testAllApis(
timestampDiff(unitParts._2, data._1.toTimestamp, data._2.toDate),
s"timestampDiff(${unitParts._1}, '${data._1}'.toTimestamp, '${data._2}'.toDate)",
s"TIMESTAMPDIFF(${unitParts._1}, TIMESTAMP '${data._1}', DATE '${data._2}')",
data._3
)
testSqlApi( // sql tsi
s"TIMESTAMPDIFF(${unitParts._3}, TIMESTAMP '${data._1}', DATE '${data._2}')",
data._3
)
case 3 => // date, date
testAllApis(
timestampDiff(unitParts._2, data._1.toDate, data._2.toDate),
s"timestampDiff(${unitParts._1}, '${data._1}'.toDate, '${data._2}'.toDate)",
s"TIMESTAMPDIFF(${unitParts._1}, DATE '${data._1}', DATE '${data._2}')",
data._3
)
testSqlApi( // sql tsi
s"TIMESTAMPDIFF(${unitParts._3}, DATE '${data._1}', DATE '${data._2}')",
data._3
)
}
}
}
testAllApis(
timestampDiff(TimePointUnit.DAY, nullOf(Types.SQL_TIMESTAMP),
"2016-02-24 12:42:25".toTimestamp),
"timestampDiff(DAY, nullOf(SQL_TIMESTAMP), '2016-02-24 12:42:25'.toTimestamp)",
"TIMESTAMPDIFF(DAY, CAST(NULL AS TIMESTAMP), TIMESTAMP '2016-02-24 12:42:25')",
"null"
)
testAllApis(
timestampDiff(TimePointUnit.DAY, "2016-02-24 12:42:25".toTimestamp,
nullOf(Types.SQL_TIMESTAMP)),
"timestampDiff(DAY, '2016-02-24 12:42:25'.toTimestamp, nullOf(SQL_TIMESTAMP))",
"TIMESTAMPDIFF(DAY, TIMESTAMP '2016-02-24 12:42:25', CAST(NULL AS TIMESTAMP))",
"null"
)
}
@Test
def testTimestampAdd(): Unit = {
val data = Seq(
(1, "2017-11-29 22:58:58.998"),
(3, "2017-11-29 22:58:58.998"),
(-1, "2017-11-29 22:58:58.998"),
(-61, "2017-11-29 22:58:58.998"),
(-1000, "2017-11-29 22:58:58.998")
)
val YEAR = Seq(
"2018-11-29 22:58:58.998",
"2020-11-29 22:58:58.998",
"2016-11-29 22:58:58.998",
"1956-11-29 22:58:58.998",
"1017-11-29 22:58:58.998")
val QUARTER = Seq(
"2018-03-01 22:58:58.998",
"2018-08-31 22:58:58.998",
"2017-08-29 22:58:58.998",
"2002-08-29 22:58:58.998",
"1767-11-29 22:58:58.998")
val MONTH = Seq(
"2017-12-29 22:58:58.998",
"2018-03-01 22:58:58.998",
"2017-10-29 22:58:58.998",
"2012-10-29 22:58:58.998",
"1934-07-29 22:58:58.998")
val WEEK = Seq(
"2017-12-06 22:58:58.998",
"2017-12-20 22:58:58.998",
"2017-11-22 22:58:58.998",
"2016-09-28 22:58:58.998",
"1998-09-30 22:58:58.998")
val DAY = Seq(
"2017-11-30 22:58:58.998",
"2017-12-02 22:58:58.998",
"2017-11-28 22:58:58.998",
"2017-09-29 22:58:58.998",
"2015-03-05 22:58:58.998")
val HOUR = Seq(
"2017-11-29 23:58:58.998",
"2017-11-30 01:58:58.998",
"2017-11-29 21:58:58.998",
"2017-11-27 09:58:58.998",
"2017-10-19 06:58:58.998")
val MINUTE = Seq(
"2017-11-29 22:59:58.998",
"2017-11-29 23:01:58.998",
"2017-11-29 22:57:58.998",
"2017-11-29 21:57:58.998",
"2017-11-29 06:18:58.998")
val SECOND = Seq(
"2017-11-29 22:58:59.998",
"2017-11-29 22:59:01.998",
"2017-11-29 22:58:57.998",
"2017-11-29 22:57:57.998",
"2017-11-29 22:42:18.998")
// we do not supported FRAC_SECOND, MICROSECOND, SQL_TSI_FRAC_SECOND, SQL_TSI_MICROSECOND
val intervalMapResults = Map(
"YEAR" -> YEAR,
"SQL_TSI_YEAR" -> YEAR,
"QUARTER" -> QUARTER,
"SQL_TSI_QUARTER" -> QUARTER,
"MONTH" -> MONTH,
"SQL_TSI_MONTH" -> MONTH,
"WEEK" -> WEEK,
"SQL_TSI_WEEK" -> WEEK,
"DAY" -> DAY,
"SQL_TSI_DAY" -> DAY,
"HOUR" -> HOUR,
"SQL_TSI_HOUR" -> HOUR,
"MINUTE" -> MINUTE,
"SQL_TSI_MINUTE" -> MINUTE,
"SECOND" -> SECOND,
"SQL_TSI_SECOND" -> SECOND
)
def intervalCount(interval: String, count: Int): (Expression, String) = interval match {
case "YEAR" => (count.years, s"$count.years")
case "SQL_TSI_YEAR" => (count.years, s"$count.years")
case "QUARTER" => (count.quarters, s"$count.quarters")
case "SQL_TSI_QUARTER" => (count.quarters, s"$count.quarters")
case "MONTH" => (count.months, s"$count.months")
case "SQL_TSI_MONTH" => (count.months, s"$count.months")
case "WEEK" => (count.weeks, s"$count.weeks")
case "SQL_TSI_WEEK" => (count.weeks, s"$count.weeks")
case "DAY" => (count.days, s"$count.days")
case "SQL_TSI_DAY" => (count.days, s"$count.days")
case "HOUR" => (count.hours, s"$count.hours")
case "SQL_TSI_HOUR" => (count.hours, s"$count.hours")
case "MINUTE" => (count.minutes, s"$count.minutes")
case "SQL_TSI_MINUTE" => (count.minutes, s"$count.minutes")
case "SECOND" => (count.seconds, s"$count.seconds")
case "SQL_TSI_SECOND" => (count.seconds, s"$count.seconds")
}
for ((interval, result) <- intervalMapResults) {
for (i <- 0 to 4) {
val (offset, ts) = data(i)
val timeInterval = intervalCount(interval, offset)
testAllApis(
timeInterval._1 + ts.toTimestamp,
s"${timeInterval._2} + '$ts'.toTimestamp",
s"TIMESTAMPADD($interval, $offset, TIMESTAMP '$ts')",
result(i))
}
}
testAllApis(
"2016-02-24 12:42:25".toTimestamp + nullOf(Types.INTERVAL_MILLIS),
"'2016-02-24 12:42:25'.toTimestamp + nullOf(INTERVAL_MILLIS)",
"TIMESTAMPADD(HOUR, CAST(NULL AS INTEGER), TIMESTAMP '2016-02-24 12:42:25')",
"null")
testAllApis(
nullOf(Types.SQL_TIMESTAMP) + -200.hours,
"nullOf(SQL_TIMESTAMP) + -200.hours",
"TIMESTAMPADD(HOUR, -200, CAST(NULL AS TIMESTAMP))",
"null")
testAllApis(
nullOf(Types.SQL_TIMESTAMP) + 3.months,
"nullOf(SQL_TIMESTAMP) + 3.months",
"TIMESTAMPADD(MONTH, 3, CAST(NULL AS TIMESTAMP))",
"null")
// TIMESTAMPADD with DATE returns a TIMESTAMP value for sub-day intervals.
testAllApis("2016-06-15".toDate + 1.month,
"'2016-06-15'.toDate + 1.month",
"timestampadd(MONTH, 1, date '2016-06-15')",
"2016-07-15")
testAllApis("2016-06-15".toDate + 1.day,
"'2016-06-15'.toDate + 1.day",
"timestampadd(DAY, 1, date '2016-06-15')",
"2016-06-16")
testAllApis("2016-06-15".toTimestamp - 1.hour,
"'2016-06-15'.toTimestamp - 1.hour",
"timestampadd(HOUR, -1, date '2016-06-15')",
"2016-06-14 23:00:00.000")
testAllApis("2016-06-15".toTimestamp + 1.minute,
"'2016-06-15'.toTimestamp + 1.minute",
"timestampadd(MINUTE, 1, date '2016-06-15')",
"2016-06-15 00:01:00.000")
testAllApis("2016-06-15".toTimestamp - 1.second,
"'2016-06-15'.toTimestamp - 1.second",
"timestampadd(SQL_TSI_SECOND, -1, date '2016-06-15')",
"2016-06-14 23:59:59.000")
testAllApis("2016-06-15".toTimestamp + 1.second,
"'2016-06-15'.toTimestamp + 1.second",
"timestampadd(SECOND, 1, date '2016-06-15')",
"2016-06-15 00:00:01.000")
testAllApis(nullOf(Types.SQL_TIMESTAMP) + 1.second,
"nullOf(SQL_TIMESTAMP) + 1.second",
"timestampadd(SECOND, 1, cast(null as date))",
"null")
testAllApis(nullOf(Types.SQL_TIMESTAMP) + 1.day,
"nullOf(SQL_TIMESTAMP) + 1.day",
"timestampadd(DAY, 1, cast(null as date))",
"null")
// Round to the last day of previous month
testAllApis("2016-05-31".toDate + 1.month,
"'2016-05-31'.toDate + 1.month",
"timestampadd(MONTH, 1, date '2016-05-31')",
"2016-06-30")
testAllApis("2016-01-31".toDate + 5.month,
"'2016-01-31'.toDate + 5.month",
"timestampadd(MONTH, 5, date '2016-01-31')",
"2016-06-30")
testAllApis("2016-03-31".toDate - 1.month,
"'2016-03-31'.toDate - 1.month",
"timestampadd(MONTH, -1, date '2016-03-31')",
"2016-02-29")
testAllApis("2016-03-31".toDate - 1.week,
"'2016-03-31'.toDate - 1.week",
"timestampadd(WEEK, -1, date '2016-03-31')",
"2016-03-24")
// test TIMESTAMPADD with positive time interval in various granularity.
testSqlApi("TIMESTAMPADD(SECOND, 1, time '23:59:59')", "00:00:00")
testSqlApi("TIMESTAMPADD(MINUTE, 1, time '00:00:00')", "00:01:00")
testSqlApi("TIMESTAMPADD(MINUTE, 1, time '23:59:59')", "00:00:59")
testSqlApi("TIMESTAMPADD(HOUR, 1, time '23:59:59')", "00:59:59")
testSqlApi("TIMESTAMPADD(DAY, 15, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(WEEK, 3, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(MONTH, 6, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(QUARTER, 1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(YEAR, 10, time '23:59:59')", "23:59:59")
// test TIMESTAMPADD with negative time interval in various granularity.
testSqlApi("TIMESTAMPADD(SECOND, -1, time '00:00:00')", "23:59:59")
testSqlApi("TIMESTAMPADD(MINUTE, -1, time '00:00:00')", "23:59:00")
testSqlApi("TIMESTAMPADD(MINUTE, -1, time '00:00:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(HOUR, -1, time '00:00:00')", "23:00:00")
testSqlApi("TIMESTAMPADD(DAY, -1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(WEEK, -1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(MONTH, -1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(QUARTER, -1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(YEAR, -1, time '23:59:59')", "23:59:59")
}
// ----------------------------------------------------------------------------------------------
// Hash functions
// ----------------------------------------------------------------------------------------------
@Test
def testHashFunctions(): Unit = {
val expectedMd5 = "098f6bcd4621d373cade4e832627b4f6"
val expectedSha1 = "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"
val expectedSha224 = "90a3ed9e32b2aaf4c61c410eb925426119e1a9dc53d4286ade99a809"
val expectedSha256 = "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"
val expectedSha384 = "768412320f7b0aa5812fce428dc4706b3cae50e02a64caa16a7" +
"82249bfe8efc4b7ef1ccb126255d196047dfedf17a0a9"
val expectedSha512 = "ee26b0dd4af7e749aa1a8ee3c10ae9923f618980772e473f8819a" +
"5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5fa9ad8e6f57f50028a8ff"
testAllApis(
"test".md5(),
"md5('test')",
"MD5('test')",
expectedMd5)
testAllApis(
"test".sha1(),
"sha1('test')",
"SHA1('test')",
expectedSha1)
// sha224
testAllApis(
"test".sha224(),
"sha224('test')",
"SHA224('test')",
expectedSha224)
// sha-2 224
testAllApis(
"test".sha2(224),
"sha2('test', 224)",
"SHA2('test', 224)",
expectedSha224)
// sha256
testAllApis(
"test".sha256(),
"sha256('test')",
"SHA256('test')",
expectedSha256)
// sha-2 256
testAllApis(
"test".sha2(256),
"sha2('test', 256)",
"SHA2('test', 256)",
expectedSha256)
// sha384
testAllApis(
"test".sha384(),
"sha384('test')",
"SHA384('test')",
expectedSha384)
// sha-2 384
testAllApis(
"test".sha2(384),
"sha2('test', 384)",
"SHA2('test', 384)",
expectedSha384)
// sha512
testAllApis(
"test".sha512(),
"sha512('test')",
"SHA512('test')",
expectedSha512)
// sha-2 512
testAllApis(
"test".sha2(512),
"sha2('test', 512)",
"SHA2('test', 512)",
expectedSha512)
// null tests
testAllApis(
'f33.md5(),
"md5(f33)",
"MD5(f33)",
"null")
testAllApis(
'f33.sha1(),
"sha1(f33)",
"SHA1(f33)",
"null")
testAllApis(
'f33.sha224(),
"sha224(f33)",
"SHA2(f33, 224)",
"null")
testAllApis(
'f33.sha2(224),
"sha2(f33, 224)",
"SHA2(f33, 224)",
"null")
testAllApis(
'f33.sha256(),
"sha256(f33)",
"SHA2(f33, 256)",
"null")
testAllApis(
'f33.sha384(),
"sha384(f33)",
"SHA2(f33, 384)",
"null")
testAllApis(
'f33.sha512(),
"sha512(f33)",
"SHA2(f33, 512)",
"null")
testAllApis(
"test".sha2(nullOf(Types.INT)),
"sha2('test', nullOf(INT))",
"SHA2('test', CAST(NULL AS INT))",
"null")
// non-constant bit length
testAllApis(
"test".sha2('f34),
"sha2('test', f34)",
"SHA2('test', f34)",
expectedSha256)
}
// ----------------------------------------------------------------------------------------------
// Other functions
// ----------------------------------------------------------------------------------------------
@Test
def testIsTrueIsFalse(): Unit = {
testAllApis(
'f1.isTrue,
"f1.isTrue",
"f1 IS TRUE",
"true")
testAllApis(
'f21.isTrue,
"f21.isTrue",
"f21 IS TRUE",
"false")
testAllApis(
false.isFalse,
"false.isFalse",
"FALSE IS FALSE",
"true")
testAllApis(
'f21.isFalse,
"f21.isFalse",
"f21 IS FALSE",
"false")
testAllApis(
'f1.isNotTrue,
"f1.isNotTrue",
"f1 IS NOT TRUE",
"false")
testAllApis(
'f21.isNotTrue,
"f21.isNotTrue",
"f21 IS NOT TRUE",
"true")
testAllApis(
false.isNotFalse,
"false.isNotFalse",
"FALSE IS NOT FALSE",
"false")
testAllApis(
'f21.isNotFalse,
"f21.isNotFalse",
"f21 IS NOT FALSE",
"true")
}
}
| tzulitai/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/expressions/ScalarFunctionsTest.scala | Scala | apache-2.0 | 70,550 |
import sbt._
import Keys._
object FinagleApns extends Build {
object V {
val finagle = "6.27.0"
}
val publishToHopperNexus: Def.Initialize[Option[sbt.Resolver]] =
version { (v: String) =>
val nexus = "http://nexus.lab.mtl/nexus/"
if (v.trim.endsWith("SNAPSHOT"))
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "content/repositories/releases")
}
val baseSettings = Defaults.coreDefaultSettings ++ Seq(
libraryDependencies ++= Seq(
"com.twitter" %% "finagle-core" % V.finagle,
"org.scalatest" %% "scalatest" % "2.2.6" % "test"
)
)
lazy val buildSettings = Seq(
organization := "com.hopper",
scalaVersion := "2.11.8",
publishTo <<= publishToHopperNexus
)
lazy val root = Project(id = "finagle-apns",
base = file("."),
settings = baseSettings ++ buildSettings
)
}
| hopper/finagle-apns | project/Build.scala | Scala | apache-2.0 | 919 |
package smarthouse.restapi.http.routes
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.PathMatchers.IntNumber
import de.heikoseeberger.akkahttpcirce.CirceSupport
import io.circe.generic.auto._
import io.circe.syntax._
import smarthouse.restapi.http.SecurityDirectives
import smarthouse.restapi.models.UserEntityUpdate
import smarthouse.restapi.services.{AuthService, UsersService}
import scala.concurrent.ExecutionContext
class UsersServiceRoute(val authService: AuthService,
usersService: UsersService
)(implicit executionContext: ExecutionContext) extends CirceSupport with SecurityDirectives {
import StatusCodes._
import usersService._
val route = pathPrefix("users") {
pathEndOrSingleSlash {
get {
complete(getUsers().map(_.asJson))
}
} ~
pathPrefix("me") {
pathEndOrSingleSlash {
authenticate { loggedUser =>
get {
complete(loggedUser)
} ~
post {
entity(as[UserEntityUpdate]) { userUpdate =>
complete(updateUser(loggedUser.id.get, userUpdate).map(_.asJson))
}
}
}
}
} ~
pathPrefix(IntNumber) { id =>
pathEndOrSingleSlash {
get {
complete(getUserById(id).map(_.asJson))
} ~
post {
entity(as[UserEntityUpdate]) { userUpdate =>
complete(updateUser(id, userUpdate).map(_.asJson))
}
} ~
delete {
onSuccess(deleteUser(id)) { ignored =>
complete(NoContent)
}
}
}
}
}
}
| andrewobukhov/smart-house | src/main/scala/smarthouse/restapi/http/routes/UsersServiceRoute.scala | Scala | mit | 1,792 |
package bloomfilter.mutable._128bit
import org.openjdk.jmh.annotations.{Benchmark, Param, Scope, State}
import scala.util.Random
@State(Scope.Benchmark)
class ArrayByteItemBenchmark {
private val itemsExpected = 1000000L
private val falsePositiveRate = 0.01
private val random = new Random()
private val bf = BloomFilter[Array[Byte]](itemsExpected, falsePositiveRate)
@Param(Array("1024"))
var length: Int = _
private val item = new Array[Byte](length)
random.nextBytes(item)
bf.add(item)
@Benchmark
def myPut(): Unit = {
bf.add(item)
}
@Benchmark
def myGet(): Unit = {
bf.mightContain(item)
}
}
| alexandrnikitin/bloom-filter-scala | benchmarks/src/main/scala/bloomfilter/mutable/_128bit/ArrayByteItemBenchmark.scala | Scala | mit | 644 |
trait Assoc[T] {
type U
def foo(t: T): U
}
trait Link[T, A]
case class Foo(i: Int)
object Foo {
println(s"Foo companion")
erased implicit val barLink: Link[Foo, FooAssoc.type] = null
}
implicit object FooAssoc extends Assoc[Foo] {
println(s"FooAssoc")
type U = Int
def foo(t: Foo): Int = t.i
}
import compiletime.summonFrom
inline def link[T] <: Any =
summonFrom {
case _: Link[T, s] =>
summonFrom {
case stuff: s => stuff
}
}
object Test {
println(s"Test")
def main(args: Array[String]): Unit = {
val foo = Foo(23)
println(s"foo: $foo")
val assoc = link[Foo]
val res: Int = assoc.foo(foo)
println(s"assoc: ${res}")
}
}
| som-snytt/dotty | tests/run-custom-args/companion-loading.scala | Scala | apache-2.0 | 698 |
/*
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.cassandra.lucene.partitioning
import com.fasterxml.jackson.annotation.JsonProperty
import com.stratio.cassandra.lucene.IndexException
import org.apache.cassandra.config.CFMetaData
import org.apache.cassandra.db._
import org.apache.cassandra.dht.Token
/** [[Partitioner]] based on the partition key token. Rows will be stored in an index partition
* determined by the hash of the partition key token. Partition-directed searches will be routed to
* a single partition, increasing performance. However, token range searches will be routed to all
* the partitions, with a slightly lower performance.
*
* This partitioner guarantees an excellent load balancing between index partitions.
*
* @param partitions the number of index partitions per node
* @author Andres de la Pena `adelapena@stratio.com`
*/
case class PartitionerOnToken(partitions: Int) extends Partitioner {
if (partitions <= 0) throw new IndexException(
s"The number of partitions should be strictly positive but found $partitions")
/** @inheritdoc */
private[this] def partition(token: Token): Int =
(Math.abs(token.getTokenValue.asInstanceOf[Long]) % partitions).toInt
/** @inheritdoc */
override def numPartitions: Int = partitions
/** @inheritdoc */
override def partition(key: DecoratedKey): Int = partition(key.getToken)
/** @inheritdoc */
override def partitions(command: ReadCommand): List[Int] = command match {
case c: SinglePartitionReadCommand => List(partition(c.partitionKey))
case c: PartitionRangeReadCommand =>
val range = c.dataRange()
val start = range.startKey().getToken
val stop = range.stopKey().getToken
if (start.equals(stop) && !start.isMinimum) List(partition(start)) else allPartitions
case _ => throw new IndexException(s"Unsupported read command type: ${command.getClass}")
}
}
/** Companion object for [[PartitionerOnToken]]. */
object PartitionerOnToken {
/** [[PartitionerOnToken]] builder.
*
* @param partitions the number of index partitions per node
*/
case class Builder(@JsonProperty("partitions") partitions: Int) extends Partitioner.Builder {
override def build(metadata: CFMetaData): PartitionerOnToken = PartitionerOnToken(partitions)
}
}
| adelapena/cassandra-lucene-index | plugin/src/main/scala/com/stratio/cassandra/lucene/partitioning/PartitionerOnToken.scala | Scala | apache-2.0 | 2,898 |
package gr.cslab.ece.ntua.musqle.plan.hypergraph
import gr.cslab.ece.ntua.musqle.engine.Engine
import gr.cslab.ece.ntua.musqle.plan.spark.{MuSQLEJoin, MuSQLEScan}
import scala.collection.mutable
abstract class DPJoinPlan(val left: DPJoinPlan, val right: DPJoinPlan, val engine: Engine,
var cost: Double, val info: QueryInfo) {
final val resultNumber: Int = DPJoinPlan.getResultNumber
final val tmpName: String = s"mtpmres$resultNumber"
final val isJoin: Boolean = (left != null && right !=null)
var isRoot: Boolean = false
var projections: mutable.HashSet[String] = new mutable.HashSet[String]
def toSQL: String = "Some SQL Text..."
def getRowsEstimation: Long = engine.getRowsEstimation(this)
def explain() = println(this.print(""))
def print(indent: String): String
def getCost: Double
def getInterResults(): mutable.HashSet[DPJoinPlan] = { getInterResults(this) }
private def getInterResults(plan: DPJoinPlan): mutable.HashSet[DPJoinPlan] = {
val s = mutable.HashSet[DPJoinPlan]()
if (plan.isRoot) s.add(plan)
plan match {
case move: Move => mutable.HashSet(move)
case join: Join => s.union(getInterResults(plan.left)).union(getInterResults(plan.right))
case _ => mutable.HashSet.empty
}
}
def toAbstract(): AbstractPlan = { AbstractPlan(getVertices(this), Seq()) }
def getVertices(plan: DPJoinPlan): mutable.HashSet[String] = {
plan match {
case scan: MuSQLEScan => mutable.HashSet(scan.tableName)
case join: MuSQLEJoin => getVertices(join.left) ++ getVertices(join.right)
case move: Move => getVertices(move.left)
}
}
}
object DPJoinPlan{
private var resultNumber = 0
def zeroResultNumber: Unit = { resultNumber = 0}
def getResultNumber: Int = {
resultNumber += 1
resultNumber
}
var totalGetCost = 0.0
}
/**
* A table scan operation (A vertex in the graph)
* @param table The table to be loaded
* @param engine The engine which hosts the table
* */
class Scan(val table: Vertex, override val engine: Engine, override val info: QueryInfo)
extends DPJoinPlan(null, null, engine, 5, info){
override def print(indent: String): String = s"$indent*Scan $this, " +
s"Engine: [$engine], Cost: [${getCost}], [${this.tmpName}] "
override def getCost: Double = engine.getCost(this)
}
/**
* A Join between two [[DPJoinPlan]]s
* @param left The left subplan
* @param right The right subplan
* @param vars The join keys
* @param engine The engine in which the join will be executed
* */
class Join(override val left: DPJoinPlan, override val right: DPJoinPlan, val vars: mutable.HashSet[Int],
override val engine: Engine, override val info: QueryInfo)
extends DPJoinPlan(null, null, engine, left.getCost + right.getCost, info){
override def print(indent: String): String = s"${indent}" +
s"Join [${left.tmpName}, ${right.tmpName}] " +
s"on ${vars} , Engine: [$engine], Cost: [$getCost], [$tmpName]" +
s"\\n${left.print(indent + "\\t")}" +
s"\\n${right.print(indent + "\\t")}"
override def getCost: Double = {
val start = System.currentTimeMillis()
val cost = left.getCost + right.getCost + engine.getCost(this)
val elapsed = System.currentTimeMillis() - start
DPJoinPlan.totalGetCost += elapsed / 1000.0
cost
}
}
/**
* A Move of a [[DPJoinPlan]] to another [[Engine]]
* @param dpJoinPlan The plan to be moved
* @param engine The destination [[Engine]]
* */
class Move(val dpJoinPlan: DPJoinPlan, override val engine: Engine, override val info: QueryInfo)
extends DPJoinPlan(dpJoinPlan, null, engine, dpJoinPlan.getCost, info){
def print(indent: String): String = s"${indent}Move [${dpJoinPlan.tmpName}] from ${dpJoinPlan.engine} " +
s"to $engine, Cost $cost [$tmpName]\\n${dpJoinPlan.print(indent + "\\t")}"
def compareTo(o: DPJoinPlan): Int = cost.compareTo(o.getCost)
def getCost: Double = dpJoinPlan.getCost + engine.getMoveCost(dpJoinPlan)
override def getRowsEstimation: Long = left.engine.getRowsEstimation(left)
} | gsvic/MuSQLE | src/main/scala/gr/cslab/ece/ntua/musqle/plan/hypergraph/DPJoinPlan.scala | Scala | apache-2.0 | 4,071 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.resource
import scala.collection.mutable
import org.apache.spark.resource.ResourceProfile._
import org.apache.spark.resource.ResourceUtils._
/**
* A set of task resource requests. This is used in conjuntion with the ResourceProfile to
* programmatically specify the resources needed for an RDD that will be applied at the
* stage level.
*
* This api is currently private until the rest of the pieces are in place and then it
* will become public.
*/
private[spark] class TaskResourceRequests() extends Serializable {
private val _taskResources = new mutable.HashMap[String, TaskResourceRequest]()
def requests: Map[String, TaskResourceRequest] = _taskResources.toMap
/**
* Specify number of cpus per Task.
*
* @param amount Number of cpus to allocate per Task.
*/
def cpus(amount: Int): this.type = {
val t = new TaskResourceRequest(CPUS, amount)
_taskResources(CPUS) = t
this
}
/**
* Amount of a particular custom resource(GPU, FPGA, etc) to use. The resource names supported
* correspond to the regular Spark configs with the prefix removed. For instance, resources
* like GPUs are resource.gpu (spark configs spark.task.resource.gpu.*)
*
* @param resourceName Name of the resource.
* @param amount Amount requesting as a Double to support fractional resource requests.
* Valid values are less than or equal to 0.5 or whole numbers. This essentially
* lets you configure X number of tasks to run on a single resource,
* ie amount equals 0.5 translates into 2 tasks per resource address.
*/
def resource(rName: String, amount: Double): this.type = {
val t = new TaskResourceRequest(rName, amount)
_taskResources(rName) = t
this
}
def addRequest(treq: TaskResourceRequest): this.type = {
_taskResources(treq.resourceName) = treq
this
}
override def toString: String = {
s"Task resource requests: ${_taskResources}"
}
}
| jkbradley/spark | core/src/main/scala/org/apache/spark/resource/TaskResourceRequests.scala | Scala | apache-2.0 | 2,809 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import java.time.ZoneId
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.execution.datasources.DataSource
import org.apache.spark.sql.test.SharedSparkSession
class ResolvedDataSourceSuite extends SharedSparkSession {
private def getProvidingClass(name: String): Class[_] =
DataSource(
sparkSession = spark,
className = name,
options = Map(DateTimeUtils.TIMEZONE_OPTION -> ZoneId.systemDefault().getId)
).providingClass
test("jdbc") {
assert(
getProvidingClass("jdbc") ===
classOf[org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider])
assert(
getProvidingClass("org.apache.spark.sql.execution.datasources.jdbc") ===
classOf[org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider])
assert(
getProvidingClass("org.apache.spark.sql.jdbc") ===
classOf[org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider])
}
test("json") {
assert(
getProvidingClass("json") ===
classOf[org.apache.spark.sql.execution.datasources.json.JsonFileFormat])
assert(
getProvidingClass("org.apache.spark.sql.execution.datasources.json") ===
classOf[org.apache.spark.sql.execution.datasources.json.JsonFileFormat])
assert(
getProvidingClass("org.apache.spark.sql.json") ===
classOf[org.apache.spark.sql.execution.datasources.json.JsonFileFormat])
}
test("parquet") {
assert(
getProvidingClass("parquet") ===
classOf[org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat])
assert(
getProvidingClass("org.apache.spark.sql.execution.datasources.parquet") ===
classOf[org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat])
assert(
getProvidingClass("org.apache.spark.sql.parquet") ===
classOf[org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat])
}
test("csv") {
assert(
getProvidingClass("csv") ===
classOf[org.apache.spark.sql.execution.datasources.csv.CSVFileFormat])
assert(
getProvidingClass("com.databricks.spark.csv") ===
classOf[org.apache.spark.sql.execution.datasources.csv.CSVFileFormat])
}
test("avro: show deploy guide for loading the external avro module") {
Seq("avro", "org.apache.spark.sql.avro").foreach { provider =>
val message = intercept[AnalysisException] {
getProvidingClass(provider)
}.getMessage
assert(message.contains(s"Failed to find data source: $provider"))
assert(message.contains("Please deploy the application as per the deployment section of"))
}
}
test("kafka: show deploy guide for loading the external kafka module") {
val message = intercept[AnalysisException] {
getProvidingClass("kafka")
}.getMessage
assert(message.contains("Failed to find data source: kafka"))
assert(message.contains("Please deploy the application as per the deployment section of"))
}
test("error message for unknown data sources") {
val error = intercept[ClassNotFoundException] {
getProvidingClass("asfdwefasdfasdf")
}
assert(error.getMessage.contains("Failed to find data source: asfdwefasdfasdf."))
}
}
| maropu/spark | sql/core/src/test/scala/org/apache/spark/sql/sources/ResolvedDataSourceSuite.scala | Scala | apache-2.0 | 4,135 |
package com.crobox.clickhouse.dsl.language
import com.crobox.clickhouse.DslIntegrationSpec
import com.crobox.clickhouse.dsl._
class CollectionFunctionTest extends DslIntegrationSpec {
it should "succeed for IN functions" in {
val someCollection = Seq(1, 4, 6, 9)
val someTuple = tuple(1, 4, 6, 9)
val inNum = 4
val notInNum = 3
r(const(inNum).in(someCollection)) shouldBe "1"
r(const(inNum).notIn(someTuple)) shouldBe "0"
r(const(notInNum).globalIn(someTuple)) shouldBe "0"
r(const(notInNum).globalNotIn(someCollection)) shouldBe "1"
}
}
| crobox/clickhouse-scala-client | dsl/src/test/scala/com/crobox/clickhouse/dsl/language/CollectionFunctionTest.scala | Scala | lgpl-3.0 | 597 |
/*
# Copyright 2016 Georges Lipka
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
*/
package com.glipka.easyReactJS.react
import scala.scalajs.js
import scala.scalajs.js._
import org.scalajs.dom.html
import js.{ UndefOr, Any, Function => JFn }
import js.annotation.{ JSBracketAccess, JSName }
import js.{ Any => jAny }
// https://github.com/DefinitelyTyped/DefinitelyTyped/blob/master/react/react.d.ts
@js.native
class ComponentClass[P](props: P, context: Any) extends js.Any {
var propTypes: ValidationMap[P] = js.native
var contextTypes: ValidationMap[P] = js.native
var childContextTypes: ValidationMap[P] = js.native
var defaultProps: P = js.native
var displayName: String = js.native
}
| glipka/Easy-React-With-ScalaJS | src/main/scala/com/glipka/easyReactJS/react/ComponentClass.scala | Scala | apache-2.0 | 1,206 |
package gapt.expr.formula.fol
import gapt.expr._
import gapt.expr.formula.All
import gapt.expr.formula.And
import gapt.expr.formula.Atom
import gapt.expr.formula.Ex
import gapt.expr.formula.Formula
import gapt.expr.formula.Imp
import gapt.expr.formula.Neg
import gapt.expr.formula.Or
import gapt.expr.formula.hol.HOLFunction
import gapt.expr.ty.Ti
import gapt.expr.util.freeVariables
import gapt.expr.ty.To
import gapt.expr.ty.Ty
import gapt.proofs.SequentProof
import gapt.utils.Logger
import scala.util.matching.Regex
/**
* This is implements some heuristics to convert a fol formula obtained by
* [[gapt.expr.formula.fol.replaceAbstractions]] and [[gapt.expr.formula.fol.reduceHolToFol]] back to its original signature.
* Sometimes, types have to be guessed and the code is poorly tested, so it is unclear
* how general it is. It works (and is necessary) during the acnf creation of the n-tape proof.
*
* To extract a signature, use the `undoHol2Fol.getSignature`, to to the back translation use
* `undoHol2Fol.backtranslate`.
*/
object undoHol2Fol {
val logger = Logger( "undoHol2fol" )
type Signature = ( Map[String, Set[Const]], Map[String, Set[Var]] )
/**
* Translate the fol formula e to a hol formula over the given signature for constants and variables.
*
* @param e the fol formula.
* @param sig_vars a mapping fol name to hol var with appropriate type
* @param sig_consts a mapping fol name to hol const with appropriate type
* @param abssymbol_map a mapping fol constant name to a lambda expression (obtained by replaceAbstractions)
* @return the changed formula
*/
def backtranslate(
e: Expr,
sig_vars: Map[String, List[Var]],
sig_consts: Map[String, List[Const]],
abssymbol_map: Hol2FolDefinitions ): Formula =
backtranslate( e.asInstanceOf[Expr], sig_vars, sig_consts, abssymbol_map, Some( To ) ).asInstanceOf[Formula]
/**
* We do some dirty stuff in here to translate a prover9 term back to the richer type signature of hol proofs, undoing
* replace abstractions at the same time.
*/
def backtranslate(
e: Expr,
sig_vars: Map[String, List[Var]],
sig_consts: Map[String, List[Const]],
abssymbol_map: Hol2FolDefinitions,
expected_type: Option[Ty] ): Expr = {
e match {
// --------------- logical structure ------------------------
case Atom( Const( name, _, _ ), args ) if sig_consts contains name.toString =>
val args_ = args.map( backtranslate( _, sig_vars, sig_consts, abssymbol_map, None ) )
val head = sig_consts( name.toString )( 0 )
Atom( head, args_ )
/* case Equation(s, t) =>
Equation(backtranslate( s, sig_vars, sig_consts, abssymbol_map, None ) ,
backtranslate( t, sig_vars, sig_consts, abssymbol_map, None ) )
*/
case Neg( f ) => Neg( backtranslate( f, sig_vars, sig_consts, abssymbol_map ) )
case And( f, g ) => And( backtranslate( f, sig_vars, sig_consts, abssymbol_map ), backtranslate( g, sig_vars, sig_consts, abssymbol_map ) )
case Or( f, g ) => Or( backtranslate( f, sig_vars, sig_consts, abssymbol_map ), backtranslate( g, sig_vars, sig_consts, abssymbol_map ) )
case Imp( f, g ) => Imp( backtranslate( f, sig_vars, sig_consts, abssymbol_map ), backtranslate( g, sig_vars, sig_consts, abssymbol_map ) )
case All( x, f ) =>
val f_ = backtranslate( f, sig_vars, sig_consts, abssymbol_map )
val xcandidates = freeVariables( f_ ).toList.filter( _.name == x.name )
xcandidates match {
case Nil => All( Var( x.name, x.ty ).asInstanceOf[Var], f_ )
case List( x_ ) => All( x_, f_ )
case _ => throw new Exception( "We have not more than one free variable with name " + x.name + xcandidates.mkString( ": (", ", ", ")" ) )
}
case Ex( x, f ) =>
val f_ = backtranslate( f, sig_vars, sig_consts, abssymbol_map )
val xcandidates = freeVariables( f_ ).toList.filter( _.name == x.name )
xcandidates match {
case Nil => Ex( Var( x.name, x.ty ).asInstanceOf[Var], f_ )
case List( x_ ) => Ex( x_, f_ )
case _ => throw new Exception( "We have not more than one free variable with name " + x.name + xcandidates.mkString( ": (", ", ", ")" ) )
}
// --------------- term structure ------------------------
//cases for term replacement
case Const( name, _, _ ) if abssymbol_map.lookupByName( name ).isDefined =>
val Some( qterm_ ) = abssymbol_map.lookupByName( name )
val qterm: Expr = freeVariables( qterm_ ).toList.foldRight( qterm_ )( ( v, term ) => Abs( v, term ) )
expected_type match {
case Some( expt ) =>
require( qterm.ty == expt, "We did a replacement of the symbol " + name + " by " + qterm + " but the type " + qterm.ty + " is not the expected type " + expected_type )
qterm
case None =>
qterm
}
case HOLFunction( Const( name, _, _ ), args ) if abssymbol_map.lookupByName( name ).isDefined =>
val Some( qterm_ ) = abssymbol_map.lookupByName( name )
val qterm: Expr = freeVariables( qterm_ ).toList.foldRight( qterm_ )( ( v, term ) => Abs( v, term ) )
val btargs = args.map( x => backtranslate( x.asInstanceOf[Expr], sig_vars, sig_consts, abssymbol_map, None ) )
val r = btargs.foldLeft( qterm )( ( term, nextarg ) => App( term, nextarg ) )
expected_type match {
case Some( expt ) =>
require( qterm.ty == expt, "We did a replacement of the symbol " + name + " by " + qterm + " but the type " + qterm.ty + " is not the expected type " + expected_type )
r
case None =>
r
}
//normal ones
case HOLFunction( Const( name, _, _ ), args ) if sig_consts contains name =>
val btargs = args.map( x => backtranslate( x.asInstanceOf[Expr], sig_vars, sig_consts, abssymbol_map, None ) )
val head = sig_consts( name )( 0 ) //we have to pick a candidate somehow, lets go for the first
HOLFunction( head, btargs )
case Var( name, Ti ) if sig_vars contains name =>
val head = sig_vars( name )( 0 ) //we have to pick a candidate somehow, lets go for the first
head
case Const( name, Ti, _ ) if sig_consts contains name =>
val head = sig_consts( name )( 0 ) //we have to pick a candidate somehow, lets go for the first
head
case Var( ivy_varname( name ), Ti ) =>
logger.debug( "Guessing that the variable " + name + " comes from ivy, assigning type i." )
Var( name, Ti ).asInstanceOf[Var]
case Var( name, Ti ) =>
throw new Exception( "No signature information for variable " + e )
case Const( name, _, _ ) =>
throw new Exception( "No signature information for const " + e )
case _ =>
throw new Exception( "Could not convert subterm " + e )
}
}
val ivy_varname: Regex = """(v[0-9]+)""".r
def getSignature[F, T <: SequentProof[F, T]]( proof: SequentProof[F, T], extract: F => Expr ): Signature = {
val exprs = for ( p <- proof.subProofs; f <- p.conclusion.elements map extract ) yield f
getSignature( exprs.toList )
}
def getSignature( fs: List[Expr] ): Signature =
fs.foldLeft( ( Map[String, Set[Const]](), Map[String, Set[Var]]() ) )( ( maps, e ) => {
//println("next "+maps._1.size+":"+maps._2.size)
getSignature( e, maps._1, maps._2 )
} )
def getSignature( e: Expr, csig: Map[String, Set[Const]], vsig: Map[String, Set[Var]] ): ( Map[String, Set[Const]], Map[String, Set[Var]] ) = e match {
case v: Var =>
val name = v.name
vsig.get( name ) match {
case Some( list ) if list contains v =>
( csig, vsig )
case Some( list ) =>
( csig, vsig + ( ( name, list + v ) ) )
case None =>
( csig, vsig + ( ( name, Set( v ) ) ) )
}
case c: Const =>
val name = c.name
csig.get( name ) match {
case Some( list ) if list contains c =>
( csig, vsig )
case Some( list ) =>
( csig + ( ( name, list + c ) ), vsig )
case None =>
( csig + ( ( name, Set( c ) ) ), vsig )
}
case App( s, t ) =>
val ( cm1, vm1 ) = getSignature( s, csig, vsig )
val ( cm2, vm2 ) = getSignature( t, cm1, vm1 )
val cmtotal = ( cm1.toList ++ cm2.toList ).foldLeft( Map[String, Set[Const]]() )( ( map, elem ) =>
map.get( elem._1 ) match {
case None => map + elem
case Some( list ) => map + ( ( elem._1, list ++ elem._2 ) )
} )
val vmtotal = ( vm1.toList ++ vm2.toList ).foldLeft( Map[String, Set[Var]]() )( ( map, elem ) =>
map.get( elem._1 ) match {
case None => map + elem
case Some( list ) => map + ( ( elem._1, list ++ elem._2 ) )
} )
( cmtotal, vmtotal )
case Abs( x @ Var( name, _ ), s ) =>
val ( cm1, vm1 ) = getSignature( s, csig, vsig )
vm1.get( name ) match {
case None =>
( cm1, vm1 + ( ( name, Set( x.asInstanceOf[Var] ) ) ) )
case Some( list ) =>
( cm1, vm1 + ( ( name, list + x.asInstanceOf[Var] ) ) )
}
}
}
| gapt/gapt | core/src/main/scala/gapt/expr/formula/fol/hol2fol_heuristics.scala | Scala | gpl-3.0 | 9,322 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.util
import java.net.URL
object ClassLoaders {
val log = Log(getClass); import log._
/**
* Returns the default class loaders to use for loading which is the current threads context class loader
* and the class loader which loaded scalate-core by default
*/
def defaultClassLoaders: List[ClassLoader] = {
List(Thread.currentThread.getContextClassLoader, classOf[Logging].getClassLoader)
}
/**
* Tries to load the named class on the given class loaders
*/
def findClass(className: String, classLoaders: Traversable[ClassLoader] = defaultClassLoaders): Option[Class[_]] = {
def tryLoadClass(classLoader: ClassLoader) = {
try {
Some(classLoader.loadClass(className))
}
catch {
case e: Exception => None
}
}
classLoaders.map(tryLoadClass).find(_.isDefined) match {
case Some(a) => a
case _ => None
}
}
/**
* Tries to find the named resource on the given class loaders
*/
def findResource(name: String, classLoaders: Traversable[ClassLoader] = defaultClassLoaders): Option[URL] = {
def tryLoadClass(classLoader: ClassLoader) = {
try {
classLoader.getResource(name)
}
catch {
case e: Exception => null
}
}
classLoaders.map(tryLoadClass).find(_ != null)
}
/**
* Loads the given named class on the given class loaders or fails with a ClassNotFoundException
*/
def loadClass(className: String, classLoaders: Traversable[ClassLoader]) = findClass(className, classLoaders) match {
case Some(c) => c
case _ => throw new ClassNotFoundException(className + " not found in class loaders: " + classLoaders)
}
/**
* Evaluates the given block using the context class loader; then restores the context class loader to its
* previous value
*/
def withContextClassLoader[T](classLoader: ClassLoader)(block: => T): T = {
def thread = Thread.currentThread
val old = thread.getContextClassLoader
try {
thread.setContextClassLoader(classLoader)
block
} finally {
thread.setContextClassLoader(old)
}
}
} | dnatic09/scalate | scalate-util/src/main/scala/org/fusesource/scalate/util/ClassLoaders.scala | Scala | apache-2.0 | 2,915 |
package org.squeryl.test.mutablerelations
import org.squeryl.test.PrimitiveTypeModeForTests._
import org.squeryl._
import org.squeryl.dsl.CompositeKey2
trait SchoolDb2Object extends KeyedEntity[Long] {
val id: Long = 0
}
class Professor(val lastName: String) extends SchoolDb2Object {
lazy val courses = SchoolDb2.courseAssignments.leftStateful(this)
}
class Course(val subjectId: Long) extends SchoolDb2Object {
def this() = this(0)
// Lets support the case where a course can have more than one professor
lazy val professors = SchoolDb2.courseAssignments.rightStateful(this)
lazy val students = SchoolDb2.courseSubscriptions.leftStateful(this)
lazy val subject = SchoolDb2.subjectToCourses.rightStateful(this)
}
class Student(val firstName: String, val lastName: String) extends SchoolDb2Object {
lazy val courses = SchoolDb2.courseSubscriptions.rightStateful(this)
}
class Subject(val name: String) extends SchoolDb2Object {
lazy val courses = SchoolDb2.subjectToCourses.leftStateful(this)
}
class CourseSubscription(val courseId: Long, val studentId: Long, val grade: Float) extends KeyedEntity[CompositeKey2[Long,Long]] {
def id = compositeKey(courseId, studentId)
}
class CourseAssignment(val courseId: Long, val professorId: Long) extends KeyedEntity[CompositeKey2[Long,Long]] {
def id = compositeKey(courseId, professorId)
}
object SchoolDb2 extends Schema {
val professors = table[Professor]()
val students = table[Student]()
val courses = table[Course]()
val subjects = table[Subject]()
val courseAssignments =
manyToManyRelation(professors, courses).
via[CourseAssignment]((p,c,a) => (a.professorId === p.id, a.courseId === c.id))
val courseSubscriptions =
manyToManyRelation(courses, students).
via[CourseSubscription]((c,s,cs) => (cs.studentId === s.id, c.id === cs.courseId))
val subjectToCourses =
oneToManyRelation(subjects, courses).
via((s,c) => c.subjectId === s.id)
// the default constraint for all foreign keys in this schema :
override def applyDefaultForeignKeyPolicy(foreignKeyDeclaration: ForeignKeyDeclaration) =
foreignKeyDeclaration.constrainReference()
override def drop = {
Session.cleanupResources
super.drop
}
}
import org.squeryl.framework._
abstract class SchoolDb2MetableRelations extends SchemaTester with QueryTester with RunTestsInsideTransaction {
self: DBConnector =>
val schema = SchoolDb2
def instance = new {
import schema._
val professeurTournesol = professors.insert(new Professor("Tournesol"))
val madProfessor = professors.insert(new Professor("Mad Professor"))
val philosophy = subjects.insert(new Subject("Philosophy"))
val chemistry = subjects.insert(new Subject("Chemistry"))
val physics = subjects.insert(new Subject("Physic"))
val computationTheory = subjects.insert(new Subject("Computation Theory"))
val chemistryCourse = courses.insert(new Course(chemistry.id))
val physicsCourse = courses.insert(new Course(physics.id))
}
test("Many2ManyAssociationFromLeftSide"){
import SchoolDb2._
val i = instance
import i._
courseAssignments.Count.toLong shouldBe 0
professeurTournesol.courses.associate(physicsCourse)
val c1 = professeurTournesol.courses.head : Course
c1.id shouldBe physicsCourse.id
val ca = professeurTournesol.courses.associations.head : CourseAssignment
ca.courseId shouldBe physicsCourse.id
professeurTournesol.courses.dissociateAll shouldBe 1
professeurTournesol.courses.dissociateAll shouldBe 0
courseAssignments.Count.toLong shouldBe 0
}
test("Many2ManyAssociationFromRightSide"){
import SchoolDb2._
val i = instance
import i._
courseAssignments.Count.toLong shouldBe 0
physicsCourse.professors.associate(professeurTournesol)
val profT = physicsCourse.professors.head : Professor
professeurTournesol.lastName shouldBe profT.lastName
professeurTournesol.courses.refresh
val ca = professeurTournesol.courses.associations.head : CourseAssignment
ca.courseId shouldBe physicsCourse.id
physicsCourse.professors.dissociateAll shouldBe 1
physicsCourse.professors.dissociateAll shouldBe 0
courseAssignments.Count.toLong shouldBe 0
// test dissociate :
physicsCourse.professors.associate(professeurTournesol)
physicsCourse.professors.head : Professor
professeurTournesol.courses.refresh
physicsCourse.professors.dissociate(professeurTournesol) shouldBe true
physicsCourse.professors.dissociate(professeurTournesol) shouldBe false
}
test("OneToMany"){
import SchoolDb2._
val i = instance
import i._
val philosophyCourse10AMWednesday = new Course
val philosophyCourse2PMWednesday = new Course
val philosophyCourse3PMFriday = new Course
philosophy.courses.associate(philosophyCourse10AMWednesday)
philosophy.courses.associate(philosophyCourse2PMWednesday)
philosophy.courses.associate(philosophyCourse3PMFriday)
chemistry.courses.associate(new Course)
val s = from(subjects)(s0 =>
where(s0.id notIn(Seq(computationTheory.id, physics.id)))
select(s0)
)
var cnt = 0
for(s0 <- s ) {
var sCnt = 0
for(c <- s0.courses) {
cnt += 1
sCnt += 1
}
if(s0.id == philosophy.id)
3 shouldBe sCnt
else if(s0.id == chemistry.id)
2 shouldBe sCnt
else
org.squeryl.internals.Utils.throwError("unknown subject : " + s0)
}
5 shouldBe cnt
philosophy.courses.map(_.id).toSet shouldBe Set(philosophyCourse10AMWednesday.id, philosophyCourse2PMWednesday.id, philosophyCourse3PMFriday.id)
// no need to refresh :
//philosophyCourse2PMWednesday.subject.refresh
// since the relation is lazy and we haven't touched it yet...
philosophyCourse2PMWednesday.subject.one.get.name shouldBe philosophy.name
// verify that a reassociation does an update and not an insert :
val pk1 = philosophyCourse3PMFriday.id
computationTheory.courses.associate(philosophyCourse3PMFriday)
pk1 shouldBe philosophyCourse3PMFriday.id
philosophy.courses.refresh
// verify that the reassociation worked, which means that
// 1) : the set of philosophy.courses was reduced properly
philosophy.courses.map(_.id).toSet shouldBe Set(philosophyCourse10AMWednesday.id, philosophyCourse2PMWednesday.id)
// 2) philosophyCourse3PMFriday.subject points to the proper subject
computationTheory.name shouldBe philosophyCourse3PMFriday.subject.one.get.name
}
}
| xuwei-k/Squeryl | src/test/scala/org/squeryl/test/mutablerelations/SchoolDbMutableRelations.scala | Scala | apache-2.0 | 6,838 |
package selects
import japgolly.scalajs.react.vdom.prefix_<^.{<, _}
import japgolly.scalajs.react.{BackendScope, Callback, ReactComponentB, _}
import shared._
object EntitySelect {
val entityList = List("Item", "Label", "Meta", "Section", "Term", "Actor", "App", "Component", "Domain", "Module", "Product", "Release",
"Risk", "Service", "Stakeholder", "System", "User", "Class", "Data", "Input", "Member", "Output", "Relationship", "Design", "Screen", "MockUp",
"Function", "Interface", "Epic", "Feature", "Goal", "Idea", "Issue", "Req", "Ticket", "WorkPackage", "Breakpoint", "Barrier", "Quality", "Target",
"Function", "Interface", "Scenario", "Task", "Test", "Story", "UseCase", "VariantPoint", "Variant")
def selectStyle(P: Props) = Seq(
^.className := "form-control pull-right",
^.width := "155px",
^.color := {
if (P.isModelValue) "black" else "#047BEA"
},
^.borderBottomLeftRadius := "5px",
^.borderTopLeftRadius := "5px",
^.background := {
if (P.isModelValue) "#cedbe7" else "white"
},
^.textAlign.center,
^.textAlignLast.center
)
case class Props(value: String, setNewEntity: Option[Entity] => Callback, isModelValue: Boolean)
case class State(value: String)
class Backend($: BackendScope[Props, State]) {
def render(P: Props, S: State) =
<.select(
selectStyle(P),
^.value := {
if (S.value.isEmpty) P.value else S.value
},
^.onChange ==> onChange(P, S)
)(
entityList.map(x => <.option(^.font := "bold", x))
)
def onChange(P: Props, S: State)(e: ReactEventI): Callback = {
e.preventDefault()
val newEntity = e.target.value
P.setNewEntity(Some(Entity(newEntity))) >> $.setState(s = S.copy(value = newEntity))
}
}
val component = ReactComponentB[Props]("EntitySelect")
.initialState(State(value = ""))
.renderBackend[Backend]
.build
def apply(value: String, setNewEntity: Option[Entity] => Callback, isModelValue: Boolean) = component.set()(Props(value, setNewEntity, isModelValue))
}
| reqT/reqT-webapp | client/src/main/scala/selects/EntitySelect.scala | Scala | apache-2.0 | 2,098 |
/** Copyright 2009 Steve Jenson under the Apache 2.0 License */
package com.saladwithsteve.namey
import com.twitter.commons.Stats.Counter
import com.twitter.commons.Stats.Timing
import java.util.concurrent.atomic.AtomicInteger
object NameServerStats {
val bytesWritten = new Counter
val totalSessions = new AtomicInteger(0)
val closedSessions = new Counter
val sessionErrors = new Counter
val nameResolutionLatency = new Timing
}
| stevej/namey | src/main/scala/com/saladwithsteve/namey/NameServerStats.scala | Scala | apache-2.0 | 442 |
package org.eso.ias.monitor
import java.util.concurrent.{Executors, ScheduledExecutorService, ThreadFactory, TimeUnit}
import com.typesafe.scalalogging.Logger
import org.eso.ias.heartbeat.HeartbeatProducerType._
import org.eso.ias.heartbeat.consumer.{HbKafkaConsumer, HbListener, HbMsg}
import org.eso.ias.logging.IASLogger
import org.eso.ias.types.Alarm
import scala.collection.mutable.{Map => MutableMap}
/**
* Monitors the HBs of
* - plugins
* - converters
* - clients
* - sink connectors (Kafka connectors does not publish HBs)
* - supervisor
*
* [[HbMonitor()]] gets the HBs using a HB consumer and checks
* the time when they have been produced and received against the passed threshold to
* generate alarms.
*
* A different alarm is generated depending on the type of the missing HB: the IDs of the
* missing HBs are passed ass properties
*
* When a HB is received, an entry is updated in the relative map.
* A thread runs periodically to check if all the HBs have been received and,
* if any of them is missing, sets an alarm.
* The entry in the map is a boolean used as a watch dog: the thread periodically resets the watch dog
* and checks if some of the boolean have not been set.
*
* Notification of alarms is done indirectly by setting the state of the alarm in the [[MonitorAlarm]].
*
* TODO: at the present, the copnverters do not publish a fullRunnibngId but their plain ID
* this needs to be changed when [[https://github.com/IntegratedAlarmSystem-Group/ias/issues/145 #145]]
* will be fixed
*
* @param hbConsumer The consumer of HBs
* @param pluginIds The IDs of the plugins whose IDs must be monitored
* @param converterIds The IDs of the converters whose IDs must be monitored
* @param clientIds The IDs of the clients whose IDs must be monitored
* @param sinkIds The IDs of the sink connectors whose IDs must be monitored
* Kafka sink connectors does not publish HBs and must be monitored elsewhere;
* however the IAs has sink conenctors like the email sender that publishes HBs
* @param supervisorIds The IDs of the supervisors whose IDs must be monitored
* @param coreToolIds The IDs of the IAS core tools whose IDs must be monitored
* @param threshold An alarm is emitted if the HB has not been received before the threshold elapses
* (in seconds)
* @param pluginsAlarmPriority the priority of the alarm for faulty plugins
* @param convertersAlarmPriority the priority of the alarm for faulty converters
* @param clientsAlarmPriority the priority of the alarm for faulty clients
* @param sinksAlarmPriority the priority of the alarm for faulty sink connectors
* @param supervisorsAlarmPriority the priority of the alarm for faulty supervisors
*/
class HbMonitor(
val hbConsumer: HbKafkaConsumer,
val pluginIds: Set[String],
val converterIds: Set[String],
val clientIds: Set[String],
val sinkIds: Set[String],
val supervisorIds: Set[String],
val coreToolIds: Set[String],
val threshold: Long,
val pluginsAlarmPriority: Alarm=Alarm.getSetDefault,
val convertersAlarmPriority: Alarm=Alarm.getSetDefault,
val clientsAlarmPriority: Alarm=Alarm.getSetDefault,
val sinksAlarmPriority: Alarm=Alarm.getSetDefault,
val supervisorsAlarmPriority: Alarm=Alarm.getSetDefault,
val coreToolAlarmPriority: Alarm = Alarm.getSetDefault) extends HbListener with Runnable {
require(Option(hbConsumer).isDefined)
require(threshold>0,"Invalid negative threshold")
require(Option(pluginIds).isDefined)
require(Option(converterIds).isDefined)
require(Option(clientIds).isDefined)
require(Option(sinkIds).isDefined)
require(Option(supervisorIds).isDefined)
require(Option(coreToolIds).isDefined)
/** The map to store the last HB of plugins */
private val pluginsHbMsgs: MutableMap[String, Boolean] = MutableMap.empty
pluginIds.foreach(pluginsHbMsgs.put(_,false))
HbMonitor.logger.debug("{} plugins to monitor: {}",pluginsHbMsgs.keySet.size,pluginsHbMsgs.keySet.mkString(","))
/** The map to store the last HB of converters */
private val convertersHbMsgs: MutableMap[String, Boolean] = MutableMap.empty
converterIds.foreach(convertersHbMsgs.put(_,false))
HbMonitor.logger.debug("{} converters to monitor: {}",convertersHbMsgs.keySet.size,convertersHbMsgs.keySet.mkString(","))
/** The map to store the last HB of clients */
private val clientsHbMsgs: MutableMap[String, Boolean] = MutableMap.empty
clientIds.foreach(clientsHbMsgs.put(_,false))
HbMonitor.logger.debug("{} clients to monitor: {}",clientsHbMsgs.keySet.size,clientsHbMsgs.keySet.mkString(","))
/** The map to store the last HB of sink connectors */
private val sinksHbMsgs: MutableMap[String, Boolean] = MutableMap.empty
sinkIds.foreach(sinksHbMsgs.put(_,false))
HbMonitor.logger.debug("{} sink clients to monitor: {}",sinksHbMsgs.keySet.size,sinksHbMsgs.keySet.mkString(","))
/** The map to store the last HB of supervisors */
private val supervisorsHbMsgs: MutableMap[String, Boolean] = MutableMap.empty
supervisorIds.foreach(supervisorsHbMsgs.put(_,false))
HbMonitor.logger.debug("{} supervisors to monitor: {}",supervisorsHbMsgs.keySet.size,supervisorsHbMsgs.keySet.mkString(","))
/** The map to store the last HB of IAS core tools */
private val coreToolsHbMsgs: MutableMap[String, Boolean] = MutableMap.empty
coreToolIds.foreach(coreToolsHbMsgs.put(_,false))
HbMonitor.logger.debug("{} IAS core tools to monitor: {}",coreToolsHbMsgs.keySet.size,coreToolsHbMsgs.keySet.mkString(","))
/** A list with all the maps of HB */
private val hbMaps= List(pluginsHbMsgs,convertersHbMsgs,sinksHbMsgs,clientsHbMsgs,supervisorsHbMsgs,coreToolsHbMsgs)
/** The factory to generate the periodic thread */
private val factory: ThreadFactory = new ThreadFactory {
override def newThread(runnable: Runnable): Thread = {
val t: Thread = new Thread(runnable,"HbMonitor-Thread")
t.setDaemon(true)
t
}
}
/** The executor to periodically run the thread */
private val schedExecutorSvc: ScheduledExecutorService = Executors.newSingleThreadScheduledExecutor(factory)
def start(): Unit = {
HbMonitor.logger.debug("Starting up")
hbConsumer.addListener(this)
hbConsumer.start()
HbMonitor.logger.debug("HB consumer started")
schedExecutorSvc.scheduleWithFixedDelay(this,threshold,threshold,TimeUnit.SECONDS)
HbMonitor.logger.debug("Thread scheduled every {} seconds",threshold)
HbMonitor.logger.info("Started")
}
def shutdown(): Unit = {
HbMonitor.logger.debug("Shutting down")
hbConsumer.shutdown()
HbMonitor.logger.debug("HB consumer shut down")
schedExecutorSvc.shutdown()
HbMonitor.logger.info("Shut down")
}
/**
* An heartbeat has been consumed from the HB topic
*
* @param hbMsg The HB consumed from the HB topic
*/
override def hbReceived(hbMsg: HbMsg): Unit = synchronized {
HbMonitor.logger.debug("HB received: {} of type {}",hbMsg.hb.stringRepr,hbMsg.hb.hbType)
hbMsg.hb.hbType match {
case PLUGIN => pluginsHbMsgs.put(hbMsg.hb.name,true)
case SUPERVISOR => supervisorsHbMsgs.put(hbMsg.hb.name,true)
case SINK => sinksHbMsgs.put(hbMsg.hb.name,true)
case CONVERTER => convertersHbMsgs.put(hbMsg.hb.name,true)
case CLIENT => clientsHbMsgs.put(hbMsg.hb.name,true)
case CORETOOL => coreToolsHbMsgs.put(hbMsg.hb.name,true)
case idType => HbMonitor.logger.warn("Unknown HB type to monitor: {} from fullRunningId {}",idType,hbMsg.hb)
}
}
/** The thread that periodically checks for missing HBs */
override def run(): Unit = synchronized {
HbMonitor.logger.debug("Checking reception of HBs")
// Return the IDs of the alarms in the map that have not been updated
// These IDs are set as properties in the alarms
def faultyIds(m: MutableMap[String, Boolean]): List[String] = m.filterKeys(k => !m(k)).keys.toList
// Update the passed alarm
def updateAlarm(alarm: MonitorAlarm, faultyIds: List[String], priority: Alarm=Alarm.getSetDefault): Unit = {
HbMonitor.logger.debug("Updating alarm {} with faulty ids {} and priority {}",
alarm.id,
faultyIds.mkString(","),
priority)
if (faultyIds.isEmpty) alarm.set(Alarm.cleared(), faultyIds.mkString(","))
else alarm.set(priority, faultyIds.mkString(","))
}
updateAlarm(MonitorAlarm.PLUGIN_DEAD,faultyIds(pluginsHbMsgs),pluginsAlarmPriority)
updateAlarm(MonitorAlarm.SUPERVISOR_DEAD,faultyIds(supervisorsHbMsgs),supervisorsAlarmPriority)
updateAlarm(MonitorAlarm.CONVERTER_DEAD,faultyIds(convertersHbMsgs),convertersAlarmPriority)
updateAlarm(MonitorAlarm.SINK_DEAD,faultyIds(sinksHbMsgs),sinksAlarmPriority)
updateAlarm(MonitorAlarm.CLIENT_DEAD,faultyIds(clientsHbMsgs),clientsAlarmPriority)
updateAlarm(MonitorAlarm.CORETOOL_DEAD,faultyIds(coreToolsHbMsgs),coreToolAlarmPriority)
// reset the maps to be ready for the next iteration
hbMaps.foreach(m => m.keySet.foreach(k => m(k)=false))
}
}
/** Companion object */
object HbMonitor {
/** The logger */
val logger: Logger = IASLogger.getLogger(HbMonitor.getClass)
}
| IntegratedAlarmSystem-Group/ias | Monitor/src/main/scala/org/eso/ias/monitor/HbMonitor.scala | Scala | lgpl-3.0 | 9,445 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.frs105.calculations.ProfitOrLossFinancialYearCalculator
import uk.gov.hmrc.ct.accounts.frs105.retriever.Frs105AccountsBoxRetriever
import uk.gov.hmrc.ct.box.retriever.FilingAttributesBoxValueRetriever
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtOptionalInteger}
case class AC436(value: Option[Int]) extends CtBoxIdentifier(name = "Previous Profit or loss") with CtOptionalInteger
object AC436 extends ProfitOrLossFinancialYearCalculator {
def calculate(boxRetriever: Frs105AccountsBoxRetriever): AC436 = {
import boxRetriever._
calculateAC436(ac13, ac406, ac411, ac416, ac421, ac426, ac35)
}
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC436.scala | Scala | apache-2.0 | 1,305 |
package games.math
import java.nio.FloatBuffer
/**
* Ported from LWJGL source code
*/
class Matrix3f extends Matrix {
private[math] var m00, m11, m22: Float = 1
private[math] var m01, m02, m10, m12, m20, m21: Float = 0
def this(a00: Float, a01: Float, a02: Float, a10: Float, a11: Float, a12: Float, a20: Float, a21: Float, a22: Float) = {
this()
// Internally stored as Column-major
m00 = a00
m10 = a01
m20 = a02
m01 = a10
m11 = a11
m21 = a12
m02 = a20
m12 = a21
m22 = a22
}
def this(col0: Vector3f, col1: Vector3f, col2: Vector3f) = {
this()
m00 = col0.x
m01 = col0.y
m02 = col0.z
m10 = col1.x
m11 = col1.y
m12 = col1.z
m20 = col2.x
m21 = col2.y
m22 = col2.z
}
def this(m: Matrix3f) = {
this()
Matrix3f.set(m, this)
}
def apply(row: Int, col: Int): Float = (row, col) match {
case (0, 0) => m00
case (0, 1) => m10
case (0, 2) => m20
case (1, 0) => m01
case (1, 1) => m11
case (1, 2) => m21
case (2, 0) => m02
case (2, 1) => m12
case (2, 2) => m22
case _ => throw new IndexOutOfBoundsException
}
def update(row: Int, col: Int, v: Float): Unit = (row, col) match {
case (0, 0) => m00 = v
case (0, 1) => m10 = v
case (0, 2) => m20 = v
case (1, 0) => m01 = v
case (1, 1) => m11 = v
case (1, 2) => m21 = v
case (2, 0) => m02 = v
case (2, 1) => m12 = v
case (2, 2) => m22 = v
case _ => throw new IndexOutOfBoundsException
}
def load(src: FloatBuffer, order: MajorOrder): Matrix3f = order match {
case RowMajor =>
m00 = src.get()
m10 = src.get()
m20 = src.get()
m01 = src.get()
m11 = src.get()
m21 = src.get()
m02 = src.get()
m12 = src.get()
m22 = src.get()
this
case ColumnMajor =>
m00 = src.get()
m01 = src.get()
m02 = src.get()
m10 = src.get()
m11 = src.get()
m12 = src.get()
m20 = src.get()
m21 = src.get()
m22 = src.get()
this
}
def store(dst: FloatBuffer, order: MajorOrder): Matrix3f = order match {
case RowMajor =>
dst.put(m00)
dst.put(m10)
dst.put(m20)
dst.put(m01)
dst.put(m11)
dst.put(m21)
dst.put(m02)
dst.put(m12)
dst.put(m22)
this
case ColumnMajor =>
dst.put(m00)
dst.put(m01)
dst.put(m02)
dst.put(m10)
dst.put(m11)
dst.put(m12)
dst.put(m20)
dst.put(m21)
dst.put(m22)
this
}
def setIdentity(): Matrix3f = {
m00 = 1
m11 = 1
m22 = 1
m01 = 0
m02 = 0
m10 = 0
m12 = 0
m20 = 0
m21 = 0
this
}
def setZero(): Matrix3f = {
m00 = 0
m01 = 0
m02 = 0
m10 = 0
m11 = 0
m12 = 0
m20 = 0
m21 = 0
m22 = 0
this
}
def column(colIdx: Int): Vector3f = {
val ret = new Vector3f
Matrix3f.getColumn(this, colIdx, ret)
ret
}
def row(rowIdx: Int): Vector3f = {
val ret = new Vector3f
Matrix3f.getRow(this, rowIdx, ret)
ret
}
def invert(): Matrix3f = {
Matrix3f.invert(this, this)
this
}
def invertedCopy(): Matrix3f = {
val ret = new Matrix3f
Matrix3f.invert(this, ret)
ret
}
def negate(): Matrix3f = {
Matrix3f.negate(this, this)
this
}
def negatedCopy(): Matrix3f = {
val ret = new Matrix3f
Matrix3f.negate(this, ret)
ret
}
def transpose(): Matrix3f = {
Matrix3f.transpose(this, this)
this
}
def transposedCopy(): Matrix3f = {
val ret = new Matrix3f
Matrix3f.transpose(this, ret)
ret
}
def determinant(): Float = {
m00 * (m11 * m22 - m12 * m21) + m01 * (m12 * m20 - m10 * m22) + m02 * (m10 * m21 - m11 * m20)
}
def copy(): Matrix3f = {
val ret = new Matrix3f
Matrix3f.set(this, ret)
ret
}
def +(m: Matrix3f): Matrix3f = {
val ret = new Matrix3f
Matrix3f.add(this, m, ret)
ret
}
def +=(m: Matrix3f): Unit = {
Matrix3f.add(this, m, this)
}
def -(m: Matrix3f): Matrix3f = {
val ret = new Matrix3f
Matrix3f.sub(this, m, ret)
ret
}
def -=(m: Matrix3f): Unit = {
Matrix3f.sub(this, m, this)
}
def *(m: Matrix3f): Matrix3f = {
val ret = new Matrix3f
Matrix3f.mult(this, m, ret)
ret
}
def *=(m: Matrix3f): Unit = {
Matrix3f.mult(this, m, this)
}
def *(v: Float): Matrix3f = {
val ret = new Matrix3f
Matrix3f.mult(this, v, ret)
ret
}
def *=(v: Float): Unit = {
Matrix3f.mult(this, v, this)
}
def /(v: Float): Matrix3f = {
val ret = new Matrix3f
Matrix3f.div(this, v, ret)
ret
}
def /=(v: Float): Unit = {
Matrix3f.div(this, v, this)
}
def *(v: Vector3f): Vector3f = {
val ret = new Vector3f
Matrix3f.mult(this, v, ret)
ret
}
def transform(v: Vector3f): Vector3f = {
val ret = new Vector3f
Matrix3f.mult(this, v, ret)
ret
}
def toCartesian(): Matrix2f = {
val ret = new Matrix2f
Matrix3f.setCartesian(this, ret)
ret
}
def toHomogeneous(): Matrix4f = {
val ret = new Matrix4f
Matrix3f.setHomogeneous(this, ret)
ret
}
override def toString: String = {
var sb = ""
sb += m00 + " " + m10 + " " + m20 + "\\n"
sb += m01 + " " + m11 + " " + m21 + "\\n"
sb += m02 + " " + m12 + " " + m22 + "\\n"
sb
}
override def equals(obj: Any): Boolean = {
if (obj == null) false
if (!obj.isInstanceOf[Matrix3f]) false
val o = obj.asInstanceOf[Matrix3f]
m00 == o.m00 &&
m01 == o.m01 &&
m02 == o.m02 &&
m10 == o.m10 &&
m11 == o.m11 &&
m12 == o.m12 &&
m20 == o.m20 &&
m21 == o.m21 &&
m22 == o.m22
}
override def hashCode(): Int = {
m00.hashCode ^ m01.hashCode ^ m02.hashCode ^
m10.hashCode ^ m11.hashCode ^ m12.hashCode ^
m20.hashCode ^ m21.hashCode ^ m22.hashCode
}
}
object Matrix3f {
def set(src: Matrix3f, dst: Matrix3f): Unit = {
dst.m00 = src.m00
dst.m01 = src.m01
dst.m02 = src.m02
dst.m10 = src.m10
dst.m11 = src.m11
dst.m12 = src.m12
dst.m20 = src.m20
dst.m21 = src.m21
dst.m22 = src.m22
}
def getColumn(src: Matrix3f, colIdx: Int, dst: Vector3f): Unit = colIdx match {
case 0 =>
dst.x = src.m00
dst.y = src.m01
dst.z = src.m02
case 1 =>
dst.x = src.m10
dst.y = src.m11
dst.z = src.m12
case 2 =>
dst.x = src.m20
dst.y = src.m21
dst.z = src.m22
case _ => throw new IndexOutOfBoundsException
}
def getRow(src: Matrix3f, rowIdx: Int, dst: Vector3f): Unit = rowIdx match {
case 0 =>
dst.x = src.m00
dst.y = src.m10
dst.z = src.m20
case 1 =>
dst.x = src.m01
dst.y = src.m11
dst.z = src.m21
case 2 =>
dst.x = src.m02
dst.y = src.m12
dst.z = src.m22
case _ => throw new IndexOutOfBoundsException
}
def setColumn(src: Vector3f, dst: Matrix3f, colIdx: Int): Unit = colIdx match {
case 0 =>
dst.m00 = src.x
dst.m01 = src.y
dst.m02 = src.z
case 1 =>
dst.m10 = src.x
dst.m11 = src.y
dst.m12 = src.z
case 2 =>
dst.m20 = src.x
dst.m21 = src.y
dst.m22 = src.z
case _ => throw new IndexOutOfBoundsException
}
def setRow(src: Vector3f, dst: Matrix3f, rowIdx: Int): Unit = rowIdx match {
case 0 =>
dst.m00 = src.x
dst.m10 = src.y
dst.m20 = src.z
case 1 =>
dst.m01 = src.x
dst.m11 = src.y
dst.m21 = src.z
case 2 =>
dst.m02 = src.x
dst.m12 = src.y
dst.m22 = src.z
case _ => throw new IndexOutOfBoundsException
}
def setCartesian(src: Matrix3f, dst: Matrix2f): Unit = {
dst.m00 = src.m00
dst.m01 = src.m01
dst.m10 = src.m10
dst.m11 = src.m11
}
def setHomogeneous(src: Matrix3f, dst: Matrix4f): Unit = {
dst.m00 = src.m00
dst.m01 = src.m01
dst.m02 = src.m02
dst.m03 = 0f
dst.m10 = src.m10
dst.m11 = src.m11
dst.m12 = src.m12
dst.m13 = 0f
dst.m20 = src.m20
dst.m21 = src.m21
dst.m22 = src.m22
dst.m23 = 0f
dst.m30 = 0f
dst.m31 = 0f
dst.m32 = 0f
dst.m33 = 1f
}
def negate(src: Matrix3f, dst: Matrix3f): Unit = {
dst.m00 = -src.m00
dst.m01 = -src.m01
dst.m02 = -src.m02
dst.m10 = -src.m10
dst.m11 = -src.m11
dst.m12 = -src.m12
dst.m20 = -src.m20
dst.m21 = -src.m21
dst.m22 = -src.m22
}
def invert(src: Matrix3f, dst: Matrix3f): Unit = {
val determinant = src.determinant
if (determinant != 0) {
val determinant_inv = 1f / determinant
val t00 = src.m11 * src.m22 - src.m12 * src.m21
val t01 = -src.m10 * src.m22 + src.m12 * src.m20
val t02 = src.m10 * src.m21 - src.m11 * src.m20
val t10 = -src.m01 * src.m22 + src.m02 * src.m21
val t11 = src.m00 * src.m22 - src.m02 * src.m20
val t12 = -src.m00 * src.m21 + src.m01 * src.m20
val t20 = src.m01 * src.m12 - src.m02 * src.m11
val t21 = -src.m00 * src.m12 + src.m02 * src.m10
val t22 = src.m00 * src.m11 - src.m01 * src.m10
dst.m00 = t00 * determinant_inv
dst.m11 = t11 * determinant_inv
dst.m22 = t22 * determinant_inv
dst.m01 = t10 * determinant_inv
dst.m10 = t01 * determinant_inv
dst.m20 = t02 * determinant_inv
dst.m02 = t20 * determinant_inv
dst.m12 = t21 * determinant_inv
dst.m21 = t12 * determinant_inv
}
}
def transpose(src: Matrix3f, dst: Matrix3f): Unit = {
val m00 = src.m00
val m01 = src.m10
val m02 = src.m20
val m10 = src.m01
val m11 = src.m11
val m12 = src.m21
val m20 = src.m02
val m21 = src.m12
val m22 = src.m22
dst.m00 = m00
dst.m01 = m01
dst.m02 = m02
dst.m10 = m10
dst.m11 = m11
dst.m12 = m12
dst.m20 = m20
dst.m21 = m21
dst.m22 = m22
}
def add(left: Matrix3f, right: Matrix3f, dst: Matrix3f): Unit = {
dst.m00 = left.m00 + right.m00
dst.m01 = left.m01 + right.m01
dst.m02 = left.m02 + right.m02
dst.m10 = left.m10 + right.m10
dst.m11 = left.m11 + right.m11
dst.m12 = left.m12 + right.m12
dst.m20 = left.m20 + right.m20
dst.m21 = left.m21 + right.m21
dst.m22 = left.m22 + right.m22
}
def sub(left: Matrix3f, right: Matrix3f, dst: Matrix3f): Unit = {
dst.m00 = left.m00 - right.m00
dst.m01 = left.m01 - right.m01
dst.m02 = left.m02 - right.m02
dst.m10 = left.m10 - right.m10
dst.m11 = left.m11 - right.m11
dst.m12 = left.m12 - right.m12
dst.m20 = left.m20 - right.m20
dst.m21 = left.m21 - right.m21
dst.m22 = left.m22 - right.m22
}
def mult(left: Matrix3f, right: Matrix3f, dst: Matrix3f): Unit = {
val m00 = left.m00 * right.m00 + left.m10 * right.m01 + left.m20 * right.m02
val m01 = left.m01 * right.m00 + left.m11 * right.m01 + left.m21 * right.m02
val m02 = left.m02 * right.m00 + left.m12 * right.m01 + left.m22 * right.m02
val m10 = left.m00 * right.m10 + left.m10 * right.m11 + left.m20 * right.m12
val m11 = left.m01 * right.m10 + left.m11 * right.m11 + left.m21 * right.m12
val m12 = left.m02 * right.m10 + left.m12 * right.m11 + left.m22 * right.m12
val m20 = left.m00 * right.m20 + left.m10 * right.m21 + left.m20 * right.m22
val m21 = left.m01 * right.m20 + left.m11 * right.m21 + left.m21 * right.m22
val m22 = left.m02 * right.m20 + left.m12 * right.m21 + left.m22 * right.m22
dst.m00 = m00
dst.m01 = m01
dst.m02 = m02
dst.m10 = m10
dst.m11 = m11
dst.m12 = m12
dst.m20 = m20
dst.m21 = m21
dst.m22 = m22
}
def mult(left: Matrix3f, right: Vector3f, dst: Vector3f): Unit = {
val x = left.m00 * right.x + left.m10 * right.y + left.m20 * right.z
val y = left.m01 * right.x + left.m11 * right.y + left.m21 * right.z
val z = left.m02 * right.x + left.m12 * right.y + left.m22 * right.z
dst.x = x
dst.y = y
dst.z = z
}
def mult(left: Matrix3f, right: Float, dst: Matrix3f): Unit = {
dst.m00 = left.m00 * right
dst.m01 = left.m01 * right
dst.m02 = left.m02 * right
dst.m10 = left.m10 * right
dst.m11 = left.m11 * right
dst.m12 = left.m12 * right
dst.m20 = left.m20 * right
dst.m21 = left.m21 * right
dst.m22 = left.m22 * right
}
def div(left: Matrix3f, right: Float, dst: Matrix3f): Unit = {
dst.m00 = left.m00 / right
dst.m01 = left.m01 / right
dst.m02 = left.m02 / right
dst.m10 = left.m10 / right
dst.m11 = left.m11 / right
dst.m12 = left.m12 / right
dst.m20 = left.m20 / right
dst.m21 = left.m21 / right
dst.m22 = left.m22 / right
}
/**
* Generates the homogeneous rotation matrix for a given angle (in degrees) around the origin
*/
def rotate2D(angle: Float): Matrix3f = {
val ret = new Matrix3f
setRotate2D(angle, ret)
ret
}
def setRotate2D(angle: Float, dst: Matrix3f): Unit = {
val radAngle = Math.toRadians(angle)
val c = Math.cos(radAngle).toFloat
val s = Math.sin(radAngle).toFloat
dst.m00 = c
dst.m10 = -s
dst.m20 = 0f
dst.m01 = s
dst.m11 = c
dst.m21 = 0f
dst.m02 = 0f
dst.m12 = 0f
dst.m22 = 1f
}
/**
* Generates the homogeneous translation matrix for a given translation vector
*/
def translate2D(mov: Vector2f): Matrix3f = {
val ret = new Matrix3f
setTranslate2D(mov, ret)
ret
}
def setTranslate2D(mov: Vector2f, dst: Matrix3f): Unit = {
dst.m00 = 1f
dst.m10 = 0f
dst.m20 = mov.x
dst.m01 = 0f
dst.m11 = 1f
dst.m21 = mov.y
dst.m02 = 0f
dst.m12 = 0f
dst.m22 = 1f
}
/**
* Generates the homogeneous scaling matrix for a given scale vector around the origin
*/
def scale2D(scale: Vector2f): Matrix3f = {
val ret = new Matrix3f
setScale2D(scale, ret)
ret
}
def setScale2D(scale: Vector2f, dst: Matrix3f): Unit = {
dst.m00 = scale.x
dst.m10 = 0f
dst.m20 = 0f
dst.m01 = 0f
dst.m11 = scale.y
dst.m21 = 0f
dst.m02 = 0f
dst.m12 = 0f
dst.m22 = 1f
}
/**
* Generates the non-homogeneous rotation matrix for a given angle (in degrees) and a given unitary axis around the origin
*/
def rotate3D(angle: Float, axis: Vector3f): Matrix3f = {
val ret = new Matrix3f
setRotate3D(angle, axis, ret)
ret
}
def setRotate3D(angle: Float, axis: Vector3f, dst: Matrix3f): Unit = {
val radAngle = Math.toRadians(angle)
val c = Math.cos(radAngle).toFloat
val s = Math.sin(radAngle).toFloat
val x = axis.x
val y = axis.y
val z = axis.z
dst.m00 = x * x * (1 - c) + c
dst.m10 = x * y * (1 - c) - z * s
dst.m20 = x * z * (1 - c) + y * s
dst.m01 = y * x * (1 - c) + z * s
dst.m11 = y * y * (1 - c) + c
dst.m21 = y * z * (1 - c) - x * s
dst.m02 = x * z * (1 - c) - y * s
dst.m12 = y * z * (1 - c) + x * s
dst.m22 = z * z * (1 - c) + c
}
/**
* Generates the non-homogeneous scaling matrix for a given scale vector around the origin
*/
def scale3D(scale: Vector3f): Matrix3f = {
val ret = new Matrix3f
setScale3D(scale, ret)
ret
}
def setScale3D(scale: Vector3f, dst: Matrix3f): Unit = {
dst.m00 = scale.x
dst.m10 = 0f
dst.m20 = 0f
dst.m01 = 0f
dst.m11 = scale.y
dst.m21 = 0f
dst.m02 = 0f
dst.m12 = 0f
dst.m22 = scale.z
}
/**
* Generates the homogeneous projection matrix given the details of the orthographic projection
*/
def ortho2D(left: Float, right: Float, bottom: Float, top: Float): Matrix3f = {
val ret = new Matrix3f
setOrtho2D(left, right, bottom, top, ret)
ret
}
def setOrtho2D(left: Float, right: Float, bottom: Float, top: Float, dst: Matrix3f): Unit = {
dst.m00 = 2 / (right - left)
dst.m10 = 0f
dst.m20 = -(right + left) / (right - left)
dst.m01 = 0f
dst.m11 = 2 / (top - bottom)
dst.m21 = -(top + bottom) / (top - bottom)
dst.m02 = 0f
dst.m12 = 0f
dst.m22 = 1f
}
}
| joelross/scalajs-games | demo/shared/src/main/scala/games/math/Matrix3f.scala | Scala | bsd-3-clause | 16,144 |
package controllers
import play.api._
import play.api.libs.json._
import play.api.mvc._
import play.api.libs.concurrent.Execution.Implicits._
import play.Logger
import utils._
import play.api.libs.iteratee.Enumerator
import play.api.cache.Cached
import play.api.Play.current
import traits.UrlFromConfig
import scala.async.Async.{ async, await }
object Standards extends Controller {
val standardsUtil = new StandardsUtil with UrlFromConfig
def standards() =
Action.async { request =>
async {
val data = await { standardsUtil.standards() }
SimpleResult(
header = ResponseHeader(200, Map("Content-Type" -> "application/json")),
body = Enumerator.fromStream(data, 256))
}
}
def standard(standardId: String) =
Action.async { request =>
async {
val data = await { standardsUtil.getStandard(standardId) }
SimpleResult(
header = ResponseHeader(200, Map("Content-Type" -> "application/json")),
body = Enumerator.fromStream(data, 256))
}
}
} | adlnet/LR-Search | app/controllers/Standards.scala | Scala | apache-2.0 | 1,053 |
/*
* Copyright 2017 Guy Van den Broeck <guyvdb@cs.ucla.edu>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.ucla.cs.starai.sdd.manager.normalized
import edu.ucla.cs.starai.logic.VTreeLeafImpl
import edu.ucla.cs.starai.logic._
import edu.ucla.cs.starai.util.Child
import edu.ucla.cs.starai.sdd.manager.GoogleWeakCache
import edu.ucla.cs.starai.sdd.manager.UniqueNodesCache
import edu.ucla.cs.starai.sdd.manager.HardCache
sealed abstract class SDDManagerImpl extends SDDManager with Child[SDDManagerImpl]{
}
object SDDManagerImpl{
def apply(vtree: VTree.Some): SDDManagerImpl = vtree.kind match{
case Left(leaf) => new SDDManagerLeafImpl(leaf.variable)
case Right(inode) => {
new SDDManagerBigINodeImpl(SDDManagerImpl(inode.vl), SDDManagerImpl(inode.vr))
}
}
}
final class SDDManagerLeafImpl(_variable: Variable)
extends { val variable = _variable }
with SDDManagerImpl with SDDManagerLeaf{
}
final class SDDManagerBigINodeImpl(val vl: SDDManagerImpl, val vr: SDDManagerImpl) extends {
val uniqueNodesCache = new GoogleWeakCache[ManagedSDD]
} with SDDManagerImpl with SDDManagerINode {
vl.setParent(this)
vr.setParent(this)
} | UCLA-StarAI/ScalaDD | src/main/scala/edu/ucla/cs/starai/sdd/manager/normalized/SDDManagerImpl.scala | Scala | apache-2.0 | 1,720 |
package fs2
import cats.effect.IO
import java.security.MessageDigest
import org.scalacheck.Gen
import hash._
class HashSpec extends Fs2Spec {
def digest(algo: String, str: String): List[Byte] =
MessageDigest.getInstance(algo).digest(str.getBytes).toList
def checkDigest[A](h: Pipe[Pure, Byte, Byte], algo: String, str: String) = {
val n =
if (str.length > 0) Gen.choose(1, str.length).sample.getOrElse(1) else 1
val s =
if (str.isEmpty) Stream.empty
else
str.getBytes
.grouped(n)
.foldLeft(Stream.empty.covaryOutput[Byte])(
(acc, c) => acc ++ Stream.chunk(Chunk.bytes(c)))
s.through(h).toList shouldBe digest(algo, str)
}
"digests" - {
"md2" in forAll { (s: String) =>
checkDigest(md2, "MD2", s)
}
"md5" in forAll { (s: String) =>
checkDigest(md5, "MD5", s)
}
"sha1" in forAll { (s: String) =>
checkDigest(sha1, "SHA-1", s)
}
"sha256" in forAll { (s: String) =>
checkDigest(sha256, "SHA-256", s)
}
"sha384" in forAll { (s: String) =>
checkDigest(sha384, "SHA-384", s)
}
"sha512" in forAll { (s: String) =>
checkDigest(sha512, "SHA-512", s)
}
}
"empty input" in {
Stream.empty.through(sha1).toList should have size (20)
}
"zero or one output" in forAll { (lb: List[Array[Byte]]) =>
lb.foldLeft(Stream.empty.covaryOutput[Byte])((acc, b) => acc ++ Stream.chunk(Chunk.bytes(b)))
.through(sha1)
.toList should have size (20)
}
"thread-safety" in {
val s = Stream
.range(1, 100)
.covary[IO]
.flatMap(i => Stream.chunk(Chunk.bytes(i.toString.getBytes)))
.through(sha512)
// avoid using .par here because it's not source-compatible across 2.12/2.13
// (2.13 needs an import, but in 2.12 the same import won't compile)
val vec = collection.parallel.immutable.ParVector.fill(100)(s)
val res = s.compile.toVector.unsafeRunSync()
vec.map(_.compile.toVector.unsafeRunSync()) shouldBe Vector.fill(100)(res)
}
}
| zaneli/fs2 | core/jvm/src/test/scala/fs2/HashSpec.scala | Scala | mit | 2,054 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.python
import org.apache.spark.api.python.PythonEvalType
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.physical.{AllTuples, ClusteredDistribution, Distribution, Partitioning}
import org.apache.spark.sql.execution.{BinaryExecNode, CoGroupedIterator, SparkPlan}
import org.apache.spark.sql.types.StructType
/**
* Physical node for [[org.apache.spark.sql.catalyst.plans.logical.FlatMapCoGroupsInPandas]]
*
* The input dataframes are first Cogrouped. Rows from each side of the cogroup are passed to the
* Python worker via Arrow. As each side of the cogroup may have a different schema we send every
* group in its own Arrow stream.
* The Python worker turns the resulting record batches to `pandas.DataFrame`s, invokes the
* user-defined function, and passes the resulting `pandas.DataFrame`
* as an Arrow record batch. Finally, each record batch is turned to
* Iterator[InternalRow] using ColumnarBatch.
*
* Note on memory usage:
* Both the Python worker and the Java executor need to have enough memory to
* hold the largest cogroup. The memory on the Java side is used to construct the
* record batches (off heap memory). The memory on the Python side is used for
* holding the `pandas.DataFrame`. It's possible to further split one group into
* multiple record batches to reduce the memory footprint on the Java side, this
* is left as future work.
*/
case class FlatMapCoGroupsInPandasExec(
leftGroup: Seq[Attribute],
rightGroup: Seq[Attribute],
func: Expression,
output: Seq[Attribute],
left: SparkPlan,
right: SparkPlan)
extends BasePandasGroupExec(func, output) with BinaryExecNode {
override def outputPartitioning: Partitioning = left.outputPartitioning
override def requiredChildDistribution: Seq[Distribution] = {
val leftDist = if (leftGroup.isEmpty) AllTuples else ClusteredDistribution(leftGroup)
val rightDist = if (rightGroup.isEmpty) AllTuples else ClusteredDistribution(rightGroup)
leftDist :: rightDist :: Nil
}
override def requiredChildOrdering: Seq[Seq[SortOrder]] = {
leftGroup
.map(SortOrder(_, Ascending)) :: rightGroup.map(SortOrder(_, Ascending)) :: Nil
}
override protected def doExecute(): RDD[InternalRow] = {
val (leftDedup, leftArgOffsets) = resolveArgOffsets(left, leftGroup)
val (rightDedup, rightArgOffsets) = resolveArgOffsets(right, rightGroup)
// Map cogrouped rows to ArrowPythonRunner results, Only execute if partition is not empty
left.execute().zipPartitions(right.execute()) { (leftData, rightData) =>
if (leftData.isEmpty && rightData.isEmpty) Iterator.empty else {
val leftGrouped = groupAndProject(leftData, leftGroup, left.output, leftDedup)
val rightGrouped = groupAndProject(rightData, rightGroup, right.output, rightDedup)
val data = new CoGroupedIterator(leftGrouped, rightGrouped, leftGroup)
.map { case (_, l, r) => (l, r) }
val runner = new CogroupedArrowPythonRunner(
chainedFunc,
PythonEvalType.SQL_COGROUPED_MAP_PANDAS_UDF,
Array(leftArgOffsets ++ rightArgOffsets),
StructType.fromAttributes(leftDedup),
StructType.fromAttributes(rightDedup),
sessionLocalTimeZone,
pythonRunnerConf)
executePython(data, runner)
}
}
}
}
| bdrillard/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/python/FlatMapCoGroupsInPandasExec.scala | Scala | apache-2.0 | 4,293 |
package edu.berkeley.nlp.coref
import edu.berkeley.nlp.coref.config.CorefSystemConfiguration
import org.jobimtext.coref.berkeley.ThesaurusCollection
import edu.berkeley.nlp.coref.lang.CorefLanguagePack
/**
* TODO convert nulls to options
* @param languagePack
*/
class MentionPropertyComputer(val languagePack: CorefLanguagePack) {
var ngComputer: NumberGenderComputer = null
var thesauri: ThesaurusCollection = null
var config: CorefSystemConfiguration = null
} | timfeu/berkeleycoref-thesaurus | src/main/java/edu/berkeley/nlp/coref/MentionPropertyComputer.scala | Scala | gpl-3.0 | 474 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.status.api.v1.sql
import java.util.Date
import scala.collection.mutable.ArrayBuffer
import org.scalatest.PrivateMethodTester
import org.apache.spark.{JobExecutionStatus, SparkFunSuite}
import org.apache.spark.sql.execution.ui.{SparkPlanGraph, SparkPlanGraphCluster, SparkPlanGraphEdge, SparkPlanGraphNode, SQLExecutionUIData, SQLPlanMetric}
object SqlResourceSuite {
val SCAN_TEXT = "Scan text"
val FILTER = "Filter"
val WHOLE_STAGE_CODEGEN_1 = "WholeStageCodegen (1)"
val DURATION = "duration"
val NUMBER_OF_OUTPUT_ROWS = "number of output rows"
val METADATA_TIME = "metadata time"
val NUMBER_OF_FILES_READ = "number of files read"
val SIZE_OF_FILES_READ = "size of files read"
val PLAN_DESCRIPTION = "== Physical Plan ==\\nCollectLimit (3)\\n+- * Filter (2)\\n +- Scan text..."
val DESCRIPTION = "csv at MyDataFrames.scala:57"
val nodeIdAndWSCGIdMap: Map[Long, Option[Long]] = Map(1L -> Some(1L))
val filterNode = new SparkPlanGraphNode(1, FILTER, "",
metrics = Seq(SQLPlanMetric(NUMBER_OF_OUTPUT_ROWS, 1, "")))
val nodes: Seq[SparkPlanGraphNode] = Seq(
new SparkPlanGraphCluster(0, WHOLE_STAGE_CODEGEN_1, "",
nodes = ArrayBuffer(filterNode),
metrics = Seq(SQLPlanMetric(DURATION, 0, ""))),
new SparkPlanGraphNode(2, SCAN_TEXT, "",
metrics = Seq(
SQLPlanMetric(METADATA_TIME, 2, ""),
SQLPlanMetric(NUMBER_OF_FILES_READ, 3, ""),
SQLPlanMetric(NUMBER_OF_OUTPUT_ROWS, 4, ""),
SQLPlanMetric(SIZE_OF_FILES_READ, 5, ""))))
val edges: Seq[SparkPlanGraphEdge] = Seq(SparkPlanGraphEdge(3, 2))
val nodesWhenCodegenIsOff: Seq[SparkPlanGraphNode] =
SparkPlanGraph(nodes, edges).allNodes.filterNot(_.name == WHOLE_STAGE_CODEGEN_1)
val metrics: Seq[SQLPlanMetric] = {
Seq(SQLPlanMetric(DURATION, 0, ""),
SQLPlanMetric(NUMBER_OF_OUTPUT_ROWS, 1, ""),
SQLPlanMetric(METADATA_TIME, 2, ""),
SQLPlanMetric(NUMBER_OF_FILES_READ, 3, ""),
SQLPlanMetric(NUMBER_OF_OUTPUT_ROWS, 4, ""),
SQLPlanMetric(SIZE_OF_FILES_READ, 5, ""))
}
val sqlExecutionUIData: SQLExecutionUIData = {
def getMetricValues() = {
Map[Long, String](
0L -> "0 ms",
1L -> "1",
2L -> "2 ms",
3L -> "1",
4L -> "1",
5L -> "330.0 B"
)
}
new SQLExecutionUIData(
executionId = 0,
description = DESCRIPTION,
details = "",
physicalPlanDescription = PLAN_DESCRIPTION,
metrics = metrics,
submissionTime = 1586768888233L,
completionTime = Some(new Date(1586768888999L)),
jobs = Map[Int, JobExecutionStatus](
0 -> JobExecutionStatus.SUCCEEDED,
1 -> JobExecutionStatus.SUCCEEDED),
stages = Set[Int](),
metricValues = getMetricValues()
)
}
private def getNodes(): Seq[Node] = {
val node = Node(0, WHOLE_STAGE_CODEGEN_1,
wholeStageCodegenId = None, metrics = Seq(Metric(DURATION, "0 ms")))
val node2 = Node(1, FILTER,
wholeStageCodegenId = Some(1), metrics = Seq(Metric(NUMBER_OF_OUTPUT_ROWS, "1")))
val node3 = Node(2, SCAN_TEXT, wholeStageCodegenId = None,
metrics = Seq(Metric(METADATA_TIME, "2 ms"),
Metric(NUMBER_OF_FILES_READ, "1"),
Metric(NUMBER_OF_OUTPUT_ROWS, "1"),
Metric(SIZE_OF_FILES_READ, "330.0 B")))
// reverse order because of supporting execution order by aligning with Spark-UI
Seq(node3, node2, node)
}
private def getExpectedNodesWhenWholeStageCodegenIsOff(): Seq[Node] = {
val node = Node(1, FILTER, metrics = Seq(Metric(NUMBER_OF_OUTPUT_ROWS, "1")))
val node2 = Node(2, SCAN_TEXT,
metrics = Seq(Metric(METADATA_TIME, "2 ms"),
Metric(NUMBER_OF_FILES_READ, "1"),
Metric(NUMBER_OF_OUTPUT_ROWS, "1"),
Metric(SIZE_OF_FILES_READ, "330.0 B")))
// reverse order because of supporting execution order by aligning with Spark-UI
Seq(node2, node)
}
private def verifyExpectedExecutionData(executionData: ExecutionData,
nodes: Seq[Node],
edges: Seq[SparkPlanGraphEdge],
planDescription: String): Unit = {
assert(executionData.id == 0)
assert(executionData.status == "COMPLETED")
assert(executionData.description == DESCRIPTION)
assert(executionData.planDescription == planDescription)
assert(executionData.submissionTime == new Date(1586768888233L))
assert(executionData.duration == 766L)
assert(executionData.successJobIds == Seq[Int](0, 1))
assert(executionData.runningJobIds == Seq[Int]())
assert(executionData.failedJobIds == Seq.empty)
assert(executionData.nodes == nodes)
assert(executionData.edges == edges)
}
}
/**
* Sql Resource Public API Unit Tests.
*/
class SqlResourceSuite extends SparkFunSuite with PrivateMethodTester {
import SqlResourceSuite._
val sqlResource = new SqlResource()
val prepareExecutionData = PrivateMethod[ExecutionData]('prepareExecutionData)
test("Prepare ExecutionData when details = false and planDescription = false") {
val executionData =
sqlResource invokePrivate prepareExecutionData(
sqlExecutionUIData, SparkPlanGraph(Seq.empty, Seq.empty), false, false)
verifyExpectedExecutionData(executionData, edges = Seq.empty,
nodes = Seq.empty, planDescription = "")
}
test("Prepare ExecutionData when details = true and planDescription = false") {
val executionData =
sqlResource invokePrivate prepareExecutionData(
sqlExecutionUIData, SparkPlanGraph(nodes, edges), true, false)
verifyExpectedExecutionData(
executionData,
nodes = getNodes(),
edges,
planDescription = "")
}
test("Prepare ExecutionData when details = true and planDescription = true") {
val executionData =
sqlResource invokePrivate prepareExecutionData(
sqlExecutionUIData, SparkPlanGraph(nodes, edges), true, true)
verifyExpectedExecutionData(
executionData,
nodes = getNodes(),
edges = edges,
planDescription = PLAN_DESCRIPTION)
}
test("Prepare ExecutionData when details = true and planDescription = false and WSCG = off") {
val executionData =
sqlResource invokePrivate prepareExecutionData(
sqlExecutionUIData, SparkPlanGraph(nodesWhenCodegenIsOff, edges), true, false)
verifyExpectedExecutionData(
executionData,
nodes = getExpectedNodesWhenWholeStageCodegenIsOff(),
edges = edges,
planDescription = "")
}
test("Parse wholeStageCodegenId from nodeName") {
val getWholeStageCodegenId = PrivateMethod[Option[Long]]('getWholeStageCodegenId)
val wholeStageCodegenId =
sqlResource invokePrivate getWholeStageCodegenId(WHOLE_STAGE_CODEGEN_1)
assert(wholeStageCodegenId == Some(1))
}
}
| maropu/spark | sql/core/src/test/scala/org/apache/spark/status/api/v1/sql/SqlResourceSuite.scala | Scala | apache-2.0 | 7,551 |
package com.sksamuel.elastic4s.searches.queries.funcscorer
import org.elasticsearch.index.query.functionscore.{RandomScoreFunctionBuilder, ScoreFunctionBuilders}
object RandomScoreFunctionBuilderFn {
def apply(random: RandomScoreFunctionDefinition): RandomScoreFunctionBuilder = {
val builder = ScoreFunctionBuilders.randomFunction(random.seed)
random.weight.map(_.toFloat).foreach(builder.setWeight)
builder
}
}
| tyth/elastic4s | elastic4s-tcp/src/main/scala/com/sksamuel/elastic4s/searches/queries/funcscorer/RandomScoreFunctionBuilderFn.scala | Scala | apache-2.0 | 431 |
/**
* Copyright 2014 Gianluca Amato <gamato@unich.it>
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of a
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.domains
/**
* This is a trait for testing those domains where the bottom element
* represents emptyness in each fiber.
* @author Gianluca Amato <gamato@unich.it>
*/
trait EmptyExistsSuite extends CartesianFiberedDomainSuite {
describe("The bottom") {
it("is empty") {
forAll(someFibers) { (f) =>
assert(dom.bottom(f).isEmpty)
}
}
}
}
| francescaScozzari/Jandom | core/src/test/scala/it/unich/jandom/domains/EmptyExistsSuite.scala | Scala | lgpl-3.0 | 1,169 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.health
import akka.actor.Actor
trait Health { this: Actor=>
def startHealth = {
// Create the health check system
context.actorOf(HealthCheckActor.props, Health.HealthName)
}
}
object Health {
val HealthName = "health"
}
| pcross616/wookiee | wookiee-core/src/main/scala/com/webtrends/harness/health/Health.scala | Scala | apache-2.0 | 1,008 |
package com.twitter.finagle.service
import com.twitter.conversions.time._
import com.twitter.finagle._
import com.twitter.finagle.client.Transporter
import com.twitter.finagle.service.exp.FailureAccrualPolicy
import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver}
import com.twitter.finagle.util.DefaultLogger
import com.twitter.finagle.util.InetSocketAddressUtil.unconnected
import com.twitter.logging.Level
import com.twitter.util.{Duration, Time, Timer, TimerTask, Try}
import java.util.logging.Logger
import java.net.SocketAddress
import scala.util.Random
object FailureAccrualFactory {
private[finagle] def wrapper(
statsReceiver: StatsReceiver,
failureAccrualPolicy: FailureAccrualPolicy,
label: String,
logger: Logger,
endpoint: SocketAddress
)(timer: Timer): ServiceFactoryWrapper = {
new ServiceFactoryWrapper {
def andThen[Req, Rep](factory: ServiceFactory[Req, Rep]) =
new FailureAccrualFactory(factory, failureAccrualPolicy, timer, statsReceiver.scope("failure_accrual"), label, logger, endpoint)
}
}
private[this] val rng = new Random
private[finagle] val defaultConsecutiveFailures = 5
// Use equalJittered backoff in order to wait more time in between
// each revival attempt on successive failures; if an endpoint has failed
// previous requests, it is likely to do so again. The recent
// "failure history" should influence how long to mark the endpoint
// dead for.
private[finagle] val jitteredBackoff: Stream[Duration] = Backoff.equalJittered(5.seconds, 300.seconds)
private[finagle] val defaultPolicy = () => FailureAccrualPolicy.consecutiveFailures(defaultConsecutiveFailures, jitteredBackoff)
/**
* Add jitter in `markDeadFor` to reduce correlation.
* Return a () => Duration type that can be used in Param.
*/
def perturb(
markDeadFor: Duration,
perturbation: Float = 0.1f,
rand: Random = rng
): () => Duration =
() => {
val ms = markDeadFor.inMilliseconds
(ms + ms*rand.nextFloat()*perturbation).toInt.milliseconds
}
val role = Stack.Role("FailureAccrual")
/**
* An ADT representing a [[FailureAccrualFactory]]s [[Stack.Param]], which is one of the following:
*
* 1. [[Param.Configured]] - configures failure accrual
* 2. [[Param.Replaced]] - replaces the standard implementation with the given one
* 3. [[Param.Disabled]] - completely disables this role in the underlying stack
*/
sealed trait Param {
def mk(): (Param, Stack.Param[Param]) = (this, Param.param)
}
private[finagle] object Param {
case class Configured(failureAccrualPolicy: () => FailureAccrualPolicy) extends Param
case class Replaced(factory: Timer => ServiceFactoryWrapper) extends Param
case object Disabled extends Param
implicit val param: Stack.Param[Param] = Stack.Param(Param.Configured(defaultPolicy))
}
// -Implementation notes-
//
// We have to provide these wrapper functions that produce params instead of calling constructors
// on case classes by the following reasons:
//
// 1. The param inserted into Stack.Params should be casted to its base type in order to tell
// the compiler what implicit value to look up.
// 2. It's not possible to construct a triply-nested Scala class in Java using the sane API.
// See http://stackoverflow.com/questions/30809070/accessing-scala-nested-classes-from-java
/**
* Configures the [[FailureAccrualFactory]].
*
* Note there is a Java-friendly method in the API that takes `Duration` as a value, not a function.
*
* @param numFailures The number of consecutive failures before marking an endpoint as dead.
* @param markDeadFor The duration to mark an endpoint as dead.
*/
def Param(numFailures: Int, markDeadFor: () => Duration): Param =
Param.Configured(() => FailureAccrualPolicy.consecutiveFailures(
numFailures, Backoff.fromFunction(markDeadFor)))
/**
* Configures the [[FailureAccrualFactory]].
*
* @param numFailures The number of consecutive failures before marking an endpoint as dead.
* @param markDeadFor The duration to mark an endpoint as dead.
*/
def Param(numFailures: Int, markDeadFor: Duration): Param =
Param.Configured(() => FailureAccrualPolicy.consecutiveFailures(numFailures,
Backoff.const(markDeadFor)))
/**
* Configures the [[FailureAccrualFactory]].
*
* @param failureAccrualPolicy The policy to use to determine when to mark an endpoint as dead.
*/
def Param(failureAccrualPolicy: FailureAccrualPolicy): Param =
Param.Configured(() => failureAccrualPolicy)
/**
* Configures the [[FailureAccrualFactory]].
*
* Used by the memcache client's default params so clients don't share the policy.
*
* @param failureAccrualPolicy The policy to use to determine when to mark an endpoint as dead.
*/
private[finagle] def Param(failureAccrualPolicy: () => FailureAccrualPolicy): Param =
Param.Configured(failureAccrualPolicy)
/**
* Replaces the [[FailureAccrualFactory]] with the [[ServiceFactoryWrapper]]
* returned by the given function `factory`.
*/
private[finagle] def Replaced(factory: Timer => ServiceFactoryWrapper): Param =
Param.Replaced(factory)
/**
* Replaces the [[FailureAccrualFactory]] with the given [[ServiceFactoryWrapper]] `factory`.
*/
private[finagle] def Replaced(factory: ServiceFactoryWrapper): Param =
Param.Replaced(_ => factory)
/**
* Disables the [[FailureAccrualFactory]].
*/
private[finagle] val Disabled: Param = Param.Disabled
/**
* Creates a [[com.twitter.finagle.Stackable]] [[com.twitter.finagle.service.FailureAccrualFactory]].
*/
def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Stack.Module6[param.Stats,
FailureAccrualFactory.Param,
param.Timer,
param.Label,
param.Logger,
Transporter.EndpointAddr,
ServiceFactory[Req, Rep]] {
val role = FailureAccrualFactory.role
val description = "Backoff from hosts that we cannot successfully make requests to"
def make(
_stats: param.Stats,
_param: FailureAccrualFactory.Param,
_timer: param.Timer,
_label: param.Label,
_logger: param.Logger,
_endpoint: Transporter.EndpointAddr,
next: ServiceFactory[Req, Rep]
) = _param match {
case Param.Configured(p) =>
val param.Timer(timer) = _timer
val param.Stats(statsReceiver) = _stats
val param.Label(label) = _label
val param.Logger(logger) = _logger
val Transporter.EndpointAddr(endpoint) = _endpoint
wrapper(statsReceiver, p(), label, logger, endpoint)(timer) andThen next
case Param.Replaced(f) =>
val param.Timer(timer) = _timer
f(timer) andThen next
case Param.Disabled => next
}
}
// The FailureAccrualFactory transitions between Alive, Dead, ProbeOpen,
// and ProbeClosed. The factory starts in the Alive state. After numFailures
// failures, the factory transitions to Dead. When it is revived,
// it transitions to ProbeOpen. After a request is received,
// it transitions to ProbeClosed and cannot accept any further requests until
// the initial request is satisfied. If the request is successful, it
// transitions back to Alive, otherwise Dead.
//
// The transitions can be visualized using the state diagram:
//
// ,<-----------.
// Alive |
// | ,---ProbeClosed
// ∨ ∨ ^
// Dead |
// `---> ProbeOpen
protected[finagle] sealed trait State
protected[finagle] object Alive extends State
protected[finagle] object Dead extends State
protected[finagle] object ProbeOpen extends State
protected[finagle] object ProbeClosed extends State
}
/**
* A [[com.twitter.finagle.ServiceFactory]] that accrues failures, marking
* itself unavailable when deemed unhealthy according to its parametrization.
*
* TODO: treat different failures differently (eg. connect failures
* vs. not), enable different backoff strategies.
*/
class FailureAccrualFactory[Req, Rep] private[finagle](
underlying: ServiceFactory[Req, Rep],
failureAccrualPolicy: FailureAccrualPolicy,
timer: Timer,
statsReceiver: StatsReceiver,
label: String = "",
logger: Logger = DefaultLogger,
endpoint: SocketAddress = unconnected)
extends ServiceFactory[Req, Rep] {
import FailureAccrualFactory._
def this(
underlying: ServiceFactory[Req, Rep],
numFailures: Int,
markDeadFor: Duration,
timer: Timer,
statsReceiver: StatsReceiver,
label: String,
logger: Logger,
endpoint: SocketAddress
) = this(
underlying,
FailureAccrualPolicy.consecutiveFailures(numFailures, Backoff.const(markDeadFor)),
timer,
statsReceiver,
label,
logger,
endpoint)
@volatile private[this] var state: State = Alive
private[this] var reviveTimerTask: Option[TimerTask] = None
private[this] val removalCounter = statsReceiver.counter("removals")
private[this] val revivalCounter = statsReceiver.counter("revivals")
private[this] def didFail() = synchronized {
state match {
case Alive | ProbeClosed =>
failureAccrualPolicy.markDeadOnFailure() match {
case Some(duration) => markDeadFor(duration)
case None =>
}
case ProbeOpen =>
logger.log(Level.DEBUG, "FailureAccrualFactory request failed in the " +
"ProbeOpen state, but requests should only fail when FailureAccrualFactory " +
"is in the Alive, ProbeClosed, or Dead state.")
case _ =>
}
}
private[this] val actOnFailure: Throwable => Unit = { _ => didFail() }
private[this] val actOnResponse: Try[Rep] => Unit = { rep =>
if (isSuccess(rep)) didSucceed() else didFail()
}
protected def didSucceed() = synchronized {
// Only count revivals when the probe succeeds.
state match {
case ProbeClosed =>
revivalCounter.incr()
failureAccrualPolicy.revived()
case ProbeOpen =>
logger.log(Level.DEBUG, "FailureAccrualFactory request succeeded in the " +
"ProbeOpen state, but requests should only succeed when FailureAccrualFactory " +
"is in the Alive, ProbeClosed, or Dead state.")
case _ =>
}
state = Alive
failureAccrualPolicy.recordSuccess()
}
private[this] def markDeadFor(duration: Duration) = synchronized {
removalCounter.incr()
val timerTask = timer.schedule(duration.fromNow) { startProbing() }
state = Dead
reviveTimerTask = Some(timerTask)
if (logger.isLoggable(Level.DEBUG))
logger.log(Level.DEBUG, s"""FailureAccrualFactory marking connection to "$label" as dead. Remote Address: ${endpoint.toString}""")
didMarkDead()
}
/**
* Called by FailureAccrualFactory after marking an endpoint dead. Override
* this method to perform additional actions.
*/
protected def didMarkDead() = {}
/**
* Enter 'Probing' state.
* The service must satisfy one request before accepting more.
*/
protected def startProbing() = synchronized {
state = ProbeOpen
cancelReviveTimerTasks()
}
protected def isSuccess(response: Try[Rep]): Boolean = response.isReturn
def apply(conn: ClientConnection) = {
underlying(conn).map { service =>
new Service[Req, Rep] {
def apply(request: Req) = {
// If service has just been revived, accept no further requests.
// Note: Another request may have come in before state transitions to
// ProbeClosed, so > 1 requests may be processing while in the
// ProbeClosed state. The result of first to complete will determine
// whether the factory transitions to Alive (successful) or Dead
// (unsuccessful).
state match {
case ProbeOpen =>
synchronized {
state match {
case ProbeOpen => state = ProbeClosed
case _ =>
}
}
case _ =>
}
service(request).respond(actOnResponse)
}
override def close(deadline: Time) = service.close(deadline)
override def status = Status.worst(service.status,
FailureAccrualFactory.this.status)
}
}.onFailure(actOnFailure)
}
override def status = state match {
case Alive | ProbeOpen => underlying.status
case Dead | ProbeClosed => Status.Busy
}
protected[this] def getState: State = state
private[this] def cancelReviveTimerTasks(): Unit = synchronized {
reviveTimerTask.foreach(_.cancel())
reviveTimerTask = None
}
def close(deadline: Time) = underlying.close(deadline).ensure {
cancelReviveTimerTasks()
}
override val toString = "failure_accrual_%s".format(underlying.toString)
@deprecated("Please call the FailureAccrualFactory constructor that supplies a StatsReceiver", "6.22.1")
def this(
underlying: ServiceFactory[Req, Rep],
numFailures: Int,
markDeadFor: Duration,
timer: Timer,
label: String
) = this(
underlying,
FailureAccrualPolicy.consecutiveFailures(numFailures, Backoff.const(markDeadFor)),
timer,
NullStatsReceiver,
label)
}
| a-manumohan/finagle | finagle-core/src/main/scala/com/twitter/finagle/service/FailureAccrualFactory.scala | Scala | apache-2.0 | 13,325 |
package fs2
package async
import fs2.util.Task
import java.util.concurrent.atomic.AtomicLong
class SignalSpec extends Fs2Spec {
"Signal" - {
"get/set/discrete" in {
forAll { (vs0: List[Long]) =>
val vs = vs0 map { n => if (n == 0) 1 else n }
val s = async.signalOf[Task,Long](0L).unsafeRun()
val r = new AtomicLong(0)
val u = s.discrete.map(r.set).run.async.unsafeRunAsyncFuture()
assert(vs.forall { v =>
s.set(v).unsafeRun()
while (s.get.unsafeRun() != v) {} // wait for set to arrive
// can't really be sure when the discrete subscription will be set up,
// but once we've gotten one update (r != 0), we know we're subscribed
// and should see result of all subsequent calls to set
if (r.get != 0) { while (r.get != v) {} }
true
})
}
}
"discrete" in {
// verifies that discrete always receives the most recent value, even when updates occur rapidly
forAll { (v0: Long, vsTl: List[Long]) =>
val vs = v0 :: vsTl
val s = async.signalOf[Task,Long](0L).unsafeRun()
val r = new AtomicLong(0)
val u = s.discrete.map { i => Thread.sleep(10); r.set(i) }.run.async.unsafeRunAsyncFuture()
vs.foreach { v => s.set(v).unsafeRun() }
val last = vs.last
while (r.get != last) {}
true
}
}
}
}
| japgolly/scalaz-stream | core/src/test/scala/fs2/async/SignalSpec.scala | Scala | mit | 1,413 |
package de.zalando.model
import de.zalando.apifirst.Application._
import de.zalando.apifirst.Domain._
import de.zalando.apifirst.ParameterPlace
import de.zalando.apifirst.naming._
import de.zalando.apifirst.Hypermedia._
import de.zalando.apifirst.Http._
import de.zalando.apifirst.Security
import java.net.URL
import Security._
//noinspection ScalaStyle
object options_yaml extends WithModel {
def types = Map[Reference, Type](
Reference("⌿definitions⌿Basic") →
TypeDef(Reference("⌿definitions⌿Basic"),
Seq(
Field(Reference("⌿definitions⌿Basic⌿id"), Lng(TypeMeta(Some("int64"), List()))),
Field(Reference("⌿definitions⌿Basic⌿required"), TypeRef(Reference("⌿definitions⌿Basic⌿required"))),
Field(Reference("⌿definitions⌿Basic⌿optional"), TypeRef(Reference("⌿definitions⌿Basic⌿optional")))
), TypeMeta(Some("Named types: 3"), List())),
Reference("⌿definitions⌿Basic⌿required") →
Arr(Str(None, TypeMeta(None, List())), TypeMeta(None, List()), "csv"),
Reference("⌿definitions⌿Basic⌿optional") →
Opt(TypeRef(Reference("⌿definitions⌿Basic⌿required")), TypeMeta(None, List()))
)
def parameters = Map[ParameterRef, Parameter](
)
def basePath: String = "/api"
def discriminators: DiscriminatorLookupTable = Map[Reference, Reference](
)
def securityDefinitions: SecurityDefinitionsTable = Map[String, Security.Definition](
)
def stateTransitions: StateTransitionsTable = Map[State, Map[State, TransitionProperties]]()
def calls: Seq[ApiCall] = Seq()
def packageName: Option[String] = None
def model = new StrictModel(calls, types, parameters, discriminators, basePath, packageName, stateTransitions, securityDefinitions)
} | zalando/play-swagger | play-scala-generator/src/test/scala/model/resources.options_yaml.scala | Scala | mit | 1,733 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package varys.framework.slave.ui
import akka.dispatch.Await
import akka.pattern.ask
import akka.util.duration._
import javax.servlet.http.HttpServletRequest
import net.liftweb.json.JsonAST.JValue
import scala.xml.Node
import varys.framework.JsonProtocol
import varys.framework.{RequestSlaveState, SlaveState}
import varys.ui.UIUtils
import varys.Utils
private[varys] class IndexPage(parent: SlaveWebUI) {
val slaveActor = parent.slave.self
val slave = parent.slave
val timeout = parent.timeout
def renderJson(request: HttpServletRequest): JValue = {
val stateFuture = (slaveActor ? RequestSlaveState)(timeout).mapTo[SlaveState]
val slaveState = Await.result(stateFuture, 30 seconds)
JsonProtocol.writeSlaveState(slaveState)
}
def render(request: HttpServletRequest): Seq[Node] = {
val stateFuture = (slaveActor ? RequestSlaveState)(timeout).mapTo[SlaveState]
val slaveState = Await.result(stateFuture, 30 seconds)
val content =
<div class="row-fluid"> <!-- Slave Details -->
<div class="span12">
<ul class="unstyled">
<li><strong>ID:</strong> {slaveState.slaveId}</li>
<li><strong>
Master URL:</strong> {slaveState.masterUrl}
</li>
<li><strong>RxBps:</strong> {slaveState.rxBps}</li>
<li><strong>TxBps:</strong> {slaveState.txBps}</li>
</ul>
<p><a href={slaveState.masterWebUiUrl}>Back to Master</a></p>
</div>
</div>
UIUtils.basicVarysPage(content, "Varys Slave at %s:%s".format(
slaveState.host, slaveState.port))
}
}
| frankfzw/varys | core/src/main/scala/varys/framework/slave/ui/IndexPage.scala | Scala | apache-2.0 | 2,439 |
/*
* Copyright 2020 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.util
import com.google.protobuf.Message
import com.spotify.scio.coders.{AvroBytesUtil, Coder, CoderMaterializer}
import org.apache.avro.Schema
import org.apache.avro.generic.GenericRecord
import scala.reflect.{classTag, ClassTag}
object ProtobufUtil {
/**
* A Coder for Protobuf [[Message]] s encoded as Avro [[GenericRecord]] s. This must be in
* implicit scope when using [[ProtobufUtil.toAvro]], for example:
*
* `implicit val avroMessageCoder: Coder[GenericRecord] = ProtobufUtil.AvroMessageCoder`
*/
lazy val AvroMessageCoder: Coder[GenericRecord] =
Coder.avroGenericRecordCoder(AvroBytesUtil.schema)
/** The Avro [[Schema]] corresponding to an Avro-encoded Protobuf [[Message]]. */
lazy val AvroMessageSchema: Schema = AvroBytesUtil.schema
/**
* A metadata map containing information about the underlying Protobuf schema of the [[Message]]
* bytes encoded inside [[AvroMessageSchema]] 's `bytes` field.
*
* @tparam T
* subclass of [[Message]]
*/
def schemaMetadataOf[T <: Message: ClassTag]: Map[String, AnyRef] = {
import me.lyh.protobuf.generic
val schema = generic.Schema
.of[Message](classTag[T].asInstanceOf[ClassTag[Message]])
.toJson
Map("protobuf.generic.schema" -> schema)
}
/**
* A function that converts a Protobuf [[Message]] of type `T` into a [[GenericRecord]] whose
* [[Schema]] is a single byte array field, corresponding to the serialized bytes in `T`.
*/
def toAvro[T <: Message: ClassTag]: T => GenericRecord = {
val protoCoder = CoderMaterializer.beamWithDefault(Coder.protoMessageCoder[T])
(t: T) => AvroBytesUtil.encode(protoCoder, t)
}
}
| spotify/scio | scio-core/src/main/scala/com/spotify/scio/util/ProtobufUtil.scala | Scala | apache-2.0 | 2,301 |
package com.infinitemule.hopperhack.foursq.storm
import com.infinitemule.hopperhack.foursq._
import FoursquareJsonProtocol._
import com.infinitemule.hopperhack.storm.StormBolt
import com.infinitemule.hopperhack.finagle.{FinagleHttpClientService,
FinagleHttpRequestBuilder,
FinagleHttpsClientService}
import com.twitter.finagle.Service
import com.twitter.util.{Await, Future}
import org.jboss.netty.buffer.ChannelBuffers
import org.jboss.netty.handler.codec.http.{HttpRequest, HttpResponse}
import backtype.storm.tuple.Tuple
import java.net.URL
import spray.json._
/**
* Since I was using Foursquare's "public" API (i.e. their web site) I wanted to pretend
* I was a browser.
*/
object FoursquareStorm {
val ua = "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:22.0) Gecko/20100101 Firefox/22.0"
}
/**
* Bolt that takes a Foursqaure tiny URL (i.e. 4sq.com/djeIjX3k) and
* parses the HTTP response to find the redirect link, which is the "real" link
* to the Foursquare checkin.
*
* Notes:
* - I noticed that if I made the Finagle Service a member of the class,
* I would get a NotSerializable exception. My guess is that Storm
* needs these all members to be serializable so that it can distribute
* them on the cluster. I solved this by just creating the service on
* every new tuple and hoping that it wasn't too expensive to do so.
*
*/
class FoursquareCheckinUrlResolverBolt(val host: String) extends StormBolt {
override val fields = List("checkinUrl")
var client: Service[HttpRequest, HttpResponse] = _
/**
*
*/
override def execute(tuple: Tuple) = {
client = FinagleHttpClientService(host)
val url = new URL(tuple.getStringByField("link")).getPath()
val request = new FinagleHttpRequestBuilder(host, url).userAgent(FoursquareStorm.ua).get()
val response: Future[HttpResponse] = client(request)
response.onSuccess { resp: HttpResponse =>
// Our URL is in the Location header.
val location = resp.getHeader("Location")
// I noiced tht some tiny URLs point to things other than checkins,
// so we need to capture only the ones that contain "checkin"
if(location.contains("checkin")) {
emitAndAck(tuple, location)
}
}
response.onFailure { cause: Throwable =>
println("FAILED with " + cause)
}
Await.ready(response)
}
override def cleanup() = {
client.close()
}
}
/**
* Bolt that takes the resolved URL and fetches the checkin web page
* and emits the embedded JSON document found in the script tag of the page.
*/
class FoursquareCheckinFetcherBolt extends StormBolt {
override val fields = List("checkinUrl", "checkinHtml")
val host = "foursquare.com"
var client: Service[HttpRequest, HttpResponse] = _
/**
*
*/
override def execute(tuple: Tuple) = {
client = FinagleHttpsClientService(host)
val url = "https://" + host + tuple.getStringByField("checkinUrl").split(host)(1)
val request = new FinagleHttpRequestBuilder(host, url).userAgent(FoursquareStorm.ua).get()
val response: Future[HttpResponse] = client(request)
response.onSuccess { resp: HttpResponse =>
emitAndAck(tuple, url, processHtml(resp.getContent().toString("UTF-8")))
}
response.onFailure { cause: Throwable =>
println("FAILED with " + cause)
}
Await.ready(response)
}
override def cleanup() = {
client.close()
}
/**
* This should be extracted into a separate class so that
* it can be tested properly.
*/
private def processHtml(html: String) = {
// First we chop out our JSON from the page by splitting
// where the JSON starts and ends. Then we need to
// put quotes around checkin and fullVenue so that is becomes
// a valid JSON object.
"{" + html
.split("'\\#checkinDetailPage'\\)\\.get\\(0\\), ")(1)
.split(",canZoomMap")(0)
.replaceFirst("checkin", "\"checkin\"")
.replaceFirst("fullVenue", "\"fullVenue\"") + "}"
}
}
/**
* Bolt that takes a checkin JSON object and emits Foursquare API objects.
*/
class FoursquareCheckinParserBolt extends StormBolt {
override val fields = List("checkin")
override def execute(tuple: Tuple) = {
val url = tuple.getStringByField("checkinUrl")
val resp = tuple.getStringByField("checkinHtml")
.asJson.convertTo[FoursquareCheckinResponse]
emitAndAck(tuple, FoursquareCheckinRecord(url, resp.checkin, resp.fullVenue))
}
}
/**
* Bolt that takes a Foursquare checkin and submits it to the client app using
* a web service.
*/
class FoursquareCheckinRepositoryBolt(host: String, port: Int, path: String) extends StormBolt {
override val fields = List("saveResult")
var client: Service[HttpRequest, HttpResponse] = _
override def execute(tuple: Tuple) = {
val record = tuple.getValue(0).asInstanceOf[FoursquareCheckinRecord]
client = FinagleHttpClientService(host, port)
val request = new FinagleHttpRequestBuilder(host, path)
.userAgent(FoursquareStorm.ua)
.content(record.toJson.toString)
.contentType("application/json")
.post()
val response: Future[HttpResponse] = client(request)
response.onSuccess { resp: HttpResponse =>
// The result here should really be parsed to see if there
// were an error response.
val result = resp.getContent().toString("UTF-8")
emitAndAck(tuple, result)
}
// Don't do this: Handle the exception properly.
response.onFailure { cause: Throwable =>
println("FAILED with " + cause)
}
Await.ready(response)
}
}
| infinitemule/poursquare-storm | src/main/scala/com/infinitemule/hopperhack/foursq/storm/FoursquareStorm.scala | Scala | mit | 6,103 |
package org.atnos
package object eff {
type <=[M[_], R] = Member.<=[M, R]
type /=[M[_], R] = MemberInOut./=[M, R]
type |=[M[_], R] = MemberIn.|=[M, R]
object eff extends EffCreation with EffInterpretation
object reader extends ReaderCreation with ReaderInterpretation
object writer extends WriterCreation with WriterInterpretation
object state extends StateCreation with StateInterpretation with StateImplicits
object eval extends EvalCreation with EvalInterpretation
object option extends OptionCreation with OptionInterpretation
object list extends ListCreation with ListInterpretation
object either extends EitherCreation with EitherInterpretation with EitherImplicits
object validate extends ValidateCreation with ValidateInterpretation
object choose extends ChooseCreation with ChooseInterpretation
object safe extends SafeCreation with SafeInterpretation
object future extends FutureCreation with FutureInterpretation
object memo extends MemoCreation with MemoInterpretation
object batch extends Batch
object create extends
ReaderCreation with
WriterCreation with
StateCreation with
EvalCreation with
OptionCreation with
ListCreation with
EitherCreation with
ValidateCreation with
ChooseCreation with
FutureCreation with
MemoCreation with
EffCreation with
SafeCreation
object all extends
ReaderEffect with
WriterEffect with
StateEffect with
EvalEffect with
OptionEffect with
ListEffect with
EitherEffect with
ValidateEffect with
ChooseEffect with
SafeEffect with
MemoEffect with
Batch with
EffInterpretation with
EffCreation with
EffImplicits
object interpret extends
Interpret with
Batch
}
| etorreborre/eff-cats | shared/src/main/scala/org/atnos/eff/package.scala | Scala | mit | 1,952 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.assertion
import io.gatling.commons.stats.assertion._
import io.gatling.core.config.GatlingConfiguration
final class AssertionWithPath(path: AssertionPath, configuration: GatlingConfiguration) {
def responseTime: AssertionWithPathAndTimeMetric = new AssertionWithPathAndTimeMetric(path, ResponseTime, configuration)
def allRequests: AssertionWithPathAndCountMetric = new AssertionWithPathAndCountMetric(path, AllRequests)
def failedRequests: AssertionWithPathAndCountMetric = new AssertionWithPathAndCountMetric(path, FailedRequests)
def successfulRequests: AssertionWithPathAndCountMetric = new AssertionWithPathAndCountMetric(path, SuccessfulRequests)
def requestsPerSec: AssertionWithPathAndTarget[Double] = new AssertionWithPathAndTarget[Double](path, MeanRequestsPerSecondTarget)
}
final class AssertionWithPathAndTimeMetric(path: AssertionPath, metric: TimeMetric, configuration: GatlingConfiguration) {
private def next(selection: TimeSelection) =
new AssertionWithPathAndTarget[Int](path, TimeTarget(metric, selection))
def min: AssertionWithPathAndTarget[Int] = next(Min)
def max: AssertionWithPathAndTarget[Int] = next(Max)
def mean: AssertionWithPathAndTarget[Int] = next(Mean)
def stdDev: AssertionWithPathAndTarget[Int] = next(StandardDeviation)
def percentile1: AssertionWithPathAndTarget[Int] = percentile(configuration.charting.indicators.percentile1)
def percentile2: AssertionWithPathAndTarget[Int] = percentile(configuration.charting.indicators.percentile2)
def percentile3: AssertionWithPathAndTarget[Int] = percentile(configuration.charting.indicators.percentile3)
def percentile4: AssertionWithPathAndTarget[Int] = percentile(configuration.charting.indicators.percentile4)
def percentile(value: Double): AssertionWithPathAndTarget[Int] = next(Percentiles(value))
}
final class AssertionWithPathAndCountMetric(path: AssertionPath, metric: CountMetric) {
def count: AssertionWithPathAndTarget[Long] = new AssertionWithPathAndTarget[Long](path, CountTarget(metric))
def percent: AssertionWithPathAndTarget[Double] = new AssertionWithPathAndTarget[Double](path, PercentTarget(metric))
}
final class AssertionWithPathAndTarget[T: Numeric](path: AssertionPath, target: Target) {
def next(condition: Condition): Assertion =
Assertion(path, target, condition)
private val numeric = implicitly[Numeric[T]]
def lt(threshold: T): Assertion = next(Lt(numeric.toDouble(threshold)))
def lte(threshold: T): Assertion = next(Lte(numeric.toDouble(threshold)))
def gt(threshold: T): Assertion = next(Gt(numeric.toDouble(threshold)))
def gte(threshold: T): Assertion = next(Gte(numeric.toDouble(threshold)))
@SuppressWarnings(Array("org.wartremover.warts.DefaultArguments"))
def between(min: T, max: T, inclusive: Boolean = true): Assertion = next(Between(numeric.toDouble(min), numeric.toDouble(max), inclusive))
@SuppressWarnings(Array("org.wartremover.warts.DefaultArguments"))
def around(mean: T, plusOrMinus: T, inclusive: Boolean = true): Assertion =
between(numeric.minus(mean, plusOrMinus), numeric.plus(mean, plusOrMinus), inclusive)
@SuppressWarnings(Array("org.wartremover.warts.DefaultArguments"))
def deviatesAround(mean: T, percentDeviation: Double, inclusive: Boolean = true): Assertion = {
val plusOrMinus = numeric.fromInt((numeric.toDouble(mean) * percentDeviation).floor.toInt)
around(mean, plusOrMinus, inclusive)
}
def is(value: T): Assertion = next(Is(numeric.toDouble(value)))
def in(set: Set[T]): Assertion = next(In(set.map(numeric.toDouble).toList))
def in(values: T*): Assertion = in(values.toSet)
}
| gatling/gatling | gatling-core/src/main/scala/io/gatling/core/assertion/AssertionBuilders.scala | Scala | apache-2.0 | 4,271 |
package org.scalawiki
import org.scalawiki.dto.Site
import org.specs2.mutable.Specification
import org.specs2.specification.BeforeAfterAll
import scala.language.postfixOps
import scala.sys.process._
import scala.util.Properties
trait WithDocker extends BeforeAfterAll {
val win = Properties.isWin
val dockerTestEnabled = false
val install = "docker exec scalawiki_mediawiki_1 " +
"php maintenance/install.php SomeWiki admin --pass 123 " +
"--dbserver database --dbuser wikiuser --dbpass example --installdbpass root_pass --installdbuser root " +
"--server http://localhost:8080 --scriptpath="
def checkMysql() = {
Seq("docker", "exec", "scalawiki_database_1",
"mysql", "--user=root", "--password=root_pass", "-s", "-e", "use my_wiki") ! ProcessLogger(_ => (), _ => ())
}
override def beforeAll: Unit = {
if (dockerTestEnabled) {
s"docker-compose rm -fsv" !
s"docker-compose up -d" !
println(s"waiting for mysql to be alive")
while (checkMysql() != 0) {
Thread.sleep(1000)
}
install !
}
}
override def afterAll: Unit = {
if (dockerTestEnabled) {
s"docker-compose down" !
}
}
}
class DockerSpec extends Specification with WithDocker {
"docker" should {
"check mediawiki version" in {
if (dockerTestEnabled) {
val bot = MwBot.create(Site.localhost.copy(scriptPath = ""), None)
bot.mediaWikiVersion.version.toDouble must be >= 1.31
} else {
ok("skip")
}
}
}
}
| intracer/scalawiki | scalawiki-core/src/test/scala/org/scalawiki/DockerSpec.scala | Scala | apache-2.0 | 1,529 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx
import org.apache.spark.SparkContext
import org.apache.spark.graphx.Graph._
import org.apache.spark.graphx.impl.EdgePartition
import org.apache.spark.rdd._
import org.scalatest.FunSuite
class GraphOpsSuite extends FunSuite with LocalSparkContext {
test("joinVertices") {
withSpark { sc =>
val vertices =
sc.parallelize(Seq[(VertexId, String)]((1, "one"), (2, "two"), (3, "three")), 2)
val edges = sc.parallelize((Seq(Edge(1, 2, "onetwo"))))
val g: Graph[String, String] = Graph(vertices, edges)
val tbl = sc.parallelize(Seq[(VertexId, Int)]((1, 10), (2, 20)))
val g1 = g.joinVertices(tbl) { (vid: VertexId, attr: String, u: Int) => attr + u }
val v = g1.vertices.collect().toSet
assert(v === Set((1, "one10"), (2, "two20"), (3, "three")))
}
}
test("collectNeighborIds") {
withSpark { sc =>
val chain = (0 until 100).map(x => (x, (x+1)%100) )
val rawEdges = sc.parallelize(chain, 3).map { case (s,d) => (s.toLong, d.toLong) }
val graph = Graph.fromEdgeTuples(rawEdges, 1.0).cache()
val nbrs = graph.collectNeighborIds(EdgeDirection.Either).cache()
assert(nbrs.count === chain.size)
assert(graph.numVertices === nbrs.count)
nbrs.collect.foreach { case (vid, nbrs) => assert(nbrs.size === 2) }
nbrs.collect.foreach { case (vid, nbrs) =>
val s = nbrs.toSet
assert(s.contains((vid + 1) % 100))
assert(s.contains(if (vid > 0) vid - 1 else 99 ))
}
}
}
test ("filter") {
withSpark { sc =>
val n = 5
val vertices = sc.parallelize((0 to n).map(x => (x:VertexId, x)))
val edges = sc.parallelize((1 to n).map(x => Edge(0, x, x)))
val graph: Graph[Int, Int] = Graph(vertices, edges).cache()
val filteredGraph = graph.filter(
graph => {
val degrees: VertexRDD[Int] = graph.outDegrees
graph.outerJoinVertices(degrees) {(vid, data, deg) => deg.getOrElse(0)}
},
vpred = (vid: VertexId, deg:Int) => deg > 0
).cache()
val v = filteredGraph.vertices.collect().toSet
assert(v === Set((0,0)))
// the map is necessary because of object-reuse in the edge iterator
val e = filteredGraph.edges.map(e => Edge(e.srcId, e.dstId, e.attr)).collect().toSet
assert(e.isEmpty)
}
}
}
| dotunolafunmiloye/spark | graphx/src/test/scala/org/apache/spark/graphx/GraphOpsSuite.scala | Scala | apache-2.0 | 3,167 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.delegation
import org.apache.flink.annotation.VisibleForTesting
import org.apache.flink.api.dag.Transformation
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.table.api.config.{ExecutionConfigOptions, TableConfigOptions}
import org.apache.flink.table.api.{PlannerType, SqlDialect, TableConfig, TableEnvironment, TableException}
import org.apache.flink.table.catalog._
import org.apache.flink.table.connector.sink.DynamicTableSink
import org.apache.flink.table.delegation.{Executor, Parser, Planner}
import org.apache.flink.table.descriptors.{ConnectorDescriptorValidator, DescriptorProperties}
import org.apache.flink.table.factories.{ComponentFactoryService, FactoryUtil, TableFactoryUtil}
import org.apache.flink.table.operations.OutputConversionModifyOperation.UpdateMode
import org.apache.flink.table.operations._
import org.apache.flink.table.planner.JMap
import org.apache.flink.table.planner.calcite._
import org.apache.flink.table.planner.catalog.CatalogManagerCalciteSchema
import org.apache.flink.table.planner.connectors.DynamicSinkUtils
import org.apache.flink.table.planner.connectors.DynamicSinkUtils.validateSchemaAndApplyImplicitCast
import org.apache.flink.table.planner.expressions.PlannerTypeInferenceUtilImpl
import org.apache.flink.table.planner.hint.FlinkHints
import org.apache.flink.table.planner.plan.nodes.calcite.LogicalLegacySink
import org.apache.flink.table.planner.plan.nodes.exec.processor.{ExecNodeGraphProcessor, ProcessorContext}
import org.apache.flink.table.planner.plan.nodes.exec.serde.SerdeContext
import org.apache.flink.table.planner.plan.nodes.exec.{ExecNodeGraph, ExecNodeGraphGenerator}
import org.apache.flink.table.planner.plan.nodes.physical.FlinkPhysicalRel
import org.apache.flink.table.planner.plan.optimize.Optimizer
import org.apache.flink.table.planner.plan.reuse.SubplanReuser
import org.apache.flink.table.planner.plan.utils.SameRelObjectShuttle
import org.apache.flink.table.planner.sinks.DataStreamTableSink
import org.apache.flink.table.planner.sinks.TableSinkUtils.{inferSinkPhysicalSchema, validateLogicalPhysicalTypesCompatible, validateTableSink}
import org.apache.flink.table.planner.utils.InternalConfigOptions.{TABLE_QUERY_START_EPOCH_TIME, TABLE_QUERY_START_LOCAL_TIME}
import org.apache.flink.table.planner.utils.JavaScalaConversionUtil
import org.apache.flink.table.sinks.TableSink
import org.apache.flink.table.types.utils.LegacyTypeInfoDataTypeConverter
import org.apache.calcite.jdbc.CalciteSchemaBuilder.asRootSchema
import org.apache.calcite.plan.{RelTrait, RelTraitDef}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.hint.RelHint
import org.apache.calcite.tools.FrameworkConfig
import java.lang.{Long => JLong}
import java.util
import java.util.TimeZone
import _root_.scala.collection.JavaConversions._
/**
* Implementation of a [[Planner]]. It supports only streaming use cases.
* (The new [[org.apache.flink.table.sources.InputFormatTableSource]] should work, but will be
* handled as streaming sources, and no batch specific optimizations will be applied).
*
* @param executor instance of [[Executor]], needed to extract
* [[StreamExecutionEnvironment]] for
* [[org.apache.flink.table.sources.StreamTableSource.getDataStream]]
* @param config mutable configuration passed from corresponding [[TableEnvironment]]
* @param functionCatalog catalog of functions
* @param catalogManager manager of catalog meta objects such as tables, views, databases etc.
* @param isStreamingMode Determines if the planner should work in a batch (false}) or
* streaming (true) mode.
*/
abstract class PlannerBase(
executor: Executor,
config: TableConfig,
val functionCatalog: FunctionCatalog,
val catalogManager: CatalogManager,
isStreamingMode: Boolean)
extends Planner {
// temporary utility until we don't use planner expressions anymore
functionCatalog.setPlannerTypeInferenceUtil(PlannerTypeInferenceUtilImpl.INSTANCE)
private var parser: Parser = _
private var currentDialect: SqlDialect = getTableConfig.getSqlDialect
@VisibleForTesting
private[flink] val plannerContext: PlannerContext =
new PlannerContext(
config,
functionCatalog,
catalogManager,
asRootSchema(new CatalogManagerCalciteSchema(catalogManager, isStreamingMode)),
getTraitDefs.toList
)
private val sqlExprToRexConverterFactory = plannerContext.getSqlExprToRexConverterFactory
/** Returns the [[FlinkRelBuilder]] of this TableEnvironment. */
private[flink] def getRelBuilder: FlinkRelBuilder = {
val currentCatalogName = catalogManager.getCurrentCatalog
val currentDatabase = catalogManager.getCurrentDatabase
plannerContext.createRelBuilder(currentCatalogName, currentDatabase)
}
/** Returns the Calcite [[FrameworkConfig]] of this TableEnvironment. */
@VisibleForTesting
private[flink] def createFlinkPlanner: FlinkPlannerImpl = {
val currentCatalogName = catalogManager.getCurrentCatalog
val currentDatabase = catalogManager.getCurrentDatabase
plannerContext.createFlinkPlanner(currentCatalogName, currentDatabase)
}
/** Returns the [[FlinkTypeFactory]] of this TableEnvironment. */
private[flink] def getTypeFactory: FlinkTypeFactory = plannerContext.getTypeFactory
/** Returns specific RelTraitDefs depends on the concrete type of this TableEnvironment. */
protected def getTraitDefs: Array[RelTraitDef[_ <: RelTrait]]
/** Returns specific query [[Optimizer]] depends on the concrete type of this TableEnvironment. */
protected def getOptimizer: Optimizer
def getTableConfig: TableConfig = config
def getFlinkContext: FlinkContext = plannerContext.getFlinkContext
private[flink] def getExecEnv: StreamExecutionEnvironment = {
executor.asInstanceOf[ExecutorBase].getExecutionEnvironment
}
def createNewParser: Parser = {
val parserProps = Map(TableConfigOptions.TABLE_SQL_DIALECT.key() ->
getTableConfig.getSqlDialect.name().toLowerCase)
ComponentFactoryService.find(classOf[ParserFactory], parserProps)
.create(catalogManager, plannerContext)
}
override def getParser: Parser = {
if (parser == null || getTableConfig.getSqlDialect != currentDialect) {
parser = createNewParser
currentDialect = getTableConfig.getSqlDialect
}
parser
}
override def translate(
modifyOperations: util.List[ModifyOperation]): util.List[Transformation[_]] = {
validateAndOverrideConfiguration()
if (modifyOperations.isEmpty) {
return List.empty[Transformation[_]]
}
val relNodes = modifyOperations.map(translateToRel)
val optimizedRelNodes = optimize(relNodes)
val execGraph = translateToExecNodeGraph(optimizedRelNodes)
val transformations = translateToPlan(execGraph)
cleanupInternalConfigurations()
transformations
}
/**
* Converts a relational tree of [[ModifyOperation]] into a Calcite relational expression.
*/
@VisibleForTesting
private[flink] def translateToRel(modifyOperation: ModifyOperation): RelNode = {
val dataTypeFactory = catalogManager.getDataTypeFactory
modifyOperation match {
case s: UnregisteredSinkModifyOperation[_] =>
val input = getRelBuilder.queryOperation(s.getChild).build()
val sinkSchema = s.getSink.getTableSchema
// validate query schema and sink schema, and apply cast if possible
val query = validateSchemaAndApplyImplicitCast(
input,
catalogManager.getSchemaResolver.resolve(sinkSchema.toSchema),
null,
dataTypeFactory,
getTypeFactory)
LogicalLegacySink.create(
query,
s.getSink,
"UnregisteredSink",
ConnectorCatalogTable.sink(s.getSink, !isStreamingMode))
case collectModifyOperation: CollectModifyOperation =>
val input = getRelBuilder.queryOperation(modifyOperation.getChild).build()
DynamicSinkUtils.convertCollectToRel(getRelBuilder, input, collectModifyOperation)
case catalogSink: CatalogSinkModifyOperation =>
val input = getRelBuilder.queryOperation(modifyOperation.getChild).build()
val identifier = catalogSink.getTableIdentifier
val dynamicOptions = catalogSink.getDynamicOptions
getTableSink(identifier, dynamicOptions).map {
case (table, sink: TableSink[_]) =>
// check the logical field type and physical field type are compatible
val queryLogicalType = FlinkTypeFactory.toLogicalRowType(input.getRowType)
// validate logical schema and physical schema are compatible
validateLogicalPhysicalTypesCompatible(table, sink, queryLogicalType)
// validate TableSink
validateTableSink(catalogSink, identifier, sink, table.getPartitionKeys)
// validate query schema and sink schema, and apply cast if possible
val query = validateSchemaAndApplyImplicitCast(
input,
table.getResolvedSchema,
catalogSink.getTableIdentifier,
dataTypeFactory,
getTypeFactory)
val hints = new util.ArrayList[RelHint]
if (!dynamicOptions.isEmpty) {
hints.add(RelHint.builder("OPTIONS").hintOptions(dynamicOptions).build)
}
LogicalLegacySink.create(
query,
hints,
sink,
identifier.toString,
table,
catalogSink.getStaticPartitions.toMap)
case (table, sink: DynamicTableSink) =>
DynamicSinkUtils.convertSinkToRel(getRelBuilder, input, catalogSink, sink, table)
} match {
case Some(sinkRel) => sinkRel
case None =>
throw new TableException(s"Sink ${catalogSink.getTableIdentifier} does not exists")
}
case externalModifyOperation: ExternalModifyOperation =>
val input = getRelBuilder.queryOperation(modifyOperation.getChild).build()
DynamicSinkUtils.convertExternalToRel(getRelBuilder, input, externalModifyOperation)
// legacy
case outputConversion: OutputConversionModifyOperation =>
val input = getRelBuilder.queryOperation(outputConversion.getChild).build()
val (needUpdateBefore, withChangeFlag) = outputConversion.getUpdateMode match {
case UpdateMode.RETRACT => (true, true)
case UpdateMode.APPEND => (false, false)
case UpdateMode.UPSERT => (false, true)
}
val typeInfo = LegacyTypeInfoDataTypeConverter.toLegacyTypeInfo(outputConversion.getType)
val inputLogicalType = FlinkTypeFactory.toLogicalRowType(input.getRowType)
val sinkPhysicalSchema = inferSinkPhysicalSchema(
outputConversion.getType,
inputLogicalType,
withChangeFlag)
// validate query schema and sink schema, and apply cast if possible
val query = validateSchemaAndApplyImplicitCast(
input,
catalogManager.getSchemaResolver.resolve(sinkPhysicalSchema.toSchema),
null,
dataTypeFactory,
getTypeFactory)
val tableSink = new DataStreamTableSink(
FlinkTypeFactory.toTableSchema(query.getRowType),
typeInfo,
needUpdateBefore,
withChangeFlag)
LogicalLegacySink.create(
query,
tableSink,
"DataStreamTableSink",
ConnectorCatalogTable.sink(tableSink, !isStreamingMode))
case _ =>
throw new TableException(s"Unsupported ModifyOperation: $modifyOperation")
}
}
@VisibleForTesting
private[flink] def optimize(relNodes: Seq[RelNode]): Seq[RelNode] = {
val optimizedRelNodes = getOptimizer.optimize(relNodes)
require(optimizedRelNodes.size == relNodes.size)
optimizedRelNodes
}
@VisibleForTesting
private[flink] def optimize(relNode: RelNode): RelNode = {
val optimizedRelNodes = getOptimizer.optimize(Seq(relNode))
require(optimizedRelNodes.size == 1)
optimizedRelNodes.head
}
/**
* Converts [[FlinkPhysicalRel]] DAG to [[ExecNodeGraph]],
* tries to reuse duplicate sub-plans and transforms the graph based on the given processors.
*/
@VisibleForTesting
private[flink] def translateToExecNodeGraph(optimizedRelNodes: Seq[RelNode]): ExecNodeGraph = {
val nonPhysicalRel = optimizedRelNodes.filterNot(_.isInstanceOf[FlinkPhysicalRel])
if (nonPhysicalRel.nonEmpty) {
throw new TableException("The expected optimized plan is FlinkPhysicalRel plan, " +
s"actual plan is ${nonPhysicalRel.head.getClass.getSimpleName} plan.")
}
require(optimizedRelNodes.forall(_.isInstanceOf[FlinkPhysicalRel]))
// Rewrite same rel object to different rel objects
// in order to get the correct dag (dag reuse is based on object not digest)
val shuttle = new SameRelObjectShuttle()
val relsWithoutSameObj = optimizedRelNodes.map(_.accept(shuttle))
// reuse subplan
val reusedPlan = SubplanReuser.reuseDuplicatedSubplan(relsWithoutSameObj, config)
// convert FlinkPhysicalRel DAG to ExecNodeGraph
val generator = new ExecNodeGraphGenerator()
val execGraph = generator.generate(reusedPlan.map(_.asInstanceOf[FlinkPhysicalRel]))
// process the graph
val context = new ProcessorContext(this)
val processors = getExecNodeGraphProcessors
processors.foldLeft(execGraph)((graph, processor) => processor.process(graph, context))
}
protected def getExecNodeGraphProcessors: Seq[ExecNodeGraphProcessor]
/**
* Translates an [[ExecNodeGraph]] into a [[Transformation]] DAG.
*
* @param execGraph The node graph to translate.
* @return The [[Transformation]] DAG that corresponds to the node DAG.
*/
protected def translateToPlan(execGraph: ExecNodeGraph): util.List[Transformation[_]]
private def getTableSink(
objectIdentifier: ObjectIdentifier,
dynamicOptions: JMap[String, String])
: Option[(ResolvedCatalogTable, Any)] = {
val optionalLookupResult =
JavaScalaConversionUtil.toScala(catalogManager.getTable(objectIdentifier))
if (optionalLookupResult.isEmpty) {
return None
}
val lookupResult = optionalLookupResult.get
lookupResult.getTable match {
case connectorTable: ConnectorCatalogTable[_, _] =>
val resolvedTable = lookupResult.getResolvedTable.asInstanceOf[ResolvedCatalogTable]
JavaScalaConversionUtil.toScala(connectorTable.getTableSink) match {
case Some(sink) => Some(resolvedTable, sink)
case None => None
}
case regularTable: CatalogTable =>
val resolvedTable = lookupResult.getResolvedTable.asInstanceOf[ResolvedCatalogTable]
val tableToFind = if (dynamicOptions.nonEmpty) {
resolvedTable.copy(FlinkHints.mergeTableOptions(dynamicOptions, resolvedTable.getOptions))
} else {
resolvedTable
}
val catalog = catalogManager.getCatalog(objectIdentifier.getCatalogName)
val isTemporary = lookupResult.isTemporary
if (isLegacyConnectorOptions(objectIdentifier, resolvedTable.getOrigin, isTemporary)) {
val tableSink = TableFactoryUtil.findAndCreateTableSink(
catalog.orElse(null),
objectIdentifier,
tableToFind.getOrigin,
getTableConfig.getConfiguration,
isStreamingMode,
isTemporary)
Option(resolvedTable, tableSink)
} else {
val tableSink = FactoryUtil.createTableSink(
catalog.orElse(null),
objectIdentifier,
tableToFind,
getTableConfig.getConfiguration,
getClassLoader,
isTemporary)
Option(resolvedTable, tableSink)
}
case _ => None
}
}
/**
* Checks whether the [[CatalogTable]] uses legacy connector sink options.
*/
private def isLegacyConnectorOptions(
objectIdentifier: ObjectIdentifier,
catalogTable: CatalogTable,
isTemporary: Boolean) = {
// normalize option keys
val properties = new DescriptorProperties(true)
properties.putProperties(catalogTable.getOptions)
if (properties.containsKey(ConnectorDescriptorValidator.CONNECTOR_TYPE)) {
true
} else {
val catalog = catalogManager.getCatalog(objectIdentifier.getCatalogName)
try {
// try to create legacy table source using the options,
// some legacy factories uses the new 'connector' key
TableFactoryUtil.findAndCreateTableSink(
catalog.orElse(null),
objectIdentifier,
catalogTable,
getTableConfig.getConfiguration,
isStreamingMode,
isTemporary)
// success, then we will use the legacy factories
true
} catch {
// fail, then we will use new factories
case _: Throwable => false
}
}
}
override def getJsonPlan(modifyOperations: util.List[ModifyOperation]): String = {
if (!isStreamingMode) {
throw new TableException("Only streaming mode is supported now.")
}
validateAndOverrideConfiguration()
val relNodes = modifyOperations.map(translateToRel)
val optimizedRelNodes = optimize(relNodes)
val execGraph = translateToExecNodeGraph(optimizedRelNodes)
val jsonPlan = ExecNodeGraph.createJsonPlan(execGraph, createSerdeContext)
cleanupInternalConfigurations()
jsonPlan
}
override def translateJsonPlan(jsonPlan: String): util.List[Transformation[_]] = {
if (!isStreamingMode) {
throw new TableException("Only streaming mode is supported now.")
}
validateAndOverrideConfiguration()
val execGraph = ExecNodeGraph.createExecNodeGraph(jsonPlan, createSerdeContext)
val transformations = translateToPlan(execGraph)
cleanupInternalConfigurations()
transformations
}
protected def createSerdeContext: SerdeContext = {
val planner = createFlinkPlanner
new SerdeContext(
planner.config.getContext.asInstanceOf[FlinkContext],
getClassLoader,
plannerContext.getTypeFactory,
planner.operatorTable
)
}
private def getClassLoader: ClassLoader = {
Thread.currentThread().getContextClassLoader
}
/**
* Different planner has different rules. Validate the planner and runtime mode is consistent with
* the configuration before planner do optimization with [[ModifyOperation]] or other works.
*/
protected def validateAndOverrideConfiguration(): Unit = {
val configuration = config.getConfiguration
if (!configuration.get(TableConfigOptions.TABLE_PLANNER).equals(PlannerType.BLINK)) {
throw new IllegalArgumentException(
"Mismatch between configured planner and actual planner. " +
"Currently, the 'table.planner' can only be set when instantiating the " +
"table environment. Subsequent changes are not supported. " +
"Please instantiate a new TableEnvironment if necessary.");
}
// Add query start time to TableConfig, these config are used internally,
// these configs will be used by temporal functions like CURRENT_TIMESTAMP,LOCALTIMESTAMP.
val epochTime :JLong = System.currentTimeMillis()
configuration.set(TABLE_QUERY_START_EPOCH_TIME, epochTime)
val localTime :JLong = epochTime +
TimeZone.getTimeZone(config.getLocalTimeZone).getOffset(epochTime)
configuration.set(TABLE_QUERY_START_LOCAL_TIME, localTime)
getExecEnv.configure(
configuration,
Thread.currentThread().getContextClassLoader)
// Use config parallelism to override env parallelism.
val defaultParallelism = getTableConfig.getConfiguration.getInteger(
ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM)
if (defaultParallelism > 0) {
getExecEnv.getConfig.setParallelism(defaultParallelism)
}
}
/**
* Cleanup all internal configuration after plan translation finished.
*/
protected def cleanupInternalConfigurations(): Unit = {
val configuration = config.getConfiguration
configuration.removeConfig(TABLE_QUERY_START_EPOCH_TIME)
configuration.removeConfig(TABLE_QUERY_START_LOCAL_TIME)
}
}
| tillrohrmann/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/delegation/PlannerBase.scala | Scala | apache-2.0 | 21,234 |
package com.realizationtime.btdogg.parsing
import java.nio.charset.StandardCharsets
import java.nio.file.Path
import java.util
import com.realizationtime.btdogg.commons.FileEntry.{TorrentDir, TorrentFile}
import com.realizationtime.btdogg.commons.{FileEntry, ParsingResult, TKey, TorrentData}
import the8472.mldht.cli.TorrentInfo
import scala.collection.JavaConverters._
import scala.compat.java8.OptionConverters
import scala.util.{Failure, Success}
object FileParser {
def parse(key: TKey, path: Path): ParsingResult[TorrentData] = {
try {
val ti: TorrentInfo = new TorrentInfo(path)
ti.decode()
val name = OptionConverters.toScala(ti.name())
val totalSize: Long = ti.totalSize()
val flatFiles: List[FlatFile] = ti.files().asScala.toList.asInstanceOf[List[java.util.Map[String, Any]]]
.map(_.asScala.toMap)
.map(FlatFile(_))
.filter(_.isDefined)
.map(_.get)
if (flatFiles.isEmpty) {
if (name.isDefined)
ParsingResult(key, path, Success(TorrentData.singleFile(name, totalSize)))
else
ParsingResult(key, path, Failure(NoFilesFound(key)))
} else {
val treeFiles: List[FileEntry] = flatToTree(flatFiles)
ParsingResult(key, path, Success(TorrentData(name, totalSize, treeFiles)))
}
} catch {
case ex: Throwable => ParsingResult(key, path, Failure(ex))
}
}
private case class FlatFile(path: List[String], size: Long) {
lazy val dropHead: FlatFile = FlatFile(path.tail, size)
}
private object FlatFile {
def apply(fileInfo: Map[String, Any]): Option[FlatFile] = {
val pathBytes: List[Array[Byte]] = fileInfo.get("path.utf-8").orElse(fileInfo.get("path"))
.map(_.asInstanceOf[util.List[Array[Byte]]].asScala.toList)
.getOrElse(List())
if (pathBytes.isEmpty)
None
else {
val path: List[String] = pathBytes.map(new String(_, StandardCharsets.UTF_8))
val size = fileInfo.get("length").map(_.asInstanceOf[Long]).getOrElse(0L)
Some(FlatFile(path, size))
}
}
}
case class NoFilesFound(key: TKey) extends RuntimeException(s"No files found in torrent: $key")
def flatToTree(flatFiles: List[FlatFile]): List[FileEntry] = {
val classed = flatFiles.groupBy(f => {
if (f.path.length == 1)
classOf[TorrentFile]
else
classOf[TorrentDir]
}).withDefaultValue(List())
val files: List[TorrentFile] = classed(classOf[TorrentFile]).map(flat => TorrentFile(flat.path.head, flat.size)).sortBy(_.name)
val dirs = classed(classOf[TorrentDir])
val dirsTree: List[TorrentDir] = dirs.groupBy(_.path.head)
.mapValues(_.map(_.dropHead))
.toList
.sortBy(_._1)
.map { case (dirName, dirContent) =>
TorrentDir(dirName, flatToTree(dirContent))
}
files ++ dirsTree
}
} | bwrega/btdogg | src/main/scala/com/realizationtime/btdogg/parsing/FileParser.scala | Scala | mit | 2,879 |
package tests
import org.scalatest.{FlatSpec, MustMatchers, OptionValues, WordSpecLike}
import razie.diesel.dom.{AExprIdent, CExpr, WTypes}
import razie.wiki.parser.ExprParser
/** A simple parser for expressions */
class SimpleExprParser extends ExprParser {
def apply(input: String) = {
parseAll(expr, input) match {
case Success(value, _) => value
// don't change the format of this message
case NoSuccess(msg, next) => throw new IllegalArgumentException("CANNOT PARSE: "+input)
}
}
}
/**
* run like: sbt 'testOnly *TestExprParser'
*/
class TestExprParser extends WordSpecLike with MustMatchers with OptionValues {
def p(s:String) = (new SimpleExprParser).apply(s)
"CExpr parser" should {
"parse numbers" in {
assert(p( "3" ).isInstanceOf[CExpr])
assert(p( "3" ).getType == WTypes.NUMBER)
}
"parse strings" in {
assert(p( "\\"a string\\"" ).isInstanceOf[CExpr])
assert(p( "\\"a string\\"" ).getType == WTypes.STRING)
}
}
"id parser" should {
"parse id" in {
assert(p( "anid" ).isInstanceOf[AExprIdent])
assert(p( "'an id'" ).isInstanceOf[AExprIdent])
}
}
}
| razie/wikireactor | diesel/src/test/scala/tests/TestExprParser.scala | Scala | apache-2.0 | 1,169 |
package org.apache.spark.ml
import org.apache.spark.ml.classification.{DecisionTreeClassificationModel, LogisticRegressionModel}
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.ml.regression.GeneralizedLinearRegressionModel
import org.apache.spark.ml.regression.DecisionTreeRegressionModel
import org.apache.spark.ml.tree._
import org.apache.spark.mllib.random.RandomDataGenerator
import org.apache.spark.mllib.tree.impurity.ImpurityCalculator
/**
* Helper for creating MLlib models which have private constructors.
*/
object ModelBuilder {
def newLogisticRegressionModel(
coefficients: Vector,
intercept: Double): LogisticRegressionModel = {
new LogisticRegressionModel("lr", coefficients, intercept)
}
def newGLR(
coefficients: Vector,
intercept: Double): GeneralizedLinearRegressionModel =
new GeneralizedLinearRegressionModel("glr-uid", coefficients, intercept)
def newDecisionTreeClassificationModel(
depth: Int,
numClasses: Int,
featureArity: Array[Int],
seed: Long): DecisionTreeClassificationModel = {
require(numClasses >= 2, s"DecisionTreeClassificationModel requires numClasses >= 2," +
s" but was given $numClasses")
val rootNode = TreeBuilder.randomBalancedDecisionTree(depth = depth, labelType = numClasses,
featureArity = featureArity, seed = seed)
new DecisionTreeClassificationModel(rootNode, numFeatures = featureArity.length,
numClasses = numClasses)
}
def newDecisionTreeRegressionModel(
depth: Int,
featureArity: Array[Int],
seed: Long): DecisionTreeRegressionModel = {
val rootNode = TreeBuilder.randomBalancedDecisionTree(depth = depth, labelType = 0,
featureArity = featureArity, seed = seed)
new DecisionTreeRegressionModel(rootNode, numFeatures = featureArity.length)
}
}
/**
* Helpers for creating random decision trees.
*/
object TreeBuilder {
/**
* Generator for a pair of distinct class labels from the set {0,...,numClasses-1}.
* Pairs are useful for trees to make sure sibling leaf nodes make different predictions.
* @param numClasses Number of classes.
*/
private class ClassLabelPairGenerator(val numClasses: Int)
extends RandomDataGenerator[Pair[Double, Double]] {
require(numClasses >= 2,
s"ClassLabelPairGenerator given label numClasses = $numClasses, but numClasses should be >= 2.")
private val rng = new java.util.Random()
override def nextValue(): Pair[Double, Double] = {
val left = rng.nextInt(numClasses)
var right = rng.nextInt(numClasses)
while (right == left) {
right = rng.nextInt(numClasses)
}
new Pair[Double, Double](left, right)
}
override def setSeed(seed: Long): Unit = {
rng.setSeed(seed)
}
override def copy(): ClassLabelPairGenerator = new ClassLabelPairGenerator(numClasses)
}
/**
* Generator for a pair of real-valued labels.
* Pairs are useful for trees to make sure sibling leaf nodes make different predictions.
*/
private class RealLabelPairGenerator() extends RandomDataGenerator[Pair[Double, Double]] {
private val rng = new java.util.Random()
override def nextValue(): Pair[Double, Double] =
new Pair[Double, Double](rng.nextDouble(), rng.nextDouble())
override def setSeed(seed: Long): Unit = {
rng.setSeed(seed)
}
override def copy(): RealLabelPairGenerator = new RealLabelPairGenerator()
}
/**
* Creates a random decision tree structure.
* @param depth Depth of tree to build. Must be <= numFeatures.
* @param labelType Value 0 indicates regression. Integers >= 2 indicate numClasses for
* classification.
* @param featureArity Array of length numFeatures indicating feature type.
* Value 0 indicates continuous feature.
* Other values >= 2 indicate a categorical feature,
* where the value is the number of categories.
* @return root node of tree
*/
def randomBalancedDecisionTree(
depth: Int,
labelType: Int,
featureArity: Array[Int],
seed: Long): Node = {
require(depth >= 0, s"randomBalancedDecisionTree given depth < 0.")
val numFeatures = featureArity.length
require(depth <= numFeatures,
s"randomBalancedDecisionTree requires depth <= featureArity.size," +
s" but depth = $depth and featureArity.size = $numFeatures")
val isRegression = labelType == 0
if (!isRegression) {
require(labelType >= 2, s"labelType must be >= 2 for classification. 0 indicates regression.")
}
val rng = new scala.util.Random()
rng.setSeed(seed)
val labelGenerator = if (isRegression) {
new RealLabelPairGenerator()
} else {
new ClassLabelPairGenerator(labelType)
}
labelGenerator.setSeed(rng.nextLong)
// We use a dummy impurityCalculator for all nodes.
val impurityCalculator = if (isRegression) {
ImpurityCalculator.getCalculator("variance", Array.fill[Double](3)(0.0))
} else {
ImpurityCalculator.getCalculator("gini", Array.fill[Double](labelType)(0.0))
}
randomBalancedDecisionTreeHelper(depth, featureArity, impurityCalculator,
labelGenerator, Set.empty, rng)
}
/**
* Create an internal node. Either create the leaf nodes beneath it, or recurse as needed.
* @param subtreeDepth Depth of subtree to build. Depth 0 means this is a leaf node.
* @param featureArity Indicates feature type. Value 0 indicates continuous feature.
* Other values >= 2 indicate a categorical feature,
* where the value is the number of categories.
* @param impurityCalculator Dummy impurity calculator to use at all tree nodes
* @param usedFeatures Features appearing in the path from the tree root to the node
* being constructed.
* @param labelGenerator Generates pairs of distinct labels.
* @return
*/
private def randomBalancedDecisionTreeHelper(
subtreeDepth: Int,
featureArity: Array[Int],
impurityCalculator: ImpurityCalculator,
labelGenerator: RandomDataGenerator[Pair[Double, Double]],
usedFeatures: Set[Int],
rng: scala.util.Random): Node = {
if (subtreeDepth == 0) {
// This case only happens for a depth 0 tree.
return new LeafNode(prediction = 0.0, impurity = 0.0, impurityStats = impurityCalculator)
}
val numFeatures = featureArity.length
// Should not happen.
assert(usedFeatures.size < numFeatures, s"randomBalancedDecisionTreeSplitNode ran out of " +
s"features for splits.")
// Make node internal.
var feature: Int = rng.nextInt(numFeatures)
while (usedFeatures.contains(feature)) {
feature = rng.nextInt(numFeatures)
}
val split: Split = if (featureArity(feature) == 0) {
// continuous feature
new ContinuousSplit(featureIndex = feature, threshold = rng.nextDouble())
} else {
// categorical feature
// Put nCatsSplit categories on left, and the rest on the right.
// nCatsSplit is in {1,...,arity-1}.
val nCatsSplit = rng.nextInt(featureArity(feature) - 1) + 1
val splitCategories: Array[Double] =
rng.shuffle(Range(0,featureArity(feature)).toList).toArray.map(_.toDouble).take(nCatsSplit)
new CategoricalSplit(featureIndex = feature,
_leftCategories = splitCategories, numCategories = featureArity(feature))
}
val (leftChild: Node, rightChild: Node) = if (subtreeDepth == 1) {
// Add leaf nodes. Assign these jointly so they make different predictions.
val predictions = labelGenerator.nextValue()
val leftChild = new LeafNode(prediction = predictions._1, impurity = 0.0,
impurityStats = impurityCalculator)
val rightChild = new LeafNode(prediction = predictions._2, impurity = 0.0,
impurityStats = impurityCalculator)
(leftChild, rightChild)
} else {
val leftChild = randomBalancedDecisionTreeHelper(subtreeDepth - 1, featureArity,
impurityCalculator, labelGenerator, usedFeatures + feature, rng)
val rightChild = randomBalancedDecisionTreeHelper(subtreeDepth - 1, featureArity,
impurityCalculator, labelGenerator, usedFeatures + feature, rng)
(leftChild, rightChild)
}
new InternalNode(prediction = 0.0, impurity = 0.0, gain = 0.0, leftChild = leftChild,
rightChild = rightChild, split = split, impurityStats = impurityCalculator)
}
}
| levyx/spark-sql-perf | src/main/scala/org/apache/spark/ml/ModelBuilder.scala | Scala | apache-2.0 | 8,574 |
package com.ox.bigdata.util.ftp
import java.io._
import com.ox.bigdata.util.log.LogSupport
import com.ox.bigdata.util.sftp.SFTPManager
import org.apache.commons.net.PrintCommandListener
import org.apache.commons.net.ftp.{FTP, FTPClient, FTPFile, FTPReply}
trait UploadSupport {
def upload(serverinfo: FtpServerInfo, local: String, remote: String): Boolean
}
trait FTPUploadSupport extends UploadSupport {
override def upload(serverinfo: FtpServerInfo, local: String, remote: String): Boolean = {
FtpManager(serverinfo).upload(local, remote)
}
}
class FtpManager(val server: String, val port: String, val user: String, val password: String) extends LogSupport {
def getFileNames(parentPath: String, withParentPath: Boolean = false,
timeout: Int = FtpManager.FTP_DATA_TIMEOUT_DEFAULT): Array[String] = {
var result = Array[String]()
using {
ftp =>
connect(ftp)
login(ftp)
ftp.setDataTimeout(timeout)
val vparentPath = if (parentPath.endsWith("/")) parentPath else parentPath + "/"
val files = ftp.listFiles(vparentPath)
if (files != null && files.nonEmpty) {
result = files.map(e =>
if (withParentPath) vparentPath + e.getName else e.getName)
}
//close(ftp)
}
result
}
def upload(local: String, remote: String): Boolean = {
var isUploadSuccess: Boolean = false
using {
ftp =>
connect(ftp)
login(ftp)
isUploadSuccess = upload(ftp, local, remote)
}
isUploadSuccess
}
def upload(localStream: InputStream, remote: String): Unit = {
using {
ftp =>
connect(ftp)
login(ftp)
upload(ftp, localStream, remote)
}
}
def download(src: String, dst: String, timeout: Int = FtpManager.FTP_DATA_TIMEOUT_DEFAULT): Unit = {
using {
ftp =>
connect(ftp)
login(ftp)
ftp.setFileType(FTP.BINARY_FILE_TYPE)
ftp.enterLocalPassiveMode()
ftp.setBufferSize(1024 * 1024)
ftp.setDataTimeout(timeout)
download(ftp, src, dst)
}
}
def downloadFiles(src: List[String], dst: String, timeout: Int = FtpManager.FTP_DATA_TIMEOUT_DEFAULT): Unit = {
using {
ftp =>
connect(ftp)
login(ftp)
ftp.setFileType(FTP.BINARY_FILE_TYPE)
ftp.enterLocalPassiveMode()
ftp.setBufferSize(1024 * 1024)
ftp.setDataTimeout(timeout)
src.map(e => {
var index = e.lastIndexOf('/')
if (index == -1) index = e.lastIndexOf('\\')
val fileName = e.substring(index + 1, e.length)
val localFile = new File(dst + "/" + fileName)
val is = new FileOutputStream(localFile)
ftp.retrieveFile(e, is)
// if (!localFile.exists()) {
// log.error("Failed to download file " + e)
// }
is.close()
})
close(ftp)
}
}
def downloadByExt(srcDir: String, baseDstDir: String, ext: String): Unit = {
using {
ftp =>
connect(ftp)
login(ftp)
downloadByExt(ftp, "", srcDir, baseDstDir, ext)
}
}
def delete(pathname: String): Unit = {
using {
ftp =>
connect(ftp)
login(ftp)
delete(ftp, pathname)
}
}
private def delete(ftp: FTPClient, pathname: String): Boolean = {
ftp.deleteFile(pathname)
ftp.logout()
}
private def using(f: FTPClient => Unit): Unit = {
val ftp = new FTPClient()
try {
f(ftp)
} catch {
case e: IOException =>
e.printStackTrace()
LOG.error(s"Could not connect to server due to ${e.getMessage}")
case e: ConnectionException => LOG.error(e.getMessage)
case e: LoginException => LOG.error(e.getMessage)
} finally {
close(ftp)
}
}
private def close(ftp: FTPClient) {
if (ftp.isConnected) {
try {
ftp.disconnect()
}
catch {
case e: IOException =>
e.printStackTrace()
}
}
}
private def MakeRemoteDirectory(ftp: FTPClient, remote: String): String = {
def remotepathVerified(path: String): String = path.take(1) match {
case "." => remotepathVerified(path.drop(1))
case "/" => path.drop(1)
case _ => path
}
val checkedRemotePath = remotepathVerified(remote)
val directories = checkedRemotePath.split('/')
directories.init.foldLeft(".")((dir, a) => {
ftp.makeDirectory(dir + "/" + a)
dir + "/" + a
})
checkedRemotePath
}
private def upload(ftp: FTPClient, local: String, remote: String): Boolean = {
val input = new FileInputStream(local)
upload(ftp, input, remote)
}
private def upload(ftp: FTPClient, localStream: InputStream, remote: String): Boolean = {
ftp.setFileType(FTP.BINARY_FILE_TYPE)
val remotePath = MakeRemoteDirectory(ftp, remote)
val isUploadSuccessful = ftp.storeFile(s"./$remotePath", localStream)
localStream.close()
ftp.noop() // check that control connection is working OK
ftp.logout()
isUploadSuccessful
}
private def download(ftp: FTPClient, src: String, dst: String): Boolean = {
ftp.setFileType(FTP.BINARY_FILE_TYPE)
var index = src.lastIndexOf('/')
if (index == -1) index = src.lastIndexOf('\\')
val fileName = src.substring(index + 1, src.length)
val localFile = new File(dst + "/" + fileName)
val localStream = new FileOutputStream(localFile)
val isDownloadSuccessful = ftp.retrieveFile(src, localStream)
localStream.close()
ftp.noop() // check that control connection is working OK
ftp.logout()
isDownloadSuccessful
}
private def login(ftp: FTPClient): Unit = {
if (!ftp.login(user, password)) {
ftp.logout()
throw new LoginException("FTP server refused login.")
}
}
private def connect(ftp: FTPClient): Unit = {
if (port == "") {
ftp.connect(server)
} else {
ftp.connect(server, port.toInt)
}
val reply = ftp.getReplyCode
ftp.addProtocolCommandListener(new PrintCommandListener(new PrintWriter(System.out))) //Open the debug info
if (!FTPReply.isPositiveCompletion(reply)) {
throw new ConnectionException("Ftp server refused connection")
}
}
private def getFileName(fullPath: String): String = {
var index = fullPath.lastIndexOf('/')
if (index == -1) index = fullPath.lastIndexOf('\\')
fullPath.substring(index + 1, fullPath.length)
}
/**
*
* relativePath
* srcDir
* baseDstDir
* ext
*/
private def downloadByExt(ftp: FTPClient, relativePath: String, srcDir: String, baseDstDir: String, ext: String): Unit = {
val array = srcDir.split("/").toList
array.foreach(x => if (x != "") ftp.changeWorkingDirectory(x))
ftp.listFiles.foreach(f => {
if (f.isFile) {
if (f.getName.endsWith(ext)) {
val dst = baseDstDir + "/" + relativePath
val src = f.getName
val file = new File(dst + "/" + f.getName)
if (!file.exists) {
if (!file.getParentFile.exists) file.getParentFile.mkdirs
download(ftp, src, dst)
}
//ftp.deleteFile(src) // delete file on the FTP after download to local
}
}
else if (f.isDirectory) {
val relativeDir = if (relativePath == "") f.getName else relativePath + "/" + f.getName
downloadByExt(ftp, relativeDir, f.getName, baseDstDir, ext)
}
})
ftp.changeToParentDirectory()
}
def deleteDirectory(remote: String): Int = {
var isDeleteDirectorySuccess: Int = 0
using {
ftp =>
connect(ftp)
login(ftp)
ftp.setFileType(FTP.BINARY_FILE_TYPE)
val remoteWithoutPoint: String = remote.take(1) match {
case "." => remote.drop(1)
case _ => remote
}
//ftp.deleteFile(s"./$remoteWithoutPoint")//delete file (nonempty dir)
//ftp.rmd(s"./$remoteWithoutPoint")//delete dir
isDeleteDirectorySuccess = deleteDirectory(ftp, remoteWithoutPoint)
ftp.noop() // check that control connection is working OK
ftp.logout()
}
isDeleteDirectorySuccess
}
def listDirectories(remote: String): Array[String] = {
var result: List[FTPFile] = Nil
using {
ftp =>
connect(ftp)
login(ftp)
val dirs = ftp.listDirectories(remote)
ftp.noop() // check that control connection is working OK
ftp.logout()
result = dirs.toList
}
result.map(file => file.getName).toArray
}
def listFiles(remote: String): Array[String] = {
var result: List[FTPFile] = Nil
using {
ftp =>
connect(ftp)
login(ftp)
val files = ftp.listFiles(remote)
ftp.noop()
ftp.logout()
result = files.toList
}
result.map(file => file.getName).toArray
}
private def deleteDirectory(ftp: FTPClient, remoteWithoutPoint: String): Int = {
ftp.listFiles(s"./$remoteWithoutPoint").foreach(f => {
if (f.isDirectory) {
val fileName = f.getName
val filePath = s"$remoteWithoutPoint/$fileName"
deleteDirectory(ftp, filePath)
ftp.rmd(s"./$filePath")
ftp.changeWorkingDirectory(s"./$filePath/$fileName")
}
else if (f.isFile) {
val fileName = f.getName
ftp.deleteFile(s"./$remoteWithoutPoint/$fileName")
}
})
val isDeleteDirectorySuccess = ftp.rmd(s"./$remoteWithoutPoint")
isDeleteDirectorySuccess
}
}
private class ConnectionException(message: String) extends Exception(message)
private class LoginException(message: String) extends Exception(message)
object FtpManager {
val FTP_DATA_TIMEOUT_DEFAULT = 1000 * 30
//time out 30 sec
def apply(ftpInfo: FtpServerInfo): FtpManager = new SFTPManager(ftpInfo.ip, ftpInfo.port, ftpInfo.user, ftpInfo.password)
}
case class FtpServerInfo(user: String,
password: String,
ip: String,
port: String)
case class FtpConfigInfo(ftpServerInfo: FtpServerInfo,
filePathInfo: String)
| black-ox/simple | src/main/scala/com/ox/bigdata/util/ftp/FtpManager.scala | Scala | apache-2.0 | 10,177 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.util.Locale
import scala.collection.mutable
import org.apache.spark.sql.{AnalysisException, SaveMode}
import org.apache.spark.sql.catalog.v2.{CatalogPlugin, Identifier, LookupCatalog, TableCatalog}
import org.apache.spark.sql.catalog.v2.expressions.Transform
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.CastSupport
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogTable, CatalogTableType, CatalogUtils}
import org.apache.spark.sql.catalyst.plans.logical.{CreateTableAsSelect, CreateV2Table, DropTable, LogicalPlan}
import org.apache.spark.sql.catalyst.plans.logical.sql.{AlterTableAddColumnsStatement, AlterTableSetLocationStatement, AlterTableSetPropertiesStatement, AlterTableUnsetPropertiesStatement, AlterViewSetPropertiesStatement, AlterViewUnsetPropertiesStatement, CreateTableAsSelectStatement, CreateTableStatement, DropTableStatement, DropViewStatement, QualifiedColType}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsCommand, AlterTableSetLocationCommand, AlterTableSetPropertiesCommand, AlterTableUnsetPropertiesCommand, DropTableCommand}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.v2.TableProvider
import org.apache.spark.sql.types.{HIVE_TYPE_STRING, HiveStringType, MetadataBuilder, StructField, StructType}
case class DataSourceResolution(
conf: SQLConf,
findCatalog: String => CatalogPlugin)
extends Rule[LogicalPlan] with CastSupport with LookupCatalog {
import org.apache.spark.sql.catalog.v2.CatalogV2Implicits._
override protected def lookupCatalog(name: String): CatalogPlugin = findCatalog(name)
def defaultCatalog: Option[CatalogPlugin] = conf.defaultV2Catalog.map(findCatalog)
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case CreateTableStatement(
AsTableIdentifier(table), schema, partitionCols, bucketSpec, properties,
V1WriteProvider(provider), options, location, comment, ifNotExists) =>
val tableDesc = buildCatalogTable(table, schema, partitionCols, bucketSpec, properties,
provider, options, location, comment, ifNotExists)
val mode = if (ifNotExists) SaveMode.Ignore else SaveMode.ErrorIfExists
CreateTable(tableDesc, mode, None)
case create: CreateTableStatement =>
// the provider was not a v1 source, convert to a v2 plan
val CatalogObjectIdentifier(maybeCatalog, identifier) = create.tableName
val catalog = maybeCatalog.orElse(defaultCatalog)
.getOrElse(throw new AnalysisException(
s"No catalog specified for table ${identifier.quoted} and no default catalog is set"))
.asTableCatalog
convertCreateTable(catalog, identifier, create)
case CreateTableAsSelectStatement(
AsTableIdentifier(table), query, partitionCols, bucketSpec, properties,
V1WriteProvider(provider), options, location, comment, ifNotExists) =>
val tableDesc = buildCatalogTable(table, new StructType, partitionCols, bucketSpec,
properties, provider, options, location, comment, ifNotExists)
val mode = if (ifNotExists) SaveMode.Ignore else SaveMode.ErrorIfExists
CreateTable(tableDesc, mode, Some(query))
case create: CreateTableAsSelectStatement =>
// the provider was not a v1 source, convert to a v2 plan
val CatalogObjectIdentifier(maybeCatalog, identifier) = create.tableName
val catalog = maybeCatalog.orElse(defaultCatalog)
.getOrElse(throw new AnalysisException(
s"No catalog specified for table ${identifier.quoted} and no default catalog is set"))
.asTableCatalog
convertCTAS(catalog, identifier, create)
case DropTableStatement(CatalogObjectIdentifier(Some(catalog), ident), ifExists, _) =>
DropTable(catalog.asTableCatalog, ident, ifExists)
case DropTableStatement(AsTableIdentifier(tableName), ifExists, purge) =>
DropTableCommand(tableName, ifExists, isView = false, purge)
case DropViewStatement(CatalogObjectIdentifier(Some(catalog), ident), _) =>
throw new AnalysisException(
s"Can not specify catalog `${catalog.name}` for view $ident " +
s"because view support in catalog has not been implemented yet")
case DropViewStatement(AsTableIdentifier(tableName), ifExists) =>
DropTableCommand(tableName, ifExists, isView = true, purge = false)
case AlterTableSetPropertiesStatement(AsTableIdentifier(table), properties) =>
AlterTableSetPropertiesCommand(table, properties, isView = false)
case AlterViewSetPropertiesStatement(AsTableIdentifier(table), properties) =>
AlterTableSetPropertiesCommand(table, properties, isView = true)
case AlterTableUnsetPropertiesStatement(AsTableIdentifier(table), propertyKeys, ifExists) =>
AlterTableUnsetPropertiesCommand(table, propertyKeys, ifExists, isView = false)
case AlterViewUnsetPropertiesStatement(AsTableIdentifier(table), propertyKeys, ifExists) =>
AlterTableUnsetPropertiesCommand(table, propertyKeys, ifExists, isView = true)
case AlterTableSetLocationStatement(AsTableIdentifier(table), newLocation) =>
AlterTableSetLocationCommand(table, None, newLocation)
case AlterTableAddColumnsStatement(AsTableIdentifier(table), newColumns)
if newColumns.forall(_.name.size == 1) =>
// only top-level adds are supported using AlterTableAddColumnsCommand
AlterTableAddColumnsCommand(table, newColumns.map(convertToStructField))
}
object V1WriteProvider {
private val v1WriteOverrideSet =
conf.useV1SourceWriterList.toLowerCase(Locale.ROOT).split(",").toSet
def unapply(provider: String): Option[String] = {
if (v1WriteOverrideSet.contains(provider.toLowerCase(Locale.ROOT))) {
Some(provider)
} else {
lazy val providerClass = DataSource.lookupDataSource(provider, conf)
provider match {
case _ if classOf[TableProvider].isAssignableFrom(providerClass) =>
None
case _ =>
Some(provider)
}
}
}
}
private def buildCatalogTable(
table: TableIdentifier,
schema: StructType,
partitioning: Seq[Transform],
bucketSpec: Option[BucketSpec],
properties: Map[String, String],
provider: String,
options: Map[String, String],
location: Option[String],
comment: Option[String],
ifNotExists: Boolean): CatalogTable = {
val storage = DataSource.buildStorageFormatFromOptions(options)
if (location.isDefined && storage.locationUri.isDefined) {
throw new AnalysisException(
"LOCATION and 'path' in OPTIONS are both used to indicate the custom table path, " +
"you can only specify one of them.")
}
val customLocation = storage.locationUri.orElse(location.map(CatalogUtils.stringToURI))
val tableType = if (customLocation.isDefined) {
CatalogTableType.EXTERNAL
} else {
CatalogTableType.MANAGED
}
CatalogTable(
identifier = table,
tableType = tableType,
storage = storage.copy(locationUri = customLocation),
schema = schema,
provider = Some(provider),
partitionColumnNames = partitioning.asPartitionColumns,
bucketSpec = bucketSpec,
properties = properties,
comment = comment)
}
private def convertCTAS(
catalog: TableCatalog,
identifier: Identifier,
ctas: CreateTableAsSelectStatement): CreateTableAsSelect = {
// convert the bucket spec and add it as a transform
val partitioning = ctas.partitioning ++ ctas.bucketSpec.map(_.asTransform)
val properties = convertTableProperties(
ctas.properties, ctas.options, ctas.location, ctas.comment, ctas.provider)
CreateTableAsSelect(
catalog,
identifier,
partitioning,
ctas.asSelect,
properties,
writeOptions = ctas.options.filterKeys(_ != "path"),
ignoreIfExists = ctas.ifNotExists)
}
private def convertCreateTable(
catalog: TableCatalog,
identifier: Identifier,
create: CreateTableStatement): CreateV2Table = {
// convert the bucket spec and add it as a transform
val partitioning = create.partitioning ++ create.bucketSpec.map(_.asTransform)
val properties = convertTableProperties(
create.properties, create.options, create.location, create.comment, create.provider)
CreateV2Table(
catalog,
identifier,
create.tableSchema,
partitioning,
properties,
ignoreIfExists = create.ifNotExists)
}
private def convertTableProperties(
properties: Map[String, String],
options: Map[String, String],
location: Option[String],
comment: Option[String],
provider: String): Map[String, String] = {
if (options.contains("path") && location.isDefined) {
throw new AnalysisException(
"LOCATION and 'path' in OPTIONS are both used to indicate the custom table path, " +
"you can only specify one of them.")
}
if ((options.contains("comment") || properties.contains("comment"))
&& comment.isDefined) {
throw new AnalysisException(
"COMMENT and option/property 'comment' are both used to set the table comment, you can " +
"only specify one of them.")
}
if (options.contains("provider") || properties.contains("provider")) {
throw new AnalysisException(
"USING and option/property 'provider' are both used to set the provider implementation, " +
"you can only specify one of them.")
}
val filteredOptions = options.filterKeys(_ != "path")
// create table properties from TBLPROPERTIES and OPTIONS clauses
val tableProperties = new mutable.HashMap[String, String]()
tableProperties ++= properties
tableProperties ++= filteredOptions
// convert USING, LOCATION, and COMMENT clauses to table properties
tableProperties += ("provider" -> provider)
comment.map(text => tableProperties += ("comment" -> text))
location.orElse(options.get("path")).map(loc => tableProperties += ("location" -> loc))
tableProperties.toMap
}
private def convertToStructField(col: QualifiedColType): StructField = {
val builder = new MetadataBuilder
col.comment.foreach(builder.putString("comment", _))
val cleanedDataType = HiveStringType.replaceCharType(col.dataType)
if (col.dataType != cleanedDataType) {
builder.putString(HIVE_TYPE_STRING, col.dataType.catalogString)
}
StructField(
col.name.head,
cleanedDataType,
nullable = true,
builder.build())
}
}
| aosagie/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceResolution.scala | Scala | apache-2.0 | 11,588 |
package im.actor.server.models.llectro
case class LlectroDevice(authId: Long, screenWidth: Int, screenHeight: Int) | boneyao/actor-platform | actor-server/actor-models/src/main/scala/im/actor/server/models/llectro/LlectroDevice.scala | Scala | mit | 115 |
package io.getquill.context.jdbc.h2
import io.getquill.{ H2Dialect, JdbcContext, Literal, TestEntities }
import io.getquill.context.jdbc.TestEncoders
import io.getquill.context.sql.TestDecoders
object testContext extends JdbcContext[H2Dialect, Literal]("testH2DB") with TestEntities with TestEncoders with TestDecoders
| jcranky/quill | quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/TestContext.scala | Scala | apache-2.0 | 321 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
import api.base.types.ScTypeElement
import api.expr.ScExpression
import api.statements.ScFunction
import com.intellij.psi.stubs.NamedStub
/**
* User: Alexander Podkhalyuzin
* Date: 14.10.2008
*/
trait ScFunctionStub extends NamedStub[ScFunction] with ScMemberOrLocal {
def isImplicit: Boolean
def isDeclaration: Boolean
def getAnnotations : Array[String]
def getReturnTypeText: String
def getReturnTypeElement: Option[ScTypeElement]
def getBodyExpression: Option[ScExpression]
def getBodyText: String
def hasAssign: Boolean
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/ScFunctionStub.scala | Scala | apache-2.0 | 629 |
package org.elasticsearch.spark.integration
import java.io.File
import java.nio.file.Files
import java.sql.Timestamp
import java.util.concurrent.TimeUnit
import java.{lang => jl}
import java.{util => ju}
import javax.xml.bind.DatatypeConverter
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.OutputMode
import org.apache.spark.sql.types.Decimal
import org.elasticsearch.hadoop.EsHadoopIllegalArgumentException
import org.elasticsearch.hadoop.EsHadoopIllegalStateException
import org.elasticsearch.hadoop.cfg.ConfigurationOptions
import org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_INDEX_AUTO_CREATE
import org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_MAPPING_EXCLUDE
import org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_MAPPING_ID
import org.elasticsearch.hadoop.cfg.ConfigurationOptions.ES_SPARK_DATAFRAME_WRITE_NULL_VALUES
import org.elasticsearch.hadoop.mr.RestUtils
import org.elasticsearch.hadoop.mr.{EsAssume => EsAssume}
import org.elasticsearch.hadoop.serialization.EsHadoopSerializationException
import org.elasticsearch.hadoop.util.EsMajorVersion
import org.elasticsearch.hadoop.util.StringUtils
import org.elasticsearch.hadoop.util.TestSettings
import org.elasticsearch.hadoop.util.TestUtils
import org.elasticsearch.spark.sql.streaming.SparkSqlStreamingConfigs
import org.elasticsearch.spark.sql.streaming.StreamingQueryTestHarness
import org.hamcrest.Matchers.containsString
import org.hamcrest.Matchers.is
import org.hamcrest.Matchers.not
import org.junit.AfterClass
import org.junit.Assert
import org.junit.Assert.assertThat
import org.junit.Assert.assertTrue
import org.junit.BeforeClass
import org.junit.FixMethodOrder
import org.junit.Rule
import org.junit.Test
import org.junit.rules.TemporaryFolder
import org.junit.runner.RunWith
import org.junit.runners.MethodSorters
import org.junit.runners.Parameterized
import org.junit.runners.Parameterized.Parameters
import scala.collection.JavaConversions.propertiesAsScalaMap
import scala.io.Codec
import scala.io.Source
object AbstractScalaEsSparkStructuredStreaming {
@transient val appName: String = "es-spark-sql-streaming-test"
@transient var spark: Option[SparkSession] = None
@transient val commitLogDir: String = commitLogDirectory()
@transient val sparkConf: SparkConf = new SparkConf()
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.setMaster("local")
.setAppName(appName)
.set("spark.executor.extraJavaOptions", "-XX:MaxPermSize=256m")
.setJars(SparkUtils.ES_SPARK_TESTING_JAR)
@BeforeClass
def setup(): Unit = {
sparkConf.setAll(TestSettings.TESTING_PROPS)
spark = Some(
SparkSession.builder()
.config(sparkConf)
.getOrCreate()
)
}
def commitLogDirectory(): String = {
val tempDir = File.createTempFile("es-spark-structured-streaming", "")
tempDir.delete()
tempDir.mkdir()
val logDir = new File(tempDir, "logs")
logDir.mkdir()
logDir.getAbsolutePath
}
@AfterClass
def cleanup(): Unit = {
spark.foreach((s: SparkSession) => {
s.close()
Thread.sleep(TimeUnit.SECONDS.toMillis(3))
})
}
@Parameters
def testParams(): ju.Collection[Array[jl.Object]] = {
val list = new ju.ArrayList[Array[jl.Object]]()
list.add(Array("default", jl.Boolean.FALSE))
list
}
}
object Products extends Serializable {
// For sending straight strings
case class Text(data: String)
// Basic tuple pair
case class Record(id: Int, name: String)
// Meant to model the sampleArtistsDatUri
case class WrappingRichData(id: Int, name: String, url: String, pictures: String, time: Timestamp, nested: RichData)
case class RichData(id: Int, name: String, url: String, pictures: String, time: Timestamp)
// Decimal data holder
case class DecimalData(decimal: Decimal)
}
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
@RunWith(classOf[Parameterized])
class AbstractScalaEsSparkStructuredStreaming(prefix: String, something: Boolean) {
private val tempFolderRule = new TemporaryFolder
@Rule
def tempFolder: TemporaryFolder = tempFolderRule
val spark: SparkSession = AbstractScalaEsSparkStructuredStreaming.spark
.getOrElse(throw new EsHadoopIllegalStateException("Spark not started..."))
import org.elasticsearch.spark.integration.Products._
import spark.implicits._
def wrapIndex(name: String): String = {
prefix + "-spark-struct-stream-" + name
}
def checkpoint(target: String): String = {
s"${AbstractScalaEsSparkStructuredStreaming.commitLogDir}/$target"
}
def checkpointDir(target: String): String = {
checkpoint(target)+"/sinks/elasticsearch"
}
@Test
def test0Framework(): Unit = {
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.runTest {
test.stream
.map(_.name)
.flatMap(_.split(" "))
.writeStream
.format("console")
.start()
}
}
@Test
def test0FrameworkFailure(): Unit = {
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.expectingToThrow(classOf[StringIndexOutOfBoundsException])
.runTest {
test.stream
.map(_.name)
.flatMap(_.split(" "))
.map(_.charAt(-4).toString)
.writeStream
.format("console")
.start()
}
}
@Test(expected = classOf[EsHadoopIllegalArgumentException])
def test1FailOnIncorrectSaveCall(): Unit = {
import org.elasticsearch.spark.sql._
val target = wrapIndex("failed-on-save-call/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.stream.saveToEs(target)
Assert.fail("Should not launch job with saveToEs() method")
}
@Test(expected = classOf[EsHadoopIllegalArgumentException])
def test1FailOnCompleteMode(): Unit = {
val target = wrapIndex("failed-on-complete-mode/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.runTest {
test.stream
.select("name").groupBy("name").count()
.writeStream
.outputMode(OutputMode.Complete())
.option("checkpointLocation", checkpoint(target))
.format("es")
.start(target)
}
Assert.fail("Should not launch job with Complete mode specified")
}
@Test(expected = classOf[EsHadoopIllegalArgumentException])
def test1FailOnPartitions(): Unit = {
val target = wrapIndex("failed-on-partitions/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.runTest {
test.stream
.writeStream
.partitionBy("name")
.option("checkpointLocation", checkpoint(target))
.format("es")
.start(target)
}
Assert.fail("Should not launch job with column partition")
}
@Test
def test2BasicWriteWithoutCommitLog(): Unit = {
val target = wrapIndex("test-basic-write-no-commit/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, "Hadoop"))
.withInput(Record(3, "YARN"))
.runTest {
test.stream
.writeStream
.option(SparkSqlStreamingConfigs.ES_SINK_LOG_ENABLE, "false")
.option("checkpointLocation", checkpoint(target))
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
val searchResult = RestUtils.get(target + "/_search?")
assertThat(searchResult, containsString("Spark"))
assertThat(searchResult, containsString("Hadoop"))
assertThat(searchResult, containsString("YARN"))
assertThat(new File(s"${checkpointDir(target)}/0").exists(), not(true))
}
@Test
def test2BasicWrite(): Unit = {
val target = wrapIndex("test-basic-write/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, "Hadoop"))
.withInput(Record(3, "YARN"))
.runTest {
test.stream
.writeStream
.option("checkpointLocation", checkpoint(target))
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
val searchResult = RestUtils.get(target + "/_search?")
assertThat(searchResult, containsString("Spark"))
assertThat(searchResult, containsString("Hadoop"))
assertThat(searchResult, containsString("YARN"))
Source.fromFile(s"${checkpointDir(target)}/0").getLines().foreach(println)
}
@Test
def test2BasicWriteUsingSessionCommitLog(): Unit = {
try {
val check = s"${AbstractScalaEsSparkStructuredStreaming.commitLogDir}/session1"
spark.conf.set(SQLConf.CHECKPOINT_LOCATION.key, check)
val target = wrapIndex("test-basic-write/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, "Hadoop"))
.withInput(Record(3, "YARN"))
.runTest {
test.stream
.writeStream
.queryName("test-basic-write-session-commit")
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
val searchResult = RestUtils.get(target + "/_search?")
assertThat(searchResult, containsString("Spark"))
assertThat(searchResult, containsString("Hadoop"))
assertThat(searchResult, containsString("YARN"))
Source.fromFile(s"${checkpointDir(target)}/0").getLines().foreach(println)
assertThat(Files.exists(new File(s"$check/test-basic-write-session-commit/sinks/elasticsearch/0").toPath), is(true))
} finally {
spark.conf.unset(SQLConf.CHECKPOINT_LOCATION.key)
}
}
@Test
def test2BasicWriteUsingSessionCommitLogNoQueryName(): Unit = {
try {
val check = s"${AbstractScalaEsSparkStructuredStreaming.commitLogDir}/session2"
spark.conf.set(SQLConf.CHECKPOINT_LOCATION.key, check)
val target = wrapIndex("test-basic-write/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, "Hadoop"))
.withInput(Record(3, "YARN"))
.runTest {
test.stream
.writeStream
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
val searchResult = RestUtils.get(target + "/_search?")
assertThat(searchResult, containsString("Spark"))
assertThat(searchResult, containsString("Hadoop"))
assertThat(searchResult, containsString("YARN"))
Source.fromFile(s"${checkpointDir(target)}/0").getLines().foreach(println)
assertThat(Files.exists(new File(check).toPath), is(true))
assertThat(Files.list(new File(check).toPath).count(), is(2L)) // A UUID for general checkpoint, and one for ES.
} finally {
spark.conf.unset(SQLConf.CHECKPOINT_LOCATION.key)
}
}
@Test(expected = classOf[EsHadoopIllegalArgumentException])
def test1FailOnIndexCreationDisabled(): Unit = {
val target = wrapIndex("test-write-index-create-disabled/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, "Hadoop"))
.withInput(Record(3, "YARN"))
.runTest {
test.stream
.writeStream
.option("checkpointLocation", checkpoint(target))
.option(ES_INDEX_AUTO_CREATE, "no")
.format("es")
.start(target)
}
assertTrue("Index already exists! Index should not exist prior to this test!", !RestUtils.exists(target))
Assert.fail("Should not be able to write to index if not already created.")
}
@Test
def test2WriteWithMappingId(): Unit = {
val target = wrapIndex("test-write-with-id/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, "Hadoop"))
.withInput(Record(3, "YARN"))
.runTest {
test.stream
.writeStream
.option("checkpointLocation", checkpoint(target))
.option(ES_MAPPING_ID, "id")
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
assertTrue(RestUtils.exists(target + "/1"))
assertTrue(RestUtils.exists(target + "/2"))
assertTrue(RestUtils.exists(target + "/3"))
val searchResult = RestUtils.get(target + "/_search?")
assertThat(searchResult, containsString("Spark"))
assertThat(searchResult, containsString("Hadoop"))
assertThat(searchResult, containsString("YARN"))
}
@Test
def test2WriteWithMappingExclude(): Unit = {
val target = wrapIndex("test-write-with-exclude/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, "Hadoop"))
.withInput(Record(3, "YARN"))
.runTest {
test.stream
.writeStream
.option("checkpointLocation", checkpoint(target))
.option(ES_MAPPING_EXCLUDE, "id")
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
val searchResult = RestUtils.get(target + "/_search?")
assertThat(searchResult, containsString("Spark"))
assertThat(searchResult, containsString("Hadoop"))
assertThat(searchResult, containsString("YARN"))
assertThat(searchResult, not(containsString(""""id":1""")))
}
@Test
def test2WriteToIngestPipeline(): Unit = {
EsAssume.versionOnOrAfter(EsMajorVersion.V_5_X, "Ingest Supported in 5.x and above only")
val pipelineName: String = prefix + "-pipeline"
val pipeline: String = """{"description":"Test Pipeline","processors":[{"set":{"field":"pipeTEST","value":true,"override":true}}]}"""
RestUtils.put("/_ingest/pipeline/" + pipelineName, StringUtils.toUTF(pipeline))
val target = wrapIndex("test-write-ingest/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, "Hadoop"))
.withInput(Record(3, "YARN"))
.runTest {
test.stream
.writeStream
.option("checkpointLocation", checkpoint(target))
.option(ConfigurationOptions.ES_INGEST_PIPELINE, pipelineName)
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
val searchResult = RestUtils.get(target + "/_search?")
assertThat(searchResult, containsString("Spark"))
assertThat(searchResult, containsString("Hadoop"))
assertThat(searchResult, containsString("YARN"))
assertThat(searchResult, containsString(""""pipeTEST":true"""))
}
@Test
def test2MultiIndexWrite(): Unit = {
val target = wrapIndex("test-tech-{name}/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "spark"))
.withInput(Record(2, "hadoop"))
.runTest {
test.stream
.writeStream
.option("checkpointLocation", checkpoint(target))
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(wrapIndex("test-tech-spark/data")))
assertTrue(RestUtils.exists(wrapIndex("test-tech-hadoop/data")))
assertThat(wrapIndex("test-tech-spark/data/_search?"), containsString("spark"))
assertThat(wrapIndex("test-tech-hadoop/data/_search?"), containsString("hadoop"))
}
@Test
def test2NullValueIgnored() {
val target = wrapIndex("test-null-data-absent/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, null))
.runTest {
test.stream
.writeStream
.option("checkpointLocation", checkpoint(target))
.option(ES_MAPPING_ID, "id")
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
assertThat(RestUtils.get(target + "/1"), containsString("name"))
assertThat(RestUtils.get(target + "/2"), not(containsString("name")))
}
@Test
def test2NullValueWritten() {
val target = wrapIndex("test-null-data-null/data")
val test = new StreamingQueryTestHarness[Record](spark)
test.withInput(Record(1, "Spark"))
.withInput(Record(2, null))
.runTest {
test.stream
.writeStream
.option("checkpointLocation", checkpoint(target))
.option(ES_MAPPING_ID, "id")
.option(ES_SPARK_DATAFRAME_WRITE_NULL_VALUES, "true")
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
assertThat(RestUtils.get(target + "/1"), containsString("name"))
assertThat(RestUtils.get(target + "/2"), containsString("name"))
}
@Test
def test2WriteWithRichMapping() {
val target = wrapIndex("test-basic-write-rich-mapping-id/data")
val test = new StreamingQueryTestHarness[Text](spark)
Source.fromURI(TestUtils.sampleArtistsDatUri())(Codec.ISO8859).getLines().foreach(s => test.withInput(Text(s)))
test
.runTest {
test.stream
.map(_.data.split("\\t"))
.map(a => {
val id = a(0).toInt
val name = a(1)
val url = a(2)
val pictures = a(3)
val time = new Timestamp(DatatypeConverter.parseDateTime(a(4)).getTimeInMillis)
WrappingRichData(id, name, url, pictures, time, RichData(id, name, url, pictures, time))
})
.writeStream
.option("checkpointLocation", checkpoint(target))
.option(ES_MAPPING_ID, "id")
.format("es")
.start(target)
}
assertTrue(RestUtils.exists(target))
assertThat(RestUtils.get(target + "/_search?"), containsString("345"))
assertThat(RestUtils.exists(target+"/1"), is(true))
}
@Test
def test1FailOnDecimalType() {
val target = wrapIndex("test-decimal-exception/data")
val test = new StreamingQueryTestHarness[DecimalData](spark)
test.withInput(DecimalData(Decimal(10)))
.expectingToThrow(classOf[EsHadoopSerializationException])
.runTest {
test.stream
.writeStream
.option("checkpointLocation", checkpoint(target))
.format("es")
.start(target)
}
}
}
| takezoe/elasticsearch-hadoop | spark/sql-20/src/itest/scala/org/elasticsearch/spark/integration/AbstractScalaEsSparkStructuredStreaming.scala | Scala | apache-2.0 | 18,558 |
package com.nullpointerbay
/**
* Created by charafau on 11/29/14.
*/
import org.scalatest._
abstract class UnitSpec extends FlatSpec with Matchers with
OptionValues with Inside with Inspectors | charafau/postal-code-parser | src/test/scala/com/nullpointerbay/UnitSpec.scala | Scala | mit | 196 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.util.Locale
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
import scala.language.existentials
import org.apache.spark.TestUtils.{assertNotSpilled, assertSpilled}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
import org.apache.spark.sql.catalyst.expressions.{Ascending, SortOrder}
import org.apache.spark.sql.execution.{BinaryExecNode, SortExec}
import org.apache.spark.sql.execution.joins._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types.StructType
class JoinSuite extends QueryTest with SharedSQLContext {
import testImplicits._
setupTestData()
def statisticSizeInByte(df: DataFrame): BigInt = {
df.queryExecution.optimizedPlan.stats.sizeInBytes
}
test("equi-join is hash-join") {
val x = testData2.as("x")
val y = testData2.as("y")
val join = x.join(y, $"x.a" === $"y.a", "inner").queryExecution.optimizedPlan
val planned = spark.sessionState.planner.JoinSelection(join)
assert(planned.size === 1)
}
def assertJoin(pair: (String, Class[_])): Any = {
val (sqlString, c) = pair
val df = sql(sqlString)
val physical = df.queryExecution.sparkPlan
val operators = physical.collect {
case j: BroadcastHashJoinExec => j
case j: ShuffledHashJoinExec => j
case j: CartesianProductExec => j
case j: BroadcastNestedLoopJoinExec => j
case j: SortMergeJoinExec => j
}
assert(operators.size === 1)
if (operators.head.getClass != c) {
fail(s"$sqlString expected operator: $c, but got ${operators.head}\\n physical: \\n$physical")
}
}
test("join operator selection") {
spark.sharedState.cacheManager.clearCache()
withSQLConf("spark.sql.autoBroadcastJoinThreshold" -> "0",
SQLConf.CROSS_JOINS_ENABLED.key -> "true") {
Seq(
("SELECT * FROM testData LEFT SEMI JOIN testData2 ON key = a",
classOf[SortMergeJoinExec]),
("SELECT * FROM testData LEFT SEMI JOIN testData2", classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData JOIN testData2", classOf[CartesianProductExec]),
("SELECT * FROM testData JOIN testData2 WHERE key = 2", classOf[CartesianProductExec]),
("SELECT * FROM testData LEFT JOIN testData2", classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData RIGHT JOIN testData2", classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData FULL OUTER JOIN testData2", classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData LEFT JOIN testData2 WHERE key = 2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData RIGHT JOIN testData2 WHERE key = 2",
classOf[CartesianProductExec]),
("SELECT * FROM testData FULL OUTER JOIN testData2 WHERE key = 2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData JOIN testData2 WHERE key > a", classOf[CartesianProductExec]),
("SELECT * FROM testData FULL OUTER JOIN testData2 WHERE key > a",
classOf[CartesianProductExec]),
("SELECT * FROM testData JOIN testData2 ON key = a", classOf[SortMergeJoinExec]),
("SELECT * FROM testData JOIN testData2 ON key = a and key = 2",
classOf[SortMergeJoinExec]),
("SELECT * FROM testData JOIN testData2 ON key = a where key = 2",
classOf[SortMergeJoinExec]),
("SELECT * FROM testData LEFT JOIN testData2 ON key = a", classOf[SortMergeJoinExec]),
("SELECT * FROM testData RIGHT JOIN testData2 ON key = a where key = 2",
classOf[SortMergeJoinExec]),
("SELECT * FROM testData right join testData2 ON key = a and key = 2",
classOf[SortMergeJoinExec]),
("SELECT * FROM testData full outer join testData2 ON key = a",
classOf[SortMergeJoinExec]),
("SELECT * FROM testData left JOIN testData2 ON (key * a != key + a)",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData right JOIN testData2 ON (key * a != key + a)",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData full JOIN testData2 ON (key * a != key + a)",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData ANTI JOIN testData2 ON key = a", classOf[SortMergeJoinExec]),
("SELECT * FROM testData LEFT ANTI JOIN testData2", classOf[BroadcastNestedLoopJoinExec])
).foreach(assertJoin)
}
}
// ignore("SortMergeJoin shouldn't work on unsortable columns") {
// Seq(
// ("SELECT * FROM arrayData JOIN complexData ON data = a", classOf[ShuffledHashJoin])
// ).foreach { case (query, joinClass) => assertJoin(query, joinClass) }
// }
test("broadcasted hash join operator selection") {
spark.sharedState.cacheManager.clearCache()
sql("CACHE TABLE testData")
Seq(
("SELECT * FROM testData join testData2 ON key = a",
classOf[BroadcastHashJoinExec]),
("SELECT * FROM testData join testData2 ON key = a and key = 2",
classOf[BroadcastHashJoinExec]),
("SELECT * FROM testData join testData2 ON key = a where key = 2",
classOf[BroadcastHashJoinExec])
).foreach(assertJoin)
}
test("broadcasted hash outer join operator selection") {
spark.sharedState.cacheManager.clearCache()
sql("CACHE TABLE testData")
sql("CACHE TABLE testData2")
Seq(
("SELECT * FROM testData LEFT JOIN testData2 ON key = a",
classOf[BroadcastHashJoinExec]),
("SELECT * FROM testData RIGHT JOIN testData2 ON key = a where key = 2",
classOf[BroadcastHashJoinExec]),
("SELECT * FROM testData right join testData2 ON key = a and key = 2",
classOf[BroadcastHashJoinExec])
).foreach(assertJoin)
}
test("multiple-key equi-join is hash-join") {
val x = testData2.as("x")
val y = testData2.as("y")
val join = x.join(y, ($"x.a" === $"y.a") && ($"x.b" === $"y.b")).queryExecution.optimizedPlan
val planned = spark.sessionState.planner.JoinSelection(join)
assert(planned.size === 1)
}
test("inner join where, one match per row") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
checkAnswer(
upperCaseData.join(lowerCaseData).where('n === 'N),
Seq(
Row(1, "A", 1, "a"),
Row(2, "B", 2, "b"),
Row(3, "C", 3, "c"),
Row(4, "D", 4, "d")
))
}
}
test("inner join ON, one match per row") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
checkAnswer(
upperCaseData.join(lowerCaseData, $"n" === $"N"),
Seq(
Row(1, "A", 1, "a"),
Row(2, "B", 2, "b"),
Row(3, "C", 3, "c"),
Row(4, "D", 4, "d")
))
}
}
test("inner join, where, multiple matches") {
val x = testData2.where($"a" === 1).as("x")
val y = testData2.where($"a" === 1).as("y")
checkAnswer(
x.join(y).where($"x.a" === $"y.a"),
Row(1, 1, 1, 1) ::
Row(1, 1, 1, 2) ::
Row(1, 2, 1, 1) ::
Row(1, 2, 1, 2) :: Nil
)
}
test("inner join, no matches") {
val x = testData2.where($"a" === 1).as("x")
val y = testData2.where($"a" === 2).as("y")
checkAnswer(
x.join(y).where($"x.a" === $"y.a"),
Nil)
}
test("SPARK-22141: Propagate empty relation before checking Cartesian products") {
Seq("inner", "left", "right", "left_outer", "right_outer", "full_outer").foreach { joinType =>
val x = testData2.where($"a" === 2 && !($"a" === 2)).as("x")
val y = testData2.where($"a" === 1 && !($"a" === 1)).as("y")
checkAnswer(x.join(y, Seq.empty, joinType), Nil)
}
}
test("big inner join, 4 matches per row") {
val bigData = testData.union(testData).union(testData).union(testData)
val bigDataX = bigData.as("x")
val bigDataY = bigData.as("y")
checkAnswer(
bigDataX.join(bigDataY).where($"x.key" === $"y.key"),
testData.rdd.flatMap(row => Seq.fill(16)(Row.merge(row, row))).collect().toSeq)
}
test("cartesian product join") {
withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "true") {
checkAnswer(
testData3.join(testData3),
Row(1, null, 1, null) ::
Row(1, null, 2, 2) ::
Row(2, 2, 1, null) ::
Row(2, 2, 2, 2) :: Nil)
checkAnswer(
testData3.as("x").join(testData3.as("y"), $"x.a" > $"y.a"),
Row(2, 2, 1, null) :: Nil)
}
withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "false") {
val e = intercept[Exception] {
checkAnswer(
testData3.join(testData3),
Row(1, null, 1, null) ::
Row(1, null, 2, 2) ::
Row(2, 2, 1, null) ::
Row(2, 2, 2, 2) :: Nil)
}
assert(e.getMessage.contains("Detected implicit cartesian product for INNER join " +
"between logical plans"))
}
}
test("left outer join") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
checkAnswer(
upperCaseData.join(lowerCaseData, $"n" === $"N", "left"),
Row(1, "A", 1, "a") ::
Row(2, "B", 2, "b") ::
Row(3, "C", 3, "c") ::
Row(4, "D", 4, "d") ::
Row(5, "E", null, null) ::
Row(6, "F", null, null) :: Nil)
checkAnswer(
upperCaseData.join(lowerCaseData, $"n" === $"N" && $"n" > 1, "left"),
Row(1, "A", null, null) ::
Row(2, "B", 2, "b") ::
Row(3, "C", 3, "c") ::
Row(4, "D", 4, "d") ::
Row(5, "E", null, null) ::
Row(6, "F", null, null) :: Nil)
checkAnswer(
upperCaseData.join(lowerCaseData, $"n" === $"N" && $"N" > 1, "left"),
Row(1, "A", null, null) ::
Row(2, "B", 2, "b") ::
Row(3, "C", 3, "c") ::
Row(4, "D", 4, "d") ::
Row(5, "E", null, null) ::
Row(6, "F", null, null) :: Nil)
checkAnswer(
upperCaseData.join(lowerCaseData, $"n" === $"N" && $"l" > $"L", "left"),
Row(1, "A", 1, "a") ::
Row(2, "B", 2, "b") ::
Row(3, "C", 3, "c") ::
Row(4, "D", 4, "d") ::
Row(5, "E", null, null) ::
Row(6, "F", null, null) :: Nil)
// Make sure we are choosing left.outputPartitioning as the
// outputPartitioning for the outer join operator.
checkAnswer(
sql(
"""
|SELECT l.N, count(*)
|FROM uppercasedata l LEFT OUTER JOIN allnulls r ON (l.N = r.a)
|GROUP BY l.N
""".stripMargin),
Row(
1, 1) ::
Row(2, 1) ::
Row(3, 1) ::
Row(4, 1) ::
Row(5, 1) ::
Row(6, 1) :: Nil)
checkAnswer(
sql(
"""
|SELECT r.a, count(*)
|FROM uppercasedata l LEFT OUTER JOIN allnulls r ON (l.N = r.a)
|GROUP BY r.a
""".stripMargin),
Row(null, 6) :: Nil)
}
}
test("right outer join") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
checkAnswer(
lowerCaseData.join(upperCaseData, $"n" === $"N", "right"),
Row(1, "a", 1, "A") ::
Row(2, "b", 2, "B") ::
Row(3, "c", 3, "C") ::
Row(4, "d", 4, "D") ::
Row(null, null, 5, "E") ::
Row(null, null, 6, "F") :: Nil)
checkAnswer(
lowerCaseData.join(upperCaseData, $"n" === $"N" && $"n" > 1, "right"),
Row(null, null, 1, "A") ::
Row(2, "b", 2, "B") ::
Row(3, "c", 3, "C") ::
Row(4, "d", 4, "D") ::
Row(null, null, 5, "E") ::
Row(null, null, 6, "F") :: Nil)
checkAnswer(
lowerCaseData.join(upperCaseData, $"n" === $"N" && $"N" > 1, "right"),
Row(null, null, 1, "A") ::
Row(2, "b", 2, "B") ::
Row(3, "c", 3, "C") ::
Row(4, "d", 4, "D") ::
Row(null, null, 5, "E") ::
Row(null, null, 6, "F") :: Nil)
checkAnswer(
lowerCaseData.join(upperCaseData, $"n" === $"N" && $"l" > $"L", "right"),
Row(1, "a", 1, "A") ::
Row(2, "b", 2, "B") ::
Row(3, "c", 3, "C") ::
Row(4, "d", 4, "D") ::
Row(null, null, 5, "E") ::
Row(null, null, 6, "F") :: Nil)
// Make sure we are choosing right.outputPartitioning as the
// outputPartitioning for the outer join operator.
checkAnswer(
sql(
"""
|SELECT l.a, count(*)
|FROM allnulls l RIGHT OUTER JOIN uppercasedata r ON (l.a = r.N)
|GROUP BY l.a
""".stripMargin),
Row(null,
6))
checkAnswer(
sql(
"""
|SELECT r.N, count(*)
|FROM allnulls l RIGHT OUTER JOIN uppercasedata r ON (l.a = r.N)
|GROUP BY r.N
""".stripMargin),
Row(1
, 1) ::
Row(2, 1) ::
Row(3, 1) ::
Row(4, 1) ::
Row(5, 1) ::
Row(6, 1) :: Nil)
}
}
test("full outer join") {
upperCaseData.where('N <= 4).createOrReplaceTempView("`left`")
upperCaseData.where('N >= 3).createOrReplaceTempView("`right`")
val left = UnresolvedRelation(TableIdentifier("left"))
val right = UnresolvedRelation(TableIdentifier("right"))
checkAnswer(
left.join(right, $"left.N" === $"right.N", "full"),
Row(1, "A", null, null) ::
Row(2, "B", null, null) ::
Row(3, "C", 3, "C") ::
Row(4, "D", 4, "D") ::
Row(null, null, 5, "E") ::
Row(null, null, 6, "F") :: Nil)
checkAnswer(
left.join(right, ($"left.N" === $"right.N") && ($"left.N" =!= 3), "full"),
Row(1, "A", null, null) ::
Row(2, "B", null, null) ::
Row(3, "C", null, null) ::
Row(null, null, 3, "C") ::
Row(4, "D", 4, "D") ::
Row(null, null, 5, "E") ::
Row(null, null, 6, "F") :: Nil)
checkAnswer(
left.join(right, ($"left.N" === $"right.N") && ($"right.N" =!= 3), "full"),
Row(1, "A", null, null) ::
Row(2, "B", null, null) ::
Row(3, "C", null, null) ::
Row(null, null, 3, "C") ::
Row(4, "D", 4, "D") ::
Row(null, null, 5, "E") ::
Row(null, null, 6, "F") :: Nil)
// Make sure we are UnknownPartitioning as the outputPartitioning for the outer join
// operator.
checkAnswer(
sql(
"""
|SELECT l.a, count(*)
|FROM allNulls l FULL OUTER JOIN upperCaseData r ON (l.a = r.N)
|GROUP BY l.a
""".
stripMargin),
Row(null, 10))
checkAnswer(
sql(
"""
|SELECT r.N, count(*)
|FROM allNulls l FULL OUTER JOIN upperCaseData r ON (l.a = r.N)
|GROUP BY r.N
""".stripMargin),
Row
(1, 1) ::
Row(2, 1) ::
Row(3, 1) ::
Row(4, 1) ::
Row(5, 1) ::
Row(6, 1) ::
Row(null, 4) :: Nil)
checkAnswer(
sql(
"""
|SELECT l.N, count(*)
|FROM upperCaseData l FULL OUTER JOIN allNulls r ON (l.N = r.a)
|GROUP BY l.N
""".stripMargin),
Row(1
, 1) ::
Row(2, 1) ::
Row(3, 1) ::
Row(4, 1) ::
Row(5, 1) ::
Row(6, 1) ::
Row(null, 4) :: Nil)
checkAnswer(
sql(
"""
|SELECT r.a, count(*)
|FROM upperCaseData l FULL OUTER JOIN allNulls r ON (l.N = r.a)
|GROUP BY r.a
""".
stripMargin),
Row(null, 10))
}
test("broadcasted existence join operator selection") {
spark.sharedState.cacheManager.clearCache()
sql("CACHE TABLE testData")
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> Long.MaxValue.toString) {
Seq(
("SELECT * FROM testData LEFT SEMI JOIN testData2 ON key = a",
classOf[BroadcastHashJoinExec]),
("SELECT * FROM testData ANT JOIN testData2 ON key = a", classOf[BroadcastHashJoinExec])
).foreach(assertJoin)
}
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1") {
Seq(
("SELECT * FROM testData LEFT SEMI JOIN testData2 ON key = a",
classOf[SortMergeJoinExec]),
("SELECT * FROM testData LEFT ANTI JOIN testData2 ON key = a",
classOf[SortMergeJoinExec])
).foreach(assertJoin)
}
}
test("cross join with broadcast") {
sql("CACHE TABLE testData")
val sizeInByteOfTestData = statisticSizeInByte(spark.table("testData"))
// we set the threshold is greater than statistic of the cached table testData
withSQLConf(
SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> (sizeInByteOfTestData + 1).toString(),
SQLConf.CROSS_JOINS_ENABLED.key -> "true") {
assert(statisticSizeInByte(spark.table("testData2")) >
spark.conf.get(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD))
assert(statisticSizeInByte(spark.table("testData")) <
spark.conf.get(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD))
Seq(
("SELECT * FROM testData LEFT SEMI JOIN testData2 ON key = a",
classOf[SortMergeJoinExec]),
("SELECT * FROM testData LEFT SEMI JOIN testData2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData JOIN testData2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData JOIN testData2 WHERE key = 2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData LEFT JOIN testData2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData RIGHT JOIN testData2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData FULL OUTER JOIN testData2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData LEFT JOIN testData2 WHERE key = 2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData RIGHT JOIN testData2 WHERE key = 2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData FULL OUTER JOIN testData2 WHERE key = 2",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData JOIN testData2 WHERE key > a",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData FULL OUTER JOIN testData2 WHERE key > a",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData left JOIN testData2 WHERE (key * a != key + a)",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData right JOIN testData2 WHERE (key * a != key + a)",
classOf[BroadcastNestedLoopJoinExec]),
("SELECT * FROM testData full JOIN testData2 WHERE (key * a != key + a)",
classOf[BroadcastNestedLoopJoinExec])
).foreach(assertJoin)
checkAnswer(
sql(
"""
SELECT x.value, y.a, y.b FROM testData x JOIN testData2 y WHERE x.key = 2
""".stripMargin),
Row("2", 1, 1) ::
Row("2", 1, 2) ::
Row("2", 2, 1) ::
Row("2", 2, 2) ::
Row("2", 3, 1) ::
Row("2", 3, 2) :: Nil)
checkAnswer(
sql(
"""
SELECT x.value, y.a, y.b FROM testData x JOIN testData2 y WHERE x.key < y.a
""".stripMargin),
Row("1", 2, 1) ::
Row("1", 2, 2) ::
Row("1", 3, 1) ::
Row("1", 3, 2) ::
Row("2", 3, 1) ::
Row("2", 3, 2) :: Nil)
checkAnswer(
sql(
"""
SELECT x.value, y.a, y.b FROM testData x JOIN testData2 y ON x.key < y.a
""".stripMargin),
Row("1", 2, 1) ::
Row("1", 2, 2) ::
Row("1", 3, 1) ::
Row("1", 3, 2) ::
Row("2", 3, 1) ::
Row("2", 3, 2) :: Nil)
}
}
test("left semi join") {
val df = sql("SELECT * FROM testData2 LEFT SEMI JOIN testData ON key = a")
checkAnswer(df,
Row(1, 1) ::
Row(1, 2) ::
Row(2, 1) ::
Row(2, 2) ::
Row(3, 1) ::
Row(3, 2) :: Nil)
}
test("cross join detection") {
testData.createOrReplaceTempView("A")
testData.createOrReplaceTempView("B")
testData2.createOrReplaceTempView("C")
testData3.createOrReplaceTempView("D")
upperCaseData.where('N >= 3).createOrReplaceTempView("`right`")
val cartesianQueries = Seq(
/** The following should error out since there is no explicit cross join */
"SELECT * FROM testData inner join testData2",
"SELECT * FROM testData left outer join testData2",
"SELECT * FROM testData right outer join testData2",
"SELECT * FROM testData full outer join testData2",
"SELECT * FROM testData, testData2",
"SELECT * FROM testData, testData2 where testData.key = 1 and testData2.a = 22",
/** The following should fail because after reordering there are cartesian products */
"select * from (A join B on (A.key = B.key)) join D on (A.key=D.a) join C",
"select * from ((A join B on (A.key = B.key)) join C) join D on (A.key = D.a)",
/** Cartesian product involving C, which is not involved in a CROSS join */
"select * from ((A join B on (A.key = B.key)) cross join D) join C on (A.key = D.a)");
def checkCartesianDetection(query: String): Unit = {
val e = intercept[Exception] {
checkAnswer(sql(query), Nil);
}
assert(e.getMessage.contains("Detected implicit cartesian product"))
}
cartesianQueries.foreach(checkCartesianDetection)
// Check that left_semi, left_anti, existence joins without conditions do not throw
// an exception if cross joins are disabled
withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "false") {
checkAnswer(
sql("SELECT * FROM testData3 LEFT SEMI JOIN testData2"),
Row(1, null) :: Row (2, 2) :: Nil)
checkAnswer(
sql("SELECT * FROM testData3 LEFT ANTI JOIN testData2"),
Nil)
checkAnswer(
sql(
"""
|SELECT a FROM testData3
|WHERE
| EXISTS (SELECT * FROM testData)
|OR
| EXISTS (SELECT * FROM testData2)""".stripMargin),
Row(1) :: Row(2) :: Nil)
checkAnswer(
sql(
"""
|SELECT key FROM testData
|WHERE
| key IN (SELECT a FROM testData2)
|OR
| key IN (SELECT a FROM testData3)""".stripMargin),
Row(1) :: Row(2) :: Row(3) :: Nil)
}
}
test("test SortMergeJoin (without spill)") {
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "1",
"spark.sql.sortMergeJoinExec.buffer.spill.threshold" -> Int.MaxValue.toString) {
assertNotSpilled(sparkContext, "inner join") {
checkAnswer(
sql("SELECT * FROM testData JOIN testData2 ON key = a where key = 2"),
Row(2, "2", 2, 1) :: Row(2, "2", 2, 2) :: Nil
)
}
val expected = new ListBuffer[Row]()
expected.append(
Row(1, "1", 1, 1), Row(1, "1", 1, 2),
Row(2, "2", 2, 1), Row(2, "2", 2, 2),
Row(3, "3", 3, 1), Row(3, "3", 3, 2)
)
for (i <- 4 to 100) {
expected.append(Row(i, i.toString, null, null))
}
assertNotSpilled(sparkContext, "left outer join") {
checkAnswer(
sql(
"""
|SELECT
| big.key, big.value, small.a, small.b
|FROM
| testData big
|LEFT OUTER JOIN
| testData2 small
|ON
| big.key = small.a
""".stripMargin),
expected
)
}
assertNotSpilled(sparkContext, "right outer join") {
checkAnswer(
sql(
"""
|SELECT
| big.key, big.value, small.a, small.b
|FROM
| testData2 small
|RIGHT OUTER JOIN
| testData big
|ON
| big.key = small.a
""".stripMargin),
expected
)
}
}
}
test("test SortMergeJoin (with spill)") {
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "1",
"spark.sql.sortMergeJoinExec.buffer.in.memory.threshold" -> "0",
"spark.sql.sortMergeJoinExec.buffer.spill.threshold" -> "1") {
assertSpilled(sparkContext, "inner join") {
checkAnswer(
sql("SELECT * FROM testData JOIN testData2 ON key = a where key = 2"),
Row(2, "2", 2, 1) :: Row(2, "2", 2, 2) :: Nil
)
}
val expected = new ListBuffer[Row]()
expected.append(
Row(1, "1", 1, 1), Row(1, "1", 1, 2),
Row(2, "2", 2, 1), Row(2, "2", 2, 2),
Row(3, "3", 3, 1), Row(3, "3", 3, 2)
)
for (i <- 4 to 100) {
expected.append(Row(i, i.toString, null, null))
}
assertSpilled(sparkContext, "left outer join") {
checkAnswer(
sql(
"""
|SELECT
| big.key, big.value, small.a, small.b
|FROM
| testData big
|LEFT OUTER JOIN
| testData2 small
|ON
| big.key = small.a
""".stripMargin),
expected
)
}
assertSpilled(sparkContext, "right outer join") {
checkAnswer(
sql(
"""
|SELECT
| big.key, big.value, small.a, small.b
|FROM
| testData2 small
|RIGHT OUTER JOIN
| testData big
|ON
| big.key = small.a
""".stripMargin),
expected
)
}
// FULL OUTER JOIN still does not use [[ExternalAppendOnlyUnsafeRowArray]]
// so should not cause any spill
assertNotSpilled(sparkContext, "full outer join") {
checkAnswer(
sql(
"""
|SELECT
| big.key, big.value, small.a, small.b
|FROM
| testData2 small
|FULL OUTER JOIN
| testData big
|ON
| big.key = small.a
""".stripMargin),
expected
)
}
}
}
test("outer broadcast hash join should not throw NPE") {
withTempView("v1", "v2") {
withSQLConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "true") {
Seq(2 -> 2).toDF("x", "y").createTempView("v1")
spark.createDataFrame(
Seq(Row(1, "a")).asJava,
new StructType().add("i", "int", nullable = false).add("j", "string", nullable = false)
).createTempView("v2")
checkAnswer(
sql("select x, y, i, j from v1 left join v2 on x = i and y < length(j)"),
Row(2, 2, null, null)
)
}
}
}
test("test SortMergeJoin output ordering") {
val joinQueries = Seq(
"SELECT * FROM testData JOIN testData2 ON key = a",
"SELECT * FROM testData t1 JOIN " +
"testData2 t2 ON t1.key = t2.a JOIN testData3 t3 ON t2.a = t3.a",
"SELECT * FROM testData t1 JOIN " +
"testData2 t2 ON t1.key = t2.a JOIN " +
"testData3 t3 ON t2.a = t3.a JOIN " +
"testData t4 ON t1.key = t4.key")
def assertJoinOrdering(sqlString: String): Unit = {
val df = sql(sqlString)
val physical = df.queryExecution.sparkPlan
val physicalJoins = physical.collect {
case j: SortMergeJoinExec => j
}
val executed = df.queryExecution.executedPlan
val executedJoins = executed.collect {
case j: SortMergeJoinExec => j
}
// This only applies to the above tested queries, in which a child SortMergeJoin always
// contains the SortOrder required by its parent SortMergeJoin. Thus, SortExec should never
// appear as parent of SortMergeJoin.
executed.foreach {
case s: SortExec => s.foreach {
case j: SortMergeJoinExec => fail(
s"No extra sort should be added since $j already satisfies the required ordering"
)
case _ =>
}
case _ =>
}
val joinPairs = physicalJoins.zip(executedJoins)
val numOfJoins = sqlString.split(" ").count(_.toUpperCase(Locale.ROOT) == "JOIN")
assert(joinPairs.size == numOfJoins)
joinPairs.foreach {
case(join1, join2) =>
val leftKeys = join1.leftKeys
val rightKeys = join1.rightKeys
val outputOrderingPhysical = join1.outputOrdering
val outputOrderingExecuted = join2.outputOrdering
// outputOrdering should always contain join keys
assert(
SortOrder.orderingSatisfies(
outputOrderingPhysical, leftKeys.map(SortOrder(_, Ascending))))
assert(
SortOrder.orderingSatisfies(
outputOrderingPhysical, rightKeys.map(SortOrder(_, Ascending))))
// outputOrdering should be consistent between physical plan and executed plan
assert(outputOrderingPhysical == outputOrderingExecuted,
s"Operator $join1 did not have the same output ordering in the physical plan as in " +
s"the executed plan.")
}
}
joinQueries.foreach(assertJoinOrdering)
}
test("SPARK-22445 Respect stream-side child's needCopyResult in BroadcastHashJoin") {
val df1 = Seq((2, 3), (2, 5), (2, 2), (3, 8), (2, 1)).toDF("k", "v1")
val df2 = Seq((2, 8), (3, 7), (3, 4), (1, 2)).toDF("k", "v2")
val df3 = Seq((1, 1), (3, 2), (4, 3), (5, 1)).toDF("k", "v3")
withSQLConf(
SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1",
SQLConf.JOIN_REORDER_ENABLED.key -> "false") {
val df = df1.join(df2, "k").join(functions.broadcast(df3), "k")
val plan = df.queryExecution.sparkPlan
// Check if `needCopyResult` in `BroadcastHashJoin` is correct when smj->bhj
val joins = new collection.mutable.ArrayBuffer[BinaryExecNode]()
plan.foreachUp {
case j: BroadcastHashJoinExec => joins += j
case j: SortMergeJoinExec => joins += j
case _ =>
}
assert(joins.size == 2)
assert(joins(0).isInstanceOf[SortMergeJoinExec])
assert(joins(1).isInstanceOf[BroadcastHashJoinExec])
checkAnswer(df, Row(3, 8, 7, 2) :: Row(3, 8, 4, 2) :: Nil)
}
}
test("SPARK-24495: Join may return wrong result when having duplicated equal-join keys") {
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1",
SQLConf.CONSTRAINT_PROPAGATION_ENABLED.key -> "false",
SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1") {
val df1 = spark.range(0, 100, 1, 2)
val df2 = spark.range(100).select($"id".as("b1"), (- $"id").as("b2"))
val res = df1.join(df2, $"id" === $"b1" && $"id" === $"b2").select($"b1", $"b2", $"id")
checkAnswer(res, Row(0, 0, 0))
}
}
test("SPARK-26352: join reordering should not change the order of columns") {
withTable("tab1", "tab2", "tab3") {
spark.sql("select 1 as x, 100 as y").write.saveAsTable("tab1")
spark.sql("select 42 as i, 200 as j").write.saveAsTable("tab2")
spark.sql("select 1 as a, 42 as b").write.saveAsTable("tab3")
val df = spark.sql("""
with tmp as (select * from tab1 cross join tab2)
select * from tmp join tab3 on a = x and b = i
""")
checkAnswer(df, Row(1, 100, 42, 200, 1, 42))
}
}
test("NaN and -0.0 in join keys") {
withTempView("v1", "v2", "v3", "v4") {
Seq(Float.NaN -> Double.NaN, 0.0f -> 0.0, -0.0f -> -0.0).toDF("f", "d").createTempView("v1")
Seq(Float.NaN -> Double.NaN, 0.0f -> 0.0, -0.0f -> -0.0).toDF("f", "d").createTempView("v2")
checkAnswer(
sql(
"""
|SELECT v1.f, v1.d, v2.f, v2.d
|FROM v1 JOIN v2
|ON v1.f = v2.f AND v1.d = v2.d
""".stripMargin),
Seq(
Row(Float.NaN, Double.NaN, Float.NaN, Double.NaN),
Row(0.0f, 0.0, 0.0f, 0.0),
Row(0.0f, 0.0, -0.0f, -0.0),
Row(-0.0f, -0.0, 0.0f, 0.0),
Row(-0.0f, -0.0, -0.0f, -0.0)))
// test with complicated join keys.
checkAnswer(
sql(
"""
|SELECT v1.f, v1.d, v2.f, v2.d
|FROM v1 JOIN v2
|ON
| array(v1.f) = array(v2.f) AND
| struct(v1.d) = struct(v2.d) AND
| array(struct(v1.f, v1.d)) = array(struct(v2.f, v2.d)) AND
| struct(array(v1.f), array(v1.d)) = struct(array(v2.f), array(v2.d))
""".stripMargin),
Seq(
Row(Float.NaN, Double.NaN, Float.NaN, Double.NaN),
Row(0.0f, 0.0, 0.0f, 0.0),
Row(0.0f, 0.0, -0.0f, -0.0),
Row(-0.0f, -0.0, 0.0f, 0.0),
Row(-0.0f, -0.0, -0.0f, -0.0)))
// test with tables with complicated-type columns.
Seq((Array(-0.0f, 0.0f), Tuple2(-0.0d, Double.NaN), Seq(Tuple2(-0.0d, Double.NaN))))
.toDF("arr", "stru", "arrOfStru").createTempView("v3")
Seq((Array(0.0f, -0.0f), Tuple2(0.0d, 0.0/0.0), Seq(Tuple2(0.0d, 0.0/0.0))))
.toDF("arr", "stru", "arrOfStru").createTempView("v4")
checkAnswer(
sql(
"""
|SELECT v3.arr, v3.stru, v3.arrOfStru, v4.arr, v4.stru, v4.arrOfStru
|FROM v3 JOIN v4
|ON v3.arr = v4.arr AND v3.stru = v4.stru AND v3.arrOfStru = v4.arrOfStru
""".stripMargin),
Seq(Row(
Seq(-0.0f, 0.0f),
Row(-0.0d, Double.NaN),
Seq(Row(-0.0d, Double.NaN)),
Seq(0.0f, -0.0f),
Row(0.0d, 0.0/0.0),
Seq(Row(0.0d, 0.0/0.0)))))
}
}
}
| WindCanDie/spark | sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala | Scala | apache-2.0 | 34,475 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.submit
import java.io.StringWriter
import java.util.{Collections, UUID}
import java.util.Properties
import io.fabric8.kubernetes.api.model._
import io.fabric8.kubernetes.client.KubernetesClient
import scala.collection.mutable
import scala.util.control.NonFatal
import org.apache.spark.SparkConf
import org.apache.spark.deploy.SparkApplication
import org.apache.spark.deploy.k8s._
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils
/**
* Encapsulates arguments to the submission client.
*
* @param mainAppResource the main application resource if any
* @param mainClass the main class of the application to run
* @param driverArgs arguments to the driver
*/
private[spark] case class ClientArguments(
mainAppResource: MainAppResource,
mainClass: String,
driverArgs: Array[String])
private[spark] object ClientArguments {
def fromCommandLineArgs(args: Array[String]): ClientArguments = {
var mainAppResource: MainAppResource = JavaMainAppResource(None)
var mainClass: Option[String] = None
val driverArgs = mutable.ArrayBuffer.empty[String]
args.sliding(2, 2).toList.foreach {
case Array("--primary-java-resource", primaryJavaResource: String) =>
mainAppResource = JavaMainAppResource(Some(primaryJavaResource))
case Array("--primary-py-file", primaryPythonResource: String) =>
mainAppResource = PythonMainAppResource(primaryPythonResource)
case Array("--primary-r-file", primaryRFile: String) =>
mainAppResource = RMainAppResource(primaryRFile)
case Array("--main-class", clazz: String) =>
mainClass = Some(clazz)
case Array("--arg", arg: String) =>
driverArgs += arg
case other =>
val invalid = other.mkString(" ")
throw new RuntimeException(s"Unknown arguments: $invalid")
}
require(mainClass.isDefined, "Main class must be specified via --main-class")
ClientArguments(
mainAppResource,
mainClass.get,
driverArgs.toArray)
}
}
/**
* Submits a Spark application to run on Kubernetes by creating the driver pod and starting a
* watcher that monitors and logs the application status. Waits for the application to terminate if
* spark.kubernetes.submission.waitAppCompletion is true.
*
* @param conf The kubernetes driver config.
* @param builder Responsible for building the base driver pod based on a composition of
* implemented features.
* @param kubernetesClient the client to talk to the Kubernetes API server
* @param waitForAppCompletion a flag indicating whether the client should wait for the application
* to complete
* @param watcher a watcher that monitors and logs the application status
*/
private[spark] class Client(
conf: KubernetesDriverConf,
builder: KubernetesDriverBuilder,
kubernetesClient: KubernetesClient,
waitForAppCompletion: Boolean,
watcher: LoggingPodStatusWatcher) extends Logging {
def run(): Unit = {
val resolvedDriverSpec = builder.buildFromFeatures(conf, kubernetesClient)
val configMapName = s"${conf.resourceNamePrefix}-driver-conf-map"
val configMap = buildConfigMap(configMapName, resolvedDriverSpec.systemProperties)
// The include of the ENV_VAR for "SPARK_CONF_DIR" is to allow for the
// Spark command builder to pickup on the Java Options present in the ConfigMap
val resolvedDriverContainer = new ContainerBuilder(resolvedDriverSpec.pod.container)
.addNewEnv()
.withName(ENV_SPARK_CONF_DIR)
.withValue(SPARK_CONF_DIR_INTERNAL)
.endEnv()
.addNewVolumeMount()
.withName(SPARK_CONF_VOLUME)
.withMountPath(SPARK_CONF_DIR_INTERNAL)
.endVolumeMount()
.build()
val resolvedDriverPod = new PodBuilder(resolvedDriverSpec.pod.pod)
.editSpec()
.addToContainers(resolvedDriverContainer)
.addNewVolume()
.withName(SPARK_CONF_VOLUME)
.withNewConfigMap()
.withName(configMapName)
.endConfigMap()
.endVolume()
.endSpec()
.build()
Utils.tryWithResource(
kubernetesClient
.pods()
.withName(resolvedDriverPod.getMetadata.getName)
.watch(watcher)) { _ =>
val createdDriverPod = kubernetesClient.pods().create(resolvedDriverPod)
try {
val otherKubernetesResources =
resolvedDriverSpec.driverKubernetesResources ++ Seq(configMap)
addDriverOwnerReference(createdDriverPod, otherKubernetesResources)
kubernetesClient.resourceList(otherKubernetesResources: _*).createOrReplace()
} catch {
case NonFatal(e) =>
kubernetesClient.pods().delete(createdDriverPod)
throw e
}
val sId = s"${Option(conf.namespace).map(_ + ":").getOrElse("")}" +
s"${resolvedDriverPod.getMetadata.getName}"
if (waitForAppCompletion) {
logInfo(s"Waiting for application ${conf.appName} with submission ID ${sId} to finish...")
watcher.awaitCompletion()
logInfo(s"Application ${conf.appName} with submission ID ${sId} finished.")
} else {
logInfo(s"Deployed Spark application ${conf.appName} with " +
s"submission ID ${sId} into Kubernetes.")
}
}
}
// Add a OwnerReference to the given resources making the driver pod an owner of them so when
// the driver pod is deleted, the resources are garbage collected.
private def addDriverOwnerReference(driverPod: Pod, resources: Seq[HasMetadata]): Unit = {
val driverPodOwnerReference = new OwnerReferenceBuilder()
.withName(driverPod.getMetadata.getName)
.withApiVersion(driverPod.getApiVersion)
.withUid(driverPod.getMetadata.getUid)
.withKind(driverPod.getKind)
.withController(true)
.build()
resources.foreach { resource =>
val originalMetadata = resource.getMetadata
originalMetadata.setOwnerReferences(Collections.singletonList(driverPodOwnerReference))
}
}
// Build a Config Map that will house spark conf properties in a single file for spark-submit
private def buildConfigMap(configMapName: String, conf: Map[String, String]): ConfigMap = {
val properties = new Properties()
conf.foreach { case (k, v) =>
properties.setProperty(k, v)
}
val propertiesWriter = new StringWriter()
properties.store(propertiesWriter,
s"Java properties built from Kubernetes config map with name: $configMapName")
new ConfigMapBuilder()
.withNewMetadata()
.withName(configMapName)
.endMetadata()
.addToData(SPARK_CONF_FILE_NAME, propertiesWriter.toString)
.build()
}
}
/**
* Main class and entry point of application submission in KUBERNETES mode.
*/
private[spark] class KubernetesClientApplication extends SparkApplication {
override def start(args: Array[String], conf: SparkConf): Unit = {
val parsedArguments = ClientArguments.fromCommandLineArgs(args)
run(parsedArguments, conf)
}
private def run(clientArguments: ClientArguments, sparkConf: SparkConf): Unit = {
val appName = sparkConf.getOption("spark.app.name").getOrElse("spark")
// For constructing the app ID, we can't use the Spark application name, as the app ID is going
// to be added as a label to group resources belonging to the same application. Label values are
// considerably restrictive, e.g. must be no longer than 63 characters in length. So we generate
// a unique app ID (captured by spark.app.id) in the format below.
val kubernetesAppId = s"spark-${UUID.randomUUID().toString.replaceAll("-", "")}"
val waitForAppCompletion = sparkConf.get(WAIT_FOR_APP_COMPLETION)
val kubernetesConf = KubernetesConf.createDriverConf(
sparkConf,
kubernetesAppId,
clientArguments.mainAppResource,
clientArguments.mainClass,
clientArguments.driverArgs)
// The master URL has been checked for validity already in SparkSubmit.
// We just need to get rid of the "k8s://" prefix here.
val master = KubernetesUtils.parseMasterUrl(sparkConf.get("spark.master"))
val loggingInterval = if (waitForAppCompletion) Some(sparkConf.get(REPORT_INTERVAL)) else None
val watcher = new LoggingPodStatusWatcherImpl(kubernetesAppId, loggingInterval)
Utils.tryWithResource(SparkKubernetesClientFactory.createKubernetesClient(
master,
Some(kubernetesConf.namespace),
KUBERNETES_AUTH_SUBMISSION_CONF_PREFIX,
SparkKubernetesClientFactory.ClientType.Submission,
sparkConf,
None,
None)) { kubernetesClient =>
val client = new Client(
kubernetesConf,
new KubernetesDriverBuilder(),
kubernetesClient,
waitForAppCompletion,
watcher)
client.run()
}
}
}
| pgandhi999/spark | resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/KubernetesClientApplication.scala | Scala | apache-2.0 | 9,758 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.convert.text
import java.io.{PipedReader, PipedWriter}
import java.util.concurrent.Executors
import com.google.common.collect.Queues
import com.typesafe.config.Config
import org.apache.commons.csv.{CSVFormat, QuoteMode}
import org.locationtech.geomesa.convert.Transformers.Expr
import org.locationtech.geomesa.convert.{Field, SimpleFeatureConverterFactory, ToSimpleFeatureConverter}
import org.opengis.feature.simple.SimpleFeatureType
import scala.collection.JavaConversions._
class DelimitedTextConverterFactory extends SimpleFeatureConverterFactory[String] {
override def canProcess(conf: Config): Boolean = canProcessType(conf, "delimited-text")
val QUOTED = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL)
val QUOTE_ESCAPE = CSVFormat.DEFAULT.withEscape('"')
val QUOTED_WITH_QUOTE_ESCAPE = QUOTE_ESCAPE.withQuoteMode(QuoteMode.ALL)
def buildConverter(targetSFT: SimpleFeatureType, conf: Config): DelimitedTextConverter = {
val format = conf.getString("format") match {
case "DEFAULT" => CSVFormat.DEFAULT
case "EXCEL" => CSVFormat.EXCEL
case "MYSQL" => CSVFormat.MYSQL
case "TDF" => CSVFormat.TDF
case "RFC4180" => CSVFormat.RFC4180
case "QUOTED" => QUOTED
case "QUOTE_ESCAPE" => QUOTE_ESCAPE
case "QUOTED_WITH_QUOTE_ESCAPE" => QUOTED_WITH_QUOTE_ESCAPE
case _ => throw new IllegalArgumentException("Unknown delimited text format")
}
val fields = buildFields(conf.getConfigList("fields"))
val idBuilder = buildIdBuilder(conf.getString("id-field"))
val pipeSize = if(conf.hasPath("pipe-size")) conf.getInt("pipe-size") else 16*1024
new DelimitedTextConverter(format, targetSFT, idBuilder, fields, pipeSize)
}
}
class DelimitedTextConverter(format: CSVFormat,
val targetSFT: SimpleFeatureType,
val idBuilder: Expr,
val inputFields: IndexedSeq[Field],
val inputSize: Int = 16*1024)
extends ToSimpleFeatureConverter[String] {
var curString: String = null
val q = Queues.newArrayBlockingQueue[String](32)
// if the record to write is bigger than the buffer size of the PipedReader
// then the writer will block until the reader reads data off of the pipe.
// For this reason, we have to separate the reading and writing into two
// threads
val writer = new PipedWriter()
val reader = new PipedReader(writer, inputSize) // 16k records
val parser = format.parse(reader).iterator()
val separator = format.getRecordSeparator
val es = Executors.newSingleThreadExecutor()
es.submit(new Runnable {
override def run(): Unit = {
while (true) {
val s = q.take()
// make sure the input is not null and is nonempty...if it is empty the threads will deadlock
if (s != null && s.nonEmpty) {
writer.write(s)
writer.write(separator)
writer.flush()
}
}
}
})
def fromInputType(string: String): Array[Any] = {
import spire.syntax.cfor._
// empty strings cause deadlock
if (string == null || string.isEmpty) throw new IllegalArgumentException("Invalid input (empty)")
q.put(string)
val rec = parser.next()
val len = rec.size()
val ret = Array.ofDim[Any](len + 1)
ret(0) = string
cfor(0)(_ < len, _ + 1) { i =>
ret(i+1) = rec.get(i)
}
ret
}
override def close(): Unit = {
es.shutdownNow()
writer.close()
reader.close()
}
}
| giserh/geomesa | geomesa-convert/geomesa-convert-text/src/main/scala/org/locationtech/geomesa/convert/text/DelimitedTextConverter.scala | Scala | apache-2.0 | 4,156 |
package test
// See: https://github.com/milessabin/si2712fix-demo/issues/3
object Test {
trait A[T1, T2] { }
trait B[T1, T2] { }
class C[T] extends A[T, Long] with B[T, Double]
class CB extends A[Boolean, Long] with B[Boolean, Double]
trait A2[T]
trait B2[T]
class C2[T] extends A2[T] with B2[T]
class CB2 extends A2[Boolean] with B2[Boolean]
def meh[M[_], A](x: M[A]): M[A] = x
val m0 = meh(new C[Boolean])
m0: C[Boolean]
val m1 = meh(new CB)
m1: B[Boolean, Double] // note: different order in which parents are visited for hk type inference. Dotty picks libearization order.
val m2 = meh(new C2[Boolean])
m2: C2[Boolean]
val m3 = meh(new CB2)
m3: B2[Boolean] // note: different order in which parents are visited for hk type inference. Dotty picks libearization order.
}
| som-snytt/dotty | tests/pos/t2712-2.scala | Scala | apache-2.0 | 811 |
package edison.yaml.project
import edison.model.domain
import edison.yaml.project.DocumentStructure.Project
import edison.yaml.project.DocumentStructure.Project.SearchDomain.ParamDef
import edison.yaml.project.DocumentStructure.Project.SearchDomain.ParamDef.ParamDomain.ParamDomainEnum.Values
import edison.yaml.project.DocumentStructure.Project.SearchDomain.ParamDef.ParamDomain.Type
import edison.yaml.project.DocumentStructure.Project.SearchDomain.ParamDef.ParamDomain.ParamDomainInteger.{ End, Start, Step }
import edison.yaml.project.DocumentStructure.Project.SearchDomain.ParamDef.{ ParamDomain, ParamName }
import edison.yaml.project.DocumentStructure.Project.{ ProjectName, SearchDomain }
import edison.yaml.project.ParserHelpers._
import edison.yaml.project.ProjectDefinitionParser.ParseResult
import scala.util.Try
object ProjectParser {
def parse(root: Any): ParseResult[domain.Project] = for {
mapping <- Project.parse(root)
projectName <- ProjectName.parse(mapping)
searchDomain <- SearchDomainParser.parse(mapping)
} yield domain.Project(projectName, searchDomain)
}
object SearchDomainParser {
def parse(project: Mapping): ParseResult[domain.SearchDomain] = Try {
val paramDefs = SearchDomain.parse(project).get.map(ParamDefParser.parse) map { _.get }
domain.SearchDomain(domain.ParamDefs(paramDefs: _*))
}
}
object ParamDefParser {
def parse(paramDef: Any): ParseResult[domain.ParamDef] = for {
paramDef <- ParamDef.parse(paramDef)
name <- ParamName.parse(paramDef)
paramDomain <- ParamDomain.parse(paramDef) flatMap ParamDomainParser.parse
} yield domain.ParamDef(name, paramDomain)
}
object ParamDomainParser {
def parse(paramDomain: Mapping): ParseResult[domain.ParamDomain] = Type.parse(paramDomain) flatMap {
case "Integer" => ParamDomainIntegerParser.parse(paramDomain)
case "Enum" => ParamDomainEnumParser.parse(paramDomain)
case _ => parseError(ParamDomain.Type.Invalid)
}
}
object ParamDomainIntegerParser {
def parse(paramDomain: Mapping): ParseResult[domain.ParamDomainInteger] =
Try { domain.ParamDomainInteger(Range.inclusive(Start(paramDomain), End(paramDomain), Step(paramDomain))) }
}
object ParamDomainEnumParser {
def parse(paramDomain: Mapping): ParseResult[domain.ParamDomainEnum[_]] =
Values.parse(paramDomain) map { seq => domain.ParamDomainEnum(parseEnumeration(seq)) }
}
| pawel-wiejacha/edison | service/src/main/scala/edison/yaml/project/ElementParsers.scala | Scala | mit | 2,392 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spot.netflow.model
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.StructType
import org.apache.spot.SuspiciousConnectsArgumentParser.SuspiciousConnectsConfig
import org.apache.spot.netflow.FlowSchema._
import org.apache.spot.testutils.TestingSparkContextFlatSpec
import org.scalatest.Matchers
class FlowSuspiciousConnectsModelTest extends TestingSparkContextFlatSpec with Matchers {
val testConfig = SuspiciousConnectsConfig(analysis = "flow",
inputPath = "",
feedbackFile = "",
duplicationFactor = 1,
topicCount = 20,
hdfsScoredConnect = "",
threshold = 1.0d,
maxResults = 1000,
outputDelimiter = "\\t",
ldaPRGSeed = None,
ldaMaxiterations = 20,
ldaAlpha = 1.02,
ldaBeta = 1.001)
def testFlowRecords = new {
val inputFlowRecordsRDD = sparkSession.sparkContext.parallelize(wrapRefArray(Array(
Seq("2016-05-05 13:54:58", 2016, 5, 5, 24, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 59, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 59, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l),
Seq(null, 2016, 5, 5, 13, 54, 58, 0.972, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l, 12522l, 0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, null, "10.0.2.202", 1024, 80, "TCP", 39l, 12522l, 0l,
0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", null, 1024, 80, "TCP", 39l, 12522l,
0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", null, 80, "TCP", 39l,
12522l, 0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, null, "TCP", 39l,
12522l, 0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", null,
12522l, 0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
null, 0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, null, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, null),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l))
.map(row => Row.fromSeq(row))))
val inputFlowRecordsSchema = StructType(
Array(TimeReceivedField,
YearField,
MonthField,
DayField,
HourField,
MinuteField,
SecondField,
DurationField,
SourceIPField,
DestinationIPField,
SourcePortField,
DestinationPortField,
ProtocolField,
IpktField,
IbytField,
OpktField,
ObytField))
val inputFlowRecordsDF = sparkSession.createDataFrame(inputFlowRecordsRDD, inputFlowRecordsSchema)
val scoredFlowRecordsRDD = sparkSession.sparkContext.parallelize(wrapRefArray(Array(
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l, -1d),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l, 1d),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l, 0.0000005d),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l, 0.05d),
Seq("2016-05-05 13:54:58", 2016, 5, 5, 13, 54, 58, 0.972d, "172.16.0.129", "10.0.2.202", 1024, 80, "TCP", 39l,
12522l, 0l, 0l, 0.0001d))
.map(row => Row.fromSeq(row))))
val scoredFlowRecordsSchema = StructType(
Array(TimeReceivedField,
YearField,
MonthField,
DayField,
HourField,
MinuteField,
SecondField,
DurationField,
SourceIPField,
DestinationIPField,
SourcePortField,
DestinationPortField,
ProtocolField,
IpktField,
IbytField,
OpktField,
ObytField,
ScoreField))
val scoredFlowRecordsDF = sparkSession.createDataFrame(scoredFlowRecordsRDD, scoredFlowRecordsSchema)
}
}
| brandon-edwards/incubator-spot | spot-ml/src/test/scala/org/apache/spot/netflow/model/FlowSuspiciousConnectsModelTest.scala | Scala | apache-2.0 | 5,783 |
package spark
import java.io.File
import java.net.InetAddress
import org.eclipse.jetty.server.Server
import org.eclipse.jetty.server.handler.DefaultHandler
import org.eclipse.jetty.server.handler.HandlerList
import org.eclipse.jetty.server.handler.ResourceHandler
import org.eclipse.jetty.util.thread.QueuedThreadPool
/**
* Exception type thrown by HttpServer when it is in the wrong state for an operation.
*/
private[spark] class ServerStateException(message: String) extends Exception(message)
/**
* An HTTP server for static content used to allow worker nodes to access JARs added to SparkContext
* as well as classes created by the interpreter when the user types in code. This is just a wrapper
* around a Jetty server.
*/
private[spark] class HttpServer(resourceBase: File) extends Logging {
private var server: Server = null
private var port: Int = -1
def start() {
if (server != null) {
throw new ServerStateException("Server is already started")
} else {
server = new Server(0)
val threadPool = new QueuedThreadPool
threadPool.setDaemon(true)
server.setThreadPool(threadPool)
val resHandler = new ResourceHandler
resHandler.setResourceBase(resourceBase.getAbsolutePath)
val handlerList = new HandlerList
handlerList.setHandlers(Array(resHandler, new DefaultHandler))
server.setHandler(handlerList)
server.start()
port = server.getConnectors()(0).getLocalPort()
}
}
def stop() {
if (server == null) {
throw new ServerStateException("Server is already stopped")
} else {
server.stop()
port = -1
server = null
}
}
/**
* Get the URI of this HTTP server (http://host:port)
*/
def uri: String = {
if (server == null) {
throw new ServerStateException("Server is not started")
} else {
return "http://" + Utils.localIpAddress + ":" + port
}
}
}
| joeywen/spark_cpp_api | core/src/main/scala/spark/HttpServer.scala | Scala | bsd-3-clause | 1,928 |
def extend[W[_], A, B]: (W[A] => B) => W[A] => W[B] | hmemcpy/milewski-ctfp-pdf | src/content/3.7/code/scala/snippet12.scala | Scala | gpl-3.0 | 51 |
package com.github.norwae.ignifera
import akka.actor.{Actor, ActorSystem, DeadLetter, Props, UnhandledMessage}
import io.prometheus.client.Counter
/**
* Optional additional statistics describing the status of the actor
* system.
*/
class AkkaStats private extends Actor {
context.system.eventStream.subscribe(self, classOf[DeadLetter])
context.system.eventStream.subscribe(self, classOf[UnhandledMessage])
def receive: Receive = {
case _: DeadLetter =>
AkkaStats.deadLetterCount.inc()
case _: UnhandledMessage =>
AkkaStats.unhandledCount.inc()
}
}
object AkkaStats {
private val deadLetterCount =
Counter.build("app_akka_dead_letters", "Nr of dead letters encountered").create()
private val unhandledCount =
Counter.build("app_akka_unhandled_msg", "Nr of unhandled messages in the system").create()
/**
* Register the additional stats. The default implementation will register two additional
* stats, counting the dead letters and unhandled messages.
*
* @param system actor system to observe.
*/
def register()(implicit system: ActorSystem): Unit = {
system.actorOf(Props(new AkkaStats))
deadLetterCount.register()
unhandledCount.register()
}
}
| Norwae/ignifera | src/main/scala/com/github/norwae/ignifera/AkkaStats.scala | Scala | bsd-2-clause | 1,238 |
package com.github.agourlay.cornichon.json
import io.circe.{ Json, JsonObject }
import com.github.agourlay.cornichon.json.JsonPath._
import munit.FunSuite
class CornichonJsonSpec extends FunSuite with CornichonJson {
def refParser(input: String): Json =
io.circe.parser.parse(input).fold(e => throw e, identity)
def mapToJsonObject(m: Map[String, Json]): Json =
Json.fromJsonObject(JsonObject.fromMap(m))
def parseUnsafe(path: String): JsonPath =
parse(path).valueUnsafe
test("parseJson object String") {
val expected = mapToJsonObject(Map("name" -> Json.fromString("cornichon")))
assert(parseDslJson("""{"name":"cornichon"}""") == Right(expected))
}
test("parseJson object String with spaces") {
val expected = mapToJsonObject(Map("name" -> Json.fromString("cornichon")))
assert(parseDslJson(""" {"name":"cornichon"}""") == Right(expected))
}
test("parseJson JSON Array string") {
val expected = Json.fromValues(Seq(
mapToJsonObject(Map("name" -> Json.fromString("cornichon"))),
mapToJsonObject(Map("name" -> Json.fromString("scala")))
))
assert(parseDslJson("""[ {"name":"cornichon"}, {"name":"scala"} ]""") == Right(expected))
}
test("parseJson data-table") {
val expected =
"""
|[
|{
|"2LettersName" : false,
| "Age": 50,
| "Name": "John"
|},
|{
|"2LettersName" : true,
| "Age": 11,
| "Name": "Bob"
|}
|]
""".stripMargin
assert(parseDslJson("""
| Name | Age | 2LettersName |
| "John" | 50 | false |
| "Bob" | 11 | true |
""") == Right(refParser(expected)))
}
test("parseJson data table with empty cell values") {
val parsed = parseDataTable(
"""
| Name | Age | 2LettersName |
| | | false |
| "Bob" | 11 | |
"""
)
assert(parsed == Right(List(
"""
{
"2LettersName" : false
}
""",
"""
{
"Age": 11,
"Name": "Bob"
}
""") map (refParser(_).asObject.get)))
}
test("parseJson parse data table as a map of raw string values") {
assert(parseDataTableRaw(
"""
| Name | Age | 2LettersName |
| | | false |
| Bob | 11 | |
"""
) == Right(List(
Map("2LettersName" -> "false"),
Map("Age" -> "11", "Name" -> "Bob"))))
}
test("isJsonString detects invalid empty string") {
assert(!isJsonString(""))
}
test("isJsonString detects a string") {
assert(isJsonString("a"))
}
test("isJsonString detects an object") {
assert(!isJsonString(""" { "a" : "v"} """))
}
test("isJsonString detects an array") {
assert(!isJsonString(""" [ "a", "v"] """))
}
test("removeFieldsByPath removes everything if root path") {
val input =
"""
|{
|"2LettersName" : false,
| "Age": 50,
| "Name": "John"
|}
""".stripMargin
assert(removeFieldsByPath(refParser(input), Seq(rootPath)) == Json.Null)
}
test("removeFieldsByPath removes nothing if path does not exist") {
val input =
"""
|{
|"2LettersName" : false,
| "Age": 50,
| "Name": "John"
|}
""".stripMargin
assert(removeFieldsByPath(refParser(input), parseUnsafe("blah") :: Nil) == refParser(input))
}
test("removeFieldsByPath removes root keys") {
val input =
"""
|{
|"2LettersName" : false,
| "Age": 50,
| "Name": "John"
|}
""".stripMargin
val expected =
"""
|{
| "Age": 50
|}
""".stripMargin
val paths = Seq("2LettersName", "Name").map(parseUnsafe)
assert(removeFieldsByPath(refParser(input), paths) == refParser(expected))
}
test("removeFieldsByPath removes only root keys") {
val input =
"""
|{
|"name" : "bob",
|"age": 50,
|"brothers":[
| {
| "name" : "john",
| "age": 40
| }
|]
|} """.stripMargin
val expected = """
|{
|"age": 50,
|"brothers":[
| {
| "name" : "john",
| "age": 40
| }
|]
|} """.stripMargin
val paths = Seq("name").map(parseUnsafe)
assert(removeFieldsByPath(refParser(input), paths) == refParser(expected))
}
test("removeFieldsByPath removes keys inside specific indexed element") {
val input =
"""
|{
|"name" : "bob",
|"age": 50,
|"brothers":[
| {
| "name" : "john",
| "age": 40
| },
| {
| "name" : "jim",
| "age": 30
| }
|]
|}
""".stripMargin
val expected = """
|{
|"name" : "bob",
|"age": 50,
|"brothers":[
| {
| "age": 40
| },
| {
| "name" : "jim",
| "age": 30
| }
|]
|} """.stripMargin
val paths = Seq("brothers[0].name").map(parseUnsafe)
assert(removeFieldsByPath(refParser(input), paths) == refParser(expected))
}
//FIXME - done manually in BodyArrayAssertion for now
// test("removeFieldsByPath removes field in each element of a root array") {
// val input =
// """
// |[
// |{
// | "name" : "bob",
// | "age": 50
// |},
// |{
// | "name" : "jim",
// | "age": 40
// |},
// |{
// | "name" : "john",
// | "age": 30
// |}
// |]
// """.stripMargin
//
// val expected =
// """
// |[
// |{
// | "name" : "bob"
// |},
// |{
// | "name" : "jim"
// |},
// |{
// | "name" : "john"
// |}
// |]
// """.stripMargin
//
// val paths = Seq("age").map(parseUnsafe)
// assert(removeFieldsByPath(refParser(input), paths) == Right(refParser(expected)))
// }
//FIXME - done manually in BodyArrayAssertion for now
// test("removeFieldsByPath removes field in each element of a nested array") {
// val input =
// """
// |{
// |"people":[
// |{
// | "name" : "bob",
// | "age": 50
// |},
// |{
// | "name" : "jim",
// | "age": 40
// |},
// |{
// | "name" : "john",
// | "age": 30
// |}
// |]
// |}
// """.stripMargin
//
// val expected =
// """
// |{
// |"people":[
// |{
// | "name" : "bob"
// |},
// |{
// | "name" : "jim"
// |},
// |{
// | "name" : "john"
// |}
// |]
// |}
// """.stripMargin
//
// val paths = Seq("people[*].age").map(parseUnsafe)
// assert(removeFieldsByPath(refParser(input), paths) == Right(refParser(expected)))
// }
test("removeFieldsByPath is correct even with duplicate Fields") {
val input =
"""
|{
|"name" : "bob",
|"age": 50,
|"brother":[
| {
| "name" : "john",
| "age": 40
| }
|],
|"friend":[
| {
| "name" : "john",
| "age": 30
| }
|]
|}
""".stripMargin
val expected =
"""
|{
|"name" : "bob",
|"age": 50,
|"brother":[
| {
| "age": 40
| }
|],
|"friend":[
| {
| "name" : "john",
| "age": 30
| }
|]
|}
""".stripMargin
val paths = Seq("brother[0].name").map(parseUnsafe)
assert(removeFieldsByPath(refParser(input), paths) == refParser(expected))
}
test("parseGraphQLJson nominal case") {
val in = """
{
id: 1
name: "door"
items: [
# pretty broken door
{state: Open, durability: 0.1465645654675762354763254763343243242}
null
{state: Open, durability: 0.5, foo: null}
]
}
"""
val expected = """
{
"id": 1,
"name": "door",
"items": [
{"state": "Open", "durability": 0.1465645654675762354763254763343243242},
null,
{"state": "Open", "durability": 0.5, "foo": null}
]
}
"""
val out = parseGraphQLJson(in)
assert(out == Right(refParser(expected)))
}
test("findAllContainingValue handles empty values array") {
val input = "target value"
assert(findAllPathWithValue(Nil, parseDslJsonUnsafe(input)) == Nil)
}
test("findAllContainingValue find root value") {
val input = "target value"
assert(findAllPathWithValue("target value" :: Nil, parseDslJsonUnsafe(input)) == List(rootPath))
}
test("findAllContainingValue not find root value") {
val input = "target values"
assert(findAllPathWithValue("target value" :: Nil, parseDslJsonUnsafe(input)) == Nil)
}
test("findAllContainingValue find root key") {
val input =
"""
|{
|"2LettersName" : false,
| "Age": 50,
| "Name": "John"
|}
""".stripMargin
assert(findAllPathWithValue("John" :: Nil, parseDslJsonUnsafe(input)) == List(parseUnsafe("$.Name")))
}
test("findAllContainingValue finds nested key") {
val input =
"""
|{
| "2LettersName" : false,
| "Age": 50,
| "Name": "John",
| "Brother": {
| "Name" : "Paul",
| "Age": 50
| }
|}
""".stripMargin
assert(findAllPathWithValue("Paul" :: Nil, parseDslJsonUnsafe(input)) == List(parseUnsafe("$.Brother.Name")))
}
test("findAllContainingValue finds key in array") {
val input =
"""
|{
| "2LettersName": false,
| "Age": 50,
| "Name": "John",
| "Brothers": [
| {
| "Name" : "Paul",
| "Age": 50
| },
| {
| "Name": "Bob",
| "Age" : 30
| }
| ]
|}
""".stripMargin
assert(findAllPathWithValue("Bob" :: Nil, parseDslJsonUnsafe(input)) == List(parseUnsafe("$.Brothers[1].Name")))
}
test("findAllContainingValue finds key in array of strings") {
val input =
"""
|{
| "2LettersName" : false,
| "Age": 50,
| "Name": "John",
| "Hobbies": [ "Basketball", "Climbing", "Coding"]
|}
""".stripMargin
assert(findAllPathWithValue("Coding" :: Nil, parseDslJsonUnsafe(input)) == List(parseUnsafe("$.Hobbies[2]")))
}
}
| agourlay/cornichon | cornichon-core/src/test/scala/com/github/agourlay/cornichon/json/CornichonJsonSpec.scala | Scala | apache-2.0 | 11,934 |
package unfiltered
/** Module defining Cookie-related constants
* See also [[http://tools.ietf.org/html/rfc2965#page-5]] */
object CookieKeys {
val Path = "Path"
val Expires = "Expires"
val MaxAge = "Max-Age"
val Domain = "Domain"
val Secure = "Secure"
val HTTPOnly = "HTTPOnly"
val Comment = "Comment"
val CommentURL = "CommentURL"
val Discard = "Discard"
val Port = "Port"
val Version = "Version"
val LCPath = Path.toLowerCase
val LCExpires = Expires.toLowerCase
val LCMaxAge = MaxAge.toLowerCase
val LCDomain = Domain.toLowerCase
val LCSecure = Secure.toLowerCase
val LCHTTPOnly = HTTPOnly.toLowerCase
val LCComment = Comment.toLowerCase
val LCCommentURL = CommentURL.toLowerCase
val LCDiscard = Discard.toLowerCase
val LCPort = Port.toLowerCase
val LCVersion = Version.toLowerCase
/** Named properties that do not have an associated value */
val KeyOnly = Seq(Discard, Secure, HTTPOnly)
}
case class Cookie(name: String, value: String, domain: Option[String] = None,
path: Option[String] = None, maxAge: Option[Int] = None,
secure: Option[Boolean] = None, httpOnly: Boolean = false,
version: Int = 0)
| omarkilani/unfiltered | library/src/main/scala/cookies.scala | Scala | mit | 1,213 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.message
/**
* Represents message and offset of the next message. This is used in the MessageSet to iterate over it
*/
case class MessageAndOffset(val message: Message, val offset: Long)
| tnachen/kafka | core/src/main/scala/kafka/message/MessageAndOffset.scala | Scala | apache-2.0 | 1,005 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server.checkpoints
import kafka.server.LogDirFailureChannel
import kafka.utils.{Logging, TestUtils}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.errors.KafkaStorageException
import org.junit.Assert._
import org.junit.Test
import org.scalatest.junit.JUnitSuite
import scala.collection.Map
class OffsetCheckpointFileTest extends JUnitSuite with Logging {
@Test
def shouldPersistAndOverwriteAndReloadFile(): Unit = {
val checkpoint = new OffsetCheckpointFile(TestUtils.tempFile())
//Given
val offsets = Map(new TopicPartition("foo", 1) -> 5L, new TopicPartition("bar", 2) -> 10L)
//When
checkpoint.write(offsets)
//Then
assertEquals(offsets, checkpoint.read())
//Given overwrite
val offsets2 = Map(new TopicPartition("foo", 2) -> 15L, new TopicPartition("bar", 3) -> 20L)
//When
checkpoint.write(offsets2)
//Then
assertEquals(offsets2, checkpoint.read())
}
@Test
def shouldHandleMultipleLines(): Unit = {
val checkpoint = new OffsetCheckpointFile(TestUtils.tempFile())
//Given
val offsets = Map(
new TopicPartition("foo", 1) -> 5L, new TopicPartition("bar", 6) -> 10L,
new TopicPartition("foo", 2) -> 5L, new TopicPartition("bar", 7) -> 10L,
new TopicPartition("foo", 3) -> 5L, new TopicPartition("bar", 8) -> 10L,
new TopicPartition("foo", 4) -> 5L, new TopicPartition("bar", 9) -> 10L,
new TopicPartition("foo", 5) -> 5L, new TopicPartition("bar", 10) -> 10L
)
//When
checkpoint.write(offsets)
//Then
assertEquals(offsets, checkpoint.read())
}
@Test
def shouldReturnEmptyMapForEmptyFile(): Unit = {
//When
val checkpoint = new OffsetCheckpointFile(TestUtils.tempFile())
//Then
assertEquals(Map(), checkpoint.read())
//When
checkpoint.write(Map())
//Then
assertEquals(Map(), checkpoint.read())
}
@Test(expected = classOf[KafkaStorageException])
def shouldThrowIfVersionIsNotRecognised(): Unit = {
val file = TestUtils.tempFile()
val logDirFailureChannel = new LogDirFailureChannel(10)
val checkpointFile = new CheckpointFile(file, OffsetCheckpointFile.CurrentVersion + 1,
OffsetCheckpointFile.Formatter, logDirFailureChannel, file.getParent)
checkpointFile.write(Seq(new TopicPartition("foo", 5) -> 10L))
new OffsetCheckpointFile(checkpointFile.file, logDirFailureChannel).read()
}
}
| ollie314/kafka | core/src/test/scala/unit/kafka/server/checkpoints/OffsetCheckpointFileTest.scala | Scala | apache-2.0 | 3,261 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.util.control
/**
* A marker trait indicating that the <code>Throwable</code> it is mixed
* into is intended for flow control.
*
* <p>Note that <code>Throwable</code> subclasses which extend this trait
* may extend any other <code>Throwable</code> subclass (eg.
* <code>RuntimeException</code>) and are not required to extend
* <code>Throwable</code> directly.</p>
*
* <p>Instances of <code>Throwable</code> subclasses marked in
* this way should not normally be caught. Where catch-all behaviour is
* required <code>ControlThrowable</code>s should be propagated, for
* example,</p>
*
* <pre>
* import scala.util.control.ControlThrowable
*
* try {
* // Body might throw arbitrarily
* } catch {
* case ce : ControlThrowable => throw ce // propagate
* case t : Exception => log(t) // log and suppress
* </pre>
*
* @author Miles Sabin
*/
trait ControlThrowable extends Throwable with NoStackTrace
| cran/rkafkajars | java/scala/util/control/ControlThrowable.scala | Scala | apache-2.0 | 1,480 |
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah, Eugene Yokota
*
* Copied from sbt 0.12 source code
*/
package net.virtualvoid.sbt.graph
import sbt.SbtDependencyGraphCompat
object Graph
{
// [info] foo
// [info] +-bar
// [info] | +-baz
// [info] |
// [info] +-quux
def toAscii[A](top: A,
children: A => Seq[A],
display: A => String,
maxColumn: Int = defaultColumnSize): String = {
val twoSpaces = " " + " " // prevent accidentally being converted into a tab
def limitLine(s: String): String =
if (s.length > maxColumn) s.slice(0, maxColumn - 2) + ".."
else s
def insertBar(s: String, at: Int): String =
if (at < s.length)
s.slice(0, at) +
(s(at).toString match {
case " " => "|"
case x => x
}) +
s.slice(at + 1, s.length)
else s
def toAsciiLines(node: A, level: Int): Vector[String] = {
val line = limitLine((twoSpaces * level) + (if (level == 0) "" else "+-") + display(node))
val cs = Vector(children(node): _*)
val childLines = cs map {toAsciiLines(_, level + 1)}
val withBar = childLines.zipWithIndex flatMap {
case (lines, pos) if pos < (cs.size - 1) => lines map {insertBar(_, 2 * (level + 1))}
case (lines, pos) =>
if (lines.last.trim != "") lines ++ Vector(twoSpaces * (level + 1))
else lines
}
line +: withBar
}
toAsciiLines(top, 0).mkString("\\n")
}
def defaultColumnSize: Int = {
val termWidth = SbtDependencyGraphCompat.getTerminalWidth
if (termWidth > 20) termWidth - 8
else 80 // ignore termWidth
}
}
| rintcius/sbt-dependency-graph | src/main/scala/net/virtualvoid/sbt/graph/Graph.scala | Scala | apache-2.0 | 1,631 |
package lila.simul
import chess.variant.Variant
import lila.user.User
import org.joda.time.{ DateTime, Duration }
import ornicar.scalalib.Random
case class Simul(
_id: Simul.ID,
name: String,
status: SimulStatus,
clock: SimulClock,
applicants: List[SimulApplicant],
pairings: List[SimulPairing],
variants: List[Variant],
createdAt: DateTime,
hostId: String,
hostRating: Int,
hostGameId: Option[String], // game the host is focusing on
startedAt: Option[DateTime],
finishedAt: Option[DateTime],
hostSeenAt: Option[DateTime],
color: Option[String]) {
def id = _id
def fullName = s"$name simul"
def isCreated = !isStarted
def isStarted = startedAt.isDefined
def isFinished = status == SimulStatus.Finished
def isRunning = status == SimulStatus.Started
def hasApplicant(userId: String) = applicants.exists(_ is userId)
def hasPairing(userId: String) = pairings.exists(_ is userId)
def hasUser(userId: String) = hasApplicant(userId) || hasPairing(userId)
def addApplicant(applicant: SimulApplicant) = Created {
if (!hasApplicant(applicant.player.user) && variants.contains(applicant.player.variant))
copy(applicants = applicants :+ applicant)
else this
}
def removeApplicant(userId: String) = Created {
copy(applicants = applicants filterNot (_ is userId))
}
def accept(userId: String, v: Boolean) = Created {
copy(applicants = applicants map {
case a if a is userId => a.copy(accepted = v)
case a => a
})
}
def removePairing(userId: String) =
copy(pairings = pairings filterNot (_ is userId)).finishIfDone
def startable = isCreated && applicants.count(_.accepted) > 1
def start = startable option copy(
status = SimulStatus.Started,
startedAt = DateTime.now.some,
applicants = Nil,
pairings = applicants collect {
case a if a.accepted => SimulPairing(a.player)
},
hostSeenAt = none)
def updatePairing(gameId: String, f: SimulPairing => SimulPairing) = copy(
pairings = pairings collect {
case p if p.gameId == gameId => f(p)
case p => p
}).finishIfDone
def ejectCheater(userId: String): Option[Simul] =
hasUser(userId) option removeApplicant(userId).removePairing(userId)
private def finishIfDone =
if (pairings.forall(_.finished))
copy(
status = SimulStatus.Finished,
finishedAt = DateTime.now.some,
hostGameId = none)
else this
def gameIds = pairings.map(_.gameId)
def perfTypes: List[lila.rating.PerfType] = variants.flatMap { variant =>
lila.game.PerfPicker.perfType(
speed = chess.Speed.Classical,
variant = variant,
daysPerTurn = none)
}
def applicantRatio = s"${applicants.count(_.accepted)}/${applicants.size}"
def variantRich = variants.size > 3
def isHost(userOption: Option[User]) = userOption ?? (_.id == hostId)
def playingPairings = pairings filterNot (_.finished)
def hostColor = (color flatMap chess.Color.apply) | {
if (scala.util.Random.nextBoolean) chess.White else chess.Black
}
private def Created(s: => Simul): Simul = if (isCreated) s else this
}
object Simul {
type ID = String
def make(
host: User,
clock: SimulClock,
variants: List[Variant],
color: String): Simul = Simul(
_id = Random nextStringUppercase 8,
name = RandomName(),
status = SimulStatus.Created,
clock = clock,
hostId = host.id,
hostRating = host.perfs.bestRatingIn {
variants flatMap { variant =>
lila.game.PerfPicker.perfType(
speed = chess.Speed(clock.chessClock.some),
variant = variant,
daysPerTurn = none)
}
},
hostGameId = none,
createdAt = DateTime.now,
variants = variants,
applicants = Nil,
pairings = Nil,
startedAt = none,
finishedAt = none,
hostSeenAt = DateTime.now.some,
color = color.some)
}
| Happy0/lila | modules/simul/src/main/Simul.scala | Scala | mit | 3,978 |
/*
* The MIT License (MIT)
*
* Copyright (C) 2012 47 Degrees, LLC http://47deg.com hello@47deg.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*/
package com.fortysevendeg
package object mvessel {
val javaNull = null
val nullString = "NULL"
implicit class IndexOps(columnIndex: Int) {
def index: Int = columnIndex - 1
}
implicit class SQLStringOps(string: String) {
def escape: String = string.replace("'", "''")
}
}
| 47deg/mvessel | core/src/main/scala/com/fortysevendeg/mvessel/package.scala | Scala | mit | 994 |
package notebook.front.widgets
import notebook.JsonCodec._
import notebook._
import notebook.front._
import org.apache.spark.sql.{DataFrame, SQLContext}
import play.api.Logger
import play.api.libs.json._
import scala.util._
class Sql(sqlContext: SQLContext, call: String) extends Widget {
private[this] val sqlInputRegex = "(\\\\{[^\\\\}]+\\\\})".r
private[this] val sqlTypedInputRegex = "^\\\\{([^:]+):(.*)\\\\}$".r
private type Item = (String, TypedInput[_])
val (parts: List[Item], after: String) = {
val inputs = sqlInputRegex.findAllMatchIn(call).toList
val r = inputs match {
case Nil => Nil
case x :: Nil =>
val b = x.before.toString
val sqlTypedInputRegex(tpe, name) = x.matched
val r = (b, TypedInput(tpe, name.trim))
r :: List.empty[Item]
case x :: xs =>
val b = x.before.toString
val sqlTypedInputRegex(tpe, name) = x.matched
val h = (b, TypedInput(tpe, name.trim))
val t = inputs.sliding(2).toList.map {
case i :: j :: Nil =>
val b = j.before.toString.substring(i.before.toString.length + i.matched.length)
val sqlTypedInputRegex(tpe, name) = j.matched
(b, TypedInput(tpe, name.trim))
}
h :: t
}
(r, Try(inputs.last.after.toString).toOption.getOrElse(""))
}
import rx.lang.scala.{Observable => RxObservable, Observer => RxObserver, _}
val mergedObservables: RxObservable[(String, Any)] = {
val l: List[RxObservable[(String, Any)]] = parts.map { p =>
val ob = p._2.widget.currentData.observable.inner //.doOnEach(x => Logger.debug("########:"+x.toString))
val o: RxObservable[(String, Any)] = ob.map((d: Any) => (p._2.name, d))
o.doOnError { t =>
Logger.warn(s"$p._1 is errored with ${t.getMessage}")
//t.printStackTrace()
}
o.doOnCompleted(
Logger.warn(s"$p._1 is completed")
)
o
}
RxObservable.from(l).flatten
}
val sql = new SingleConnector[Option[Try[DataFrame]]] with Widget {
implicit val codec = new Codec[JsValue, Option[Try[DataFrame]]] {
def encode(x: JsValue): Option[Try[DataFrame]] = None
def decode(x: Option[Try[DataFrame]]): JsValue = JsString {
x.flatMap {
case Success(s) => Some(s.toString())
case Failure(ex) => Some(ex.getMessage)
}.getOrElse("<no enough info>")
}
}
lazy val toHtml = <p data-bind="text: value">
{scopedScript(
""" req(
['observable', 'knockout'],
function (O, ko) {
ko.applyBindings({
value: O.makeObservable(valueId)
},
this
);
}
);
""",
Json.obj("valueId" -> dataConnection.id)
)}
</p>
}
val subject: Subject[Option[Try[DataFrame]]] = subjects.ReplaySubject(1)
var result: Subject[Any] = subjects.ReplaySubject(1)
def updateValue(c: String) = {
val tried: Option[Try[DataFrame]] = Some(Try {
sqlContext.sql(c)
})
Logger.info(" Tried => " + tried.toString)
subject.onNext(tried)
sql(tried)
tried
}
sql {
parts match {
case Nil => updateValue(call)
case xs => None
}
}
def react[A](f: DataFrame => A, w: SingleConnectedWidget[A]) = {
result.subscribe(x => w(x.asInstanceOf[A])) //argl → asInstanceOf
val sub = (o: Option[Try[DataFrame]]) => {
o match {
case Some(Success(s)) =>
val r = f(s)
result.onNext(r)
case x =>
Logger.error("ARRrrggllll → " + x.toString)
}
}
subject.subscribe(sub)
//subject.orElse(None).subscribe(sub)
w
}
mergedObservables.subscribe(new RxObserver[(String, Any)]() {
val values: collection.mutable.Map[String, Any] = collection.mutable.HashMap[String, Any]().withDefaultValue("")
override def onNext(value: (String, Any)): Unit = {
values += value
val s = parts.map { case (before, input) =>
val vv = values(input.name)
before + vv.toString
}.mkString("")
val c = s + after
updateValue(c)
}
override def onError(error: Throwable): Unit = {
Logger.warn(s"Merged errored with ${error.getMessage}")
//error.printStackTrace()
super.onError(error)
}
override def onCompleted(): Unit = {
Logger.warn(s"Merged completed!")
super.onCompleted()
}
})
val ws: Widget = {
val ps = parts.map(_._2.widget) match {
case Nil => out
case xs => xs.reduce((x: Widget, y: Widget) => x ++ y)
}
ps ++ sql
}
lazy val toHtml = ws.toHtml
}
object Sql {
implicit def toWidget(sql: Sql): Widget = sql.ws
}
import notebook.front.widgets.types._
sealed trait TypedInput[T] {
def name: String
def widget: Widget with SingleConnector[T]
}
case class BooleanInput(name: String) extends TypedInput[Boolean] {
val widget = new InputBox[Boolean](false, name)
}
case class CharInput(name: String) extends TypedInput[Char] {
val widget = new InputBox[Char](' ', name)
}
case class StringInput(name: String) extends TypedInput[String] {
val widget = new InputBox[String]("", name)
}
case class DateInput(name: String) extends TypedInput[java.util.Date] {
implicit val d: java.text.DateFormat = new java.text.SimpleDateFormat("yyyy-MM-dd")
val widget = new InputBox[java.util.Date](new java.util.Date(), name)
}
case class IntInput(name: String) extends TypedInput[Int] {
implicit val codec: Codec[JsValue, Int] = JsonCodec.formatToCodec {
val r = Reads.of[Int] orElse Reads.of[String].map(_.toInt)
val w = Writes.of[Int].transform { x =>
val JsNumber(n) = x
JsString(n.toString())
}
Format(r, w)
}
val widget = new InputBox[Int](0, name)(implicitly[InputType[Int]], codec)
}
case class LongInput(name: String) extends TypedInput[Long] {
val widget = new InputBox[Long](0, name)
}
case class FloatInput(name: String) extends TypedInput[Float] {
val widget = new InputBox[Float](0, name)
}
case class DoubleInput(name: String) extends TypedInput[Double] {
val widget = new InputBox[Double](0, name)
}
object TypedInput {
def apply(tpe: String, name: String): TypedInput[_] = tpe match {
case "Boolean" => BooleanInput(name)
case "Char" => CharInput(name)
case "String" => StringInput(name)
case "Date" => DateInput(name)
case "Int" => IntInput(name)
case "Long" => LongInput(name)
case "Float" => FloatInput(name)
case "Double" => DoubleInput(name)
}
} | pb-pravin/spark-notebook | modules/common/src/main/scala/notebook/front/widgets/Sql.scala | Scala | apache-2.0 | 6,615 |
package com.cloudray.scalapress.plugin.sitemap
import org.springframework.stereotype.Controller
import org.springframework.web.bind.annotation.{ResponseBody, RequestMapping}
import org.springframework.beans.factory.annotation.Autowired
import javax.servlet.http.HttpServletResponse
import org.apache.commons.io.IOUtils
import com.cloudray.scalapress.framework.ScalapressContext
/** @author Stephen Samuel */
@Controller
@Autowired
class SitemapController(context: ScalapressContext) {
@ResponseBody
@RequestMapping(value = Array("sitemap"), produces = Array("text/xml"))
def sitemap(resp: HttpServletResponse) {
val urls = UrlBuilder.build(context)
val sitemap = SitemapWriter.write(urls)
IOUtils.write(sitemap, resp.getOutputStream)
}
}
| vidyacraghav/scalapress | src/main/scala/com/cloudray/scalapress/plugin/sitemap/SitemapController.scala | Scala | apache-2.0 | 761 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package expr
package xml
/**
* @author Alexander Podkhalyuzin
* Date: 21.04.2008
*/
trait ScXmlCDSect extends ScalaPsiElement {
} | katejim/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/expr/xml/ScXmlCDSect.scala | Scala | apache-2.0 | 205 |
package com.eigengo.lift.exercise.classifiers.workflows
import akka.stream.scaladsl.FlexiMerge
/**
* Flowgraph merge node that expects a message on each of its inputs. The collection of input messages is then outputted.
* This merge node can be thought of as a generalised Zip or ZipWith node.
*
* @param size number of inputs that may be joined to this merge node
*/
abstract class ZipBundle[A] private[workflows] (size: Int) extends FlexiMerge[Set[A]] {
require(size > 1, s"ZipBundle must have at least 2 connected inputs ($size were given)")
import FlexiMerge._
protected val inPorts = (0 until size).map { _ => createInputPort[A]() }.toVector
def createMergeLogic() = new MergeLogic[Set[A]] {
def initialState = State[ReadAllInputs](ReadAll(inPorts)) { (ctx, _, inputs) =>
ctx.emit(inPorts.flatMap(port => inputs.get[A](port)).toSet)
SameState[A]
}
def inputHandles(inputCount: Int) = {
require(inputCount == size, s"ZipBundle must have $size connected inputs, was $inputCount")
inPorts
}
override def initialCompletionHandling = eagerClose
}
}
/**
* Utility classes and factories for using ZipBundle. They differ in how we reference the input ports.
*
* ZipN - input ports are referenced using indexes from 0 until size
* ZipSet - input ports are referenced using a set of locations or addresses
*/
class ZipN[A] private[workflows] (size: Int) extends ZipBundle[A](size) {
val in = inPorts
}
object ZipN {
def apply[A](size: Int) = new ZipN[A](size)
}
class ZipSet[A, L] private[workflows] (keys: Set[L]) extends ZipBundle[A](keys.size) {
val in = keys.zipWithIndex.map { case (key, index) => (key, inPorts(index)) }.toMap
}
object ZipSet {
def apply[A, L](keys: Set[L]) = new ZipSet[A, L](keys)
}
| lachatak/lift | server/exercise/src/main/scala/com/eigengo/lift/exercise/classifiers/workflows/ZipNodes.scala | Scala | apache-2.0 | 1,787 |
package org.ocular
import android.app.Activity
import android.content.Context
import android.support.annotation.UiThread
import android.view.View
import org.ocular.ui._
import org.ocular.utils.MainThreadExecutor
class UiProcessor(activity: Activity) {
private var _currentDef: UiComponent[_] = _
private var _previousContentView: Option[View] = None
def apply(uiDef: UiComponent[_ <: View]) = {
MainThreadExecutor.execute(updateUi(uiDef))
}
@UiThread private def updateUi(uiDef: UiComponent[_ <: View]) = new Runnable {
override def run() = {
updateContentViewWith(UiProcessor.processComponentToView(uiDef, _previousContentView.orNull, activity))
_currentDef = uiDef
}
}
@UiThread private def updateContentViewWith(view: View) = {
if (!_previousContentView.exists(_ eq view)) {
_previousContentView = Option(view)
activity.setContentView(view)
}
}
}
object UiProcessor {
private val _viewsCache = new ViewCache
@UiThread def processComponentToView[T <: View](ui: UiComponent[T], previousView: View, context: Context): T = {
val componentView = _viewsCache.reuseAs(previousView)(ui.classTag).getOrElse(ui.createView(context))
ui.bindView(componentView, context).foreach(_viewsCache.cacheView)
componentView
}
}
| dant3/ocular | core/src/main/scala/org/ocular/UiProcessor.scala | Scala | apache-2.0 | 1,295 |
package org.mentha.tools.archimate.model.view
import java.util
import scala.util._
package object dsl {
import org.mentha.tools.archimate.model._
import org.mentha.tools.archimate.model.edges._
import org.mentha.tools.archimate.model.edges.impl._
implicit class NodeConceptToView[T <: NodeConcept](val concept: T) {
@inline def attach(implicit view: View): ViewNodeConcept[T] = view.attach_node(concept)
}
implicit class EdgeConceptToView[T <: Relationship](val concept: T) {
@inline def attach(implicit view: View): ViewRelationship[T] = view.attach_edge(concept)
}
object directions extends Enumeration {
type Type = Value
val Left, Right, Up, Down = Value
}
object geometry {
@inline def x(v1: ViewObject, v2: ViewObject, shift: Double = 0.5) = {
(1.0 - shift) * v1.position.x + (shift) * v2.position.x
}
@inline def y(v1: ViewObject, v2: ViewObject, shift: Double = 0.5) = {
(1.0 - shift) * v1.position.y + (shift) * v2.position.y
}
@inline def w(v1: ViewObject, v2: ViewObject, shift: Double = 0.5) = {
(1.0 - shift) * v1.size.width + (shift) * v2.size.width
}
@inline def h(v1: ViewObject, v2: ViewObject, shift: Double = 0.5) = {
(1.0 - shift) * v1.size.height + (shift) * v2.size.height
}
}
implicit class ImplicitViewNode[+T <: ViewNode](node: T) {
def scaleWidth(scale: Double): T = node withSize {
Size(scale * node.size.width, node.size.height)
}
def scaleHeight(scale: Double): T = node withSize {
Size(node.size.width, scale * node.size.height)
}
def doubleWidth(): T = scaleWidth(2.0)
def doubleHeight(): T = scaleHeight(2.0)
@inline def place(dir: directions.Type, v: ViewObject)(implicit space: Size): T = place(dir, v, v)(space)
/** places the element between the given element + shifted to the given direction */
def place(dir: directions.Type, v1: ViewObject, v2: ViewObject)(implicit space: Size): T = node.withPosition(
dir match {
case directions.Up => Vector(
x = geometry.x(v1, v2),
y = geometry.y(v1, v2) - (geometry.h(v1, v2) + node.size.height) / 2 - space.height
)
case directions.Down => Vector(
x = geometry.x(v1, v2),
y = geometry.y(v1, v2) + (geometry.h(v1, v2) + node.size.height) / 2 + space.height
)
case directions.Left => Vector(
x = geometry.x(v1, v2) - (geometry.w(v1, v2) + node.size.width) / 2 - space.width,
y = geometry.y(v1, v2)
)
case directions.Right => Vector(
x = geometry.x(v1, v2) + (geometry.w(v1, v2) + node.size.width) / 2 + space.width,
y = geometry.y(v1, v2)
)
}
)
/** places the element in the intersection of vertical and horizontal position of the given elements */
def place(vx: ViewObject, vy: ViewObject)(implicit space: Size): T = node.withPosition(
Vector(
x = vx.position.x,
y = vy.position.y
)
)
def move(dir: directions.Type, amount: Double = 1.0)(implicit space: Size): T = node.withPosition(
dir match {
case directions.Up => Vector(
x = node.position.x,
y = node.position.y - (node.size.height + space.height) * amount
)
case directions.Down => Vector(
x = node.position.x,
y = node.position.y + (node.size.height + space.height) * amount
)
case directions.Left => Vector(
x = node.position.x - (node.size.width + space.width) * amount,
y = node.position.y
)
case directions.Right => Vector(
x = node.position.x + (node.size.width + space.width) * amount,
y = node.position.y
)
}
)
/** wraps the current element over the given group */
def wrap(callback: (ViewNode, ViewNode) => Unit, elements: ViewNode*)(implicit space: Size): T = {
if (elements.isEmpty) {
throw new IllegalStateException("")
}
var x0 = Double.MaxValue
var y0 = Double.MaxValue
var x1 = Double.MinValue
var y1 = Double.MinValue
for { el <- elements } {
callback(node, el)
x0 = Math.min(x0, el.position.x - el.size.width / 2)
x1 = Math.max(x1, el.position.x + el.size.width / 2)
y0 = Math.min(y0, el.position.y - el.size.height / 2)
y1 = Math.max(y1, el.position.y + el.size.height / 2)
}
node withPosition { Vector( (x0+x1)/2, (y0+y1)/2 - 0.15*space.height ) } withSize { Size((x1-x0) + 0.5*space.width, (y1-y0) + 0.75*space.height) }
}
}
def wrapWithComposition(implicit model: Model, view: View): (ViewNode, ViewNode) => Unit = {
case (src: ViewGroup, dst) => view.add { new ViewConnection(src, dst) }
case (src: ViewNodeConcept[_], dst: ViewNodeConcept[_]) => {
view.add {
new ViewRelationship[Relationship](src, dst)(
model
.edges
.collect { case e: StructuralRelationship if e.source == src.concept && e.target == dst.concept => e }
.toSeq
.sortBy {
case _: CompositionRelationship => 1
case _: AssignmentRelationship => 2
case _: RealizationRelationship => 3
case _: AggregationRelationship => 4
}
.headOption
.getOrElse {
model.add {
Seq(
StructuralRelationships.compositionRelationship,
StructuralRelationships.assignmentRelationship,
StructuralRelationships.realizationRelationship,
StructuralRelationships.aggregationRelationship
)
.iterator // make it lazy (we want to try each type of relation one by one)
.map { meta => Try[Relationship] { meta.newInstance(src.concept, dst.concept).validate } }
.collectFirst { case Success(r) => r }
.getOrElse { throw new IllegalArgumentException(s"There is no possible relationship between ${src} and ${dst}") }
}
}
)
}
}
case (src, dst) => throw new IllegalArgumentException(s"There is no possible relationship between ${src} and ${dst}")
}
implicit class ImplicitViewEdge[+T <: ViewEdge](edge: T) {
def points(route: (Double, Double)*): T = edge.withPoints {
route.map { case (x, y) => Vector(x, y) }
}
def route(route: (Double, Double)*): T = edge.withPoints {
route.foldLeft((edge.source.position, List[Vector]())) {
case ((sp, seq), (x, y)) => {
val p = Vector(sp.x + x, sp.y + y)
(p, p :: seq)
}
} match {
case (_, b) => b.reverse
}
}
def flex(levels: Double*): T = {
if (edge.source eq edge.target) {
throw new IllegalStateException(s"Edge: ${edge} is a loop. Use the `routeLoop` method instead.")
} else {
val vx = edge.target.position.x - edge.source.position.x
val vy = edge.target.position.y - edge.source.position.y
val dA = 0.1
val rx = -vy*dA
val ry = vx*dA
val l = 1.0 + levels.length
edge.withPoints(
levels.zipWithIndex.map {
case (lvl, idx) => Vector(
geometry.x(edge.source, edge.target, (1.0 + idx) / l) + rx*lvl,
geometry.y(edge.source, edge.target, (1.0 + idx) / l) + ry*lvl
)
}
)
}
}
def routeLoop(dir: directions.Type, level: Int = 1): T = {
if (edge.source eq edge.target) {
val x = edge.source.position.x
val y = edge.source.position.y
val h = edge.source.size.height
val w = edge.source.size.width
val d10 = 1.0 + 0.25*level
val d20 = Math.pow(d10, 2.0)
val d05 = Math.pow(d10, 0.5)
val dA = 0.45*d20
val dB = 0.75*d05
val dC = 0.95*d10
dir match {
case directions.Up => points(
(x - dA*h, y - dB*h), (x, y - dC*h), (x + dA*h, y - dB*h)
)
case directions.Down => points(
(x - dA*h, y + dB*h), (x, y + dC*h), (x + dA*h, y + dB*h)
)
case directions.Left => points(
(x - dB*h, y - dA*h), (x - dC*h, y), (x - dB*h, y + dA*h),
)
case directions.Right => points(
(x + dB*h, y - dA*h), (x + dC*h, y), (x + dB*h, y + dA*h),
)
}
} else {
throw new IllegalStateException(s"Edge: ${edge} is not a loop. Use the `flex` method instead.")
}
}
}
private val LAYOUT_ITERATIONS: Int = 1000
def in(view: View) = new {
// direct calls
@inline def apply[T <: Concept](r: => T): T = r match {
case n: NodeConcept => node(n).concept.asInstanceOf[T]
case e: Relationship => edge(e).concept.asInstanceOf[T]
}
// Nouns for objects
def node[T <: NodeConcept](r: => NodeConcept with T): ViewNodeConcept[NodeConcept with T] = r.attach(view)
def edge[T <: Relationship](r: => Relationship with T): ViewRelationship[Relationship with T] = r.attach(view)
def notes(text: String): ViewNotes = view.add { new ViewNotes withText { text } }
def connection(left: ViewObject, right: ViewObject): ViewConnection = view.add { new ViewConnection(left, right) }
@inline def connection(left: ViewObject, right: Concept): ViewConnection = connection(left, view.attach(right))
@inline def connection(left: Concept, right: ViewObject): ViewConnection = connection(view.attach(left), right)
//
def notes(vo: ViewObject)(text: String): ViewNotes = {
val n = view.add { new ViewNotes withText { text } }
connection(vo, n)
n
}
@inline def notes(concept: Concept)(text: String): ViewNotes = {
notes( view.attach(concept) )(text)
}
// Verbs for flows
def add(v: View): this.type = {
v._objects.values.foreach {
case e: ViewConcept[_] => add(e.concept)
case _ =>
}
this
}
def del(concept: Concept): this.type = {
view._objects.values
.collect {
case n: ViewNodeConcept[_] if (concept eq n.concept) => n
case r: ViewRelationship[_] if (concept eq r.concept) || (concept eq r.concept.source) || (concept eq r.concept.target) => r
}
.foreach { _.markAsDeleted() }
this
}
@inline def add(concept: Concept): this.type = { view.attach(concept); this }
@inline def addNotes(concept: Concept)(text: String): this.type = { connection(concept, notes(text)); this }
@inline def connect(left: ViewObject, right: ViewObject): this.type = { connection(left, right); this }
@inline def connect(left: ViewObject, right: Concept): this.type = { connection(left, right); this }
@inline def connect(left: Concept, right: ViewObject): this.type = { connection(left, right); this }
@inline def add(vo: ViewObject): this.type = {
vo match {
case v: ViewNodeConcept[_] => view.attach_node(v.concept) withSize { v.size } withPosition { v.position }
case v: ViewRelationship[_] => view.attach_edge(v.concept) withPoints { v.points }
case _ => throw new UnsupportedOperationException(vo.getClass.getName)
}
this
}
def remove(concept: Concept): this.type = { view.attach(concept).markAsDeleted(marker = true); this }
def remove(vo: ViewObject): this.type = {
vo match {
case v: ViewNodeConcept[_] => view.attach_node(v.concept).markAsDeleted(marker = true)
case v: ViewRelationship[_] => view.attach_edge(v.concept).markAsDeleted(marker = true)
case _ => throw new UnsupportedOperationException(vo.getClass.getName)
}
this
}
def borrowEdges(edgesSource: Iterable[EdgeConcept]): this.type = {
edgesSource.foreach { add(_) }
this
}
def borrowEdges()(implicit model: Model): this.type = {
borrowEdges(model.edges)
}
def borrowEdges(v: View): this.type = {
borrowEdges {
v.edges.collect {
case e: ViewRelationship[_] if view.locate(e.concept.source).isDefined && view.locate(e.concept.target).isDefined => e.concept
}
}
}
/** Experimental API */
def placeLikeBefore()(implicit model: Model): this.type = {
val nodes = view.nodes.collect { case n: ViewNodeConcept[_] => n }.toVector
val conceptIds = nodes.map { _.concept.id }.toSet
val head = model
.views.zipWithIndex
.collect {
case (v, idx) if v ne view =>
val count = v.nodes.count {
case n: ViewNodeConcept[_] => conceptIds.contains(n.concept.id)
case _ => false
}
(count, idx, v)
}
.toVector
.sortBy { case (count, idx, _) => (-count, idx) }
.headOption
for {
(_, _, v) <- head
node <- nodes
l <- v.nodes.collectFirst { case n: ViewNodeConcept[_] if n.concept.id == node.concept.id => n }
} {
node withPosition { l.position } withSize { l.size }
}
this
}
/** Experimental API */
def placeRandomly(): this.type = {
val rnd = new util.Random(0)
for { node <- view.nodes } {
node.withPosition( Vector(x = rnd.nextGaussian(), y = rnd.nextGaussian()) )
}
this
}
def resizeNodesToTitle(): this.type = {
for (node <- view.nodes) {
val text = node match {
case n: ViewNodeConcept[_] => n.concept match {
case e: Element => e.name.trim
case _ => ""
}
case n: ViewNotes => n.text.trim
case _ => ""
}
node.withSize {
if (text.isEmpty) {
Size( width = 10, height = 10 )
} else {
val strings = text.split('\\n')
val height: Int = 20 + strings.length * 12
val width: Int = 40 + strings.map {
_.trim.collect {
case c if c.isDigit | c.isUpper => 6
case _ => 5
}.sum
}.max
val BOX: Int = 20
Size(
width = BOX*Math.floor(width/BOX) + { if (width % BOX <= 0) 0 else BOX },
height = BOX*Math.floor(height/BOX) + { if (height % BOX <= 0) 0 else BOX }
)
}
}
}
this
}
def layout(): this.type = this.layoutLayered()
def layoutLayered(maxIterations: Int = LAYOUT_ITERATIONS): this.type = {
new org.mentha.tools.archimate.model.view.layout.LayeredSpringLayoutF(view).layout(maxIterations)
this
}
def layoutSimple(maxIterations: Int = LAYOUT_ITERATIONS): this.type = {
new org.mentha.tools.archimate.model.view.layout.SimpleSpringLayoutF(view).layout(maxIterations)
this
}
}
@inline def $[T <: Concept](t: ViewObject with ViewConcept[T]): T = t.concept
@inline def <<(implicit view: View) = in(view)
@inline def <<[T <: Concept](r: => T)(implicit view: View): T = <<.apply { r }
}
| zhuj/mentha-web-archimate | archimate-model/src/main/scala/org/mentha/tools/archimate/model/view/dsl/package.scala | Scala | mit | 15,123 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.nisp.helpers
import org.joda.time.DateTime
import play.api.mvc.{Request, Result}
import uk.gov.hmrc.auth.core.ConfidenceLevel
import uk.gov.hmrc.auth.core.retrieve.LoginTimes
import uk.gov.hmrc.nisp.controllers.auth.{AuthDetails, AuthenticatedRequest, VerifyAuthActionImpl}
import uk.gov.hmrc.nisp.fixtures.NispAuthedUserFixture
import scala.concurrent.Future
class FakeVerifyAuthAction extends VerifyAuthActionImpl(null, null, null, null, null) {
override def invokeBlock[A](request: Request[A], block: AuthenticatedRequest[A] => Future[Result]): Future[Result] =
block(
AuthenticatedRequest(
request,
NispAuthedUserFixture.user(TestAccountBuilder.regularNino),
AuthDetails(ConfidenceLevel.L500, Some("IDA"), LoginTimes(DateTime.now, None))
)
)
}
| hmrc/nisp-frontend | test/uk/gov/hmrc/nisp/helpers/FakeVerifyAuthAction.scala | Scala | apache-2.0 | 1,422 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.crossdata.catalyst.analysis
import org.apache.spark.sql.crossdata.test.SharedXDContextTest
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class XDResolveReferencesIT extends SharedXDContextTest{
//TABLE test.t1 COLUMNS id value
//TABLE test.t2 COLUMNS id value
//TABLE test2.t1 COLUMNS id value
//TABLE test COLUMNS test.id test.test
//TABLE test.test COLUMNS col.id col.test
override def beforeAll(): Unit = {
super.beforeAll()
val t1: DataFrame = xdContext.createDataFrame(xdContext.sparkContext.parallelize((1 to 5)
.map(i => Row(s"val_$i", i))), StructType(Array(StructField("id", StringType), StructField("value", IntegerType))))
t1.registerTempTable("test.t1")
val t2: DataFrame = xdContext.createDataFrame(xdContext.sparkContext.parallelize((4 to 8)
.map(i => Row(s"val_$i", i))), StructType(Array(StructField("id", StringType), StructField("value", IntegerType))))
t2.registerTempTable("test.t2")
t1.registerTempTable("test2.t1")
//columns test.id and test.test
val rows = xdContext.sparkContext.parallelize(2 to 5).map(i => Row(Row(s"val_$i", i)))
val strType = StructType(Array(StructField("test", StructType(Array(StructField("id", StringType), StructField("test", IntegerType))))))
xdContext.createDataFrame(rows, strType).registerTempTable("test")
val rows2= xdContext.sparkContext.parallelize(4 to 8).map(i => Row(Row(s"val_$i", i)))
val strType2 = StructType(Array(StructField("col", StructType(Array(StructField("id", StringType), StructField("test", IntegerType))))))
xdContext.createDataFrame(rows2, strType2).registerTempTable("test.test")
}
it must "resolve partially qualified identifiers" in {
val rows = xdContext.sql("SELECT t1.id, id FROM test.t1").collect()
rows(0)(0) shouldBe rows(0)(1)
}
it must "resolve fully qualified identifiers" in {
val rows = xdContext.sql("SELECT test.t1.id, id FROM test.t1").collect()
rows(0)(0) shouldBe rows(0)(1)
}
it must "fail when using non-existing qualifiers in the first part of the identifier" in {
an [Exception] shouldBe thrownBy (xdContext.sql("SELECT fake.test.t1.id FROM test.t1").show)
}
it must "fail when querying ambiguous columns" in {
an [Exception] shouldBe thrownBy (xdContext.sql("SELECT id FROM test.t1 INNER JOIN test.t2").show)
}
it must "keep supporting qualified table aliases" in {
val rows = xdContext.sql("SELECT t1.id, als.id FROM test.t1 INNER JOIN test.t1 als").collect()
rows(0)(0) shouldBe rows(0)(1)
}
it must "fail when using fully qualified identifiers after aliasing the table" in {
an [Exception] shouldBe thrownBy (xdContext.sql("SELECT t1.id, test.t2.id FROM test.t1 INNER JOIN test.t2 als").show)
an [Exception] shouldBe thrownBy (xdContext.sql("SELECT * FROM test.t1 INNER JOIN test.t2 otra ON t1.id = t2.id").show)
}
it must "resolve qualified identifiers when joining tables" in {
val rows = xdContext.sql("SELECT t1.id, test.t1.id, t2.id, test.t2.id FROM test.t1 INNER JOIN test.t2").collect()
rows(0)(0) shouldBe rows(0)(1)
rows(0)(2) shouldBe rows(0)(3)
}
it must "resolve partially qualified identifiers in the join condition" in {
val dataFrame = xdContext.sql("SELECT * FROM test.t1 INNER JOIN test.t2 ON t1.id = t2.id")
dataFrame.count() shouldBe 2
}
it must "resolve fully qualified identifiers in the join condition" in {
val dataFrame = xdContext.sql("SELECT * FROM test.t1 INNER JOIN test.t2 ON test.t1.id = test.t2.id")
dataFrame.count() shouldBe 2
}
it must "resolve partially and fully qualified identifiers in the same query" in {
val dataFrame = xdContext.sql("SELECT test.t1.id, t2.id FROM test.t1 INNER JOIN test.t2 ON t1.id = test.t2.id")
dataFrame.count() shouldBe 2
val rows = dataFrame.collect()
rows(0)(0) shouldBe rows(0)(1)
}
it must "resolve qualified identifiers in the group by" in {
val dataFrame = xdContext.sql("SELECT test.t1.id FROM test.t1 INNER JOIN test.t2 ON t1.id = test.t2.id GROUP BY t1.id")
val dataFrame2 = xdContext.sql("SELECT test.t1.id FROM test.t1 INNER JOIN test.t2 ON t1.id = test.t2.id GROUP BY test.t1.id")
dataFrame.count() shouldBe 2
dataFrame2.count() shouldBe 2
}
//test.t1 and test2.t1 have a column "id"
it must "fail when using ambiguous identifiers in the join condition" in {
an [Exception] shouldBe thrownBy (xdContext.sql("SELECT * FROM test.t1 INNER JOIN test2.t1 ON t1.id = t1.id").show)
}
it must "allow to fully qualify identifiers in order to resolve ambiguous columns" in {
val dataFrame = xdContext.sql("SELECT * FROM test.t1 INNER JOIN test2.t1 ON test.t1.id = test2.t1.id")
dataFrame.count shouldBe 5
}
//TABLE test COLUMNS test.id test.test
//TABLE test.test COLUMNS col.id col.test
it must "resolve qualified identifiers associated to subfields" in {
xdContext.sql("SELECT * FROM test.test").count() shouldBe 5
xdContext.sql("SELECT col FROM test.test").count() shouldBe 5
xdContext.sql("SELECT col.id FROM test.test").count() shouldBe 5
xdContext.sql("SELECT col.test FROM test.test").count() shouldBe 5
xdContext.sql("SELECT test.col.test FROM test.test").count() shouldBe 5
xdContext.sql("SELECT test.test.col.test FROM test.test").count() shouldBe 5
xdContext.sql("SELECT * FROM test").count() shouldBe 4
xdContext.sql("SELECT test FROM test").count() shouldBe 4
xdContext.sql("SELECT test.test FROM test").count() shouldBe 4
xdContext.sql("SELECT test.test.test FROM test").count() shouldBe 4
}
it must "resolve fully qualified identifiers in where conditions" in {
xdContext.sql("SELECT test FROM test WHERE test.test.test = 4").count() shouldBe 1
}
it must "resolve qualified identifiers associated to subfields when joining tables" in {
xdContext.sql("SELECT * FROM test INNER JOIN test.test").count() shouldBe 20
xdContext.sql("SELECT col FROM test INNER JOIN test.test").count() shouldBe 20
xdContext.sql("SELECT col.test FROM test INNER JOIN test.test").count() shouldBe 20
xdContext.sql("SELECT test FROM test INNER JOIN test.test").count() shouldBe 20
xdContext.sql("SELECT test.test FROM test INNER JOIN test.test").count() shouldBe 20
xdContext.sql("SELECT test.test.test FROM test INNER JOIN test.test").count() shouldBe 20
xdContext.sql("SELECT * FROM test INNER JOIN test.test ON test.test.test = col.test").count() shouldBe 2
}
it must "resolve qualified count distinct queries with qualified filters" in {
xdContext.sql("SELECT count(distinct test) FROM test WHERE test.test.test = 4").count() shouldBe 1
}
}
| darroyocazorla/crossdata | core/src/test/scala/org/apache/spark/sql/crossdata/catalyst/analysis/XDResolveReferencesIT.scala | Scala | apache-2.0 | 7,513 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015-2019 Helge Holzmann (Internet Archive) <helge@archive.org>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.archive.archivespark.model
import io.circe.Json
import org.archive.archivespark.util.Json.{json, mapToJson}
import scala.collection.immutable.ListMap
trait TypedEnrichRoot[+Meta] extends EnrichRoot with TypedEnrichable[Meta] {
override def root[A]: TypedEnrichRoot[A] = this.asInstanceOf[TypedEnrichRoot[A]]
def companion: EnrichRootCompanion[_]
}
trait EnrichRoot extends Enrichable { this: TypedEnrichRoot[_] =>
def metaKey: String = "record"
def companion: EnrichRootCompanion[_]
def metaToJson: Json = json(get)
def toJson: Map[String, Json] = ListMap(
metaKey -> metaToJson
) ++ enrichments.map{e => (e, mapToJson(enrichment(e).get.toJson))}.filter{ case (_, field) => field != null }
}
object EnrichRoot {
implicit def companion[A <: EnrichRoot](root: A): EnrichRootCompanion[A] = root.companion.asInstanceOf[EnrichRootCompanion[A]]
} | helgeho/ArchiveSpark | src/main/scala/org/archive/archivespark/model/EnrichRoot.scala | Scala | mit | 2,073 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.worker
import java.io._
import akka.actor.ActorRef
import com.google.common.base.Charsets
import com.google.common.io.Files
import org.apache.spark.Logging
import org.apache.spark.deploy.{ApplicationDescription, Command, ExecutorState}
import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged
/**
* Manages the execution of one executor process.
*/
private[spark] class ExecutorRunner(
val appId: String,
val execId: Int,
val appDesc: ApplicationDescription,
val cores: Int,
val memory: Int,
val worker: ActorRef,
val workerId: String,
val host: String,
val sparkHome: File,
val workDir: File,
val workerUrl: String,
var state: ExecutorState.Value)
extends Logging {
val fullId = appId + "/" + execId
var workerThread: Thread = null
var process: Process = null
// NOTE: This is now redundant with the automated shut-down enforced by the Executor. It might
// make sense to remove this in the future.
var shutdownHook: Thread = null
def start() {
workerThread = new Thread("ExecutorRunner for " + fullId) {
override def run() { fetchAndRunExecutor() }
}
workerThread.start()
// Shutdown hook that kills actors on shutdown.
shutdownHook = new Thread() {
override def run() {
if (process != null) {
logInfo("Shutdown hook killing child process.")
process.destroy()
process.waitFor()
}
}
}
Runtime.getRuntime.addShutdownHook(shutdownHook)
}
/** Stop this executor runner, including killing the process it launched */
def kill() {
if (workerThread != null) {
workerThread.interrupt()
workerThread = null
if (process != null) {
logInfo("Killing process!")
process.destroy()
process.waitFor()
}
state = ExecutorState.KILLED
worker ! ExecutorStateChanged(appId, execId, state, None, None)
Runtime.getRuntime.removeShutdownHook(shutdownHook)
}
}
/** Replace variables such as {{EXECUTOR_ID}} and {{CORES}} in a command argument passed to us */
def substituteVariables(argument: String): String = argument match {
case "{{WORKER_URL}}" => workerUrl
case "{{EXECUTOR_ID}}" => execId.toString
case "{{HOSTNAME}}" => host
case "{{CORES}}" => cores.toString
case other => other
}
def getCommandSeq = {
val command = Command(appDesc.command.mainClass,
appDesc.command.arguments.map(substituteVariables) ++ Seq(appId), appDesc.command.environment)
CommandUtils.buildCommandSeq(command, memory, sparkHome.getAbsolutePath)
}
/**
* Download and run the executor described in our ApplicationDescription
*/
def fetchAndRunExecutor() {
try {
// Create the executor's working directory
val executorDir = new File(workDir, appId + "/" + execId)
if (!executorDir.mkdirs()) {
throw new IOException("Failed to create directory " + executorDir)
}
// Launch the process
val command = getCommandSeq
logInfo("Launch command: " + command.mkString("\\"", "\\" \\"", "\\""))
val builder = new ProcessBuilder(command: _*).directory(executorDir)
val env = builder.environment()
for ((key, value) <- appDesc.command.environment) {
env.put(key, value)
}
// In case we are running this from within the Spark Shell, avoid creating a "scala"
// parent process for the executor command
env.put("SPARK_LAUNCH_WITH_SCALA", "0")
process = builder.start()
val header = "Spark Executor Command: %s\\n%s\\n\\n".format(
command.mkString("\\"", "\\" \\"", "\\""), "=" * 40)
// Redirect its stdout and stderr to files
val stdout = new File(executorDir, "stdout")
CommandUtils.redirectStream(process.getInputStream, stdout)
val stderr = new File(executorDir, "stderr")
Files.write(header, stderr, Charsets.UTF_8)
CommandUtils.redirectStream(process.getErrorStream, stderr)
// Wait for it to exit; this is actually a bad thing if it happens, because we expect to run
// long-lived processes only. However, in the future, we might restart the executor a few
// times on the same machine.
val exitCode = process.waitFor()
state = ExecutorState.FAILED
val message = "Command exited with code " + exitCode
worker ! ExecutorStateChanged(appId, execId, state, Some(message), Some(exitCode))
} catch {
case interrupted: InterruptedException =>
logInfo("Runner thread for executor " + fullId + " interrupted")
case e: Exception => {
logError("Error running executor", e)
if (process != null) {
process.destroy()
}
state = ExecutorState.FAILED
val message = e.getClass + ": " + e.getMessage
worker ! ExecutorStateChanged(appId, execId, state, Some(message), None)
}
}
}
}
| sryza/spark | core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala | Scala | apache-2.0 | 5,751 |
package org.jetbrains.plugins.scala.lang.formatter.tests
import org.jetbrains.plugins.scala.lang.formatter.AbstractScalaFormatterTestBase
import com.intellij.psi.codeStyle.CommonCodeStyleSettings
/**
* @author Alexander Podkhalyuzin
*/
class ScalaWrappingAndBracesTest extends AbstractScalaFormatterTestBase {
/* stub:
def test {
val before =
"""
""".replace("\\r", "")
val after =
"""
""".replace("\\r", "")
doTextTest(before, after)
}
*/
def testInfixExpressionWrapAsNeeded {
getCommonSettings.BINARY_OPERATION_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED
getSettings.RIGHT_MARGIN = 20
getIndentOptions.CONTINUATION_INDENT_SIZE = 2
val before =
"""
2 + 2 + 2 + 2 + 2 + 2 + 2 + 2 + 2 + 2 + 2 + 2 + 2 + 2 + 2
2 :: 2 :: 2 :: 2 :: 2 :: 2 :: 2 :: 2 :: 2 :: 2 :: 2 :: 2 :: Nil
2 + 2 + 2 + 2 + 22 * 66 + 2
""".replace("\\r", "")
val after =
"""
2 + 2 + 2 + 2 + 2 +
2 + 2 + 2 + 2 +
2 + 2 + 2 + 2 +
2 + 2
2 :: 2 :: 2 :: 2 ::
2 :: 2 :: 2 ::
2 :: 2 :: 2 ::
2 :: 2 :: Nil
2 + 2 + 2 + 2 +
22 * 66 + 2
""".replace("\\r", "")
doTextTest(before, after)
}
def testInfixPatternWrapAsNeeded {
getCommonSettings.BINARY_OPERATION_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED
getSettings.RIGHT_MARGIN = 20
getIndentOptions.CONTINUATION_INDENT_SIZE = 2
val before =
"""
List(1, 2) match {
case x :: y :: z :: Nil =>
}
""".replace("\\r", "")
val after =
"""
List(1, 2) match {
case x :: y ::
z :: Nil =>
}
""".replace("\\r", "")
doTextTest(before, after)
}
def testInfixTypeWrapAsNeeded {
getCommonSettings.BINARY_OPERATION_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED
getSettings.RIGHT_MARGIN = 20
getIndentOptions.CONTINUATION_INDENT_SIZE = 2
val before =
"""
val x: T + T + T + T + T
""".replace("\\r", "")
val after =
"""
val x: T + T + T +
T + T
""".replace("\\r", "")
doTextTest(before, after)
}
def testInfixExprWrapAlways {
getCommonSettings.BINARY_OPERATION_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS
getSettings.RIGHT_MARGIN = 20
getIndentOptions.CONTINUATION_INDENT_SIZE = 2
val before =
"""
2 + 3 + 4 * 6 + (7 + 9 * 10) - 8 - 4
""".replace("\\r", "")
val after =
"""
2 +
3 +
4 *
6 +
(7 +
9 *
10) -
8 -
4
""".replace("\\r", "")
doTextTest(before, after)
}
def testInfixExprWrapAllIfLong {
getCommonSettings.BINARY_OPERATION_WRAP = CommonCodeStyleSettings.WRAP_ON_EVERY_ITEM
getSettings.RIGHT_MARGIN = 20
getIndentOptions.CONTINUATION_INDENT_SIZE = 2
val before =
"""
2 + 2 + 2 + 2 + 2 + 2
2 + 2 + 2 + 2 + 2
""".replace("\\r", "")
val after =
"""
2 +
2 +
2 +
2 +
2 +
2
2 + 2 + 2 + 2 + 2
""".replace("\\r", "")
doTextTest(before, after)
}
def testInfixExprDoNotWrap {
getCommonSettings.BINARY_OPERATION_WRAP = CommonCodeStyleSettings.DO_NOT_WRAP
getSettings.RIGHT_MARGIN = 20
getIndentOptions.CONTINUATION_INDENT_SIZE = 2
val before =
"""
2 + 2 + 2 + 2 + 2 + 2
2 + 2 + 2 + 2 + 2
""".replace("\\r", "")
val after =
"""
2 + 2 + 2 + 2 + 2 + 2
2 + 2 + 2 + 2 + 2
""".replace("\\r", "")
doTextTest(before, after)
}
def testAlignBinary {
getCommonSettings.ALIGN_MULTILINE_BINARY_OPERATION = true
val before =
"""
val i = 2 + 2 +
3 + 5 +
6 + 7 *
8
""".replace("\\r", "")
val after =
"""
val i = 2 + 2 +
3 + 5 +
6 + 7 *
8
""".replace("\\r", "")
doTextTest(before, after)
}
def testBinaryParentExpressionWrap {
getCommonSettings.BINARY_OPERATION_WRAP = CommonCodeStyleSettings.WRAP_AS_NEEDED
getCommonSettings.PARENTHESES_EXPRESSION_LPAREN_WRAP = true
getCommonSettings.PARENTHESES_EXPRESSION_RPAREN_WRAP = true
getSettings.RIGHT_MARGIN = 20
val before =
"""
(2333333333333333 + 2)
(2 +
2)
""".replace("\\r", "")
val after =
"""
(
2333333333333333 +
2
)
(
2 +
2
)
""".replace("\\r", "")
doTextTest(before, after)
}
def testCallParametersWrap {
getCommonSettings.CALL_PARAMETERS_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS
val before =
"""
foo(1, 2, 3)
""".replace("\\r", "")
val after =
"""
foo(1,
2,
3)
""".replace("\\r", "")
doTextTest(before, after)
}
def testAlignMultilineParametersCalls {
getCommonSettings.ALIGN_MULTILINE_PARAMETERS_IN_CALLS = true
val before =
"""
foo(1,
2,
3)
""".replace("\\r", "")
val after =
"""
foo(1,
2,
3)
""".replace("\\r", "")
doTextTest(before, after)
}
def testCallParametersParen {
getCommonSettings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE = true
getCommonSettings.CALL_PARAMETERS_RPAREN_ON_NEXT_LINE = true
val before =
"""
foo(1,
2,
3)
""".replace("\\r", "")
val after =
"""
foo(
1,
2,
3
)
""".replace("\\r", "")
doTextTest(before, after)
}
def testMethodCallChainWrap {
getCommonSettings.METHOD_CALL_CHAIN_WRAP = CommonCodeStyleSettings.WRAP_ALWAYS
val before =
"""
foo(1, 2).foo(1, 2).foo(1, 2)
""".replace("\\r", "")
val after =
"""
foo(1, 2)
.foo(1, 2)
.foo(1, 2)
""".replace("\\r", "")
doTextTest(before, after)
}
def testMethodCallChainAlign {
getCommonSettings.ALIGN_MULTILINE_CHAINED_METHODS = true
val before =
"""
val x = foo.
foo.goo.
foo(1, 2, 3).
foo.
foo
.foo
""".replace("\\r", "")
val after =
"""
val x = foo.
foo.goo.
foo(1, 2, 3).
foo.
foo
.foo
""".replace("\\r", "")
doTextTest(before, after)
}
def testBraceStyle {
getCommonSettings.CLASS_BRACE_STYLE = CommonCodeStyleSettings.NEXT_LINE
getCommonSettings.METHOD_BRACE_STYLE = CommonCodeStyleSettings.NEXT_LINE_SHIFTED
getCommonSettings.BRACE_STYLE = CommonCodeStyleSettings.NEXT_LINE_IF_WRAPPED
val before =
"""
class A {
def foo = {
val z =
{
3
}
}
}
class B extends A {
}
""".replace("\\r", "")
val after =
"""
class A
{
def foo =
{
val z =
{
3
}
}
}
class B extends A
{
}
""".replace("\\r", "")
doTextTest(before, after)
}
} | consulo/consulo-scala | test/org/jetbrains/plugins/scala/lang/formatter/tests/ScalaWrappingAndBracesTest.scala | Scala | apache-2.0 | 5,982 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.mvc.akkahttp
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse }
import play.api.mvc.Handler
import play.mvc.Http.RequestHeader
import scala.concurrent.Future
trait AkkaHttpHandler extends (HttpRequest => Future[HttpResponse]) with Handler
object AkkaHttpHandler {
def apply(handler: HttpRequest => Future[HttpResponse]): AkkaHttpHandler = new AkkaHttpHandler {
def apply(request: HttpRequest): Future[HttpResponse] = handler(request)
}
}
| Shenker93/playframework | framework/src/play-akka-http-server/src/main/scala/play/api/mvc/akkahttp/AkkaHttpHandler.scala | Scala | apache-2.0 | 556 |
package cromwell.engine.backend
import java.nio.file._
import better.files._
import ch.qos.logback.classic.encoder.PatternLayoutEncoder
import ch.qos.logback.classic.spi.ILoggingEvent
import ch.qos.logback.classic.{Level, LoggerContext}
import ch.qos.logback.core.FileAppender
import cromwell.core.{WorkflowId, WorkflowOptions}
import cromwell.engine.{WorkflowFailureMode, WorkflowSourceFiles}
import cromwell.engine.backend.io._
import cromwell.logging.WorkflowLogger
import cromwell.util.{SimpleExponentialBackoff, TryUtil}
import cromwell.webservice.WorkflowMetadataResponse
import org.slf4j.helpers.NOPLogger
import org.slf4j.{Logger, LoggerFactory}
import spray.json._
import wdl4s._
import wdl4s.values.{WdlFile, WdlSingleFile, WdlValue, _}
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import scala.language.postfixOps
import scala.util.{Failure, Success, Try}
case class WorkflowLogOptions(dir: Path, temporary: Boolean)
case class WorkflowDescriptor(id: WorkflowId,
sourceFiles: WorkflowSourceFiles,
workflowOptions: WorkflowOptions,
workflowLogOptions: Option[WorkflowLogOptions],
rawInputs: Map[String, JsValue],
namespace: NamespaceWithWorkflow,
coercedInputs: WorkflowCoercedInputs,
declarations: WorkflowCoercedInputs,
backend: Backend,
configCallCaching: Boolean,
lookupDockerHash: Boolean,
workflowFailureMode: WorkflowFailureMode,
wfContext: WorkflowContext,
fileSystems: List[FileSystem]) {
import WorkflowDescriptor._
val shortId = id.toString.split("-")(0)
val name = namespace.workflow.unqualifiedName
val actualInputs: WorkflowCoercedInputs = coercedInputs ++ declarations
private val relativeWorkflowRootPath = s"$name/$id"
private val log = WorkflowLogger("WorkflowDescriptor", this)
val workflowLogDir = workflowOptions.get(WorkflowLogDirOptionKey) recover { case e: IllegalArgumentException =>
log.warn(s"$WorkflowLogDirOptionKey expected to be of type String", e)
throw e
}
val workflowOutputsPath = workflowOptions.get(WorkflowOutputsOptionKey) recover { case e: IllegalArgumentException =>
log.warn(s"$WorkflowOutputsOptionKey expected to be of type String", e)
throw e
}
val callLogsDir = workflowOptions.get(CallLogsDirOptionKey) recover { case e: IllegalArgumentException =>
log.warn(s"$CallLogsDirOptionKey expected to be of type String", e)
throw e
}
lazy val fileHasher: FileHasher = { wdlFile: WdlFile =>
try {
SymbolHash(wdlFile.value.toPath(fileSystems).hash)
} catch {
case e: Exception =>
log.error(s"Cannot compute hash for file $wdlFile")
throw e
}
}
private lazy val optionCacheWriting = workflowOptions getBoolean "write_to_cache" getOrElse configCallCaching
private lazy val optionCacheReading = workflowOptions getBoolean "read_from_cache" getOrElse configCallCaching
if (!configCallCaching) {
if (optionCacheWriting) logWriteDisabled()
if (optionCacheReading) logReadDisabled()
}
lazy val writeToCache = configCallCaching && optionCacheWriting
lazy val readFromCache = configCallCaching && optionCacheReading
lazy val workflowLogName = s"workflow.$id.log"
lazy val workflowLogPath = workflowLogOptions.map(_.dir.createDirectories() / workflowLogName).map(_.path)
lazy val workflowLogger = workflowLogPath match {
case Some(path) => makeFileLogger(path, workflowLogName, Level.toLevel(sys.props.getOrElse("LOG_LEVEL", "debug")))
case None => NOPLogger.NOP_LOGGER
}
def maybeDeleteWorkflowLog(): Unit = {
for {
opt <- workflowLogOptions if opt.temporary
log <- workflowLogPath
} yield log.delete(ignoreIOExceptions = true)
}
lazy val workflowRootPath = wfContext.root.toPath(fileSystems)
def workflowRootPathWithBaseRoot(rootPath: String): Path = {
backend.buildWorkflowRootPath(rootPath, name, id).toPath(fileSystems)
}
def copyWorkflowOutputs(workflowMetadataResponse: WorkflowMetadataResponse)
(implicit executionContext: ExecutionContext): Future[Unit] = {
// Try to copy outputs to final destination
workflowOutputsPath map copyWorkflowOutputsTo(workflowMetadataResponse) getOrElse Future.successful(())
}
private def copyWorkflowOutputsTo(workflowMetadataResponse: WorkflowMetadataResponse)(destDirectory: String)
(implicit executionContext: ExecutionContext): Future[Unit] = {
Future(copyWdlFilesTo(destDirectory, workflowMetadataResponse.outputs.toSeq.flatMap(_.values)))
}
def copyCallLogs(workflowMetadataResponse: WorkflowMetadataResponse)
(implicit executionContext: ExecutionContext): Future[Unit] = {
callLogsDir map copyCallLogsTo(workflowMetadataResponse) getOrElse Future.successful(())
}
private def copyCallLogsTo(workflowMetadataResponse: WorkflowMetadataResponse)(destDirectory: String)
(implicit executionContext: ExecutionContext): Future[Unit] = {
Future {
val callLogs = for {
callMetadatas <- workflowMetadataResponse.calls.values
callMetadata <- callMetadatas
callLog <- callMetadata.stdout.toSeq ++ callMetadata.stderr.toSeq ++
callMetadata.backendLogs.toSeq.flatMap(_.values)
} yield callLog
copyWdlFilesTo(destDirectory, callLogs)
}
}
def copyWorkflowLog()(implicit executionContext: ExecutionContext): Future[Unit] = {
(workflowLogDir, workflowLogPath) match {
case (Success(dir), Some(path)) =>
val logger = backend.workflowLogger(this)
val dest = dir.toPath(fileSystems).resolve(workflowLogName)
Future.fromTry(copyFile(logger, dest, path))
case _ => Future.successful(())
}
}
private def copyWdlFilesTo(destDirectory: String, wdlValues: Traversable[WdlValue]): Unit = {
val logger = backend.workflowLogger(this)
// All outputs should have wdl values at this point, if they don't there's nothing we can do here
val copies = for {
wdlValue <- wdlValues
wdlFile <- wdlValue collectAsSeq { case f: WdlSingleFile => f }
} yield copyWdlFile(logger, destDirectory, wdlFile)
// Throw an exception if one or more of the copies failed.
TryUtil.sequence(copies.toSeq) match {
case Success(_) => ()
case Failure(e) => throw new Exception(s"Copy failed for the following files:\\n ${e.getMessage}", e)
}
}
def copyWdlFile(logger: WorkflowLogger, destDirectory: String, file: WdlFile): Try[Unit] = {
val src = file.valueString.toPath(fileSystems)
val wfPath = wfContext.root.toPath(fileSystems).toAbsolutePath
val srcSubPath = src.subpath(wfPath.getNameCount, src.getNameCount)
val dest = destDirectory
.toPath(fileSystems)
.toAbsolutePath
.resolve(relativeWorkflowRootPath)
// UnixPath.resolve(NioGcsPath) seems to be returning a null pointer. TODO: Add a test to confirm
.resolve(srcSubPath.toString)
copyFile(logger, dest, src)
}
def copyFile(logger: WorkflowLogger, dest: Path, src: Path): Try[Unit] = {
def copy(): Unit = {
logger.info(s"Trying to copy output file $src to $dest")
Files.createDirectories(dest.getParent)
Files.copy(src, dest)
}
TryUtil.retryBlock(
fn = (retries: Option[Unit]) => copy(),
retryLimit = Option(5),
backoff = SimpleExponentialBackoff(5 seconds, 10 seconds, 1.1D),
logger = logger,
failMessage = Option(s"Failed to copy file $src to $dest"),
isFatal = (t: Throwable) => t.isInstanceOf[FileAlreadyExistsException]
) recover {
case _: FileAlreadyExistsException =>
logger.info(s"Tried to copy the same file multiple times. Skipping subsequent copies for $src")
}
}
private def makeFileLogger(path: Path, name: String, level: Level): Logger = {
val ctx = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
/*
WorkflowDescriptor.copy() is currently invoked by WorkflowActor.startActor().
This causes this block of code to be executed twice.
*/
Option(ctx.exists(name)) match {
case Some(existingLogger) => existingLogger
case None =>
val encoder = new PatternLayoutEncoder()
encoder.setPattern("%date %-5level - %msg%n")
encoder.setContext(ctx)
encoder.start()
val appender = new FileAppender[ILoggingEvent]()
appender.setFile(path.fullPath)
appender.setEncoder(encoder)
appender.setName(name)
appender.setContext(ctx)
appender.start()
val fileLogger = ctx.getLogger(name)
fileLogger.addAppender(appender)
fileLogger.setAdditive(false)
fileLogger.setLevel(level)
fileLogger
}
}
private def logWriteDisabled() = workflowLogger.warn(writeDisabled)
private def logReadDisabled() = workflowLogger.warn(readDisabled)
override def toString = s"WorkflowDescriptor(${id.id.toString})"
def hash(wdlValue: WdlValue): Option[SymbolHash] = {
if (writeToCache) Option(wdlValue.computeHash(fileHasher)) else None
}
}
object WorkflowDescriptor {
val WorkflowLogDirOptionKey = "workflow_log_dir"
val WorkflowOutputsOptionKey = "outputs_path"
val CallLogsDirOptionKey = "call_logs_dir"
val WorkflowFailureModeKey = "workflowFailureMode"
val OptionKeys: Set[String] = Set(WorkflowLogDirOptionKey, WorkflowOutputsOptionKey, CallLogsDirOptionKey, WorkflowFailureModeKey)
private def disabledMessage(readWrite: String, consequence: String) =
s"""$readWrite is enabled in the workflow options but Call Caching is disabled in this Cromwell instance.
|As a result the calls in this workflow $consequence
""".stripMargin
private val writeDisabled = disabledMessage("Write to Cache", "WILL NOT be cached")
private val readDisabled = disabledMessage("Read from Cache", "WILL ALL be executed")
}
| cowmoo/cromwell | engine/src/main/scala/cromwell/engine/backend/WorkflowDescriptor.scala | Scala | bsd-3-clause | 10,255 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst
import java.sql.{Date, Timestamp}
import scala.language.implicitConversions
import org.apache.spark.api.java.function.FilterFunction
import org.apache.spark.sql.Encoder
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.expressions.objects.Invoke
import org.apache.spark.sql.catalyst.plans.{Inner, JoinType}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.types._
/**
* A collection of implicit conversions that create a DSL for constructing catalyst data structures.
*
* {{{
* scala> import org.apache.spark.sql.catalyst.dsl.expressions._
*
* // Standard operators are added to expressions.
* scala> import org.apache.spark.sql.catalyst.expressions.Literal
* scala> Literal(1) + Literal(1)
* res0: org.apache.spark.sql.catalyst.expressions.Add = (1 + 1)
*
* // There is a conversion from 'symbols to unresolved attributes.
* scala> 'a.attr
* res1: org.apache.spark.sql.catalyst.analysis.UnresolvedAttribute = 'a
*
* // These unresolved attributes can be used to create more complicated expressions.
* scala> 'a === 'b
* res2: org.apache.spark.sql.catalyst.expressions.EqualTo = ('a = 'b)
*
* // SQL verbs can be used to construct logical query plans.
* scala> import org.apache.spark.sql.catalyst.plans.logical._
* scala> import org.apache.spark.sql.catalyst.dsl.plans._
* scala> LocalRelation('key.int, 'value.string).where('key === 1).select('value).analyze
* res3: org.apache.spark.sql.catalyst.plans.logical.LogicalPlan =
* Project [value#3]
* Filter (key#2 = 1)
* LocalRelation [key#2,value#3], []
* }}}
*/
package object dsl {
trait ImplicitOperators {
def expr: Expression
def unary_- : Expression = UnaryMinus(expr)
def unary_! : Predicate = Not(expr)
def unary_~ : Expression = BitwiseNot(expr)
def + (other: Expression): Expression = Add(expr, other)
def - (other: Expression): Expression = Subtract(expr, other)
def * (other: Expression): Expression = Multiply(expr, other)
def / (other: Expression): Expression = Divide(expr, other)
def div (other: Expression): Expression = IntegralDivide(expr, other)
def % (other: Expression): Expression = Remainder(expr, other)
def & (other: Expression): Expression = BitwiseAnd(expr, other)
def | (other: Expression): Expression = BitwiseOr(expr, other)
def ^ (other: Expression): Expression = BitwiseXor(expr, other)
def && (other: Expression): Predicate = And(expr, other)
def || (other: Expression): Predicate = Or(expr, other)
def < (other: Expression): Predicate = LessThan(expr, other)
def <= (other: Expression): Predicate = LessThanOrEqual(expr, other)
def > (other: Expression): Predicate = GreaterThan(expr, other)
def >= (other: Expression): Predicate = GreaterThanOrEqual(expr, other)
def === (other: Expression): Predicate = EqualTo(expr, other)
def <=> (other: Expression): Predicate = EqualNullSafe(expr, other)
def =!= (other: Expression): Predicate = Not(EqualTo(expr, other))
def in(list: Expression*): Expression = list match {
case Seq(l: ListQuery) => expr match {
case c: CreateNamedStruct => InSubquery(c.valExprs, l)
case other => InSubquery(Seq(other), l)
}
case _ => In(expr, list)
}
def like(other: Expression): Expression = Like(expr, other)
def rlike(other: Expression): Expression = RLike(expr, other)
def contains(other: Expression): Expression = Contains(expr, other)
def startsWith(other: Expression): Expression = StartsWith(expr, other)
def endsWith(other: Expression): Expression = EndsWith(expr, other)
def substr(pos: Expression, len: Expression = Literal(Int.MaxValue)): Expression =
Substring(expr, pos, len)
def substring(pos: Expression, len: Expression = Literal(Int.MaxValue)): Expression =
Substring(expr, pos, len)
def isNull: Predicate = IsNull(expr)
def isNotNull: Predicate = IsNotNull(expr)
def getItem(ordinal: Expression): UnresolvedExtractValue = UnresolvedExtractValue(expr, ordinal)
def getField(fieldName: String): UnresolvedExtractValue =
UnresolvedExtractValue(expr, Literal(fieldName))
def cast(to: DataType): Expression = Cast(expr, to)
def asc: SortOrder = SortOrder(expr, Ascending)
def asc_nullsLast: SortOrder = SortOrder(expr, Ascending, NullsLast, Set.empty)
def desc: SortOrder = SortOrder(expr, Descending)
def desc_nullsFirst: SortOrder = SortOrder(expr, Descending, NullsFirst, Set.empty)
def as(alias: String): NamedExpression = Alias(expr, alias)()
def as(alias: Symbol): NamedExpression = Alias(expr, alias.name)()
}
trait ExpressionConversions {
implicit class DslExpression(e: Expression) extends ImplicitOperators {
def expr: Expression = e
}
implicit def booleanToLiteral(b: Boolean): Literal = Literal(b)
implicit def byteToLiteral(b: Byte): Literal = Literal(b)
implicit def shortToLiteral(s: Short): Literal = Literal(s)
implicit def intToLiteral(i: Int): Literal = Literal(i)
implicit def longToLiteral(l: Long): Literal = Literal(l)
implicit def floatToLiteral(f: Float): Literal = Literal(f)
implicit def doubleToLiteral(d: Double): Literal = Literal(d)
implicit def stringToLiteral(s: String): Literal = Literal.create(s, StringType)
implicit def dateToLiteral(d: Date): Literal = Literal(d)
implicit def bigDecimalToLiteral(d: BigDecimal): Literal = Literal(d.underlying())
implicit def bigDecimalToLiteral(d: java.math.BigDecimal): Literal = Literal(d)
implicit def decimalToLiteral(d: Decimal): Literal = Literal(d)
implicit def timestampToLiteral(t: Timestamp): Literal = Literal(t)
implicit def binaryToLiteral(a: Array[Byte]): Literal = Literal(a)
implicit def symbolToUnresolvedAttribute(s: Symbol): analysis.UnresolvedAttribute =
analysis.UnresolvedAttribute(s.name)
/** Converts $"col name" into an [[analysis.UnresolvedAttribute]]. */
implicit class StringToAttributeConversionHelper(val sc: StringContext) {
// Note that if we make ExpressionConversions an object rather than a trait, we can
// then make this a value class to avoid the small penalty of runtime instantiation.
def $(args: Any*): analysis.UnresolvedAttribute = {
analysis.UnresolvedAttribute(sc.s(args : _*))
}
}
def rand(e: Long): Expression = Rand(e)
def sum(e: Expression): Expression = Sum(e).toAggregateExpression()
def sumDistinct(e: Expression): Expression = Sum(e).toAggregateExpression(isDistinct = true)
def count(e: Expression): Expression = Count(e).toAggregateExpression()
def countDistinct(e: Expression*): Expression =
Count(e).toAggregateExpression(isDistinct = true)
def approxCountDistinct(e: Expression, rsd: Double = 0.05): Expression =
HyperLogLogPlusPlus(e, rsd).toAggregateExpression()
def avg(e: Expression): Expression = Average(e).toAggregateExpression()
def first(e: Expression): Expression = new First(e).toAggregateExpression()
def last(e: Expression): Expression = new Last(e).toAggregateExpression()
def min(e: Expression): Expression = Min(e).toAggregateExpression()
def minDistinct(e: Expression): Expression = Min(e).toAggregateExpression(isDistinct = true)
def max(e: Expression): Expression = Max(e).toAggregateExpression()
def maxDistinct(e: Expression): Expression = Max(e).toAggregateExpression(isDistinct = true)
def upper(e: Expression): Expression = Upper(e)
def lower(e: Expression): Expression = Lower(e)
def coalesce(args: Expression*): Expression = Coalesce(args)
def greatest(args: Expression*): Expression = Greatest(args)
def least(args: Expression*): Expression = Least(args)
def sqrt(e: Expression): Expression = Sqrt(e)
def abs(e: Expression): Expression = Abs(e)
def star(names: String*): Expression = names match {
case Seq() => UnresolvedStar(None)
case target => UnresolvedStar(Option(target))
}
def namedStruct(e: Expression*): Expression = CreateNamedStruct(e)
def callFunction[T, U](
func: T => U,
returnType: DataType,
argument: Expression): Expression = {
val function = Literal.create(func, ObjectType(classOf[T => U]))
Invoke(function, "apply", returnType, argument :: Nil)
}
def windowSpec(
partitionSpec: Seq[Expression],
orderSpec: Seq[SortOrder],
frame: WindowFrame): WindowSpecDefinition =
WindowSpecDefinition(partitionSpec, orderSpec, frame)
def windowExpr(windowFunc: Expression, windowSpec: WindowSpecDefinition): WindowExpression =
WindowExpression(windowFunc, windowSpec)
implicit class DslSymbol(sym: Symbol) extends ImplicitAttribute { def s: String = sym.name }
// TODO more implicit class for literal?
implicit class DslString(val s: String) extends ImplicitOperators {
override def expr: Expression = Literal(s)
def attr: UnresolvedAttribute = analysis.UnresolvedAttribute(s)
}
abstract class ImplicitAttribute extends ImplicitOperators {
def s: String
def expr: UnresolvedAttribute = attr
def attr: UnresolvedAttribute = analysis.UnresolvedAttribute(s)
/** Creates a new AttributeReference of type boolean */
def boolean: AttributeReference = AttributeReference(s, BooleanType, nullable = true)()
/** Creates a new AttributeReference of type byte */
def byte: AttributeReference = AttributeReference(s, ByteType, nullable = true)()
/** Creates a new AttributeReference of type short */
def short: AttributeReference = AttributeReference(s, ShortType, nullable = true)()
/** Creates a new AttributeReference of type int */
def int: AttributeReference = AttributeReference(s, IntegerType, nullable = true)()
/** Creates a new AttributeReference of type long */
def long: AttributeReference = AttributeReference(s, LongType, nullable = true)()
/** Creates a new AttributeReference of type float */
def float: AttributeReference = AttributeReference(s, FloatType, nullable = true)()
/** Creates a new AttributeReference of type double */
def double: AttributeReference = AttributeReference(s, DoubleType, nullable = true)()
/** Creates a new AttributeReference of type string */
def string: AttributeReference = AttributeReference(s, StringType, nullable = true)()
/** Creates a new AttributeReference of type date */
def date: AttributeReference = AttributeReference(s, DateType, nullable = true)()
/** Creates a new AttributeReference of type decimal */
def decimal: AttributeReference =
AttributeReference(s, DecimalType.SYSTEM_DEFAULT, nullable = true)()
/** Creates a new AttributeReference of type decimal */
def decimal(precision: Int, scale: Int): AttributeReference =
AttributeReference(s, DecimalType(precision, scale), nullable = true)()
/** Creates a new AttributeReference of type timestamp */
def timestamp: AttributeReference = AttributeReference(s, TimestampType, nullable = true)()
/** Creates a new AttributeReference of type binary */
def binary: AttributeReference = AttributeReference(s, BinaryType, nullable = true)()
/** Creates a new AttributeReference of type array */
def array(dataType: DataType): AttributeReference =
AttributeReference(s, ArrayType(dataType), nullable = true)()
def array(arrayType: ArrayType): AttributeReference =
AttributeReference(s, arrayType)()
/** Creates a new AttributeReference of type map */
def map(keyType: DataType, valueType: DataType): AttributeReference =
map(MapType(keyType, valueType))
def map(mapType: MapType): AttributeReference =
AttributeReference(s, mapType, nullable = true)()
/** Creates a new AttributeReference of type struct */
def struct(structType: StructType): AttributeReference =
AttributeReference(s, structType, nullable = true)()
def struct(attrs: AttributeReference*): AttributeReference =
struct(StructType.fromAttributes(attrs))
/** Creates a new AttributeReference of object type */
def obj(cls: Class[_]): AttributeReference =
AttributeReference(s, ObjectType(cls), nullable = true)()
/** Create a function. */
def function(exprs: Expression*): UnresolvedFunction =
UnresolvedFunction(s, exprs, isDistinct = false)
def distinctFunction(exprs: Expression*): UnresolvedFunction =
UnresolvedFunction(s, exprs, isDistinct = true)
}
implicit class DslAttribute(a: AttributeReference) {
def notNull: AttributeReference = a.withNullability(false)
def canBeNull: AttributeReference = a.withNullability(true)
def at(ordinal: Int): BoundReference = BoundReference(ordinal, a.dataType, a.nullable)
}
}
object expressions extends ExpressionConversions // scalastyle:ignore
object plans { // scalastyle:ignore
def table(ref: String): LogicalPlan = UnresolvedRelation(TableIdentifier(ref))
def table(db: String, ref: String): LogicalPlan =
UnresolvedRelation(TableIdentifier(ref, Option(db)))
implicit class DslLogicalPlan(val logicalPlan: LogicalPlan) {
def select(exprs: Expression*): LogicalPlan = {
val namedExpressions = exprs.map {
case e: NamedExpression => e
case e => UnresolvedAlias(e)
}
Project(namedExpressions, logicalPlan)
}
def where(condition: Expression): LogicalPlan = Filter(condition, logicalPlan)
def filter[T : Encoder](func: T => Boolean): LogicalPlan = TypedFilter(func, logicalPlan)
def filter[T : Encoder](func: FilterFunction[T]): LogicalPlan = TypedFilter(func, logicalPlan)
def serialize[T : Encoder]: LogicalPlan = CatalystSerde.serialize[T](logicalPlan)
def deserialize[T : Encoder]: LogicalPlan = CatalystSerde.deserialize[T](logicalPlan)
def limit(limitExpr: Expression): LogicalPlan = Limit(limitExpr, logicalPlan)
def join(
otherPlan: LogicalPlan,
joinType: JoinType = Inner,
condition: Option[Expression] = None): LogicalPlan =
Join(logicalPlan, otherPlan, joinType, condition, JoinHint.NONE)
def cogroup[Key: Encoder, Left: Encoder, Right: Encoder, Result: Encoder](
otherPlan: LogicalPlan,
func: (Key, Iterator[Left], Iterator[Right]) => TraversableOnce[Result],
leftGroup: Seq[Attribute],
rightGroup: Seq[Attribute],
leftAttr: Seq[Attribute],
rightAttr: Seq[Attribute]
): LogicalPlan = {
CoGroup.apply[Key, Left, Right, Result](
func,
leftGroup,
rightGroup,
leftAttr,
rightAttr,
logicalPlan,
otherPlan)
}
def orderBy(sortExprs: SortOrder*): LogicalPlan = Sort(sortExprs, true, logicalPlan)
def sortBy(sortExprs: SortOrder*): LogicalPlan = Sort(sortExprs, false, logicalPlan)
def groupBy(groupingExprs: Expression*)(aggregateExprs: Expression*): LogicalPlan = {
val aliasedExprs = aggregateExprs.map {
case ne: NamedExpression => ne
case e => Alias(e, e.toString)()
}
Aggregate(groupingExprs, aliasedExprs, logicalPlan)
}
def window(
windowExpressions: Seq[NamedExpression],
partitionSpec: Seq[Expression],
orderSpec: Seq[SortOrder]): LogicalPlan =
Window(windowExpressions, partitionSpec, orderSpec, logicalPlan)
def subquery(alias: Symbol): LogicalPlan = SubqueryAlias(alias.name, logicalPlan)
def except(otherPlan: LogicalPlan, isAll: Boolean): LogicalPlan =
Except(logicalPlan, otherPlan, isAll)
def intersect(otherPlan: LogicalPlan, isAll: Boolean): LogicalPlan =
Intersect(logicalPlan, otherPlan, isAll)
def union(otherPlan: LogicalPlan): LogicalPlan = Union(logicalPlan, otherPlan)
def generate(
generator: Generator,
unrequiredChildIndex: Seq[Int] = Nil,
outer: Boolean = false,
alias: Option[String] = None,
outputNames: Seq[String] = Nil): LogicalPlan =
Generate(generator, unrequiredChildIndex, outer,
alias, outputNames.map(UnresolvedAttribute(_)), logicalPlan)
def insertInto(tableName: String, overwrite: Boolean = false): LogicalPlan =
InsertIntoTable(
analysis.UnresolvedRelation(TableIdentifier(tableName)),
Map.empty, logicalPlan, overwrite, ifPartitionNotExists = false)
def as(alias: String): LogicalPlan = SubqueryAlias(alias, logicalPlan)
def coalesce(num: Integer): LogicalPlan =
Repartition(num, shuffle = false, logicalPlan)
def repartition(num: Integer): LogicalPlan =
Repartition(num, shuffle = true, logicalPlan)
def distribute(exprs: Expression*)(n: Int): LogicalPlan =
RepartitionByExpression(exprs, logicalPlan, numPartitions = n)
def analyze: LogicalPlan =
EliminateSubqueryAliases(analysis.SimpleAnalyzer.execute(logicalPlan))
def hint(name: String, parameters: Any*): LogicalPlan =
UnresolvedHint(name, parameters, logicalPlan)
}
}
}
| WindCanDie/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/dsl/package.scala | Scala | apache-2.0 | 18,327 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.conf
import org.locationtech.geomesa.utils.conf.GeoMesaSystemProperties.SystemProperty
object SchemaProperties {
val ValidateDistributedClasspath = SystemProperty("geomesa.validate.distributed.classpath", "true")
val CheckDistributedVersion = SystemProperty("geomesa.distributed.version.check", "false")
}
| aheyne/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/conf/SchemaProperties.scala | Scala | apache-2.0 | 816 |
/*******************************************************************************
* Copyright (c) 2014 Łukasz Szpakowski.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
******************************************************************************/
package pl.luckboy.purfuncor.frontend.typer
import scala.util.parsing.input.NoPosition
import scalaz._
import scalaz.Scalaz._
import pl.luckboy.purfuncor.common._
import pl.luckboy.purfuncor.frontend._
import pl.luckboy.purfuncor.frontend
import pl.luckboy.purfuncor.common.Evaluator._
object TypeBuiltinFunctions
{
def illegalAppNoTypeValue[T, U, V, W] = NoTypeValue.fromError[T, U, V, W](FatalError("illegal type application", none, NoPosition))
val typeBuiltinFunctions = Map[frontend.TypeBuiltinFunction.Value, TypeFunction](
frontend.TypeBuiltinFunction.Any -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Any, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Nothing -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Nothing, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Zero -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Zero, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.NonZero -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.NonZero, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Boolean -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Boolean, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Char -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Char, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Byte -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Byte, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Short -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Short, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Int -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Int, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Long -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Long, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Float -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Float, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Double -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Double, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Empty -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Empty, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.NonEmpty -> new TypeFunction(0) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq() => (env, EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.NonEmpty, Nil)))
case _ => (env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Array -> new TypeFunction(1) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq(value) =>
val (env2, res) = value.typeValueTermS(env)
(env2, res.map { tvt => EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Array, Seq(tvt))) }.valueOr(identity))
case _ =>
(env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Fun -> new TypeFunction(2) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq(value1, value2) =>
val (env2, res1) = value1.typeValueTermS(env)
val (env3, res2) = value2.typeValueTermS(env2)
val retValue = (for {
t1 <- res1
t2 <- res2
} yield EvaluatedTypeValue(BuiltinType(TypeBuiltinFunction.Fun, Seq(t1, t2)))).valueOr(identity)
(env3, retValue)
case _ =>
(env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Conj -> new TypeFunction(2) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq(value1, value2) =>
val (env2, res1) = value1.typeValueTermS(env)
val (env3, res2) = value2.typeValueTermS(env2)
val retValue = (for {
t1 <- res1
t2 <- res2
} yield EvaluatedTypeValue(t1 & t2)).valueOr(identity)
(env3, retValue)
case _ =>
(env, illegalAppNoTypeValue)
}
},
frontend.TypeBuiltinFunction.Disj -> new TypeFunction(2) {
override def applyS[T, U, V, W, E](argValues: Seq[TypeValue[T, U, V, W]])(env: E)(implicit eval: Evaluator[TypeSimpleTerm[U, V], E, TypeValue[T, U, V, W]]) =
argValues match {
case Seq(value1, value2) =>
val (env2, res1) = value1.typeValueTermS(env)
val (env3, res2) = value2.typeValueTermS(env2)
val retValue = (for {
t1 <- res1
t2 <- res2
} yield EvaluatedTypeValue(t1 | t2)).valueOr(identity)
(env3, retValue)
case _ =>
(env, illegalAppNoTypeValue)
}
})
}
| luckboy/Purfuncor | src/main/scala/pl/luckboy/purfuncor/frontend/typer/TypeBuiltinFunctions.scala | Scala | mpl-2.0 | 9,724 |
/*
* Copyright 2015 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shopScala.util.examples
import java.util.concurrent.{CountDownLatch, TimeUnit}
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration.Duration
import scala.util.{Failure, Success, Try}
import org.mongodb.scala.Completed
import org.reactivestreams.{Subscriber, Subscription}
object TestSubscriber {
def apply[T](): TestSubscriber[T] = {
TestSubscriber(new Subscriber[T]() {
def onSubscribe(subscription: Subscription) {}
def onNext(result: T) {}
def onComplete() {}
def onError(e: Throwable) {}
})
}
}
case class TestSubscriber[T](delegate: Subscriber[T]) extends Subscriber[T] {
private final val latch: CountDownLatch = new CountDownLatch(1)
private final val onNextEvents: ArrayBuffer[T] = new ArrayBuffer[T]
private final val onErrorEvents: ArrayBuffer[Throwable] = new ArrayBuffer[Throwable]
private final val onCompleteEvents: ArrayBuffer[Completed] = new ArrayBuffer[Completed]
private var subscription: Option[Subscription] = None
def onSubscribe(subscription: Subscription) {
this.subscription = Some(subscription)
}
/**
* Provides the Subscriber with a new item to observe.
*
* The `Publisher` may call this method 0 or more times.
*
* The `Publisher` will not call this method again after it calls either `onComplete` or `onError`.
*
* @param result the item emitted by the obserable
*/
def onNext(result: T): Unit = {
onNextEvents += result
delegate.onNext(result)
}
/**
* Notifies the Subscriber that the obserable has experienced an error condition.
*
* If the obserable calls this method, it will not thereafter call `onNext` or
* `onComplete`.
*
* @param e the exception encountered by the obserable
*/
def onError(e: Throwable): Unit = {
try {
onErrorEvents += e
delegate.onError(e)
} finally {
latch.countDown()
}
}
/**
* Notifies the Subscriber that the obserable has finished sending push-based notifications.
*
* The obserable will not call this method if it calls `onError`.
*
*/
def onComplete(): Unit = {
try {
onCompleteEvents += Completed()
delegate.onComplete()
} finally {
latch.countDown()
}
}
/**
* Allow calling the protected `Subscription.request(long)` from unit tests.
*
* @param n the maximum number of items you want the obserable to emit to the Subscriber at this time, or
* `Long.MaxValue` if you want the obserable to emit items at its own pace
*/
def requestMore(n: Long): Unit = {
subscription match {
case Some(sub) => sub.request(n)
case None =>
}
}
/**
* Get the `Throwable`s this `Subscriber` was notified of via `onError` as a `Seq`.
*
* @return a list of the Throwables that were passed to this Subscriber's { @link #onError} method
*/
def getOnErrorEvents: Seq[Throwable] = onErrorEvents.toSeq
/**
* Get the sequence of items observed by this `Subscriber`, as an ordered `List`.
*
* @return a list of items observed by this Subscriber, in the order in which they were observed
*/
def getOnNextEvents: Seq[T] = onNextEvents.toSeq
/**
* Returns the subscription to the this `Subscriber`.
*
* @return the subscription or null if not subscribed to
*/
def getSubscription: Option[Subscription] = subscription
/**
* Assert that a particular sequence of items was received by this `Subscriber` in order.
*
* @param items the sequence of items expected to have been observed
* @throws AssertionError if the sequence of items observed does not exactly match `items`
*/
def assertReceivedOnNext(items: Seq[T]) {
if (getOnNextEvents.size != items.size) {
throw new AssertionError(s"Number of items does not match. Provided: ${items.size} Actual: ${getOnNextEvents.size}")
}
items.indices.foreach(i => {
items(i) == onNextEvents(i) match {
case false => throw new AssertionError(s"Value at index: $i expected to be [${items(i)}] but was: [${onNextEvents(i)}]")
case true =>
}
})
}
/**
* Assert that a single terminal event occurred, either `onComplete` or `onError`.
*
* @throws AssertionError if not exactly one terminal event notification was received
*/
def assertTerminalEvent(): Unit = {
if (onErrorEvents.size > 1) {
throw new AssertionError("Too many onError events: " + onErrorEvents.size)
}
if (onCompleteEvents.size > 1) {
throw new AssertionError("Too many onCompleted events: " + onCompleteEvents.size)
}
if (onCompleteEvents.size == 1 && onErrorEvents.size == 1) {
throw new AssertionError("Received both an onError and onCompleted. Should be one or the other.")
}
if (onCompleteEvents.isEmpty && onErrorEvents.isEmpty) {
throw new AssertionError("No terminal events received.")
}
}
/**
* Assert that no terminal event occurred, either `onComplete` or `onError`.
*
* @throws AssertionError if a terminal event notification was received
*/
def assertNoTerminalEvent(): Unit = {
if (onCompleteEvents.nonEmpty && onErrorEvents.nonEmpty) {
throw new AssertionError("Terminal events received.")
}
}
/**
* Assert that this `Subscriber` has received no `onError` notifications.
*
* @throws AssertionError if this { @link Subscriber} has received one or more { @link #onError} notifications
*/
def assertNoErrors(): Unit = {
if (onErrorEvents.nonEmpty) {
throw new RuntimeException("Unexpected onError events: " + onErrorEvents.size, getOnErrorEvents.head)
}
}
/**
* Assert that this `Subscriber` has received an `onError` notification.
*
* @throws AssertionError if this { @link Subscriber} did not received an { @link #onError} notifications
*/
def assertErrored(): Unit = {
if (onErrorEvents.isEmpty) {
throw new RuntimeException("No onError events")
}
}
/**
* Blocks until this `Subscriber` receives a notification that the `Observable` is complete (either an `onCompleted` or
* `onError` notification).
*
* @throws RuntimeException if the Subscriber is interrupted before the Observable is able to complete
*/
def awaitTerminalEvent(): Unit = {
Try(latch.await()) match {
case Failure(ex) => throw new RuntimeException("Interrupted", ex)
case _ =>
}
}
/**
* Blocks until this `Subscriber` receives a notification that the `Observable` is complete (either an `onCompleted` or
* `onError` notification).
*
* @param duration the duration of the timeout
* @throws RuntimeException
* if the Subscriber is interrupted before the Observable is able to complete
*/
def awaitTerminalEvent(duration: Duration) {
Try(latch.await(duration.toMillis, TimeUnit.MILLISECONDS)) match {
case Failure(ex) => throw new RuntimeException("Interrupted", ex)
case Success(false) => throw new RuntimeException("Failed to return in time")
case Success(true) =>
}
}
}
| hermannhueck/reactive-mongo-access | src/main/scala/shopScala/util/examples/TestSubscriber.scala | Scala | apache-2.0 | 7,752 |
import _root_.io.gatling.core.scenario.Simulation
import ch.qos.logback.classic.{Level, LoggerContext}
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
/**
* Performance test for the ClaimCase entity.
*/
class ClaimCaseGatlingTest extends Simulation {
val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext]
// Log all HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE"))
// Log failed HTTP requests
//context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG"))
val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080"""
val httpConf = http
.baseURL(baseURL)
.inferHtmlResources()
.acceptHeader("*/*")
.acceptEncodingHeader("gzip, deflate")
.acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3")
.connection("keep-alive")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0")
val headers_http = Map(
"Accept" -> """application/json"""
)
val headers_http_authenticated = Map(
"Accept" -> """application/json""",
"X-CSRF-TOKEN" -> "${csrf_token}"
)
val scn = scenario("Test the ClaimCase entity")
.exec(http("First unauthenticated request")
.get("/api/account")
.headers(headers_http)
.check(status.is(401))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.exec(http("Authentication")
.post("/api/authentication")
.headers(headers_http_authenticated)
.formParam("j_username", "admin")
.formParam("j_password", "admin")
.formParam("_spring_security_remember_me", "true")
.formParam("submit", "Login"))
.pause(1)
.exec(http("Authenticated request")
.get("/api/account")
.headers(headers_http_authenticated)
.check(status.is(200))
.check(headerRegex("Set-Cookie", "CSRF-TOKEN=(.*); [P,p]ath=/").saveAs("csrf_token")))
.pause(10)
.repeat(2) {
exec(http("Get all claimCases")
.get("/api/claimCases")
.headers(headers_http_authenticated)
.check(status.is(200)))
.pause(10 seconds, 20 seconds)
.exec(http("Create new claimCase")
.put("/api/claimCases")
.headers(headers_http_authenticated)
.body(StringBody("""{"id":null, "legacyCaseNumber":"SAMPLE_TEXT"}""")).asJSON
.check(status.is(201))
.check(headerRegex("Location", "(.*)").saveAs("new_claimCase_url")))
.pause(10)
.repeat(5) {
exec(http("Get created claimCase")
.get("${new_claimCase_url}")
.headers(headers_http_authenticated))
.pause(10)
}
.exec(http("Delete created claimCase")
.delete("${new_claimCase_url}")
.headers(headers_http_authenticated))
.pause(10)
}
val users = scenario("Users").exec(scn)
setUp(
users.inject(rampUsers(100) over (1 minutes))
).protocols(httpConf)
}
| onpointtech/stack-reference | src/test/gatling/simulations/ClaimCaseGatlingTest.scala | Scala | apache-2.0 | 3,338 |
package dotty.tools.scaladoc.util
/**
* This is trivial html renderer using api inspired by ScalaTags
* It probably could be more efficient but for now on it should be good enough.
*/
object HTML:
type AttrArg = AppliedAttr | Seq[AppliedAttr]
type TagArg = AppliedTag | Seq[AppliedTag] | String | Seq[String]
case class Tag(name: String):
def apply(tags: TagArg*): AppliedTag = apply()(tags:_*)
def apply(first: AttrArg, rest: AttrArg*): AppliedTag = apply((first +: rest):_*)()
def apply(attrs: AttrArg*)(tags: TagArg*): AppliedTag =
def unpackTags(tags: TagArg*)(using sb: StringBuilder): StringBuilder =
tags.foreach {
case t: AppliedTag =>
sb.append(t)
case s: String =>
sb.append(s.escapeReservedTokens)
case s: Seq[AppliedTag | String] =>
unpackTags(s:_*)
}
sb
val sb = StringBuilder()
sb.append(s"<$name")
attrs.filter(_ != Nil).foreach{
case s: Seq[AppliedAttr] =>
s.foreach(sb.append(" ").append)
case e: AppliedAttr =>
sb.append(" ").append(e)
}
sb.append(">")
unpackTags(tags:_*)(using sb)
sb.append(s"</$name>")
sb
extension (s: String) def escapeReservedTokens: String =
s.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace("\\"", """)
.replace("'", "'")
case class Attr(name: String):
def :=(value: String): AppliedAttr = new AppliedAttr(s"""$name="$value"""")
opaque type AppliedTag = StringBuilder
opaque type AppliedAttr = String
val div = Tag("div")
val span = Tag("span")
val a = Tag("a")
val p = Tag("p")
val h1 = Tag("h1")
val h2 = Tag("h2")
val h3 = Tag("h3")
val h4 = Tag("h4")
val h5 = Tag("h5")
val h6 = Tag("h6")
val dl = Tag("dl")
val dd = Tag("dd")
val dt = Tag("dt")
val svg = Tag("svg")
val button = Tag("button")
val input = Tag("input")
val label = Tag("label")
val script = Tag("script")
val link = Tag("link")
val footer = Tag("footer")
val html = Tag("html")
val head = Tag("head")
val meta = Tag("meta")
val main = Tag("main")
val title = Tag("title")
val body = Tag("body")
val nav = Tag("nav")
val img = Tag("img")
val ul = Tag("ul")
val ol = Tag("ol")
val li = Tag("li")
val code = Tag("code")
val pre = Tag("pre")
val table = Tag("table")
val thead = Tag("thead")
val tbody = Tag("tbody")
val th = Tag("th")
val tr = Tag("tr")
val td = Tag("td")
val cls = Attr("class")
val href = Attr("href")
val style = Attr("style")
val id = Attr("id")
val `type` = Attr("type")
val placeholder = Attr("placeholder")
val defer = Attr("defer")
val src = Attr("src")
val rel = Attr("rel")
val charset = Attr("charset")
val name = Attr("name")
val content = Attr("content")
val testId = Attr("data-test-id")
val alt = Attr("alt")
val value = Attr("value")
val onclick=Attr("onclick")
val titleAttr =Attr("title")
val onkeyup = Attr("onkeyup")
def raw(content: String): AppliedTag = new AppliedTag(content)
def raw(content: StringBuilder): AppliedTag = content
def text(content: String) = content.escapeReservedTokens
val hr = raw("<hr/>")
| dotty-staging/dotty | scaladoc/src/dotty/tools/scaladoc/util/html.scala | Scala | apache-2.0 | 3,270 |
package com.github.pheymann.rrt.util
import com.github.pheymann.rrt.TestConfig
import com.github.pheymann.rrt.util.ResponseComparator.{ComparisonFailure, FailureWithDiffs, FailureWithValues}
import scala.util.Try
import scala.util.control.NonFatal
object BodyAsStringComparison {
def stringComparison(actual: String,
expected: String,
config: TestConfig): Option[ComparisonFailure] = {
val actualCleanedByKey = cleanBodyByKey(actual, config.jsonIgnoreKeys)
val expectedCleanedByKey = cleanBodyByKey(expected, config.jsonIgnoreKeys)
val actualCleaned = cleanBodyByRegex(actualCleanedByKey, config.bodyRemovals)
val expectedCleaned = cleanBodyByRegex(expectedCleanedByKey, config.bodyRemovals)
if (config.showDiffs)
jsonComparison(actualCleaned, expectedCleaned)
else {
if (actualCleaned != expectedCleaned)
Some(FailureWithValues("body", actualCleaned, expectedCleaned))
else
None
}
}
def jsonComparison(actual: String,
expected: String): Option[ComparisonFailure] = {
import gnieh.diffson.playJson._
try {
val diffs = JsonDiff.diff(actual, expected, false).toString
if (diffs != "[ ]")
Some(FailureWithDiffs("body", diffs))
else
None
} catch {
case NonFatal(_) => Some(FailureWithValues("body", actual, expected))
}
}
private final val EmptyReplacement = ""
private[util] def cleanBodyByKey(body: String, keys: List[String]): String = {
var cleanedBody = body
def innerKeyRegex(key: String): String = "\\"" + key + "\\"\\\\s*:\\\\s*(\\"[^,]*\\")?([0-9]*)?(null)?(\\\\[.*\\\\])?(\\\\{.*\\\\})?\\\\s*,"
def lastKeyRegex(key: String): String = "(,)?\\"" + key + "\\"\\\\s*:\\\\s*(\\".*\\")?([0-9]*)?(null)?(\\\\[.*\\\\])?(\\\\{.*\\\\})?\\\\s*\\\\}"
for (key <- keys) {
cleanedBody = cleanedBody.replaceAll(innerKeyRegex(key), EmptyReplacement)
cleanedBody = cleanedBody.replaceAll(lastKeyRegex(key), "}")
}
cleanedBody
}
private[util] def cleanBodyByRegex(body: String, regexes: List[String]): String = {
var cleanedBody = body
for (regex <- regexes)
cleanedBody = cleanedBody.replaceAll(regex, EmptyReplacement)
cleanedBody
}
}
| pheymann/rest-refactoring-test | core/src/main/scala/com/github/pheymann/rrt/util/BodyAsStringComparison.scala | Scala | mit | 2,265 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.