code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package troy
package cql.parser.dml
import troy.cql.ast.CqlParser._
import troy.cql.ast.InsertStatement
import troy.cql.ast.dml.Insert
trait InsertStatementParser {
def insertStatement: Parser[InsertStatement] = {
import Insert._
def into = "INTO" ~> tableName
def insertClause: Parser[InsertClause] = {
def names = parenthesis(rep1sep(identifier, ","))
def namesValues: Parser[NamesValues] = names ~ ("VALUES".i ~> tupleLiteral) ^^^^ NamesValues
def jsonClause: Parser[JsonClause] = {
def default: Parser[Default] = {
def nullValue = "NULL".i ^^^ NullValue
def unset = "UNSET".i ^^^ Unset
nullValue | unset
}
"JSON" ~> Constants.string ~ ("DEFAULT".i ~> default).? ^^^^ JsonClause
}
namesValues | jsonClause
}
def ifNotExists = "IF NOT EXISTS".flag
"INSERT".i ~>
into ~
insertClause ~
ifNotExists ~
using ^^^^ InsertStatement.apply
}
}
| schemasafe/troy | cql-parser/src/main/scala/troy/cql/parser/dml/InsertStatementParser.scala | Scala | apache-2.0 | 984 |
package eu.timepit.refined.scalacheck
import eu.timepit.refined.W
import eu.timepit.refined.api.Refined
import eu.timepit.refined.collection.NonEmpty
import eu.timepit.refined.scalacheck.any._
import eu.timepit.refined.string.MatchesRegex
import org.scalacheck.Properties
class AnyArbitrarySpec extends Properties("AnyArbitrary") {
property("MatchesRegex[S]") =
checkArbitraryRefinedType[String Refined MatchesRegex[W.`".{2,}"`.T]]
property("NonEmpty") = checkArbitraryRefinedType[List[Int] Refined NonEmpty]
}
| fthomas/refined | modules/scalacheck/shared/src/test/scala-3.0-/eu/timepit/refined/scalacheck/AnyArbitrarySpec.scala | Scala | mit | 523 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import scala.collection.immutable.HashSet
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans.DslLogicalPlan
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.IntegralLiteralTestUtils._
import org.apache.spark.sql.catalyst.optimizer.UnwrapCastInBinaryComparison._
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.types._
class UnwrapCastInBinaryComparisonSuite extends PlanTest with ExpressionEvalHelper {
object Optimize extends RuleExecutor[LogicalPlan] {
val batches: List[Batch] =
Batch("Unwrap casts in binary comparison", FixedPoint(10),
NullPropagation, UnwrapCastInBinaryComparison) :: Nil
}
val testRelation: LocalRelation = LocalRelation('a.short, 'b.float, 'c.decimal(5, 2), 'd.boolean)
val f: BoundReference = 'a.short.canBeNull.at(0)
val f2: BoundReference = 'b.float.canBeNull.at(1)
val f3: BoundReference = 'c.decimal(5, 2).canBeNull.at(2)
val f4: BoundReference = 'd.boolean.canBeNull.at(3)
test("unwrap casts when literal == max") {
val v = Short.MaxValue
assertEquivalent(castInt(f) > v.toInt, falseIfNotNull(f))
assertEquivalent(castInt(f) >= v.toInt, f === v)
assertEquivalent(castInt(f) === v.toInt, f === v)
assertEquivalent(castInt(f) <=> v.toInt, f <=> v)
assertEquivalent(castInt(f) <= v.toInt, trueIfNotNull(f))
assertEquivalent(castInt(f) < v.toInt, f =!= v)
val d = Float.NaN
assertEquivalent(castDouble(f2) > d.toDouble, falseIfNotNull(f2))
assertEquivalent(castDouble(f2) >= d.toDouble, f2 === d)
assertEquivalent(castDouble(f2) === d.toDouble, f2 === d)
assertEquivalent(castDouble(f2) <=> d.toDouble, f2 <=> d)
assertEquivalent(castDouble(f2) <= d.toDouble, trueIfNotNull(f2))
assertEquivalent(castDouble(f2) < d.toDouble, f2 =!= d)
}
test("unwrap casts when literal > max") {
val v: Int = positiveInt
assertEquivalent(castInt(f) > v, falseIfNotNull(f))
assertEquivalent(castInt(f) >= v, falseIfNotNull(f))
assertEquivalent(castInt(f) === v, falseIfNotNull(f))
assertEquivalent(castInt(f) <=> v, false)
assertEquivalent(castInt(f) <= v, trueIfNotNull(f))
assertEquivalent(castInt(f) < v, trueIfNotNull(f))
}
test("unwrap casts when literal == min") {
val v = Short.MinValue
assertEquivalent(castInt(f) > v.toInt, f =!= v)
assertEquivalent(castInt(f) >= v.toInt, trueIfNotNull(f))
assertEquivalent(castInt(f) === v.toInt, f === v)
assertEquivalent(castInt(f) <=> v.toInt, f <=> v)
assertEquivalent(castInt(f) <= v.toInt, f === v)
assertEquivalent(castInt(f) < v.toInt, falseIfNotNull(f))
val d = Float.NegativeInfinity
assertEquivalent(castDouble(f2) > d.toDouble, f2 =!= d)
assertEquivalent(castDouble(f2) >= d.toDouble, trueIfNotNull(f2))
assertEquivalent(castDouble(f2) === d.toDouble, f2 === d)
assertEquivalent(castDouble(f2) <=> d.toDouble, f2 <=> d)
assertEquivalent(castDouble(f2) <= d.toDouble, f2 === d)
assertEquivalent(castDouble(f2) < d.toDouble, falseIfNotNull(f2))
// Double.NegativeInfinity == Float.NegativeInfinity
val d2 = Double.NegativeInfinity
assertEquivalent(castDouble(f2) > d2, f2 =!= d)
assertEquivalent(castDouble(f2) >= d2, trueIfNotNull(f2))
assertEquivalent(castDouble(f2) === d2, f2 === d)
assertEquivalent(castDouble(f2) <=> d2, f2 <=> d)
assertEquivalent(castDouble(f2) <= d2, f2 === d)
assertEquivalent(castDouble(f2) < d2, falseIfNotNull(f2))
}
test("unwrap casts when literal < min") {
val v: Int = negativeInt
assertEquivalent(castInt(f) > v, trueIfNotNull(f))
assertEquivalent(castInt(f) >= v, trueIfNotNull(f))
assertEquivalent(castInt(f) === v, falseIfNotNull(f))
assertEquivalent(castInt(f) <=> v, false)
assertEquivalent(castInt(f) <= v, falseIfNotNull(f))
assertEquivalent(castInt(f) < v, falseIfNotNull(f))
}
test("unwrap casts when literal is within range (min, max) or fromType has no range") {
Seq(300, 500, 32766, -6000, -32767).foreach(v => {
assertEquivalent(castInt(f) > v, f > v.toShort)
assertEquivalent(castInt(f) >= v, f >= v.toShort)
assertEquivalent(castInt(f) === v, f === v.toShort)
assertEquivalent(castInt(f) <=> v, f <=> v.toShort)
assertEquivalent(castInt(f) <= v, f <= v.toShort)
assertEquivalent(castInt(f) < v, f < v.toShort)
})
Seq(3.14.toFloat.toDouble, -1000.0.toFloat.toDouble,
20.0.toFloat.toDouble, -2.414.toFloat.toDouble,
Float.MinValue.toDouble, Float.MaxValue.toDouble, Float.PositiveInfinity.toDouble
).foreach(v => {
assertEquivalent(castDouble(f2) > v, f2 > v.toFloat)
assertEquivalent(castDouble(f2) >= v, f2 >= v.toFloat)
assertEquivalent(castDouble(f2) === v, f2 === v.toFloat)
assertEquivalent(castDouble(f2) <=> v, f2 <=> v.toFloat)
assertEquivalent(castDouble(f2) <= v, f2 <= v.toFloat)
assertEquivalent(castDouble(f2) < v, f2 < v.toFloat)
})
Seq(decimal2(100.20), decimal2(-200.50)).foreach(v => {
assertEquivalent(castDecimal2(f3) > v, f3 > decimal(v))
assertEquivalent(castDecimal2(f3) >= v, f3 >= decimal(v))
assertEquivalent(castDecimal2(f3) === v, f3 === decimal(v))
assertEquivalent(castDecimal2(f3) <=> v, f3 <=> decimal(v))
assertEquivalent(castDecimal2(f3) <= v, f3 <= decimal(v))
assertEquivalent(castDecimal2(f3) < v, f3 < decimal(v))
})
}
test("unwrap cast when literal is within range (min, max) AND has round up or down") {
// Cases for rounding down
var doubleValue = 100.6
assertEquivalent(castDouble(f) > doubleValue, f > doubleValue.toShort)
assertEquivalent(castDouble(f) >= doubleValue, f > doubleValue.toShort)
assertEquivalent(castDouble(f) === doubleValue, falseIfNotNull(f))
assertEquivalent(castDouble(f) <=> doubleValue, false)
assertEquivalent(castDouble(f) <= doubleValue, f <= doubleValue.toShort)
assertEquivalent(castDouble(f) < doubleValue, f <= doubleValue.toShort)
// Cases for rounding up: 3.14 will be rounded to 3.14000010... after casting to float
doubleValue = 3.14
assertEquivalent(castDouble(f2) > doubleValue, f2 >= doubleValue.toFloat)
assertEquivalent(castDouble(f2) >= doubleValue, f2 >= doubleValue.toFloat)
assertEquivalent(castDouble(f2) === doubleValue, falseIfNotNull(f2))
assertEquivalent(castDouble(f2) <=> doubleValue, false)
assertEquivalent(castDouble(f2) <= doubleValue, f2 < doubleValue.toFloat)
assertEquivalent(castDouble(f2) < doubleValue, f2 < doubleValue.toFloat)
// Another case: 400.5678 is rounded up to 400.57
val decimalValue = decimal2(400.5678)
assertEquivalent(castDecimal2(f3) > decimalValue, f3 >= decimal(decimalValue))
assertEquivalent(castDecimal2(f3) >= decimalValue, f3 >= decimal(decimalValue))
assertEquivalent(castDecimal2(f3) === decimalValue, falseIfNotNull(f3))
assertEquivalent(castDecimal2(f3) <=> decimalValue, false)
assertEquivalent(castDecimal2(f3) <= decimalValue, f3 < decimal(decimalValue))
assertEquivalent(castDecimal2(f3) < decimalValue, f3 < decimal(decimalValue))
}
test("unwrap casts when cast is on rhs") {
val v = Short.MaxValue
assertEquivalent(Literal(v.toInt) < castInt(f), falseIfNotNull(f))
assertEquivalent(Literal(v.toInt) <= castInt(f), Literal(v) === f)
assertEquivalent(Literal(v.toInt) === castInt(f), Literal(v) === f)
assertEquivalent(Literal(v.toInt) <=> castInt(f), Literal(v) <=> f)
assertEquivalent(Literal(v.toInt) >= castInt(f), trueIfNotNull(f))
assertEquivalent(Literal(v.toInt) > castInt(f), f =!= v)
assertEquivalent(Literal(30) <= castInt(f), Literal(30.toShort, ShortType) <= f)
}
test("unwrap cast should skip when expression is non-deterministic or foldable") {
Seq(positiveLong, negativeLong).foreach (v => {
val e = Cast(Rand(0), LongType) <=> v
assertEquivalent(e, e, evaluate = false)
val e2 = Cast(Literal(30), LongType) >= v
assertEquivalent(e2, e2, evaluate = false)
})
}
test("unwrap casts when literal is null") {
val intLit = Literal.create(null, IntegerType)
val nullLit = Literal.create(null, BooleanType)
assertEquivalent(castInt(f) > intLit, nullLit)
assertEquivalent(castInt(f) >= intLit, nullLit)
assertEquivalent(castInt(f) === intLit, nullLit)
assertEquivalent(castInt(f) <=> intLit, IsNull(castInt(f)))
assertEquivalent(castInt(f) <= intLit, nullLit)
assertEquivalent(castInt(f) < intLit, nullLit)
}
test("unwrap casts should skip if downcast failed") {
val decimalValue = decimal2(123456.1234)
assertEquivalent(castDecimal2(f3) === decimalValue, castDecimal2(f3) === decimalValue)
}
test("unwrap cast should skip if cannot coerce type") {
assertEquivalent(Cast(f, ByteType) > 100.toByte, Cast(f, ByteType) > 100.toByte)
}
test("test getRange()") {
assert(Some((Byte.MinValue, Byte.MaxValue)) === getRange(ByteType))
assert(Some((Short.MinValue, Short.MaxValue)) === getRange(ShortType))
assert(Some((Int.MinValue, Int.MaxValue)) === getRange(IntegerType))
assert(Some((Long.MinValue, Long.MaxValue)) === getRange(LongType))
val floatRange = getRange(FloatType)
assert(floatRange.isDefined)
val (floatMin, floatMax) = floatRange.get
assert(floatMin.isInstanceOf[Float])
assert(floatMin.asInstanceOf[Float].isNegInfinity)
assert(floatMax.isInstanceOf[Float])
assert(floatMax.asInstanceOf[Float].isNaN)
val doubleRange = getRange(DoubleType)
assert(doubleRange.isDefined)
val (doubleMin, doubleMax) = doubleRange.get
assert(doubleMin.isInstanceOf[Double])
assert(doubleMin.asInstanceOf[Double].isNegInfinity)
assert(doubleMax.isInstanceOf[Double])
assert(doubleMax.asInstanceOf[Double].isNaN)
assert(getRange(DecimalType(5, 2)).isEmpty)
}
test("SPARK-35316: unwrap should support In/InSet predicate.") {
val longLit = Literal.create(null, LongType)
val intLit = Literal.create(null, IntegerType)
val shortLit = Literal.create(null, ShortType)
def checkInAndInSet(in: In, expected: Expression): Unit = {
assertEquivalent(in, expected)
val toInSet = (in: In) => InSet(in.value, HashSet() ++ in.list.map(_.eval()))
val expectedInSet = expected match {
case expectedIn: In =>
toInSet(expectedIn)
case Or(falseIfNotNull: And, expectedIn: In) =>
Or(falseIfNotNull, toInSet(expectedIn))
}
assertEquivalent(toInSet(in), expectedInSet)
}
checkInAndInSet(
In(Cast(f, LongType), Seq(1.toLong, 2.toLong, 3.toLong)),
f.in(1.toShort, 2.toShort, 3.toShort))
// in.list contains the value which out of `fromType` range
checkInAndInSet(
In(Cast(f, LongType), Seq(1.toLong, Int.MaxValue.toLong, Long.MaxValue)),
Or(falseIfNotNull(f), f.in(1.toShort)))
// in.list only contains the value which out of `fromType` range
checkInAndInSet(
In(Cast(f, LongType), Seq(Int.MaxValue.toLong, Long.MaxValue)),
Or(falseIfNotNull(f), f.in()))
// in.list is empty
checkInAndInSet(
In(Cast(f, IntegerType), Seq.empty), Cast(f, IntegerType).in())
// in.list contains null value
checkInAndInSet(
In(Cast(f, IntegerType), Seq(intLit)), In(Cast(f, IntegerType), Seq(intLit)))
checkInAndInSet(
In(Cast(f, IntegerType), Seq(intLit, intLit)), In(Cast(f, IntegerType), Seq(intLit, intLit)))
checkInAndInSet(
In(Cast(f, IntegerType), Seq(intLit, 1)), f.in(shortLit, 1.toShort))
checkInAndInSet(
In(Cast(f, LongType), Seq(longLit, 1.toLong, Long.MaxValue)),
Or(falseIfNotNull(f), f.in(shortLit, 1.toShort))
)
}
test("SPARK-36130: unwrap In should skip when in.list contains an expression that " +
"is not literal") {
val add = Cast(f2, DoubleType) + 1.0d
val doubleLit = Literal.create(null, DoubleType)
assertEquivalent(In(Cast(f2, DoubleType), Seq(add)), In(Cast(f2, DoubleType), Seq(add)))
assertEquivalent(
In(Cast(f2, DoubleType), Seq(doubleLit, add)),
In(Cast(f2, DoubleType), Seq(doubleLit, add)))
assertEquivalent(
In(Cast(f2, DoubleType), Seq(doubleLit, 1.0d, add)),
In(Cast(f2, DoubleType), Seq(doubleLit, 1.0d, add)))
assertEquivalent(
In(Cast(f2, DoubleType), Seq(1.0d, add)),
In(Cast(f2, DoubleType), Seq(1.0d, add)))
assertEquivalent(
In(Cast(f2, DoubleType), Seq(0.0d, 1.0d, add)),
In(Cast(f2, DoubleType), Seq(0.0d, 1.0d, add)))
}
test("SPARK-36607: Support BooleanType in UnwrapCastInBinaryComparison") {
assert(Some((false, true)) === getRange(BooleanType))
val n = -1
assertEquivalent(castInt(f4) > n, trueIfNotNull(f4))
assertEquivalent(castInt(f4) >= n, trueIfNotNull(f4))
assertEquivalent(castInt(f4) === n, falseIfNotNull(f4))
assertEquivalent(castInt(f4) <=> n, false)
assertEquivalent(castInt(f4) <= n, falseIfNotNull(f4))
assertEquivalent(castInt(f4) < n, falseIfNotNull(f4))
val z = 0
assertEquivalent(castInt(f4) > z, f4 =!= false)
assertEquivalent(castInt(f4) >= z, trueIfNotNull(f4))
assertEquivalent(castInt(f4) === z, f4 === false)
assertEquivalent(castInt(f4) <=> z, f4 <=> false)
assertEquivalent(castInt(f4) <= z, f4 === false)
assertEquivalent(castInt(f4) < z, falseIfNotNull(f4))
val o = 1
assertEquivalent(castInt(f4) > o, falseIfNotNull(f4))
assertEquivalent(castInt(f4) >= o, f4 === true)
assertEquivalent(castInt(f4) === o, f4 === true)
assertEquivalent(castInt(f4) <=> o, f4 <=> true)
assertEquivalent(castInt(f4) <= o, trueIfNotNull(f4))
assertEquivalent(castInt(f4) < o, f4 =!= true)
val t = 2
assertEquivalent(castInt(f4) > t, falseIfNotNull(f4))
assertEquivalent(castInt(f4) >= t, falseIfNotNull(f4))
assertEquivalent(castInt(f4) === t, falseIfNotNull(f4))
assertEquivalent(castInt(f4) <=> t, false)
assertEquivalent(castInt(f4) <= t, trueIfNotNull(f4))
assertEquivalent(castInt(f4) < t, trueIfNotNull(f4))
}
private def castInt(e: Expression): Expression = Cast(e, IntegerType)
private def castDouble(e: Expression): Expression = Cast(e, DoubleType)
private def castDecimal2(e: Expression): Expression = Cast(e, DecimalType(10, 4))
private def decimal(v: Decimal): Decimal = Decimal(v.toJavaBigDecimal, 5, 2)
private def decimal2(v: BigDecimal): Decimal = Decimal(v, 10, 4)
private def assertEquivalent(e1: Expression, e2: Expression, evaluate: Boolean = true): Unit = {
val plan = testRelation.where(e1).analyze
val actual = Optimize.execute(plan)
val expected = testRelation.where(e2).analyze
comparePlans(actual, expected)
if (evaluate) {
Seq(
(100.toShort, 3.14.toFloat, decimal2(100), true),
(-300.toShort, 3.1415927.toFloat, decimal2(-3000.50), false),
(null, Float.NaN, decimal2(12345.6789), null),
(null, null, null, null),
(Short.MaxValue, Float.PositiveInfinity, decimal2(Short.MaxValue), true),
(Short.MinValue, Float.NegativeInfinity, decimal2(Short.MinValue), false),
(0.toShort, Float.MaxValue, decimal2(0), null),
(0.toShort, Float.MinValue, decimal2(0.01), null)
).foreach(v => {
val row = create_row(v._1, v._2, v._3, v._4)
checkEvaluation(e1, e2.eval(row), row)
})
}
}
}
| nchammas/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/UnwrapCastInBinaryComparisonSuite.scala | Scala | apache-2.0 | 16,448 |
package play.api.libs.json
import concurrent.{Await, Future, ExecutionContext}
import java.util.concurrent.Executors
import concurrent.duration.Duration
/**
* Performance test for JsValue serialization and deserialization.
*
* Very crude, but does the job. Easiest way to run this is in SBT:
*
* test:run-main play.api.libs.json.JsonPerformanceTest
*/
object JsonPerformanceTest extends App {
println("Running serialization test...")
println("Serialization run 1: " + testSerialization() + "ms")
println("Serialization run 2: " + testSerialization() + "ms")
println("Serialization run 3: " + testSerialization() + "ms")
println("Deserialization run 1: " + testDeserialization() + "ms")
println("Deserialization run 2: " + testDeserialization() + "ms")
println("Deserialization run 3: " + testDeserialization() + "ms")
println("Large Array Deserialization run 1: " + testLargeArrayDeserialization() + "ms")
println("Large Array Deserialization run 2: " + testLargeArrayDeserialization() + "ms")
println("Large Array Deserialization run 3: " + testLargeArrayDeserialization() + "ms")
println("Large Object Deserialization run 1: " + testLargeObjectDeserialization() + "ms")
println("Large Object Deserialization run 2: " + testLargeObjectDeserialization() + "ms")
println("Large Object Deserialization run 3: " + testLargeObjectDeserialization() + "ms")
lazy val jsvalue = Json.obj(
"f1" -> Json.obj(
"f1" -> "string",
"f2" -> "string",
"f3" -> "string",
"f4" -> 10,
"f5" -> Json.arr(
"string",
"string",
"string",
"string",
"string"
),
"f6" -> Json.obj(
"f1" -> 10,
"f2" -> 20,
"f3" -> 30,
"f4" -> "string"
)
),
"f2" -> "string",
"f3" -> "string",
"f4" -> 10,
"f5" -> true,
"f6" -> false,
"f7" -> Json.arr(1, 2, 3, 4, 5, 6)
)
lazy val json = Json.stringify(jsvalue)
lazy val largeArrayJsValue = Json.obj(
"f1" -> Json.toJson((1 to 65536))
)
lazy val largeArrayJson = Json.stringify(largeArrayJsValue)
lazy val largeObjectJsValue = (1 to 8192).map(i => Json.obj("f" + i -> "obj")).reduce(_++_)
lazy val largeObjectJson = Json.stringify(largeObjectJsValue)
def testSerialization(times: Int = 10000000, threads: Int = 100): Long = {
runTest(times, threads) {
Json.stringify(jsvalue)
}
}
def testDeserialization(times: Int = 1000000, threads: Int = 100): Long = {
runTest(times, threads) {
Json.parse(json)
}
}
def testLargeArrayDeserialization(times: Int = 100, threads: Int = 10): Long = {
runTest(times, threads) {
Json.parse(largeArrayJson)
}
}
def testLargeObjectDeserialization(times: Int = 100, threads: Int = 100): Long = {
runTest(times, threads) {
Json.parse(largeObjectJson)
}
}
def runTest(times: Int, threads: Int)(test: => Unit): Long = {
val timesPerThread = times / threads
val executor = Executors.newFixedThreadPool(threads)
try {
val context = ExecutionContext.fromExecutor(executor)
val start = System.currentTimeMillis()
import ExecutionContext.Implicits.global
Await.ready(Future.sequence(List.range(0, threads).map { t =>
Future {
for (i <- 0 to timesPerThread) {
test
}
}(context)
}), Duration.Inf)
System.currentTimeMillis() - start
} finally {
executor.shutdownNow()
}
}
}
| michaelahlers/team-awesome-wedding | vendor/play-2.2.1/framework/src/play-json/src/test/scala/play/api/libs/json/JsonPerformanceTest.scala | Scala | mit | 3,514 |
package io.github.hamsters
import org.scalacheck.Prop.forAll
import org.scalacheck.{Arbitrary, Properties}
import scala.reflect._
class BooleanMonoidLaws extends MonoidLaws[Boolean](Monoid.booleanMonoid)
class IntMonoidLaws extends MonoidLaws[Int](Monoid.intMonoid)
class LongMonoidLaws extends MonoidLaws[Long](Monoid.longMonoid)
class BigDecimalMonoidLaws extends MonoidLaws[BigDecimal](Monoid.bigDecimalMonoid)
//float and double monoid break the laws : https://github.com/scalaz/scalaz/issues/334
//class FloatMonoidLaws extends io.github.hamsters.MonoidLaws[Float](Monoid.floatMonoid)
//class DoubleMonoidLaws extends io.github.hamsters.MonoidLaws[Double](Monoid.doubleMonoid)
class StringMonoidLaws extends MonoidLaws[String](Monoid.stringMonoid)
class ListMonoidLaws extends MonoidLaws[List[String]](Monoid.listMonoid)
class SeqMonoidLaws extends MonoidLaws[Seq[String]](Monoid.seqMonoid)
class OptionMonoidLaws extends MonoidLaws[Option[List[Int]]](Monoid.optionMonoid)
abstract class MonoidLaws[T: ClassTag](monoid: Monoid[T])(implicit arbitrary: Arbitrary[T]) extends Properties(s"Monoid for ${classTag[T]}") {
val id: T = monoid.empty
// o mean compose
// n o id == id o n == n
property("identity") = forAll { n: T =>
monoid.compose(n, id) == n && monoid.compose(id, n) == n
}
// forall l, r => l o r
property("composition") = forAll { (l: T, r: T) =>
monoid.compose(l, r).isInstanceOf[T]
}
// l o (m o r) == (l o m) o r
property("associativity") = forAll { (l: T, m: T, r: T) =>
val lm = monoid.compose(l,m)
val mr = monoid.compose(m,r)
monoid.compose(lm, r) == monoid.compose(l, mr)
}
}
| dgouyette/hamsters | shared/src/test/scala/io/github/hamsters/MonoidLaws.scala | Scala | apache-2.0 | 1,651 |
package com.sksamuel.scapegoat.inspections
import com.sksamuel.scapegoat.{Levels, Inspection, Reporter}
import scala.reflect.runtime._
/** @author Stephen Samuel */
class ParameterlessMethodReturnsUnit extends Inspection {
override def traverser(reporter: Reporter) = new universe.Traverser {
import scala.reflect.runtime.universe._
override def traverse(tree: scala.reflect.runtime.universe.Tree): Unit = {
tree match {
case d@DefDef(_, name, _, vparamss, tpt, _) if tpt.tpe.toString == "Unit" && vparamss.isEmpty =>
reporter.warn("Parameterless methods returns unit", tree, Levels.Warning, name.toString.take(100))
case _ => super.traverse(tree)
}
}
}
}
| RichardBradley/scapegoat | src/main/scala/com/sksamuel/scapegoat/inspections/ParameterlessMethodReturnsUnit.scala | Scala | apache-2.0 | 715 |
package org.atnos.eff.concurrent
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
import scala.scalajs.js.timers._
trait Schedulers {
/**
* Default Scheduler for JavaScript
*/
def default: Scheduler = new Scheduler {
def schedule(timedout: =>Unit, duration: FiniteDuration): () => Unit = {
val handle = setTimeout(duration)(timedout)
() => clearTimeout(handle)
}
def delay(duration: FiniteDuration): Future[Unit] =
sys.error("delay(duration) not implemented")
override def toString = "Scheduler"
}
}
object Schedulers extends Schedulers
| etorreborre/eff-cats | js/src/main/scala/org/atnos/eff/concurrent/Schedulers.scala | Scala | mit | 621 |
package mypipe.snapshotter
import akka.util.Timeout
import com.github.mauricio.async.db.Connection
import mypipe.{ Queries, ActorSystemSpec, DatabaseSpec, UnitSpec }
import org.scalatest.BeforeAndAfterAll
import org.slf4j.LoggerFactory
import scala.concurrent.duration._
import scala.concurrent.Await
class SnapshotterSpec extends UnitSpec with DatabaseSpec with ActorSystemSpec with BeforeAndAfterAll {
val log = LoggerFactory.getLogger(getClass)
implicit val timeout = Timeout(1.second)
"Snapshotter" should "be able to fetch table information" in {
implicit lazy val c: Connection = db.connection
val future = {
// make inserts
Await.result(db.connection.sendQuery(Queries.INSERT.statement(id = "123")), 2000.millis)
Await.result(db.connection.sendQuery(Queries.INSERT.statement(id = "124")), 2000.millis)
val tables = Seq("mypipe.user")
val colData = MySQLSnapshotter.snapshot(tables) map { results ⇒
results.map { result ⇒
val colData = result._2.rows.map(identity) map { rows ⇒
val colCount = rows.columnNames.length
rows.map { row ⇒
(0 until colCount) map { i ⇒
row(i)
}
}
}
result._1 -> colData.getOrElse(Seq.empty)
}
}
val ret = colData
.map { colData ⇒
colData.map {
case (dbAndTable, rows) ⇒
if (dbAndTable.equals("showMasterStatus")) {
log.info(s"found show master status data: length:${rows.length} data:${rows.map(_.toArray.map(c ⇒ c.getClass.getName + ":" + c.toString).mkString(",")).mkString("\\n")}")
rows.length == 1 &&
rows(0).length >= 2 // file and position at least
} else if (dbAndTable.startsWith("mypipe.") && rows.length > 0) {
log.info(s"found select data: length:${rows.length} data:${rows.map(_.toArray.map(c ⇒ c.getClass.getName + ":" + c.toString).mkString(",")).mkString("\\n")}")
rows.length == 2 &&
rows(0).toArray.deep.equals(Array(123, "username", "password", 0).deep) &&
rows(1).toArray.deep.equals(Array(124, "username", "password", 0).deep)
} else {
true
}
}
}
ret.map { results ⇒
!results.contains(false)
}
}
try {
val ret = Await.result(future, 10.seconds)
assert(ret)
} catch {
case e: Exception ⇒
log.error(s"Caught exception: ${e.getMessage} at ${e.getStackTraceString}")
assert(false)
}
}
}
| tramchamploo/mypipe | mypipe-snapshotter/src/test/scala/mypipe/snapshotter/SnapshotterSpec.scala | Scala | apache-2.0 | 2,664 |
package shapeless.contrib.spire
import spire.math._
import spire.algebra._
import shapeless._
import shapeless.contrib._
trait Empty {
def emptyProduct = new Order[HNil] with AbGroup[HNil] with AdditiveAbGroup[HNil] with MultiplicativeAbGroup[HNil] {
override def eqv(x: HNil, y: HNil) = true
override def neqv(x: HNil, y: HNil) = false
def compare(x: HNil, y: HNil) = 0
def op(x: HNil, y: HNil) = HNil
def id = HNil
def inverse(a: HNil) = HNil
def plus(x: HNil, y: HNil) = HNil
def zero = HNil
def negate(x: HNil) = HNil
override def minus(x: HNil, y: HNil) = HNil
override def additive: AbGroup[HNil] = this
def times(x: HNil, y: HNil) = HNil
def one = HNil
override def reciprocal(x: HNil) = HNil
def div(x: HNil, y: HNil) = HNil
override def multiplicative: AbGroup[HNil] = this
}
}
// Products
trait ProductEq[F, T <: HList]
extends Eq[F :: T]
with Product[Eq, F, T] {
def eqv(x: λ, y: λ) =
F.eqv(x.head, y.head) && T.eqv(x.tail, y.tail)
override def neqv(x: λ, y: λ) =
F.neqv(x.head, y.head) || T.neqv(x.tail, y.tail)
}
trait ProductOrder[F, T <: HList]
extends ProductEq[F, T]
with Order[F :: T]
with Product[Order, F, T] {
override def eqv(x: λ, y: λ) =
super[ProductEq].eqv(x, y)
def compare(x: λ, y: λ) = {
val headOrder = F.compare(x.head, y.head)
if (headOrder < 0)
headOrder
else
T.compare(x.tail, y.tail)
}
}
trait ProductSemigroup[F, T <: HList]
extends Semigroup[F :: T]
with Product[Semigroup, F, T] {
def op(x: λ, y: λ) =
F.op(x.head, y.head) :: T.op(x.tail, y.tail)
}
trait ProductMonoid[F, T <: HList]
extends ProductSemigroup[F, T]
with Monoid[F :: T]
with Product[Monoid, F, T] {
def id = F.id :: T.id
}
trait ProductGroup[F, T <: HList]
extends ProductMonoid[F, T]
with Group[F :: T]
with Product[Group, F, T] {
def inverse(a: λ) =
F.inverse(a.head) :: T.inverse(a.tail)
}
trait ProductAbGroup[F, T <: HList]
extends ProductGroup[F, T]
with AbGroup[F :: T]
trait ProductAdditiveSemigroup[F, T <: HList]
extends AdditiveSemigroup[F :: T]
with Product[AdditiveSemigroup, F, T] {
def plus(x: λ, y: λ) =
F.plus(x.head, y.head) :: T.plus(x.tail, y.tail)
}
trait ProductAdditiveMonoid[F, T <: HList]
extends ProductAdditiveSemigroup[F, T]
with AdditiveMonoid[F :: T]
with Product[AdditiveMonoid, F, T] {
def zero = F.zero :: T.zero
}
trait ProductAdditiveGroup[F, T <: HList]
extends ProductAdditiveMonoid[F, T]
with AdditiveGroup[F :: T]
with Product[AdditiveGroup, F, T] {
def negate(a: λ) =
F.negate(a.head) :: T.negate(a.tail)
override def minus(x: λ, y: λ) =
F.minus(x.head, y.head) :: T.minus(x.tail, y.tail)
}
trait ProductAdditiveAbGroup[F, T <: HList]
extends ProductAdditiveGroup[F, T]
with AdditiveAbGroup[F :: T]
trait ProductMultiplicativeSemigroup[F, T <: HList]
extends MultiplicativeSemigroup[F :: T]
with Product[MultiplicativeSemigroup, F, T] {
def times(x: λ, y: λ) =
F.times(x.head, y.head) :: T.times(x.tail, y.tail)
}
trait ProductMultiplicativeMonoid[F, T <: HList]
extends ProductMultiplicativeSemigroup[F, T]
with MultiplicativeMonoid[F :: T]
with Product[MultiplicativeMonoid, F, T] {
def one = F.one :: T.one
}
trait ProductMultiplicativeGroup[F, T <: HList]
extends ProductMultiplicativeMonoid[F, T]
with MultiplicativeGroup[F :: T]
with Product[MultiplicativeGroup, F, T] {
override def reciprocal(a: λ) =
F.reciprocal(a.head) :: T.reciprocal(a.tail)
def div(x: λ, y: λ) =
F.div(x.head, y.head) :: T.div(x.tail, y.tail)
}
trait ProductMultiplicativeAbGroup[F, T <: HList]
extends ProductMultiplicativeGroup[F, T]
with MultiplicativeAbGroup[F :: T]
// Isos
trait IsomorphicSemigroup[A, B]
extends Semigroup[A]
with Isomorphic[Semigroup, A, B] {
def op(x: A, y: A) =
from(B.op(to(x), to(y)))
}
trait IsomorphicMonoid[A, B]
extends IsomorphicSemigroup[A, B]
with Monoid[A]
with Isomorphic[Monoid, A, B] {
def id = from(B.id)
}
trait IsomorphicGroup[A, B]
extends IsomorphicMonoid[A, B]
with Group[A]
with Isomorphic[Group, A, B] {
def inverse(a: A) =
from(B.inverse(to(a)))
}
trait IsomorphicAbGroup[A, B]
extends IsomorphicGroup[A, B]
with AbGroup[A]
trait IsomorphicAdditiveSemigroup[A, B]
extends AdditiveSemigroup[A]
with Isomorphic[AdditiveSemigroup, A, B] {
def plus(x: A, y: A) =
from(B.plus(to(x), to(y)))
}
trait IsomorphicAdditiveMonoid[A, B]
extends IsomorphicAdditiveSemigroup[A, B]
with AdditiveMonoid[A]
with Isomorphic[AdditiveMonoid, A, B] {
def zero = from(B.zero)
}
trait IsomorphicAdditiveGroup[A, B]
extends IsomorphicAdditiveMonoid[A, B]
with AdditiveGroup[A]
with Isomorphic[AdditiveGroup, A, B] {
def negate(a: A) =
from(B.negate(to(a)))
override def minus(x: A, y: A) =
from(B.minus(to(x), to(y)))
}
trait IsomorphicAdditiveAbGroup[A, B]
extends IsomorphicAdditiveGroup[A, B]
with AdditiveAbGroup[A]
trait IsomorphicMultiplicativeSemigroup[A, B]
extends MultiplicativeSemigroup[A]
with Isomorphic[MultiplicativeSemigroup, A, B] {
def times(x: A, y: A) =
from(B.times(to(x), to(y)))
}
trait IsomorphicMultiplicativeMonoid[A, B]
extends IsomorphicMultiplicativeSemigroup[A, B]
with MultiplicativeMonoid[A]
with Isomorphic[MultiplicativeMonoid, A, B] {
def one = from(B.one)
}
trait IsomorphicMultiplicativeGroup[A, B]
extends IsomorphicMultiplicativeMonoid[A, B]
with MultiplicativeGroup[A]
with Isomorphic[MultiplicativeGroup, A, B] {
override def reciprocal(a: A) =
from(B.reciprocal(to(a)))
def div(x: A, y: A) =
from(B.div(to(x), to(y)))
}
trait IsomorphicMultiplicativeAbGroup[A, B]
extends IsomorphicMultiplicativeGroup[A, B]
with MultiplicativeAbGroup[A]
// vim: expandtab:ts=2:sw=2
| typelevel/shapeless-contrib | spire/src/main/scala/typeclass.scala | Scala | mit | 5,910 |
/**
* Created by alix on 5/17/17.
*/
package object exceptions {
case class RippleException() extends Exception("The Ripple Network is not responding")
case class DisconnectedException() extends Exception("Connection lost to the Ripple Network")
case class MissingTagException() extends Exception("The recipient requires a destination tag")
case class FeesException() extends Exception("The Ripple API does not respond")
case class UnfundedException() extends Exception("The payment exceeds the balance capacity")
case class UnknownException(val name: String) extends Exception("Unknown exception occured")
case class SelfSendException() extends Exception("Sending funds to self")
case class SubmitException(val name: String, val message: String) extends Exception("Submit failed")
}
| LedgerHQ/ledger-wallet-ripple | src/main/scala/exceptions/package.scala | Scala | mit | 812 |
package controllers.circs.report_changes
import utils.WithApplication
import controllers.mappings.Mappings
import org.specs2.mutable._
class GOtherChangeInfoFormSpec extends Specification {
val otherInfo = "This is my other info"
section("unit", models.domain.CircumstancesReportChanges.id)
"Change of circumstances - Other Change Info Form" should {
"map data into case class" in new WithApplication {
GOtherChangeInfo.form.bind(
Map("changeInCircs" -> otherInfo)
).fold(
formWithErrors => "This mapping should not happen." must equalTo("Error"),
f => {
f.change must equalTo(otherInfo)
}
)
}
"fail if no data into case class" in new WithApplication {
GOtherChangeInfo.form.bind(
Map("changeInCircs" -> "")
).fold(
formWithErrors => {
formWithErrors.errors(0).message must equalTo(Mappings.errorRequired)
},
f => "This mapping should not happen." must equalTo("Valid")
)
}
"reject special characters in text fields" in new WithApplication {
GOtherChangeInfo.form.bind(
Map("changeInCircs" -> "<>")
).fold(
formWithErrors => {
formWithErrors.errors.length must equalTo(1)
formWithErrors.errors(0).message must equalTo(Mappings.errorRestrictedCharacters)
},
f => "This mapping should not happen." must equalTo("Valid"))
}
}
section("unit", models.domain.CircumstancesReportChanges.id)
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/controllers/circs/report_changes/GOtherChangeInfoFormSpec.scala | Scala | mit | 1,510 |
package com.larry.da.test
/**
* Created by larry on 17/2/16.
*/
import com.cloudera.spark.hbase.HBaseContext
import org.apache.hadoop.fs.Path
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.SparkContext
object HBaseBulkPutExample {
var sc:SparkContext = _
def main(args: Array[String]) {
// if (args.length == 0) {
// System.out.println("HBaseBulkPutExample {tableName} {columnFamily}");
// return;
// }
// val tableName = args(0);
// val columnFamily = args(1);
val tableName = "userdigestuid_test";
val columnFamily = "cf1";
/********************************************************
* hbase行要求:
* (key,Array((cf,fieldname,value),(cf,fieldname,value)))
* ******************************************************/
val rdd = sc.parallelize(Array(
(Bytes.toBytes("1"), Array((Bytes.toBytes(columnFamily), Bytes.toBytes("1"), Bytes.toBytes("1")),(Bytes.toBytes(columnFamily), Bytes.toBytes("11"), Bytes.toBytes("11")))),
(Bytes.toBytes("2"), Array((Bytes.toBytes(columnFamily), Bytes.toBytes("1"), Bytes.toBytes("2")))),
(Bytes.toBytes("3"), Array((Bytes.toBytes(columnFamily), Bytes.toBytes("1"), Bytes.toBytes("3"))))
))
val conf = HBaseConfiguration.create();
conf.addResource(new Path("/etc/hbase/conf/core-site.xml"));
conf.addResource(new Path("/etc/hbase/conf/hbase-site.xml"));
val hbaseContext = new HBaseContext(sc, conf);
hbaseContext.bulkPut[(Array[Byte], Array[(Array[Byte], Array[Byte], Array[Byte])])](rdd,
tableName,
(putRecord) => {
val put = new Put(putRecord._1)
putRecord._2.foreach((putValue) => put.add(putValue._1, putValue._2, putValue._3))
put
},
true);
}
}
| larry88/spark_da | src/main/scala/com/larry/da/test/HBaseBulkPutExample.scala | Scala | gpl-2.0 | 1,855 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalog
import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset}
import org.apache.spark.sql.types.StructType
/**
* Catalog interface for Spark. To access this, use `SparkSession.catalog`.
*
* @since 2.0.0
*/
@InterfaceStability.Stable
abstract class Catalog {
/**
* Returns the current default database in this session.
*
* @since 2.0.0
*/
def currentDatabase: String
/**
* Sets the current default database in this session.
*
* @since 2.0.0
*/
def setCurrentDatabase(dbName: String): Unit
/**
* Returns a list of databases available across all sessions.
*
* @since 2.0.0
*/
def listDatabases(): Dataset[Database]
/**
* Returns a list of tables in the current database.
* This includes all temporary tables.
*
* @since 2.0.0
*/
def listTables(): Dataset[Table]
/**
* Returns a list of tables in the specified database.
* This includes all temporary tables.
*
* @since 2.0.0
*/
@throws[AnalysisException]("database does not exist")
def listTables(dbName: String): Dataset[Table]
/**
* Returns a list of functions registered in the current database.
* This includes all temporary functions
*
* @since 2.0.0
*/
def listFunctions(): Dataset[Function]
/**
* Returns a list of functions registered in the specified database.
* This includes all temporary functions
*
* @since 2.0.0
*/
@throws[AnalysisException]("database does not exist")
def listFunctions(dbName: String): Dataset[Function]
/**
* Returns a list of columns for the given table in the current database or
* the given temporary table.
*
* @since 2.0.0
*/
@throws[AnalysisException]("table does not exist")
def listColumns(tableName: String): Dataset[Column]
/**
* Returns a list of columns for the given table in the specified database.
*
* @since 2.0.0
*/
@throws[AnalysisException]("database or table does not exist")
def listColumns(dbName: String, tableName: String): Dataset[Column]
/**
* Get the database with the specified name. This throws an AnalysisException when the database
* cannot be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("database does not exist")
def getDatabase(dbName: String): Database
/**
* Get the table or view with the specified name. This table can be a temporary view or a
* table/view in the current database. This throws an AnalysisException when no Table
* can be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("table does not exist")
def getTable(tableName: String): Table
/**
* Get the table or view with the specified name in the specified database. This throws an
* AnalysisException when no Table can be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("database or table does not exist")
def getTable(dbName: String, tableName: String): Table
/**
* Get the function with the specified name. This function can be a temporary function or a
* function in the current database. This throws an AnalysisException when the function cannot
* be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("function does not exist")
def getFunction(functionName: String): Function
/**
* Get the function with the specified name. This throws an AnalysisException when the function
* cannot be found.
*
* @since 2.1.0
*/
@throws[AnalysisException]("database or function does not exist")
def getFunction(dbName: String, functionName: String): Function
/**
* Check if the database with the specified name exists.
*
* @since 2.1.0
*/
def databaseExists(dbName: String): Boolean
/**
* Check if the table or view with the specified name exists. This can either be a temporary
* view or a table/view in the current database.
*
* @since 2.1.0
*/
def tableExists(tableName: String): Boolean
/**
* Check if the table or view with the specified name exists in the specified database.
*
* @since 2.1.0
*/
def tableExists(dbName: String, tableName: String): Boolean
/**
* Check if the function with the specified name exists. This can either be a temporary function
* or a function in the current database.
*
* @since 2.1.0
*/
def functionExists(functionName: String): Boolean
/**
* Check if the function with the specified name exists in the specified database.
*
* @since 2.1.0
*/
def functionExists(dbName: String, functionName: String): Boolean
/**
* :: Experimental ::
* Creates an external table from the given path and returns the corresponding DataFrame.
* It will use the default data source configured by spark.sql.sources.default.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(tableName: String, path: String): DataFrame
/**
* :: Experimental ::
* Creates an external table from the given path based on a data source
* and returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(tableName: String, path: String, source: String): DataFrame
/**
* :: Experimental ::
* Creates an external table from the given path based on a data source and a set of options.
* Then, returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(
tableName: String,
source: String,
options: java.util.Map[String, String]): DataFrame
/**
* :: Experimental ::
* (Scala-specific)
* Creates an external table from the given path based on a data source and a set of options.
* Then, returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(
tableName: String,
source: String,
options: Map[String, String]): DataFrame
/**
* :: Experimental ::
* Create an external table from the given path based on a data source, a schema and
* a set of options. Then, returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(
tableName: String,
source: String,
schema: StructType,
options: java.util.Map[String, String]): DataFrame
/**
* :: Experimental ::
* (Scala-specific)
* Create an external table from the given path based on a data source, a schema and
* a set of options. Then, returns the corresponding DataFrame.
*
* @since 2.0.0
*/
@Experimental
@InterfaceStability.Evolving
def createExternalTable(
tableName: String,
source: String,
schema: StructType,
options: Map[String, String]): DataFrame
/**
* Drops the local temporary view with the given view name in the catalog.
* If the view has been cached before, then it will also be uncached.
*
* Local temporary view is session-scoped. Its lifetime is the lifetime of the session that
* created it, i.e. it will be automatically dropped when the session terminates. It's not
* tied to any databases, i.e. we can't use `db1.view1` to reference a local temporary view.
*
* Note that, the return type of this method was Unit in Spark 2.0, but changed to Boolean
* in Spark 2.1.
*
* @param viewName the name of the view to be dropped.
* @return true if the view is dropped successfully, false otherwise.
* @since 2.0.0
*/
def dropTempView(viewName: String): Boolean
/**
* Drops the global temporary view with the given view name in the catalog.
* If the view has been cached before, then it will also be uncached.
*
* Global temporary view is cross-session. Its lifetime is the lifetime of the Spark application,
* i.e. it will be automatically dropped when the application terminates. It's tied to a system
* preserved database `_global_temp`, and we must use the qualified name to refer a global temp
* view, e.g. `SELECT * FROM _global_temp.view1`.
*
* @param viewName the name of the view to be dropped.
* @return true if the view is dropped successfully, false otherwise.
* @since 2.1.0
*/
def dropGlobalTempView(viewName: String): Boolean
/**
* Recover all the partitions in the directory of a table and update the catalog.
*
* @since 2.1.1
*/
def recoverPartitions(tableName: String): Unit
/**
* Returns true if the table is currently cached in-memory.
*
* @since 2.0.0
*/
def isCached(tableName: String): Boolean
/**
* Caches the specified table in-memory.
*
* @since 2.0.0
*/
def cacheTable(tableName: String): Unit
/**
* Removes the specified table from the in-memory cache.
*
* @since 2.0.0
*/
def uncacheTable(tableName: String): Unit
/**
* Removes all cached tables from the in-memory cache.
*
* @since 2.0.0
*/
def clearCache(): Unit
/**
* Invalidate and refresh all the cached metadata of the given table. For performance reasons,
* Spark SQL or the external data source library it uses might cache certain metadata about a
* table, such as the location of blocks. When those change outside of Spark SQL, users should
* call this function to invalidate the cache.
*
* If this table is cached as an InMemoryRelation, drop the original cached version and make the
* new version cached lazily.
*
* @since 2.0.0
*/
def refreshTable(tableName: String): Unit
/**
* Invalidate and refresh all the cached data (and the associated metadata) for any dataframe that
* contains the given data source path. Path matching is by prefix, i.e. "/" would invalidate
* everything that is cached.
*
* @since 2.0.0
*/
def refreshByPath(path: String): Unit
}
| u2009cf/spark-radar | sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala | Scala | apache-2.0 | 10,765 |
package com.splicemachine.spark2.splicemachine
object ThisVersionSpecificItems {
val schema = SparkVersionSpecificItems.schemaWithoutMetadata
val jdbcBadDriverNameException = SparkVersionSpecificItems.checkTheUrl
}
| splicemachine/spliceengine | splice_spark2/src/test/spark3.1/com/splicemachine/spark2/splicemachine/ThisVersionSpecificItems.scala | Scala | agpl-3.0 | 220 |
/*
* Copyright 2013 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.sharry.lib
import java.io.OutputStream
import java.nio.file.Path
/**
* @author Eike Kettner eike.kettner@gmail.com
* @since 11.02.13 22:42
*/
trait Sharry {
/**
* Adds the given files by zipping them into one archive and encrypting this
* archive. The file name is returned that can be used to lookup the file.
*
* @param files the files to add
* @param password the password used to encrypt the archive
* @param timeout how long the file is valid. after this timeout, the file can
* will be subject to deletion
* @return
*/
def addFiles(files: Iterable[Entry], owner: String, password: String, timeout: Option[Timeout]): Either[Exception, FileName]
/**
* Looks up the encrypted archive.
*
* @param name
* @return
*/
def lookupFile(name: FileName): Option[Path]
/**
* Looks up the encrypted archive and decrypts it into the given
* output stream.
*
* @param name
* @param password
* @param out
*/
def decryptFile(name: FileName, password: String, out: OutputStream)
/**
* Convenience method that decrypts the archive and stores it
* into a file at the given path. The `target` file must not
* exist and it will contain the unencrypted archive. Client
* code must take care of access/deletion.
*
* @param name
* @param password
* @param target
*/
def decryptFile(name: FileName, password: String, target: Path)
/**
* Removes all files that match the given filter.
*
* @param filter
* @return the number of files deleted
*/
def removeFiles(filter: FileName => Boolean): Int
/**
* Lists all files currently available.
*
* @return
*/
def listFiles: Iterable[FileName]
/**
* Returns the size the folder currently occupies.
*
* @return
*/
def folderSize: Long
def folderSizeLimit: Long
}
| eikek/publet-sharry | src/main/scala/org/eknet/publet/sharry/lib/Sharry.scala | Scala | apache-2.0 | 2,493 |
package ca.uqam.euler.nicolas
/** What is the largest prime factor of the number 600851475143 ? */
/* Answer: 6857 */
object Problem3 {
def primes = {
def primesFrom(lastPrime: Int, knownPrimes: Seq[Int]): Stream[Int] = {
val nextPrime = Stream.from(lastPrime + 1).find(n ⇒ !knownPrimes.exists(n % _ == 0)).get
Stream.cons(lastPrime, primesFrom(nextPrime, knownPrimes :+ lastPrime))
}
primesFrom(2, List())
}
def largestPrimeFactor(n: Long) = {
(math.sqrt(n).toLong to 2 by -1).find(m ⇒ n % m == 0 && isPrime(m))
}
def longs = Iterator.iterate(1L)(_ + 1)
def r(start: Long, end: Long) = longs.dropWhile(_ < start).takeWhile(_ <= end)
def factors(n: Long) = r(1, n).filter(n % _ == 0L)
def candidateFactors(n: Long) = factors(n).drop(1).takeWhile(_ <= math.sqrt(n).toLong)
def isPrime(n: Long) = n > 1 && candidateFactors(n).size == 0
def primeFactors(n: Long) = candidateFactors(n).filter(isPrime)
def main(args: Array[String]): Unit = Answer {
largestPrimeFactor(600851475143L)
}
} | nicolaspayette/project-euler | src/main/scala/ca/uqam/euler/nicolas/Problem003.scala | Scala | mit | 1,049 |
package jk_5.nailed.map
import scala.collection.immutable
import java.util.concurrent.atomic.AtomicInteger
import jk_5.nailed.map.gameloop.instructions._
import jk_5.nailed.map.gameloop.IInstruction
/**
* No description given
*
* @author jk-5
*/
object InstructionManager {
private final val instructionMap = immutable.HashMap[String, Class[_]](
"trigger" -> classOf[InstructionTrigger],
"sleep" -> classOf[InstructionSleep],
"watchunready" -> classOf[InstructionWatchUnready],
"unwatchunready" -> classOf[InstructionUnwatchUnready],
"countdown" -> classOf[InstructionCountdown],
"setwinner" -> classOf[InstructionSetWinner],
"startwinnerinterrupt" -> classOf[InstructionStartWinnerInterrupt],
"enable" -> classOf[InstructionEnableStat],
"disable" -> classOf[InstructionDisableStat],
"setspawn" -> classOf[InstructionSetSpawnpoint],
"resetspawn" -> classOf[InstructionResetSpawnpoint],
"clearinventory" -> classOf[InstructionClearInventory],
"setgamemode" -> classOf[InstructionSetGamemode],
"moveteamspeak" -> classOf[InstructionMoveTeamspeak],
"settime" -> classOf[InstructionSetTime],
"setdifficulty" -> classOf[InstructionSetDifficulty],
"clearexperience" -> classOf[InstructionResetExperience],
"sethealth" -> classOf[InstructionSetHealth],
"setfoodlevel" -> classOf[InstructionSetFoodLevel],
"countup" -> classOf[InstructionCountUp]
)
def getInstruction(name: String): IInstruction = this.instructionMap.get(name).get.newInstance().asInstanceOf[IInstruction]
}
| nailed/nailed-legacy | src/main/scala/jk_5/nailed/map/InstructionManager.scala | Scala | unlicense | 1,559 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.net.{Inet4Address, InetAddress, NetworkInterface}
import scala.collection.JavaConverters._
import scala.sys.process._
import scala.util.Try
private[spark] object DockerUtils {
def getDockerIp(): String = {
/** If docker-machine is setup on this box, attempts to find the ip from it. */
def findFromDockerMachine(): Option[String] = {
sys.env.get("DOCKER_MACHINE_NAME").flatMap { name =>
Try(Seq("/bin/bash", "-c", s"docker-machine ip $name 2>/dev/null").!!.trim).toOption
}
}
sys.env.get("DOCKER_IP")
.orElse(findFromDockerMachine())
.orElse(Try(Seq("/bin/bash", "-c", "boot2docker ip 2>/dev/null").!!.trim).toOption)
.getOrElse {
// This block of code is based on Utils.findLocalInetAddress(), but is modified to blacklist
// certain interfaces.
val address = InetAddress.getLocalHost
// Address resolves to something like 127.0.1.1, which happens on Debian; try to find
// a better address using the local network interfaces
// getNetworkInterfaces returns ifs in reverse order compared to ifconfig output order
// on unix-like system. On windows, it returns in index order.
// It's more proper to pick ip address following system output order.
val blackListedIFs = Seq(
"vboxnet0", // Mac
"docker0" // Linux
)
val activeNetworkIFs = NetworkInterface.getNetworkInterfaces.asScala.toSeq.filter { i =>
!blackListedIFs.contains(i.getName)
}
val reOrderedNetworkIFs = activeNetworkIFs.reverse
for (ni <- reOrderedNetworkIFs) {
val addresses = ni.getInetAddresses.asScala
.filterNot(addr => addr.isLinkLocalAddress || addr.isLoopbackAddress).toSeq
if (addresses.nonEmpty) {
val addr = addresses.find(_.isInstanceOf[Inet4Address]).getOrElse(addresses.head)
// because of Inet6Address.toHostName may add interface at the end if it knows about it
val strippedAddress = InetAddress.getByAddress(addr.getAddress)
return strippedAddress.getHostAddress
}
}
address.getHostAddress
}
}
}
| akopich/spark | external/docker-integration-tests/src/test/scala/org/apache/spark/util/DockerUtils.scala | Scala | apache-2.0 | 3,040 |
import java.net.InetSocketAddress
import play.sbt.PlayRunHook
import sbt._
object Webpack {
def apply(base: File): PlayRunHook = {
object WebpackHook extends PlayRunHook {
var process: Option[Process] = None
override def beforeStarted() = {
process = Option(
Process("sh -c \'./node_modules/.bin/webpack\'", base).run()
)
}
override def afterStarted(addr: InetSocketAddress) = {
process = Option(
Process("sh -c ./node_modules/.bin/webpack --watch", base).run()
)
}
override def afterStopped() = {
process.foreach(_.destroy())
process = None
}
}
WebpackHook
}
} | bminderh/play-react-webpack | project/Webpack.scala | Scala | apache-2.0 | 692 |
/**
* Copyright (C) 2020 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.xforms
import org.orbeon.oxf.util.CoreUtils.BooleanOps
import org.orbeon.xforms.facade.{Controls, Utils}
import org.scalajs.dom
import org.scalajs.dom.ext._
import org.scalajs.dom.html
import org.scalajs.dom.raw.{Element, HTMLElement}
import scalatags.JsDom
import org.scalajs.macrotaskexecutor.MacrotaskExecutor.Implicits._
import scala.concurrent.duration._
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSExport, JSExportTopLevel}
import scala.scalajs.js.timers
import scala.scalajs.js.timers.SetTimeoutHandle
import scalatags.JsDom.all.{value, _}
// Progressively migrate contents of xforms.js here
@JSExportTopLevel("OrbeonXFormsUi")
object XFormsUI {
@JSExport // 2020-04-27: 6 JavaScript usages from xforms.js
var modalProgressPanelShown: Boolean = false
@JSExport // 2020-04-27: 1 JavaScript usage
def displayModalProgressPanel(): Unit =
if (! modalProgressPanelShown) {
modalProgressPanelShown = true
// Take out the focus from the current control
// See https://github.com/orbeon/orbeon-forms/issues/4511
val focusControlIdOpt =
Option(Globals.currentFocusControlId) map { focusControlId =>
Controls.removeFocus(focusControlId)
focusControlId
}
val timerIdOpt =
if (Utils.isIOS() && Utils.getZoomLevel() != 1.0) {
Utils.resetIOSZoom()
Some(
timers.setTimeout(200.milliseconds) {
Private.showModalProgressPanelRaw()
}
)
} else {
Private.showModalProgressPanelRaw()
None
}
AjaxClient.ajaxResponseReceivedForCurrentEventQueueF("modal panel") foreach { details =>
// Hide the modal progress panel, unless the server tells us to do a submission or load, so we don't want
// to remove it otherwise users could start interacting with a page which is going to be replaced shortly.
//
// We remove the modal progress panel before handling DOM response, as script actions may dispatch
// events and we don't want them to be filtered. If there are server events, we don't remove the
// panel until they have been processed, i.e. the request sending the server events returns.
val mustHideProgressDialog =
! (
// `exists((//xxf:submission, //xxf:load)[empty(@target) and empty(@show-progress)])`
details.responseXML.getElementsByTagNameNS(Namespaces.XXF, "submission").iterator ++
details.responseXML.getElementsByTagNameNS(Namespaces.XXF, "load").iterator exists
(e => ! e.hasAttribute("target") && e.getAttribute("show-progress") != "false")
)
if (mustHideProgressDialog)
Private.hideModalProgressPanel(timerIdOpt, focusControlIdOpt)
}
}
def showModalProgressPanelImmediate(): Unit =
Private.showModalProgressPanelRaw()
def hideModalProgressPanelImmediate(): Unit =
Private.hideModalProgressPanelRaw()
@js.native trait ItemsetItem extends js.Object {
def attributes: js.UndefOr[js.Dictionary[String]] = js.native
def children: js.UndefOr[js.Array[ItemsetItem]] = js.native
def label: String = js.native
def value: String = js.native
}
// TODO:
// - Resolve nested `optgroup`s.
// - use direct serialization/deserialization instead of custom JSON.
// See `XFormsSelect1Control.outputAjaxDiffUseClientValue()`.
@JSExport
def updateSelectItemset(documentElement: html.Element, itemsetTree: js.Array[ItemsetItem]): Unit =
((documentElement.getElementsByTagName("select")(0): js.UndefOr[Element]): Any) match {
case select: html.Select =>
val selectedValues =
select.options.filter(_.selected).map(_.value)
def generateItem(itemElement: ItemsetItem): JsDom.TypedTag[HTMLElement] = {
val classOpt = itemElement.attributes.toOption.flatMap(_.get("class"))
itemElement.children.toOption match {
case None =>
option(
(value := itemElement.value) ::
selectedValues.contains(itemElement.value).list(selected) :::
classOpt.toList.map(`class` := _)
)(
itemElement.label
)
case Some(children) =>
optgroup(
(attr("label") := itemElement.label) :: classOpt.toList.map(`class` := _)
) {
children.map(generateItem): _*
}
}
}
// IE 11 doesn't support `replaceChildren()`
select.innerHTML = ""
itemsetTree.toList.map(generateItem).map(_.render).foreach(select.appendChild)
case _ =>
// This should not happen but if it does we'd like to know about it without entirely stopping the
// update process so we give the user a chance to recover the form. This should be generalized
// once we have migrated `AjaxServer.js` entirely to Scala.
scribe.error(s"`<select>` element not found when attempting to update itemset")
}
private object Private {
private def findLoaderElem: Option[Element] =
Option(dom.document.querySelector("body > .orbeon-loader"))
private def createLoaderElem: Element = {
val newDiv = dom.document.createElement("div")
newDiv.classList.add("orbeon-loader")
dom.document.body.appendChild(newDiv)
newDiv
}
def showModalProgressPanelRaw(): Unit = {
val elem = findLoaderElem getOrElse createLoaderElem
val cl = elem.classList
cl.add("loader") // TODO: `add()` can take several arguments
cl.add("loader-default")
cl.add("is-active")
}
def hideModalProgressPanelRaw(): Unit =
findLoaderElem foreach { elem =>
val cl = elem.classList
cl.remove("is-active")
}
def hideModalProgressPanel(
timerIdOpt : Option[SetTimeoutHandle],
focusControlIdOpt : Option[String]
): Unit =
if (modalProgressPanelShown) {
modalProgressPanelShown = false
// So that the modal progress panel doesn't show just after we try to hide it
timerIdOpt foreach timers.clearTimeout
hideModalProgressPanelRaw()
// Restore focus
// See https://github.com/orbeon/orbeon-forms/issues/4511
focusControlIdOpt foreach Controls.setFocus
}
}
}
| orbeon/orbeon-forms | xforms-web/src/main/scala/org/orbeon/xforms/XFormsUI.scala | Scala | lgpl-2.1 | 7,167 |
package edu.uwm.cs.pir.misc
import edu.uwm.cs.pir.spark.SparkObject._
import edu.uwm.cs.pir.strategy.Strategy._
object Constants {
val DATA_ROOT = inputDataRoot
val SAMPLES_ROOT = DATA_ROOT + "samples/"
val SAMPLE_SIFT_FEATURE_ROOT = SAMPLES_ROOT + "sift/"
val SAMPLE_IMAGES_ROOT = SAMPLES_ROOT + "images/"
val SAMPLE_TEXT_ROOT = SAMPLES_ROOT + "text/"
val WIKIPEDIA_ROOT = DATA_ROOT + "experiments/early_fusion/wikipedia_dataset/"
val WIKIPEDIA_IMAGES_ROOT = WIKIPEDIA_ROOT + "images/"
val WIKIPEDIA_TEXT_ROOT = WIKIPEDIA_ROOT + "texts/"
val STOPWORDS_ROOT = DATA_ROOT + "stoplists/"
val IMAGE_SERILIZED_FILE_ROOT = DATA_ROOT + "image_features/"
val EXTRACTED_TEXT_FILE_ROOT = DATA_ROOT + "text_features/"
val IMAGE_FEATURE_ROOT = DATA_ROOT + "image_features/"
val CLUSTER_FIlE = DATA_ROOT + "image_cluster/clusters.ser"
val LDA_MODEL_FIlE = DATA_ROOT + "text_model/lda_model.ser"
val INDEX_ROOT = DATA_ROOT + "index/"
val INDEX_IMAGE_FEATURE_ROOT = INDEX_ROOT + "images/"
val INDEX_TEXT_FEATURE_ROOT = INDEX_ROOT + "text/"
val GROUND_TRUTH_ROOT = DATA_ROOT + "ground_truth/"
val GROUND_TRUTH_ALL_TEXT_IMAGE_MAP = GROUND_TRUTH_ROOT + "all_txt_img_cat.list"
val GROUND_TRUTH_TRAINING_TEXT_IMAGE_MAP = GROUND_TRUTH_ROOT + "trainset_txt_img_cat.list"
val GROUND_TRUTH_TEST_TEXT_IMAGE_MAP = GROUND_TRUTH_ROOT + "testset_txt_img_cat.list"
val GROUND_TRUTH_CATEGORY_LIST = GROUND_TRUTH_ROOT + "categories.list"
val DEFAULT_ENCODING = "ISO-8859-1"
val NUM_OF_CLUSTERS = 256
val NUM_OF_FEATURES = 300
val NUM_OF_TOPICS = 10
val ALPHA_SUM = NUM_OF_TOPICS * 0.01
val BETA_W = 0.01
val SCALE_WIDTH = 320
val SCALE_HEIGHT = 240
val NUMBER_ITERATION = 1000
val NUMBER_SAMPLER = 4
// Use Gibbs sampling to infer a topic distribution. Topics are initialized
// to the (or a) most probable topic for each token. Using zero iterations
// returns exactly this initial topic distribution. This code does not
// adjust type-topic counts: P(w|t) is clamped.
// GIBBS_SAMPLING_ITERATION - The total number of iterations of sampling per
// document
// GIBBS_SAMPLING_THINNING - The number of iterations between saved samples
// GIBBS_SAMPLING_BURNIN - The number of iterations before the first saved
// sample
val GIBBS_SAMPLING_ITERATION = 10
val GIBBS_SAMPLING_THINNING = 1
val GIBBS_SAMPLING_BURNIN = 5
var GLOBAL_STRATEGY : RunStrategy = null
val NUM_OF_TOP_RESULT = 10
} | pir-dsl/pir | core/src/main/scala/edu/uwm/cs/pir/misc/Constants.scala | Scala | gpl-2.0 | 2,570 |
package keystoneml.pipelines.images.mnist
import breeze.linalg.DenseVector
import breeze.stats.distributions.{RandBasis, ThreadLocalRandomGenerator}
import keystoneml.evaluation.MulticlassClassifierEvaluator
import keystoneml.loaders.{CsvDataLoader, LabeledData}
import keystoneml.nodes.learning.BlockLeastSquaresEstimator
import keystoneml.nodes.stats.{LinearRectifier, PaddedFFT, RandomSignNode}
import keystoneml.nodes.util._
import org.apache.commons.math3.random.MersenneTwister
import org.apache.spark.{SparkConf, SparkContext}
import keystoneml.pipelines._
import scopt.OptionParser
import keystoneml.utils.Image
import keystoneml.workflow.Pipeline
object MnistRandomFFT extends Serializable with Logging {
val appName = "MnistRandomFFT"
def run(sc: SparkContext, conf: MnistRandomFFTConfig): Pipeline[DenseVector[Double], Int] = {
// This is a property of the MNIST Dataset (digits 0 - 9)
val numClasses = 10
val randomSignSource = new RandBasis(new ThreadLocalRandomGenerator(new MersenneTwister(conf.seed)))
// The number of pixels in an MNIST image (28 x 28 = 784)
// Because the mnistImageSize is 784, we get 512 PaddedFFT features per FFT.
val mnistImageSize = 784
val startTime = System.nanoTime()
val train = LabeledData(
CsvDataLoader(sc, conf.trainLocation, conf.numPartitions)
// The pipeline expects 0-indexed class labels, but the labels in the file are 1-indexed
.map(x => (x(0).toInt - 1, x(1 until x.length)))
.cache())
val labels = ClassLabelIndicatorsFromIntLabels(numClasses).apply(train.labels)
val featurizer = Pipeline.gather {
Seq.fill(conf.numFFTs) {
RandomSignNode(mnistImageSize, randomSignSource) andThen PaddedFFT() andThen LinearRectifier(0.0)
}
} andThen VectorCombiner()
val pipeline = featurizer andThen
(new BlockLeastSquaresEstimator(conf.blockSize, 1, conf.lambda.getOrElse(0)),
train.data, labels) andThen
MaxClassifier
val test = LabeledData(
CsvDataLoader(sc, conf.testLocation, conf.numPartitions)
// The pipeline expects 0-indexed class labels, but the labels in the file are 1-indexed
.map(x => (x(0).toInt - 1, x(1 until x.length)))
.cache())
// Calculate train error
val evaluator = new MulticlassClassifierEvaluator(numClasses)
val trainEval = evaluator.evaluate(pipeline(train.data), train.labels)
logInfo("TRAIN Error is " + (100 * trainEval.totalError) + "%")
// Calculate test error
val testEval = evaluator.evaluate(pipeline(test.data), test.labels)
logInfo("TEST Error is " + (100 * testEval.totalError) + "%")
val endTime = System.nanoTime()
logInfo(s"Pipeline took ${(endTime - startTime)/1e9} s")
pipeline
}
case class MnistRandomFFTConfig(
trainLocation: String = "",
testLocation: String = "",
numFFTs: Int = 200,
blockSize: Int = 2048,
numPartitions: Int = 10,
lambda: Option[Double] = None,
seed: Long = 0)
def parse(args: Array[String]): MnistRandomFFTConfig = new OptionParser[MnistRandomFFTConfig](appName) {
head(appName, "0.1")
help("help") text("prints this usage text")
opt[String]("trainLocation") required() action { (x,c) => c.copy(trainLocation=x) }
opt[String]("testLocation") required() action { (x,c) => c.copy(testLocation=x) }
opt[Int]("numFFTs") action { (x,c) => c.copy(numFFTs=x) }
opt[Int]("blockSize") validate { x =>
// Bitwise trick to test if x is a power of 2
if (x % 512 == 0) {
success
} else {
failure("Option --blockSize must be divisible by 512")
}
} action { (x,c) => c.copy(blockSize=x) }
opt[Int]("numPartitions") action { (x,c) => c.copy(numPartitions=x) }
opt[Double]("lambda") action { (x,c) => c.copy(lambda=Some(x)) }
opt[Long]("seed") action { (x,c) => c.copy(seed=x) }
}.parse(args, MnistRandomFFTConfig()).get
/**
* The actual driver receives its configuration parameters from spark-submit usually.
*
* @param args
*/
def main(args: Array[String]) = {
val appConfig = parse(args)
val conf = new SparkConf().setAppName(appName)
conf.setIfMissing("spark.master", "local[2]")
val sc = new SparkContext(conf)
run(sc, appConfig)
sc.stop()
}
}
| amplab/keystone | src/main/scala/keystoneml/pipelines/images/mnist/MnistRandomFFT.scala | Scala | apache-2.0 | 4,338 |
package com.larry.da.parse
import org.apache.spark.SparkContext
import org.apache.spark.graphx.{Graph, Edge}
import org.apache.spark.rdd.RDD
import com.google.common.hash.Hashing.md5
import scala.collection.mutable.ArrayBuffer
/**
* Created by larry on 16/11/15.
*/
object SiteClass {
var sc : SparkContext = _
def binaryRelation(day:String): Unit ={
import scala.collection.mutable.ArrayBuffer
import com.larry.da.util.{LogParseUtil => U}
val log = sc.union(
day.split(",").flatMap(hour=>{
"show".split(",").flatMap(logType => {
"baidu,adx,tanx".split(",").map(ch=> U.dspRdd(sc,logType,ch,hour))
})
})
)
val fields = "guid,site".split(",")
val data1 = log.map(x=>{
val d = U.showLog(x)
val Array(guid,site) = fields.map(f => {val fValue = d.getOrElse(f,"null");if(fValue == "") "null" else fValue})
(guid,site)
}).filter(x=>x._1 != "null" && x._2 != "null").distinct(sc.defaultParallelism * 3)
val uidContent = data1.map(x=>{
val(guid,site) = x;
(guid,ArrayBuffer(site))
}).reduceByKey(_++=_)
val data2 = uidContent.filter(x=>{val len = x._2.length; len > 1 && len < 25}).flatMap(x=>{
val(guid,list) = x;
val res = for(s1<-list;s2<-list) yield(s1,s2)
res.filter(p=>p._1 < p._2).map(p=>(p,1))
}).reduceByKey(_+_)
data2.map(x=>{
val((s1,s2),num) = x;
Array(s1,s2,num).mkString("\\t")
}).saveAsTextFile(s"show/$day")
}
def run1(): Unit ={
7 to 14 foreach(d =>{
val day = if(d < 10) "0" + d else d.toString
val date = "2015-11-" + day
println (date)
binaryRelation(date)
})
}
//========== vertices and edges =================
def prepareGragh(data: RDD[((String, String), Double)]) = {
val res = data.flatMap(x => {
val ((s1, s2), num) = x;
val (s1L, s2L) = (md5.hashString(s1).asLong(), md5.hashString(s2).asLong())
Array(
(1, (s1L,s1,0)),
(1, (s2L,s2,0)),
(2, (s1L,s2L.toString,num))
)
})
val vertices = res.filter(_._1 == 1).map(x => {
val(xxk,(siteL,site,xx3)) = x;
(siteL,site)
}).distinct()
val edges = res.filter(_._1 == 2).map(x => {
val(xxk,(s1L,s2,num)) = x;
(s1L,s2.toLong,num)
})
(vertices, edges)
}
def processGraph(): Unit ={
val log = sc.textFile(s"show/*")
.repartition(sc.defaultParallelism * 3)
.map(x=>x.split("\\t")).filter(x=>x.length == 3).map(x=>{val Array(s1,s2,num) = x;((s1,s2),num.toInt)})
val data = log.reduceByKey(_+_).filter(_._2 > 30).flatMap(x=>{
val((s1,s2),num) = x;
Array(
(s1,(s2,num)),
(s2,(s1,num))
)
}).aggregateByKey(new ArrayBuffer[(String,Int)])(
(a,v) => a += v,
(a,b) => a ++= b
).filter(_._2.length > 4).cache()
val verticelDic = sc.broadcast( data.map(x=>{
val(s1,list) =x;
var totalWeight = 0;
val edgeCount = list.length
list.foreach( p=> totalWeight += p._2 )
(s1,(totalWeight,edgeCount))
}).collectAsMap()
)
val sortedRdd = data.map(x=>{
val(site,list) =x;
val sorted = list.map(p=>{
val(s,num) = p;
val (totalWeight,edgeCount) = verticelDic.value.getOrElse(s,(Int.MaxValue,Int.MaxValue))
val w = (num.toDouble / totalWeight) * math.log( Int.MaxValue.toDouble / edgeCount )
(s,w)
}).sortWith((a,b) => a._2 > b._2)
(site,sorted)
})
val resData = sortedRdd.flatMap(x=>{
val(site,list) = x;
list.take(1).filter(p=>p._2 > 1D).map(p=>{
((site,p._1),p._2)
})
})
val(vertices,edges) = prepareGragh( resData )
val edgeRdd = edges.map(x=>{
val (s1,s2,num) =x ;
Edge(s1,s2,null)
}).repartition(sc.defaultParallelism)
val res = Graph.fromEdges(edgeRdd,null).connectedComponents().vertices
// res.map(_._2).distinct().count()
// siteDelDic.value.count(x=>true)
res.map(x=>(x._2,ArrayBuffer(x._1))).reduceByKey(_++=_).map(x=>(x._2.length,x._1)).sortByKey(false).take(50).foreach(println)
res.join(vertices).map(x=>{
val (sL,(cL,site)) =x;
(cL,ArrayBuffer(site))
}).reduceByKey(_++=_).map(x=>x._2.mkString(",")).saveAsTextFile("siteClass/res")
/*
(72,-8925578992189951322)
(66,-9171405986732911112)
(54,-9104471626576158611)
(48,-8863894869090580160)
(47,-9125154861068097076)
(41,-8903496469600426327)
(38,-9087393912466866681)
(32,-8203124030491385269)
(32,-8841686522496213865)
(30,-9147281699495493982)
(25,-8722281529839030491)
(25,-9059556491083049789)
(25,-9024733272072633538)
(24,-7233829474210095194)
(21,-8203113167436391916)
(21,-6643023523241620483)
(20,-8995308617529765229)
(19,-8958346478855326031)
(18,-8926357782926982346)
(17,-8626248322765934824)
(17,-7508195520869170376)
(16,-9135162308908727993)
(15,-8649697749656082988)
(15,-8551153477254068310)
(15,-8817568393341188252)
*/
res.filter(_._2 == -8925578992189951322L).join(vertices).take(100).foreach(println)
res.filter(_._2 == -5027747965319986579L).join(vertices).take(100).foreach(println)
}
def test1(): Unit ={
val log = sc.textFile(s"show/*")
.repartition(sc.defaultParallelism * 3)
.filter(! _.contains("www.anonymouswebsite.com")) //delete special website
.map(x=>x.split("\\t")).filter(x=>x.length == 3).map(x=>{val Array(s1,s2,num) = x;((s1,s2),num.toInt)})
val t1 = log.reduceByKey(_+_).flatMap(x=>{
val((s1,s2),num) = x;
Array(
(s1,ArrayBuffer((s2,num))),
(s2,ArrayBuffer((s1,num)))
)
}).reduceByKey(_++=_)
t1.map(x=>{
val(site,list)=x;
site + "," +list.map(p=>Array(p._1,p._2).mkString(":")).mkString(",")
}).saveAsTextFile("siteClass/res2")
val keyNum = Array(10,20,50,100,200,500,1000,2000,Int.MaxValue)
val kn = keyNum zip Array(" 0-9"," 10-19"," 20-49"," 50-99"," 100-199"," 200-499"," 500-999","1000-1999","else")
t1.map(x=>{
val(site,list) =x;
val len = list.length
val k = kn.map(n=>(len/n._1,n._2)).filter(_._1 == 0).head._2
(k,1)
}).reduceByKey(_+_).sortByKey().map(x=>{
val(span,num) =x;
Array(span,num).mkString(",")
}).take(200).foreach(println)
}
}
| larry88/spark_da | src/main/scala/com/larry/da/parse/SiteClass.scala | Scala | gpl-2.0 | 6,284 |
package ru.freefry.furniture_factory.core
/**
* Order for producing furniture unit.
* @author freefry
*/
trait UnitOrder {
/** Unique id of the order */
def id: String
}
/** Produced furniture unit */
trait ProducedUnit {
/** Order for the unit */
def order: UnitOrder
/** Price of unit production */
def price: BigDecimal
}
/** Final assembled furniture product */
trait AssembledUnit {
/** Price of assembling operation */
def price: BigDecimal
/** Units included in final product */
def units: Seq[ProducedUnit]
/**
* Returns full cost of unit. Consists of prices of all included units
* and assembly price.
*
* @return full unit cost
*/
def fullCost = price + units.map(_.price).sum
} | freefry/furniture-factory | src/main/scala/ru/freefry/furniture_factory/core/model.scala | Scala | apache-2.0 | 738 |
package org.coursera.courier.data
import javax.annotation.Generated
import com.linkedin.data.ByteString
import com.linkedin.data.DataMap
import com.linkedin.data.DataList
import com.linkedin.data.schema.MapDataSchema
import com.linkedin.data.schema.DataSchema
import com.linkedin.data.template.DataTemplate
import com.linkedin.data.template.DataTemplateUtil
import org.coursera.courier.companions.MapCompanion
import org.coursera.courier.templates.DataTemplates
import org.coursera.courier.templates.DataTemplates.DataConversion
import scala.collection.generic.CanBuildFrom
import scala.collection.immutable
import scala.collection.mutable
import scala.collection.JavaConverters._
import com.linkedin.data.template.Custom
import org.coursera.courier.codecs.InlineStringCodec
@Generated(value = Array("BooleanToStringMap"), comments = "Courier Data Template.", date = "Fri Aug 14 14:51:38 PDT 2015")
final class BooleanToStringMap(private val dataMap: DataMap)
extends immutable.Iterable[(Boolean, String)]
with Map[Boolean, String]
with immutable.MapLike[Boolean, String, immutable.Map[Boolean, String]]
with DataTemplate[DataMap] {
import BooleanToStringMap._
private[this] lazy val map = dataMap.asScala.map { case (k, v) => coerceKeyInput(k) -> coerceInput(v) }.toMap
private[this] def coerceInput(any: AnyRef): String = {
DataTemplateUtil.coerceOutput(any, classOf[java.lang.String])
}
private[this] def coerceKeyInput(key: String): Boolean = {
def coerceKeyDataInput(any: AnyRef): Boolean = {
DataTemplateUtil.coerceOutput(any, classOf[java.lang.Boolean])
}
coerceKeyDataInput(InlineStringCodec.stringToData(key, KEY_SCHEMA))
}
override def get(key: Boolean): Option[String] = map.get(key)
override def iterator: Iterator[(Boolean, String)] = map.iterator
override def +[F >: String](kv: (Boolean, F)): Map[Boolean, F] = {
val (key, value) = kv
value match {
case v: String =>
val copy = dataMap.copy()
copy.put(coerceKeyOutput(key), coerceOutput(v))
copy.makeReadOnly()
new BooleanToStringMap(copy)
case _: Any =>
(iterator ++ Iterator.single(kv)).toMap
}
}
override def -(key: Boolean): BooleanToStringMap = {
val copy = dataMap.copy()
copy.remove(coerceKeyOutput(key))
copy.makeReadOnly()
new BooleanToStringMap(copy)
}
override def schema(): DataSchema = BooleanToStringMap.SCHEMA
override def data(): DataMap = dataMap
override def copy(): DataTemplate[DataMap] = this
}
object BooleanToStringMap extends MapCompanion[BooleanToStringMap] {
val SCHEMA = DataTemplateUtil.parseSchema("""{"type":"map","values":"string","keys":"boolean"}""").asInstanceOf[MapDataSchema]
val KEY_SCHEMA = DataTemplateUtil.parseSchema(""""boolean"""")
val empty = BooleanToStringMap()
def apply(elems: (Boolean, String)*): BooleanToStringMap = {
BooleanToStringMap(elems.toMap)
}
def apply(map: Map[Boolean, String]): BooleanToStringMap = {
new BooleanToStringMap(new DataMap(map.map { case (k, v) => coerceKeyOutput(k) -> coerceOutput(v) }.asJava))
}
def build(dataMap: DataMap, conversion: DataConversion): BooleanToStringMap = {
new BooleanToStringMap(DataTemplates.makeImmutable(dataMap, conversion))
}
def newBuilder = new DataBuilder()
implicit val canBuildFrom = new CanBuildFrom[BooleanToStringMap, (Boolean, String), BooleanToStringMap] {
def apply(from: BooleanToStringMap) = new DataBuilder(from)
def apply() = newBuilder
}
class DataBuilder(initial: BooleanToStringMap) extends mutable.Builder[(Boolean, String), BooleanToStringMap] {
def this() = this(new BooleanToStringMap(new DataMap()))
val entries = new DataMap(initial.data())
def +=(kv: (Boolean, String)): this.type = {
val (key, value) = kv
entries.put(coerceKeyOutput(key), coerceOutput(value))
this
}
def clear() = {
entries.clear()
}
def result() = {
entries.makeReadOnly()
new BooleanToStringMap(entries)
}
}
private def coerceOutput(value: String): AnyRef = {
DataTemplateUtil.coerceInput(value, classOf[java.lang.String], classOf[java.lang.String])
}
private def coerceKeyOutput(key: Boolean): String = {
def coerceKeyDataOutput(value: Boolean): AnyRef = {
DataTemplateUtil.coerceInput(Boolean.box(value), classOf[java.lang.Boolean], classOf[java.lang.Boolean])
}
InlineStringCodec.dataToString(coerceKeyDataOutput(key))
}
implicit def wrap(map: Map[Boolean, String]): BooleanToStringMap = {
BooleanToStringMap(map)
}
}
| coursera/courier | scala/runtime/src/main/scala/org/coursera/courier/data/BooleanToStringMap.scala | Scala | apache-2.0 | 4,622 |
import org.scalatest._
class ScalaDays extends FunSuite
| xeno-by/dottyhost | tests/src/test/scala/macros/ScalaDays.scala | Scala | bsd-3-clause | 57 |
package com.github.mdr.mash.ns.os
import com.github.mdr.mash.classes.MashClass
import com.github.mdr.mash.ns.core.AnyClass
import jnr.constants.platform.linux.Signal
object SignalClass extends MashClass("os.Signal") {
override def enumerationValues: Option[Seq[String]] = Some(Signals)
val Signals = Signal.values.toSeq.map(_.name.drop(3))
override def summaryOpt = Some("A signal")
override def parentOpt = Some(AnyClass)
} | mdr/mash | src/main/scala/com/github/mdr/mash/ns/os/SignalClass.scala | Scala | mit | 439 |
package algorithms.GraphAlgorithms
/**
* problem description: http://rosalind.info/problems/bip/
*/
object TestingBipartiteness {
sealed trait Color
case object Red extends Color
case object Blue extends Color
object SampleData {
val sample: List[String] =
List(
"2",
"",
"3 3",
"1 2",
"3 2",
"3 1",
"",
"4 3",
"1 4",
"3 1",
"1 2"
)
}
import scala.annotation.tailrec
import collection.mutable.{Map => MutableMap, Set => MutableSet}
import algorithms.Datastructures.Graph
import SampleData.sample
import utils.UtilityFunctions.{readInputData, writeListAsStringToFile}
import utils.GraphUtilityFunctions.{Node, readListOfGraphs}
val inputFileName: String = "/algorithms/datasets/rosalind_bip.txt"
def getData(isPractice: Boolean): List[Graph] = {
val data: List[String] = if (isPractice) sample else readInputData(inputFileName)
readListOfGraphs(data, isDirected = false)
}
def getNeighboursOfNodes(nodes: Set[Node],
adjacencyList: Map[Node, List[Node]],
coloring: MutableMap[Node, Color]): (Set[Node], Set[Node]) = {
val unColoredNeighbours: MutableSet[Node] = MutableSet()
val coloredNeighbours: MutableSet[Node] = MutableSet()
for {
node <- nodes
neighbours: List[Node] = adjacencyList.getOrElse(node, Nil)
neighbour <- neighbours
} {
if (coloring.contains(neighbour)) coloredNeighbours += neighbour
else unColoredNeighbours += neighbour
}
(unColoredNeighbours.toSet, coloredNeighbours.toSet)
}
def changeColor(color: Color): Color = color match {
case Red => Blue
case Blue => Red
}
def updateColoredNodes(uncoloredNodes: Set[Node], color: Color, coloring: MutableMap[Node, Color]): Unit =
for { node <- uncoloredNodes} coloring += (node -> color)
def isColoringConsistent(color: Color, coloredNeighbours: Set[Node], coloring: MutableMap[Node, Color]): Boolean =
coloredNeighbours.forall(coloredNode => coloring(coloredNode) == color)
def colorComponent(startingNode: Node,
adjacencyList: Map[Node, List[Node]],
coloring: MutableMap[Node, Color]): Boolean = {
val currentColor: Color = Red
coloring += (startingNode -> currentColor)
val (neighbours, _): (Set[Node], Set[Node]) = getNeighboursOfNodes(Set(startingNode), adjacencyList, coloring)
@tailrec
def loop(unColoredNeighbours: Set[Node], previousColor: Color): Boolean = {
if (unColoredNeighbours.isEmpty) true
else {
val currentColor: Color = changeColor(previousColor)
updateColoredNodes(unColoredNeighbours, currentColor, coloring)
val (unvisitedNeighbours, coloredNeighbours): (Set[Node], Set[Node]) =
getNeighboursOfNodes(unColoredNeighbours, adjacencyList, coloring)
if (!isColoringConsistent(changeColor(currentColor), coloredNeighbours, coloring)) false
else loop(unvisitedNeighbours, currentColor)
}
}
loop(neighbours, currentColor)
}
def isBipartite(graph: Graph): Boolean = {
val coloring: MutableMap[Node, Color] = MutableMap()
def findUncoloredNode(): Option[Node] = (1 to graph.numberOfNodes).find(node => !coloring.contains(node))
@tailrec
def loop(): Boolean =
findUncoloredNode() match {
case None => true
case Some(startingNode) =>
val isComponentConsistentlyColored: Boolean = colorComponent(startingNode, graph.adjacencyList, coloring)
if (!isComponentConsistentlyColored) false
else loop()
}
loop()
}
def testForBipartiteness(graphs: List[Graph]): List[Boolean] = graphs.map(isBipartite)
def main(args: Array[String]): Unit = {
val graphs: List[Graph] = getData(isPractice = false)
val result: List[Int] = testForBipartiteness(graphs).map(if (_) 1 else -1)
writeListAsStringToFile(result)
}
}
| ghostrider77/Bioinformatics | Bioinformatics/src/main/scala-2.11/algorithms/GraphAlgorithms/TestingBipartiteness.scala | Scala | mit | 4,137 |
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.schema.shell.util
final class PlusFourChild
/**
* A program to be run in a child process.
* This parses its argument as an integer, adds four to the value,
* and returns this sum as its exit status code.
*/
object PlusFourChild {
def main(argv: Array[String]) {
val input: Int = argv(0).toInt
val sum: Int = input + 4
System.out.println("Input is: " + input)
System.out.println("Output is: " + sum)
// Return the sum as an exit status.
// Note that if this were to run in the same process as unit tests,
// a non-zero exit status would cause maven-surefire-plugin to fail.
// The unit tests can only succeed if this is run a forked JVM.
System.exit(sum)
}
}
| kijiproject/kiji-schema-shell | src/test/scala/org/kiji/schema/shell/util/PlusFourChild.scala | Scala | apache-2.0 | 1,441 |
package com.mesosphere.universe.v3.model
import com.mesosphere.universe
import java.nio.ByteBuffer
import org.scalatest.FreeSpec
import org.scalatest.Matchers
import scala.util.Random
class V3PackageSpec extends FreeSpec with Matchers {
val input = List(
// scalastyle:off magic.number
("pkg1", Version("1.0-1"), ReleaseVersion(1)),
("pkg1", Version("1.0-2"), ReleaseVersion(2)),
("pkg1", Version("1.0-3"), ReleaseVersion(3)),
("pkg2", Version("1.0"), ReleaseVersion(1)),
("pkg2", Version("2.0"), ReleaseVersion(2)),
("pkg3", Version("1.0"), ReleaseVersion(3)),
("pkg4", Version("1.0"), ReleaseVersion(4)),
("pkg5", Version("1.0-1"), ReleaseVersion(1)),
("pkg5", Version("2.0-1"), ReleaseVersion(2)),
("pkg5", Version("1.1-1"), ReleaseVersion(3)),
("pkg6", Version("0.0.0.1"), ReleaseVersion(1)),
("pkg6", Version("0.0.0.5"), ReleaseVersion(2)),
("pkg6", Version("0.0.0.2"), ReleaseVersion(3)),
("pkg7", Version("0.0.1"), ReleaseVersion(1)),
("pkg7", Version("0.0.4.2"), ReleaseVersion(10))
// scalastyle:on magic.number
)
"V3Package" - {
"Ordering should work" in {
val expected = input.map(v3Package(_))
val actual = Random.shuffle(expected).sorted
actual shouldBe expected
}
}
"V2Package" - {
"Ordering should work" in {
val expected = input.map(v2Package(_))
val actual = Random.shuffle(expected).sorted
actual shouldBe expected
}
}
"PackageDefinition" - {
"Ordering should work" in {
val expected = input.map(packageDefinition(_))
val actual = Random.shuffle(expected).sorted
actual shouldBe expected
}
}
def v3Package(tuple: (String, Version, ReleaseVersion)): V3Package = {
val (name, version, relVer) = tuple
V3Package(
V3PackagingVersion,
name,
version,
relVer,
"does@not.matter",
"doesn't matter"
)
}
def v2Package(tuple: (String, Version, ReleaseVersion)): V2Package = {
val (name, version, relVer) = tuple
V2Package(
V2PackagingVersion,
name,
version,
relVer,
"does@not.matter",
"doesn't matter",
Marathon(ByteBuffer.allocate(0))
)
}
def packageDefinition(tuple: (String, Version, ReleaseVersion)): universe.v4.model.PackageDefinition = {
if (Random.nextBoolean) {
v2Package(tuple)
} else {
v3Package(tuple)
}
}
}
| dcos/cosmos | cosmos-test-common/src/test/scala/com/mesosphere/universe/v3/model/V3PackageSpec.scala | Scala | apache-2.0 | 2,442 |
package edu.gemini.ui.workspace.scala
import edu.gemini.ui.workspace._
class RichShellContext[A](val context:IShellContext) {
def shell:RichShell[A] = context.getShell
def title:String = sys.error("Not implemented")
def title_=(s:String) = context.setTitle(s)
def actionManager = context.getActionManager // for now
def workspace = context.getWorkspace()
} | arturog8m/ocs | bundle/edu.gemini.ui.workspace/src/main/scala/edu/gemini/ui/workspace/scala/RichShellContext.scala | Scala | bsd-3-clause | 378 |
import scala.reflect.macros.blackbox.Context
object Impls {
def impl(c: Context) = {
import c.universe._
println(c.fresh())
println(c.fresh("qwe"))
println(c.fresh(TypeName("qwe")))
c.abort(NoPosition, "blargh")
}
}
object Macros {
def foo = macro Impls.impl
} | yusuke2255/dotty | tests/disabled/macro/run/macro-abort-fresh/Macros_1.scala | Scala | bsd-3-clause | 288 |
package org.sofa.math
import scala.language.implicitConversions
//===================================================
object Vector {
def apply(values:Double*) = {
val result = new Vector(values.size)
result.copy(values)
result
}
def apply(other:NumberSeq) = {
val result = new Vector(other.size)
result.copy(other)
result
}
def apply(size:Int) = new Vector(size)
}
class Vector(size:Int) extends NumberSeq {
type ReturnType = Vector
protected[math] final val data = new Array[Double](size)
def this(other:NumberSeq) = { this(other.size); copy(other) }
def newInstance = new Vector(size)
}
//===================================================
object Vector2 {
implicit def vector2ToTuple(v:Vector2):(Double, Double) = (v.x, v.y)
def apply(x:Double, y:Double) = new Vector2(x, y)
def apply() = new Vector2()
def apply(from:Point2, to:Point2) = new Vector2(to.x-from.x, to.y-from.y)
def apply(other:NumberSeq) = {
if(other.size < 1) // Nooooo !!!
new Vector2()
else if(other.size < 2)
new Vector2(other.data(0), 0)
else new Vector2(other.data(0), other.data(1))
}
def apply(xy:(Double, Double)) = new Vector2(xy._1, xy._2)
def apply(fill:Double) = new Vector2(fill, fill)
}
class Vector2(xInit:Double, yInit:Double) extends NumberSeq2 {
type ReturnType = Vector2
protected[math] final val data = Array[Double](xInit, yInit)
def this(other:Vector2) = this(other.x, other.y)
def this() = this(0, 0)
def newInstance = new Vector2
override final def size:Int = 2
}
//===================================================
object Vector3 {
implicit def vector3ToTuple(v:Vector3):(Double, Double, Double) = (v.x, v.y, v.z)
def apply(x:Double, y:Double, z:Double) = new Vector3(x, y, z)
def apply() = new Vector3()
def apply(from:Point3, to:Point3) = new Vector3(to.data(0)-from.data(0), to.data(1)-from.data(1), to.data(2)-from.data(2))
def apply(other:NumberSeq) = {
if(other.size < 1) // Nooooo !!!
new Vector3()
else if(other.size < 2)
new Vector3(other.data(0), 0, 0)
else if(other.size < 3)
new Vector3(other.data(0), other.data(1), 0)
else new Vector3(other.data(0), other.data(1), other.data(2))
}
def apply(other:Vector3) = new Vector3(other.data(0), other.data(1), other.data(2))
def apply(xyz:(Double, Double, Double)) = new Vector3(xyz._1, xyz._2, xyz._3)
def apply(xy:(Double, Double), z:Double) = new Vector3(xy._1, xy._2, z)
def apply(x:Double, yz:(Double, Double)) = new Vector3(x, yz._1, yz._2)
def apply(fill:Double) = new Vector3(fill, fill, fill)
}
class Vector3(xInit:Double, yInit:Double, zInit:Double) extends NumberSeq3 {
/** Set this to the cross product of this and vector (`x`, `y`, `z`).
*
* This operation works in place, modifying this vector.
*/
def cross(x:Double, y:Double, z:Double) {
var xx = 0.0
var yy = 0.0
xx = (data(1) * z) - (data(2) * y);
yy = (data(2) * x) - (data(0) * z);
data(2) = (data(0) * y) - (data(1) * x);
data(0) = xx
data(1) = yy
}
/** Set this as the vector between points `from` and `to`. */
def set(from:Point3, to:Point3) {
val f = from.data
val t = to.data
data(0) = t(0)-f(0)
data(1) = t(1)-f(1)
data(2) = t(2)-f(2)
}
/** Set this to the cross product of this and `other`.
*
* This operation works in place, modifying this vector.
*/
def cross(other:Vector3):ReturnType = {
var xx = 0.0
var yy = 0.0
val o = other.data
xx = (data(1) * o(2)) - (data(2) * o(1));
yy = (data(2) * o(0)) - (data(0) * o(2));
data(2) = (data(0) * o(1)) - (data(1) * o(0));
data(0) = xx
data(1) = yy
this
}
/** Result of the cross product between this and an `other` vector.
*
* @return A new vector result of the cross product.
*/
def X(other:Vector3):ReturnType = newClone.cross(other).asInstanceOf[ReturnType]
def copy(other:Vector3) {
// Much faster than original on n elements.
val o = other.data
data(0) = o(0)
data(1) = o(1)
data(2) = o(2)
}
override def norm:Double = math.sqrt(data(0)*data(0) + data(1)*data(1) + data(2)*data(2))
override def normalize():Double = {
// Much faster than original on n elements.
val len = norm
data(0) /= len
data(1) /= len
data(2) /= len
len
}
/** Switch axes from the blender convention. The x of remains the one of blender.
* However the y becomes z of blender and the z become the -y of blender. In
* other terms:
*
* x = xb
* y = zb
* z = -yb
*
* When x, y, z are our coordinates and xb, yb, zb the ones of blender. */
def fromBlender() {
val z = -data(1)
data(1) = data(2)
data(2) = z
}
def +(other:Vector3):ReturnType = (new Vector3(data(0), data(1), data(2))).addBy(other) // Faster than using apply
override def +(value:Double):ReturnType = (new Vector3(data(0), data(1), data(2))).addBy(value) // Faster than using apply
def -(other:Vector3):ReturnType = (new Vector3(data(0), data(1), data(2))).subBy(other) // Faster than using apply
override def -(value:Double):ReturnType = (new Vector3(data(0), data(1), data(2))).subBy(value) // Faster than using apply
def *(other:Vector3):ReturnType = (new Vector3(data(0), data(1), data(2))).multBy(other) // Faster than using apply
override def *(value:Double):ReturnType = (new Vector3(data(0), data(1), data(2))).multBy(value) // Faster than using apply
def /(other:Vector3):ReturnType = (new Vector3(data(0), data(1), data(2))).divBy(other) // Faster than using apply
override def /(value:Double):ReturnType = (new Vector3(data(0), data(1), data(2))).divBy(value) // Faster than using apply
def dot(other:Vector3):Double = {
// Much faster than original on n elements.
val o = other.data
data(0)*o(0) + data(1)*o(1) + data(2)*o(2)
}
/** Compute the angle in radians between the two vectors. */
def angle(other:Vector3):Double = {
val v0 = Vector3(other)
val v1 = Vector3(this)
v0.normalize
v1.normalize
math.acos(v0.dot(v1))
}
type ReturnType = Vector3
protected[math] final val data = Array[Double](xInit, yInit, zInit)
def this(other:Vector3) = this(other.x, other.y, other.z)
def this() = this(0, 0, 0)
def newInstance = new Vector3
override final def size:Int = 3
}
//===================================================
object Vector4 {
implicit def vector4ToTuple(v:Vector4):(Double, Double, Double, Double) = (v.x, v.y, v.z, v.w)
def apply(x:Double, y:Double, z:Double, w:Double) = new Vector4(x, y, z, w)
def apply() = new Vector4()
def apply(other:NumberSeq) = {
if(other.size < 1) // Nooooo !!!
new Vector4()
else if(other.size < 2)
new Vector4(other.data(0), 0, 0, 0)
else if(other.size < 3)
new Vector4(other.data(0), other.data(1), 0, 0)
else if(other.size < 4)
new Vector4(other.data(0), other.data(1), other.data(2), 0)
else new Vector4(other.data(0), other.data(1), other.data(2), other.data(3))
}
def apply(xyzw:(Double, Double, Double, Double)) = new Vector4(xyzw._1, xyzw._2, xyzw._3, xyzw._4)
def apply(xyz:(Double, Double, Double), w:Double) = new Vector4(xyz._1, xyz._2, xyz._3, w)
def apply(xy:(Double, Double), zw:(Double,Double)) = new Vector4(xy._1, xy._2, zw._1, zw._2)
def apply(x:Double, yz:(Double, Double), w:Double) = new Vector4(x, yz._1, yz._2, w)
def apply(x:Double, yzw:(Double, Double, Double)) = new Vector4(x, yzw._1, yzw._2, yzw._3)
def apply(fill:Double) = new Vector4(fill, fill, fill, fill)
}
class Vector4(xInit:Double, yInit:Double, zInit:Double, wInit:Double) extends NumberSeq4 {
type ReturnType = Vector4
protected[math] final val data = Array[Double](xInit, yInit, zInit, wInit)
def this(other:Vector4) = this(other.x, other.y, other.z, other.w)
def this() = this(0, 0, 0, 0)
def newInstance = new Vector4
override final def size:Int = 4
} | Ant01n3/ReliefExtruder | src/main/scala/org/sofa/math/Vector.scala | Scala | gpl-2.0 | 8,531 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.booleantype
import java.io.File
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
class BooleanDataTypesParameterTest extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
val filePath: String = s"$resourcesPath/globalsort"
val file1: String = resourcesPath + "/globalsort/sample1.csv"
val file2: String = resourcesPath + "/globalsort/sample2.csv"
val file3: String = resourcesPath + "/globalsort/sample3.csv"
override def beforeEach(): Unit = {
sql("drop table if exists boolean_one_column")
sql("drop table if exists boolean_table")
sql(
s"""CREATE TABLE if not exists boolean_one_column(
|booleanField BOOLEAN)
|STORED AS carbondata
|""".stripMargin)
}
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
override def beforeAll(): Unit = {
CarbonProperties.getInstance().
addProperty(CarbonCommonConstants.COMPACTION_SEGMENT_LEVEL_THRESHOLD,
CarbonCommonConstants.DEFAULT_SEGMENT_LEVEL_THRESHOLD)
}
override def afterAll(): Unit = {
sql("drop table if exists boolean_one_column")
sql("drop table if exists boolean_table")
}
test("ENABLE_AUTO_LOAD_MERGE: false, and Inserting and selecting table: one column boolean and many rows, should support") {
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "false")
sql("insert into boolean_one_column values(true)")
sql("insert into boolean_one_column values(True)")
sql("insert into boolean_one_column values(TRUE)")
sql("insert into boolean_one_column values('true')")
sql("insert into boolean_one_column values(False)")
sql("insert into boolean_one_column values(false)")
sql("insert into boolean_one_column values(FALSE)")
sql("insert into boolean_one_column values('false')")
sql("insert into boolean_one_column values('tr')")
sql("insert into boolean_one_column values(null)")
sql("insert into boolean_one_column values('truEe')")
sql("insert into boolean_one_column values('falsEe')")
sql("insert into boolean_one_column values('t')")
sql("insert into boolean_one_column values('f')")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true), Row(true), Row(true), Row(true),
Row(false), Row(false), Row(false), Row(false),
Row(true), Row(false), Row(null), Row(null), Row(null), Row(null))
)
val segments = sql("SHOW SEGMENTS FOR TABLE boolean_one_column")
val SegmentSequenceIds = segments.collect().map { each => (each.toSeq) (0) }
assert(!SegmentSequenceIds.contains("0.1"))
assert(SegmentSequenceIds.length == 14)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE,
CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE)
}
test("ENABLE_AUTO_LOAD_MERGE: true, and Inserting and selecting table: one column boolean and many rows, should support") {
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "true")
sql("insert into boolean_one_column values(true)")
sql("insert into boolean_one_column values(True)")
sql("insert into boolean_one_column values(TRUE)")
sql("insert into boolean_one_column values('true')")
sql("insert into boolean_one_column values(False)")
sql("insert into boolean_one_column values(false)")
sql("insert into boolean_one_column values(FALSE)")
sql("insert into boolean_one_column values('false')")
sql("insert into boolean_one_column values('tr')")
sql("insert into boolean_one_column values(null)")
sql("insert into boolean_one_column values('truEe')")
sql("insert into boolean_one_column values('falsEe')")
sql("insert into boolean_one_column values('t')")
sql("insert into boolean_one_column values('f')")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true), Row(true), Row(true), Row(true),
Row(false), Row(false), Row(false), Row(false),
Row(true), Row(false), Row(null), Row(null), Row(null), Row(null))
)
val segments = sql("SHOW SEGMENTS FOR TABLE boolean_one_column")
val SegmentSequenceIds = segments.collect().map { each => (each.toSeq) (0) }
assert(SegmentSequenceIds.contains("0.1"))
assert(SegmentSequenceIds.length == 18)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE,
CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE)
}
test("ENABLE_AUTO_LOAD_MERGE: false, and Loading table: support boolean and other data type") {
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "false")
sql(
s"""
| CREATE TABLE boolean_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBoolean.csv"
for (i <- 0 until 4) {
sql(
s"""
| LOAD DATA LOCAL INPATH '${storeLocation}'
| INTO TABLE boolean_table
| options('FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2')
""".stripMargin)
}
checkAnswer(
sql("select count(*) from boolean_table"),
Seq(Row(40))
)
val segments = sql("SHOW SEGMENTS FOR TABLE boolean_table")
val SegmentSequenceIds = segments.collect().map { each => (each.toSeq) (0) }
assert(!SegmentSequenceIds.contains("0.1"))
assert(SegmentSequenceIds.length == 4)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE,
CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE)
}
test("ENABLE_AUTO_LOAD_MERGE: true, and Loading table: support boolean and other data type") {
//unfinish
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "true")
sql(
s"""
| CREATE TABLE boolean_table(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT,
| booleanField2 BOOLEAN
| )
| STORED AS carbondata
| TBLPROPERTIES('sort_columns'='')
""".stripMargin)
val storeLocation = s"$rootPath/integration/spark/src/test/resources/bool/supportBooleanTwoBooleanColumns.csv"
for (i <- 0 until 4) {
sql(
s"""
| LOAD DATA LOCAL INPATH '${storeLocation}'
| INTO TABLE boolean_table
| options('FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2')
""".stripMargin)
}
checkAnswer(
sql("select count(*) from boolean_table"),
Seq(Row(40))
)
val segments = sql("SHOW SEGMENTS FOR TABLE boolean_table")
val SegmentSequenceIds = segments.collect().map { each => (each.toSeq) (0) }
assert(SegmentSequenceIds.contains("0.1"))
assert(SegmentSequenceIds.length == 5)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE,
CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE)
}
test("ENABLE_AUTO_LOAD_MERGE: false, and sort_columns is boolean") {
sql("drop table if exists boolean_one_column")
sql(
s"""CREATE TABLE if not exists boolean_one_column(
|booleanField BOOLEAN)
|STORED AS carbondata
|TBLPROPERTIES('sort_columns'='booleanField','SORT_SCOPE'='GLOBAL_SORT')
|""".stripMargin)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "false")
sql("insert into boolean_one_column values(true)")
sql("insert into boolean_one_column values(True)")
sql("insert into boolean_one_column values(TRUE)")
sql("insert into boolean_one_column values('true')")
sql("insert into boolean_one_column values(False)")
sql("insert into boolean_one_column values(false)")
sql("insert into boolean_one_column values(FALSE)")
sql("insert into boolean_one_column values('false')")
sql("insert into boolean_one_column values('tr')")
sql("insert into boolean_one_column values(null)")
sql("insert into boolean_one_column values('truEe')")
sql("insert into boolean_one_column values('falsEe')")
sql("insert into boolean_one_column values('t')")
sql("insert into boolean_one_column values('f')")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true), Row(true), Row(true), Row(true),
Row(false), Row(false), Row(false), Row(false),
Row(true), Row(false), Row(null), Row(null), Row(null), Row(null))
)
val segments = sql("SHOW SEGMENTS FOR TABLE boolean_one_column")
val SegmentSequenceIds = segments.collect().map { each => (each.toSeq) (0) }
assert(!SegmentSequenceIds.contains("0.1"))
assert(SegmentSequenceIds.length == 14)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE,
CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE)
}
test("ENABLE_AUTO_LOAD_MERGE: true, and sort_columns is boolean") {
sql("drop table if exists boolean_one_column")
sql(
s"""CREATE TABLE if not exists boolean_one_column(
|booleanField BOOLEAN)
|STORED AS carbondata
|TBLPROPERTIES('sort_columns'='booleanField','SORT_SCOPE'='GLOBAL_SORT')
|""".stripMargin)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE, "true")
sql("insert into boolean_one_column values(true)")
sql("insert into boolean_one_column values(True)")
sql("insert into boolean_one_column values(TRUE)")
sql("insert into boolean_one_column values('true')")
sql("insert into boolean_one_column values(False)")
sql("insert into boolean_one_column values(false)")
sql("insert into boolean_one_column values(FALSE)")
sql("insert into boolean_one_column values('false')")
sql("insert into boolean_one_column values('tr')")
sql("insert into boolean_one_column values(null)")
sql("insert into boolean_one_column values('truEe')")
sql("insert into boolean_one_column values('falsEe')")
sql("insert into boolean_one_column values('t')")
sql("insert into boolean_one_column values('f')")
checkAnswer(
sql("select * from boolean_one_column"),
Seq(Row(true), Row(true), Row(true), Row(true),
Row(false), Row(false), Row(false), Row(false),
Row(true), Row(false), Row(null), Row(null), Row(null), Row(null))
)
val segments = sql("SHOW SEGMENTS FOR TABLE boolean_one_column")
val SegmentSequenceIds = segments.collect().map { each => (each.toSeq) (0) }
assert(SegmentSequenceIds.contains("0.1"))
assert(SegmentSequenceIds.length == 18)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.ENABLE_AUTO_LOAD_MERGE,
CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE)
}
}
| jackylk/incubator-carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesParameterTest.scala | Scala | apache-2.0 | 12,852 |
object InfixApply {
class Query[T] {
def apply(x: T): T = x
}
def foo[A] : Query[A] = null
/*start*/InfixApply foo 1/*end*/
}
//Int | ilinum/intellij-scala | testdata/typeInference/bugs5/InfixApply.scala | Scala | apache-2.0 | 145 |
package ui.shader.builder
import ui.shader.builder.types.GlType
import ui.shader.builder.value.GlValue
class GlBraces[T <: GlType](val glValue: GlValue[T]) extends GlValue[T]{
def toGlsl: String = {
s"(${glValue.toGlsl})"
}
}
object GlBraces {
def apply[T <: GlType](glValue: GlValue[T]): GlBraces[T] = {
new GlBraces(glValue)
}
}
| gvatn/play-scalajs-webgl-spark | client/src/main/scala/ui/shader/builder/GlBraces.scala | Scala | mit | 368 |
package spark.scheduler
import spark._
import java.io._
import util.{MetadataCleaner, TimeStampedHashMap}
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
private[spark] object ResultTask {
// A simple map between the stage id to the serialized byte array of a task.
// Served as a cache for task serialization because serialization can be
// expensive on the master node if it needs to launch thousands of tasks.
val serializedInfoCache = new TimeStampedHashMap[Int, Array[Byte]]
val metadataCleaner = new MetadataCleaner("ResultTask", serializedInfoCache.clearOldValues)
def serializeInfo(stageId: Int, rdd: RDD[_], func: (TaskContext, Iterator[_]) => _): Array[Byte] = {
synchronized {
val old = serializedInfoCache.get(stageId).orNull
if (old != null) {
return old
} else {
val out = new ByteArrayOutputStream
val ser = SparkEnv.get.closureSerializer.newInstance
val objOut = ser.serializeStream(new GZIPOutputStream(out))
objOut.writeObject(rdd)
objOut.writeObject(func)
objOut.close()
val bytes = out.toByteArray
serializedInfoCache.put(stageId, bytes)
return bytes
}
}
}
def deserializeInfo(stageId: Int, bytes: Array[Byte]): (RDD[_], (TaskContext, Iterator[_]) => _) = {
synchronized {
val loader = Thread.currentThread.getContextClassLoader
val in = new GZIPInputStream(new ByteArrayInputStream(bytes))
val ser = SparkEnv.get.closureSerializer.newInstance
val objIn = ser.deserializeStream(in)
val rdd = objIn.readObject().asInstanceOf[RDD[_]]
val func = objIn.readObject().asInstanceOf[(TaskContext, Iterator[_]) => _]
return (rdd, func)
}
}
def clearCache() {
synchronized {
serializedInfoCache.clear()
}
}
}
private[spark] class ResultTask[T, U](
stageId: Int,
var rdd: RDD[T],
var func: (TaskContext, Iterator[T]) => U,
var partition: Int,
@transient locs: Seq[String],
val outputId: Int)
extends Task[U](stageId) with Externalizable {
def this() = this(0, null, null, 0, null, 0)
var split = if (rdd == null) {
null
} else {
rdd.partitions(partition)
}
private val preferredLocs: Seq[String] = if (locs == null) Nil else locs.toSet.toSeq
{
// DEBUG code
preferredLocs.foreach (hostPort => Utils.checkHost(Utils.parseHostPort(hostPort)._1, "preferredLocs : " + preferredLocs))
}
override def run(attemptId: Long): U = {
val context = new TaskContext(stageId, partition, attemptId)
metrics = Some(context.taskMetrics)
try {
func(context, rdd.iterator(split, context))
} finally {
context.executeOnCompleteCallbacks()
}
}
override def preferredLocations: Seq[String] = preferredLocs
override def toString = "ResultTask(" + stageId + ", " + partition + ")"
override def writeExternal(out: ObjectOutput) {
RDDCheckpointData.synchronized {
split = rdd.partitions(partition)
out.writeInt(stageId)
val bytes = ResultTask.serializeInfo(
stageId, rdd, func.asInstanceOf[(TaskContext, Iterator[_]) => _])
out.writeInt(bytes.length)
out.write(bytes)
out.writeInt(partition)
out.writeInt(outputId)
out.writeObject(split)
}
}
override def readExternal(in: ObjectInput) {
val stageId = in.readInt()
val numBytes = in.readInt()
val bytes = new Array[Byte](numBytes)
in.readFully(bytes)
val (rdd_, func_) = ResultTask.deserializeInfo(stageId, bytes)
rdd = rdd_.asInstanceOf[RDD[T]]
func = func_.asInstanceOf[(TaskContext, Iterator[T]) => U]
partition = in.readInt()
val outputId = in.readInt()
split = in.readObject().asInstanceOf[Partition]
}
}
| baeeq/incubator-spark | core/src/main/scala/spark/scheduler/ResultTask.scala | Scala | bsd-3-clause | 3,775 |
package ch.ethz.dalab.dissolve.optimization
import breeze.linalg.Vector
import ch.ethz.dalab.dissolve.classification.StructSVMModel
trait DissolveFunctions[X, Y] extends Serializable {
def featureFn(x: X, y: Y): Vector[Double]
def lossFn(yPredicted: Y, yTruth: Y): Double
// Override either `oracleFn` or `oracleCandidateStream`
def oracleFn(model: StructSVMModel[X, Y], x: X, y: Y): Y =
oracleCandidateStream(model, x, y).head
def oracleCandidateStream(model: StructSVMModel[X, Y], x: X, y: Y, initLevel: Int = 0): Stream[Y] =
oracleFn(model, x, y) #:: Stream.empty
def predictFn(model: StructSVMModel[X, Y], x: X): Y
def classWeights(y:Y): Double = 1.0
} | dalab/dissolve-struct | dissolve-struct-lib/src/main/scala/ch/ethz/dalab/dissolve/optimization/DissolveFunctions.scala | Scala | apache-2.0 | 689 |
package org.bitcoins.core.crypto.words
object EnglishWordsBip39 extends MnemonicWords {
override lazy val getWords: Vector[String] = Vector(
"abandon",
"ability",
"able",
"about",
"above",
"absent",
"absorb",
"abstract",
"absurd",
"abuse",
"access",
"accident",
"account",
"accuse",
"achieve",
"acid",
"acoustic",
"acquire",
"across",
"act",
"action",
"actor",
"actress",
"actual",
"adapt",
"add",
"addict",
"address",
"adjust",
"admit",
"adult",
"advance",
"advice",
"aerobic",
"affair",
"afford",
"afraid",
"again",
"age",
"agent",
"agree",
"ahead",
"aim",
"air",
"airport",
"aisle",
"alarm",
"album",
"alcohol",
"alert",
"alien",
"all",
"alley",
"allow",
"almost",
"alone",
"alpha",
"already",
"also",
"alter",
"always",
"amateur",
"amazing",
"among",
"amount",
"amused",
"analyst",
"anchor",
"ancient",
"anger",
"angle",
"angry",
"animal",
"ankle",
"announce",
"annual",
"another",
"answer",
"antenna",
"antique",
"anxiety",
"any",
"apart",
"apology",
"appear",
"apple",
"approve",
"april",
"arch",
"arctic",
"area",
"arena",
"argue",
"arm",
"armed",
"armor",
"army",
"around",
"arrange",
"arrest",
"arrive",
"arrow",
"art",
"artefact",
"artist",
"artwork",
"ask",
"aspect",
"assault",
"asset",
"assist",
"assume",
"asthma",
"athlete",
"atom",
"attack",
"attend",
"attitude",
"attract",
"auction",
"audit",
"august",
"aunt",
"author",
"auto",
"autumn",
"average",
"avocado",
"avoid",
"awake",
"aware",
"away",
"awesome",
"awful",
"awkward",
"axis",
"baby",
"bachelor",
"bacon",
"badge",
"bag",
"balance",
"balcony",
"ball",
"bamboo",
"banana",
"banner",
"bar",
"barely",
"bargain",
"barrel",
"base",
"basic",
"basket",
"battle",
"beach",
"bean",
"beauty",
"because",
"become",
"beef",
"before",
"begin",
"behave",
"behind",
"believe",
"below",
"belt",
"bench",
"benefit",
"best",
"betray",
"better",
"between",
"beyond",
"bicycle",
"bid",
"bike",
"bind",
"biology",
"bird",
"birth",
"bitter",
"black",
"blade",
"blame",
"blanket",
"blast",
"bleak",
"bless",
"blind",
"blood",
"blossom",
"blouse",
"blue",
"blur",
"blush",
"board",
"boat",
"body",
"boil",
"bomb",
"bone",
"bonus",
"book",
"boost",
"border",
"boring",
"borrow",
"boss",
"bottom",
"bounce",
"box",
"boy",
"bracket",
"brain",
"brand",
"brass",
"brave",
"bread",
"breeze",
"brick",
"bridge",
"brief",
"bright",
"bring",
"brisk",
"broccoli",
"broken",
"bronze",
"broom",
"brother",
"brown",
"brush",
"bubble",
"buddy",
"budget",
"buffalo",
"build",
"bulb",
"bulk",
"bullet",
"bundle",
"bunker",
"burden",
"burger",
"burst",
"bus",
"business",
"busy",
"butter",
"buyer",
"buzz",
"cabbage",
"cabin",
"cable",
"cactus",
"cage",
"cake",
"call",
"calm",
"camera",
"camp",
"can",
"canal",
"cancel",
"candy",
"cannon",
"canoe",
"canvas",
"canyon",
"capable",
"capital",
"captain",
"car",
"carbon",
"card",
"cargo",
"carpet",
"carry",
"cart",
"case",
"cash",
"casino",
"castle",
"casual",
"cat",
"catalog",
"catch",
"category",
"cattle",
"caught",
"cause",
"caution",
"cave",
"ceiling",
"celery",
"cement",
"census",
"century",
"cereal",
"certain",
"chair",
"chalk",
"champion",
"change",
"chaos",
"chapter",
"charge",
"chase",
"chat",
"cheap",
"check",
"cheese",
"chef",
"cherry",
"chest",
"chicken",
"chief",
"child",
"chimney",
"choice",
"choose",
"chronic",
"chuckle",
"chunk",
"churn",
"cigar",
"cinnamon",
"circle",
"citizen",
"city",
"civil",
"claim",
"clap",
"clarify",
"claw",
"clay",
"clean",
"clerk",
"clever",
"click",
"client",
"cliff",
"climb",
"clinic",
"clip",
"clock",
"clog",
"close",
"cloth",
"cloud",
"clown",
"club",
"clump",
"cluster",
"clutch",
"coach",
"coast",
"coconut",
"code",
"coffee",
"coil",
"coin",
"collect",
"color",
"column",
"combine",
"come",
"comfort",
"comic",
"common",
"company",
"concert",
"conduct",
"confirm",
"congress",
"connect",
"consider",
"control",
"convince",
"cook",
"cool",
"copper",
"copy",
"coral",
"core",
"corn",
"correct",
"cost",
"cotton",
"couch",
"country",
"couple",
"course",
"cousin",
"cover",
"coyote",
"crack",
"cradle",
"craft",
"cram",
"crane",
"crash",
"crater",
"crawl",
"crazy",
"cream",
"credit",
"creek",
"crew",
"cricket",
"crime",
"crisp",
"critic",
"crop",
"cross",
"crouch",
"crowd",
"crucial",
"cruel",
"cruise",
"crumble",
"crunch",
"crush",
"cry",
"crystal",
"cube",
"culture",
"cup",
"cupboard",
"curious",
"current",
"curtain",
"curve",
"cushion",
"custom",
"cute",
"cycle",
"dad",
"damage",
"damp",
"dance",
"danger",
"daring",
"dash",
"daughter",
"dawn",
"day",
"deal",
"debate",
"debris",
"decade",
"december",
"decide",
"decline",
"decorate",
"decrease",
"deer",
"defense",
"define",
"defy",
"degree",
"delay",
"deliver",
"demand",
"demise",
"denial",
"dentist",
"deny",
"depart",
"depend",
"deposit",
"depth",
"deputy",
"derive",
"describe",
"desert",
"design",
"desk",
"despair",
"destroy",
"detail",
"detect",
"develop",
"device",
"devote",
"diagram",
"dial",
"diamond",
"diary",
"dice",
"diesel",
"diet",
"differ",
"digital",
"dignity",
"dilemma",
"dinner",
"dinosaur",
"direct",
"dirt",
"disagree",
"discover",
"disease",
"dish",
"dismiss",
"disorder",
"display",
"distance",
"divert",
"divide",
"divorce",
"dizzy",
"doctor",
"document",
"dog",
"doll",
"dolphin",
"domain",
"donate",
"donkey",
"donor",
"door",
"dose",
"double",
"dove",
"draft",
"dragon",
"drama",
"drastic",
"draw",
"dream",
"dress",
"drift",
"drill",
"drink",
"drip",
"drive",
"drop",
"drum",
"dry",
"duck",
"dumb",
"dune",
"during",
"dust",
"dutch",
"duty",
"dwarf",
"dynamic",
"eager",
"eagle",
"early",
"earn",
"earth",
"easily",
"east",
"easy",
"echo",
"ecology",
"economy",
"edge",
"edit",
"educate",
"effort",
"egg",
"eight",
"either",
"elbow",
"elder",
"electric",
"elegant",
"element",
"elephant",
"elevator",
"elite",
"else",
"embark",
"embody",
"embrace",
"emerge",
"emotion",
"employ",
"empower",
"empty",
"enable",
"enact",
"end",
"endless",
"endorse",
"enemy",
"energy",
"enforce",
"engage",
"engine",
"enhance",
"enjoy",
"enlist",
"enough",
"enrich",
"enroll",
"ensure",
"enter",
"entire",
"entry",
"envelope",
"episode",
"equal",
"equip",
"era",
"erase",
"erode",
"erosion",
"error",
"erupt",
"escape",
"essay",
"essence",
"estate",
"eternal",
"ethics",
"evidence",
"evil",
"evoke",
"evolve",
"exact",
"example",
"excess",
"exchange",
"excite",
"exclude",
"excuse",
"execute",
"exercise",
"exhaust",
"exhibit",
"exile",
"exist",
"exit",
"exotic",
"expand",
"expect",
"expire",
"explain",
"expose",
"express",
"extend",
"extra",
"eye",
"eyebrow",
"fabric",
"face",
"faculty",
"fade",
"faint",
"faith",
"fall",
"false",
"fame",
"family",
"famous",
"fan",
"fancy",
"fantasy",
"farm",
"fashion",
"fat",
"fatal",
"father",
"fatigue",
"fault",
"favorite",
"feature",
"february",
"federal",
"fee",
"feed",
"feel",
"female",
"fence",
"festival",
"fetch",
"fever",
"few",
"fiber",
"fiction",
"field",
"figure",
"file",
"film",
"filter",
"final",
"find",
"fine",
"finger",
"finish",
"fire",
"firm",
"first",
"fiscal",
"fish",
"fit",
"fitness",
"fix",
"flag",
"flame",
"flash",
"flat",
"flavor",
"flee",
"flight",
"flip",
"float",
"flock",
"floor",
"flower",
"fluid",
"flush",
"fly",
"foam",
"focus",
"fog",
"foil",
"fold",
"follow",
"food",
"foot",
"force",
"forest",
"forget",
"fork",
"fortune",
"forum",
"forward",
"fossil",
"foster",
"found",
"fox",
"fragile",
"frame",
"frequent",
"fresh",
"friend",
"fringe",
"frog",
"front",
"frost",
"frown",
"frozen",
"fruit",
"fuel",
"fun",
"funny",
"furnace",
"fury",
"future",
"gadget",
"gain",
"galaxy",
"gallery",
"game",
"gap",
"garage",
"garbage",
"garden",
"garlic",
"garment",
"gas",
"gasp",
"gate",
"gather",
"gauge",
"gaze",
"general",
"genius",
"genre",
"gentle",
"genuine",
"gesture",
"ghost",
"giant",
"gift",
"giggle",
"ginger",
"giraffe",
"girl",
"give",
"glad",
"glance",
"glare",
"glass",
"glide",
"glimpse",
"globe",
"gloom",
"glory",
"glove",
"glow",
"glue",
"goat",
"goddess",
"gold",
"good",
"goose",
"gorilla",
"gospel",
"gossip",
"govern",
"gown",
"grab",
"grace",
"grain",
"grant",
"grape",
"grass",
"gravity",
"great",
"green",
"grid",
"grief",
"grit",
"grocery",
"group",
"grow",
"grunt",
"guard",
"guess",
"guide",
"guilt",
"guitar",
"gun",
"gym",
"habit",
"hair",
"half",
"hammer",
"hamster",
"hand",
"happy",
"harbor",
"hard",
"harsh",
"harvest",
"hat",
"have",
"hawk",
"hazard",
"head",
"health",
"heart",
"heavy",
"hedgehog",
"height",
"hello",
"helmet",
"help",
"hen",
"hero",
"hidden",
"high",
"hill",
"hint",
"hip",
"hire",
"history",
"hobby",
"hockey",
"hold",
"hole",
"holiday",
"hollow",
"home",
"honey",
"hood",
"hope",
"horn",
"horror",
"horse",
"hospital",
"host",
"hotel",
"hour",
"hover",
"hub",
"huge",
"human",
"humble",
"humor",
"hundred",
"hungry",
"hunt",
"hurdle",
"hurry",
"hurt",
"husband",
"hybrid",
"ice",
"icon",
"idea",
"identify",
"idle",
"ignore",
"ill",
"illegal",
"illness",
"image",
"imitate",
"immense",
"immune",
"impact",
"impose",
"improve",
"impulse",
"inch",
"include",
"income",
"increase",
"index",
"indicate",
"indoor",
"industry",
"infant",
"inflict",
"inform",
"inhale",
"inherit",
"initial",
"inject",
"injury",
"inmate",
"inner",
"innocent",
"input",
"inquiry",
"insane",
"insect",
"inside",
"inspire",
"install",
"intact",
"interest",
"into",
"invest",
"invite",
"involve",
"iron",
"island",
"isolate",
"issue",
"item",
"ivory",
"jacket",
"jaguar",
"jar",
"jazz",
"jealous",
"jeans",
"jelly",
"jewel",
"job",
"join",
"joke",
"journey",
"joy",
"judge",
"juice",
"jump",
"jungle",
"junior",
"junk",
"just",
"kangaroo",
"keen",
"keep",
"ketchup",
"key",
"kick",
"kid",
"kidney",
"kind",
"kingdom",
"kiss",
"kit",
"kitchen",
"kite",
"kitten",
"kiwi",
"knee",
"knife",
"knock",
"know",
"lab",
"label",
"labor",
"ladder",
"lady",
"lake",
"lamp",
"language",
"laptop",
"large",
"later",
"latin",
"laugh",
"laundry",
"lava",
"law",
"lawn",
"lawsuit",
"layer",
"lazy",
"leader",
"leaf",
"learn",
"leave",
"lecture",
"left",
"leg",
"legal",
"legend",
"leisure",
"lemon",
"lend",
"length",
"lens",
"leopard",
"lesson",
"letter",
"level",
"liar",
"liberty",
"library",
"license",
"life",
"lift",
"light",
"like",
"limb",
"limit",
"link",
"lion",
"liquid",
"list",
"little",
"live",
"lizard",
"load",
"loan",
"lobster",
"local",
"lock",
"logic",
"lonely",
"long",
"loop",
"lottery",
"loud",
"lounge",
"love",
"loyal",
"lucky",
"luggage",
"lumber",
"lunar",
"lunch",
"luxury",
"lyrics",
"machine",
"mad",
"magic",
"magnet",
"maid",
"mail",
"main",
"major",
"make",
"mammal",
"man",
"manage",
"mandate",
"mango",
"mansion",
"manual",
"maple",
"marble",
"march",
"margin",
"marine",
"market",
"marriage",
"mask",
"mass",
"master",
"match",
"material",
"math",
"matrix",
"matter",
"maximum",
"maze",
"meadow",
"mean",
"measure",
"meat",
"mechanic",
"medal",
"media",
"melody",
"melt",
"member",
"memory",
"mention",
"menu",
"mercy",
"merge",
"merit",
"merry",
"mesh",
"message",
"metal",
"method",
"middle",
"midnight",
"milk",
"million",
"mimic",
"mind",
"minimum",
"minor",
"minute",
"miracle",
"mirror",
"misery",
"miss",
"mistake",
"mix",
"mixed",
"mixture",
"mobile",
"model",
"modify",
"mom",
"moment",
"monitor",
"monkey",
"monster",
"month",
"moon",
"moral",
"more",
"morning",
"mosquito",
"mother",
"motion",
"motor",
"mountain",
"mouse",
"move",
"movie",
"much",
"muffin",
"mule",
"multiply",
"muscle",
"museum",
"mushroom",
"music",
"must",
"mutual",
"myself",
"mystery",
"myth",
"naive",
"name",
"napkin",
"narrow",
"nasty",
"nation",
"nature",
"near",
"neck",
"need",
"negative",
"neglect",
"neither",
"nephew",
"nerve",
"nest",
"net",
"network",
"neutral",
"never",
"news",
"next",
"nice",
"night",
"noble",
"noise",
"nominee",
"noodle",
"normal",
"north",
"nose",
"notable",
"note",
"nothing",
"notice",
"novel",
"now",
"nuclear",
"number",
"nurse",
"nut",
"oak",
"obey",
"object",
"oblige",
"obscure",
"observe",
"obtain",
"obvious",
"occur",
"ocean",
"october",
"odor",
"off",
"offer",
"office",
"often",
"oil",
"okay",
"old",
"olive",
"olympic",
"omit",
"once",
"one",
"onion",
"online",
"only",
"open",
"opera",
"opinion",
"oppose",
"option",
"orange",
"orbit",
"orchard",
"order",
"ordinary",
"organ",
"orient",
"original",
"orphan",
"ostrich",
"other",
"outdoor",
"outer",
"output",
"outside",
"oval",
"oven",
"over",
"own",
"owner",
"oxygen",
"oyster",
"ozone",
"pact",
"paddle",
"page",
"pair",
"palace",
"palm",
"panda",
"panel",
"panic",
"panther",
"paper",
"parade",
"parent",
"park",
"parrot",
"party",
"pass",
"patch",
"path",
"patient",
"patrol",
"pattern",
"pause",
"pave",
"payment",
"peace",
"peanut",
"pear",
"peasant",
"pelican",
"pen",
"penalty",
"pencil",
"people",
"pepper",
"perfect",
"permit",
"person",
"pet",
"phone",
"photo",
"phrase",
"physical",
"piano",
"picnic",
"picture",
"piece",
"pig",
"pigeon",
"pill",
"pilot",
"pink",
"pioneer",
"pipe",
"pistol",
"pitch",
"pizza",
"place",
"planet",
"plastic",
"plate",
"play",
"please",
"pledge",
"pluck",
"plug",
"plunge",
"poem",
"poet",
"point",
"polar",
"pole",
"police",
"pond",
"pony",
"pool",
"popular",
"portion",
"position",
"possible",
"post",
"potato",
"pottery",
"poverty",
"powder",
"power",
"practice",
"praise",
"predict",
"prefer",
"prepare",
"present",
"pretty",
"prevent",
"price",
"pride",
"primary",
"print",
"priority",
"prison",
"private",
"prize",
"problem",
"process",
"produce",
"profit",
"program",
"project",
"promote",
"proof",
"property",
"prosper",
"protect",
"proud",
"provide",
"public",
"pudding",
"pull",
"pulp",
"pulse",
"pumpkin",
"punch",
"pupil",
"puppy",
"purchase",
"purity",
"purpose",
"purse",
"push",
"put",
"puzzle",
"pyramid",
"quality",
"quantum",
"quarter",
"question",
"quick",
"quit",
"quiz",
"quote",
"rabbit",
"raccoon",
"race",
"rack",
"radar",
"radio",
"rail",
"rain",
"raise",
"rally",
"ramp",
"ranch",
"random",
"range",
"rapid",
"rare",
"rate",
"rather",
"raven",
"raw",
"razor",
"ready",
"real",
"reason",
"rebel",
"rebuild",
"recall",
"receive",
"recipe",
"record",
"recycle",
"reduce",
"reflect",
"reform",
"refuse",
"region",
"regret",
"regular",
"reject",
"relax",
"release",
"relief",
"rely",
"remain",
"remember",
"remind",
"remove",
"render",
"renew",
"rent",
"reopen",
"repair",
"repeat",
"replace",
"report",
"require",
"rescue",
"resemble",
"resist",
"resource",
"response",
"result",
"retire",
"retreat",
"return",
"reunion",
"reveal",
"review",
"reward",
"rhythm",
"rib",
"ribbon",
"rice",
"rich",
"ride",
"ridge",
"rifle",
"right",
"rigid",
"ring",
"riot",
"ripple",
"risk",
"ritual",
"rival",
"river",
"road",
"roast",
"robot",
"robust",
"rocket",
"romance",
"roof",
"rookie",
"room",
"rose",
"rotate",
"rough",
"round",
"route",
"royal",
"rubber",
"rude",
"rug",
"rule",
"run",
"runway",
"rural",
"sad",
"saddle",
"sadness",
"safe",
"sail",
"salad",
"salmon",
"salon",
"salt",
"salute",
"same",
"sample",
"sand",
"satisfy",
"satoshi",
"sauce",
"sausage",
"save",
"say",
"scale",
"scan",
"scare",
"scatter",
"scene",
"scheme",
"school",
"science",
"scissors",
"scorpion",
"scout",
"scrap",
"screen",
"script",
"scrub",
"sea",
"search",
"season",
"seat",
"second",
"secret",
"section",
"security",
"seed",
"seek",
"segment",
"select",
"sell",
"seminar",
"senior",
"sense",
"sentence",
"series",
"service",
"session",
"settle",
"setup",
"seven",
"shadow",
"shaft",
"shallow",
"share",
"shed",
"shell",
"sheriff",
"shield",
"shift",
"shine",
"ship",
"shiver",
"shock",
"shoe",
"shoot",
"shop",
"short",
"shoulder",
"shove",
"shrimp",
"shrug",
"shuffle",
"shy",
"sibling",
"sick",
"side",
"siege",
"sight",
"sign",
"silent",
"silk",
"silly",
"silver",
"similar",
"simple",
"since",
"sing",
"siren",
"sister",
"situate",
"six",
"size",
"skate",
"sketch",
"ski",
"skill",
"skin",
"skirt",
"skull",
"slab",
"slam",
"sleep",
"slender",
"slice",
"slide",
"slight",
"slim",
"slogan",
"slot",
"slow",
"slush",
"small",
"smart",
"smile",
"smoke",
"smooth",
"snack",
"snake",
"snap",
"sniff",
"snow",
"soap",
"soccer",
"social",
"sock",
"soda",
"soft",
"solar",
"soldier",
"solid",
"solution",
"solve",
"someone",
"song",
"soon",
"sorry",
"sort",
"soul",
"sound",
"soup",
"source",
"south",
"space",
"spare",
"spatial",
"spawn",
"speak",
"special",
"speed",
"spell",
"spend",
"sphere",
"spice",
"spider",
"spike",
"spin",
"spirit",
"split",
"spoil",
"sponsor",
"spoon",
"sport",
"spot",
"spray",
"spread",
"spring",
"spy",
"square",
"squeeze",
"squirrel",
"stable",
"stadium",
"staff",
"stage",
"stairs",
"stamp",
"stand",
"start",
"state",
"stay",
"steak",
"steel",
"stem",
"step",
"stereo",
"stick",
"still",
"sting",
"stock",
"stomach",
"stone",
"stool",
"story",
"stove",
"strategy",
"street",
"strike",
"strong",
"struggle",
"student",
"stuff",
"stumble",
"style",
"subject",
"submit",
"subway",
"success",
"such",
"sudden",
"suffer",
"sugar",
"suggest",
"suit",
"summer",
"sun",
"sunny",
"sunset",
"super",
"supply",
"supreme",
"sure",
"surface",
"surge",
"surprise",
"surround",
"survey",
"suspect",
"sustain",
"swallow",
"swamp",
"swap",
"swarm",
"swear",
"sweet",
"swift",
"swim",
"swing",
"switch",
"sword",
"symbol",
"symptom",
"syrup",
"system",
"table",
"tackle",
"tag",
"tail",
"talent",
"talk",
"tank",
"tape",
"target",
"task",
"taste",
"tattoo",
"taxi",
"teach",
"team",
"tell",
"ten",
"tenant",
"tennis",
"tent",
"term",
"test",
"text",
"thank",
"that",
"theme",
"then",
"theory",
"there",
"they",
"thing",
"this",
"thought",
"three",
"thrive",
"throw",
"thumb",
"thunder",
"ticket",
"tide",
"tiger",
"tilt",
"timber",
"time",
"tiny",
"tip",
"tired",
"tissue",
"title",
"toast",
"tobacco",
"today",
"toddler",
"toe",
"together",
"toilet",
"token",
"tomato",
"tomorrow",
"tone",
"tongue",
"tonight",
"tool",
"tooth",
"top",
"topic",
"topple",
"torch",
"tornado",
"tortoise",
"toss",
"total",
"tourist",
"toward",
"tower",
"town",
"toy",
"track",
"trade",
"traffic",
"tragic",
"train",
"transfer",
"trap",
"trash",
"travel",
"tray",
"treat",
"tree",
"trend",
"trial",
"tribe",
"trick",
"trigger",
"trim",
"trip",
"trophy",
"trouble",
"truck",
"true",
"truly",
"trumpet",
"trust",
"truth",
"try",
"tube",
"tuition",
"tumble",
"tuna",
"tunnel",
"turkey",
"turn",
"turtle",
"twelve",
"twenty",
"twice",
"twin",
"twist",
"two",
"type",
"typical",
"ugly",
"umbrella",
"unable",
"unaware",
"uncle",
"uncover",
"under",
"undo",
"unfair",
"unfold",
"unhappy",
"uniform",
"unique",
"unit",
"universe",
"unknown",
"unlock",
"until",
"unusual",
"unveil",
"update",
"upgrade",
"uphold",
"upon",
"upper",
"upset",
"urban",
"urge",
"usage",
"use",
"used",
"useful",
"useless",
"usual",
"utility",
"vacant",
"vacuum",
"vague",
"valid",
"valley",
"valve",
"van",
"vanish",
"vapor",
"various",
"vast",
"vault",
"vehicle",
"velvet",
"vendor",
"venture",
"venue",
"verb",
"verify",
"version",
"very",
"vessel",
"veteran",
"viable",
"vibrant",
"vicious",
"victory",
"video",
"view",
"village",
"vintage",
"violin",
"virtual",
"virus",
"visa",
"visit",
"visual",
"vital",
"vivid",
"vocal",
"voice",
"void",
"volcano",
"volume",
"vote",
"voyage",
"wage",
"wagon",
"wait",
"walk",
"wall",
"walnut",
"want",
"warfare",
"warm",
"warrior",
"wash",
"wasp",
"waste",
"water",
"wave",
"way",
"wealth",
"weapon",
"wear",
"weasel",
"weather",
"web",
"wedding",
"weekend",
"weird",
"welcome",
"west",
"wet",
"whale",
"what",
"wheat",
"wheel",
"when",
"where",
"whip",
"whisper",
"wide",
"width",
"wife",
"wild",
"will",
"win",
"window",
"wine",
"wing",
"wink",
"winner",
"winter",
"wire",
"wisdom",
"wise",
"wish",
"witness",
"wolf",
"woman",
"wonder",
"wood",
"wool",
"word",
"work",
"world",
"worry",
"worth",
"wrap",
"wreck",
"wrestle",
"wrist",
"write",
"wrong",
"yard",
"year",
"yellow",
"you",
"young",
"youth",
"zebra",
"zero",
"zone",
"zoo"
)
}
| bitcoin-s/bitcoin-s | core/src/main/scala/org/bitcoins/core/crypto/words/EnglishWordsBip39.scala | Scala | mit | 27,602 |
package controllers
import java.nio.file.Files
import akka.stream.scaladsl.FileIO
import com.google.inject.{Inject, Singleton}
import models.document.{ArchiveAddContext, ArchiveContext, _}
import net.scalytica.symbiotic.json.Implicits.{PathFormatters, lockFormat}
import no.uio.musit.MusitResults._
import no.uio.musit.functional.Implicits.futureMonad
import no.uio.musit.functional.MonadTransformers.MusitResultT
import no.uio.musit.security.Permissions.{Admin, Read, Write}
import no.uio.musit.security.{Authenticator, DocumentArchive}
import no.uio.musit.service.MusitController
import play.api.Logger
import play.api.libs.json.{JsError, JsSuccess, Json}
import play.api.mvc.ControllerComponents
import services.DocumentArchiveService
import scala.concurrent.Future.{successful => evaluated}
import scala.util.Try
@Singleton
class DocumentArchiveController @Inject()(
val controllerComponents: ControllerComponents,
val authService: Authenticator,
val docService: DocumentArchiveService
) extends MusitController {
private val log = Logger(classOf[DocumentArchiveController])
// ---------------------------------------------------------------------------
// Folder specific endpoints
// ---------------------------------------------------------------------------
def getRootTree(mid: Int, includeFiles: Boolean) =
MusitSecureAction(mid, DocumentArchive, Read).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.getArchiveRootTreeFor(includeFiles).map { r =>
respond(r) { tree =>
if (tree.isEmpty) NoContent
else Ok(Json.toJson[Seq[ArchiveItem]](tree))
}
}
}
def addFolder(mid: Int, destFolderId: String, collectionId: Option[String]) =
MusitSecureAction(mid, DocumentArchive, Write).async(parse.json) { implicit request =>
// Verify that the user has access to collectionId
parseMaybeCollectionIdParam(collectionId) match {
case Right(maybeColId) =>
if (request.user.canAccess(mid, DocumentArchive, maybeColId)) {
implicit val ctx = ArchiveAddContext(request.user, mid, maybeColId)
request.body.validate[ArchiveFolderItem] match {
case JsSuccess(afi, _) =>
docService.addArchiveFolderItem(destFolderId, afi).map { r =>
respond(r)(added => Created(Json.toJson[ArchiveFolderItem](added)))
}
case err: JsError =>
evaluated(BadRequest(JsError.toJson(err)))
}
} else {
evaluated(Forbidden(Json.obj("message" -> s"Unauthorized access")))
}
case Left(err) => evaluated(err)
}
}
def getFolder(mid: Int, folderId: String) =
MusitSecureAction(mid, DocumentArchive, Read).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.getArchiveFolderItem(folderId).map { r =>
respond(r) { afi =>
Ok(Json.toJson[ArchiveFolderItem](afi))
}
}
}
def updateFolder(mid: Int, folderId: String) =
MusitSecureAction(mid, DocumentArchive, Write).async(parse.json) { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
request.body.validate[ArchiveFolderItem] match {
case JsSuccess(afi, _) =>
docService.updateArchiveFolderItem(folderId, afi).map { r =>
respond(r)(d => Ok(Json.toJson[ArchiveFolderItem](d)))
}
case err: JsError =>
evaluated(BadRequest(JsError.toJson(err)))
}
}
def renameFolder(mid: Int, folderId: String, name: String) =
MusitSecureAction(mid, DocumentArchive, Write).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.renameArchiveFolderItem(folderId, name).map { r =>
respond(r) { modPaths =>
if (modPaths.nonEmpty) Ok(Json.toJson(modPaths))
else NotModified
}
}
}
def getDirectChildrenForId(mid: Int, folderId: String) =
MusitSecureAction(mid, DocumentArchive, Read).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.getChildrenFor(folderId).map { r =>
respond(r) { tree =>
if (tree.nonEmpty) Ok(Json.toJson[Seq[ArchiveItem]](tree))
else NoContent
}
}
}
def isClosedFolder(mid: Int, folderId: String) =
MusitSecureAction(mid, DocumentArchive, Read).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.isArchiveFolderItemClosed(folderId).map { r =>
respond(r) { locked =>
Ok(Json.obj("isLocked" -> locked))
}
}
}
def closeFolder(mid: Int, folderId: String) =
MusitSecureAction(mid, DocumentArchive, Admin).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.closeArchiveFolderItem(folderId).map { r =>
respond(r)(l => Ok(Json.toJson(l)))
}
}
def openFolder(mid: Int, folderId: String) =
MusitSecureAction(mid, DocumentArchive, Admin).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.openArchiveFolderItem(folderId).map { r =>
respond(r)(opened => if (opened) Ok else NotModified)
}
}
def moveFolderTo(mid: Int, folderId: String, to: String) =
MusitSecureAction(mid, DocumentArchive, Write).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.moveArchiveFolderItem(folderId, to).map { r =>
respond(r) { modPaths =>
if (modPaths.nonEmpty) Ok(Json.toJson(modPaths))
else NotModified
}
}
}
def getFolderTreeFrom(mid: Int, folderId: String, includeFiles: Boolean) =
MusitSecureAction(mid, DocumentArchive, Read).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.getTreeFrom(folderId, includeFiles).map { r =>
respond(r) { tree =>
if (tree.nonEmpty) Ok(Json.toJson[Seq[ArchiveItem]](tree))
else NoContent
}
}
}
// ---------------------------------------------------------------------------
// File specific endpoints
// ---------------------------------------------------------------------------
def uploadToFolder(mid: Int, folderId: String, collectionId: Option[String]) =
MusitSecureAction(mid, DocumentArchive, Write).async(parse.multipartFormData) {
implicit request =>
// Verify that the user has access to collectionId
parseMaybeCollectionIdParam(collectionId) match {
case Right(maybeColId) =>
if (request.user.canAccess(mid, DocumentArchive, maybeColId)) {
implicit val ctx = ArchiveAddContext(request.user, mid, maybeColId)
request.body.files.headOption.map { tmp =>
ArchiveDocument(
title = tmp.filename,
fileType = tmp.contentType,
fileSize = Try(Files.size(tmp.ref.path)).toOption.map(_.toString),
stream = Option(FileIO.fromPath(tmp.ref.path))
)
}.map { ad =>
val res = for {
a <- MusitResultT(docService.saveArchiveDocument(folderId, ad))
b <- MusitResultT(docService.getArchiveDocument(a)(ctx))
} yield b
res.value.map {
case MusitSuccess(added) =>
// Using the less specific type ArchiveItem for JSON parsing
Created(Json.toJson[ArchiveItem](added))
case MusitGeneralError(msg) =>
BadRequest(Json.obj("message" -> msg))
case err: MusitError =>
InternalServerError(Json.obj("message" -> s"${err.message}"))
}
}.getOrElse(
evaluated(BadRequest(Json.obj("message" -> s"No attached file")))
)
} else {
evaluated(Forbidden(Json.obj("message" -> s"Unauthorized access")))
}
case Left(err) => evaluated(err)
}
}
def updateFile(mid: Int, fileId: String) =
MusitSecureAction(mid, DocumentArchive, Write).async(parse.json) { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
request.body.validate[ArchiveDocument] match {
case JsSuccess(ad, _) =>
docService.updateArchiveDocument(fileId, ad).map { r =>
respond(r)(d => Ok(Json.toJson[ArchiveItem](d)))
}
case err: JsError =>
evaluated(BadRequest(JsError.toJson(err)))
}
}
def getFileMetadataById(mid: Int, fileId: String) =
MusitSecureAction(mid, DocumentArchive, Read).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.getArchiveDocument(fileId).map { r =>
respond(r)(d => Ok(Json.toJson[ArchiveItem](d)))
}
}
def downloadFile(mid: Int, fileId: String) =
MusitSecureAction(mid, DocumentArchive, Read).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.getArchiveDocument(fileId).map { r =>
respond(r) { doc =>
doc.stream.map { source =>
val headers = Seq.newBuilder[(String, String)]
headers += CONTENT_DISPOSITION -> ContentDisposition(doc.title)
doc.size.foreach(s => headers += CONTENT_LENGTH -> s)
Ok.chunked(source).withHeaders(headers.result(): _*)
}.getOrElse {
NotFound(Json.obj("message" -> s"Could not find physical file for $fileId"))
}
}
}
}
def isLockedFile(mid: Int, fileId: String) =
MusitSecureAction(mid, DocumentArchive, Read).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.isArchiveDocumentLocked(fileId).map { r =>
respond(r)(l => Ok(Json.obj("isLocked" -> l)))
}
}
def lockFile(mid: Int, fileId: String) =
MusitSecureAction(mid, DocumentArchive, Write).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.lockArchiveDocument(fileId).map { r =>
respond(r)(l => Ok(Json.toJson(l)))
}
}
def unlockFile(mid: Int, fileId: String) =
MusitSecureAction(mid, DocumentArchive, Write).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.unlockArchiveDocument(fileId).map { r =>
respond(r)(opened => if (opened) Ok else NotModified)
}
}
def moveFileTo(mid: Int, fileId: String, to: String) =
MusitSecureAction(mid, DocumentArchive, Write).async { implicit request =>
implicit val ctx = ArchiveContext(request.user, mid)
docService.moveArchiveDocument(fileId, to).map { r =>
respond(r)(d => Ok(Json.toJson[ArchiveItem](d)))
}
}
}
| MUSIT-Norway/musit | service_document/app/controllers/DocumentArchiveController.scala | Scala | gpl-2.0 | 11,121 |
package com.itszuvalex.itszulib.gui
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
/**
* Created by Christopher Harris (Itszuvalex) on 9/3/15.
*/
trait GuiPanel extends GuiElement {
var panelWidth: Int
var panelHeight: Int
override def spaceHorizontal = panelWidth
override def spaceVertical = panelHeight
val subElements = ArrayBuffer[GuiElement]()
def add(elements: GuiElement*) = {
subElements ++= elements.filter(gui => gui.setParent(this))
this
}
override def onMouseClick(mouseX: Int, mouseY: Int, button: Int): Boolean = {
passAlongMouseClick(mouseX, mouseY, button)
}
def passAlongMouseClick(mouseX: Int, mouseY: Int, button: Int): Boolean = {
subElements.exists(gui => gui.onMouseClick(mouseX - gui.anchorX, mouseY - gui.anchorY, button))
}
override def addTooltip(mouseX: Int, mouseY: Int, tooltip: ListBuffer[String]): Unit = {
super.addTooltip(mouseX, mouseY, tooltip)
addSubElementTooltips(mouseX, mouseY, tooltip)
}
def addSubElementTooltips(mouseX: Int, mouseY: Int, tooltip: ListBuffer[String]): Unit = {
subElements.foreach(gui => gui.addTooltip(mouseX - gui.anchorX, mouseY - gui.anchorY, tooltip))
}
override def isLocationInside(mouseX: Int, mouseY: Int): Boolean = {
((mouseX >= 0) && (mouseX < panelWidth)) &&
((mouseY >= 0) && (mouseY < panelHeight))
}
override def update(): Unit = subElements.foreach(_.update())
override def renderUpdate(screenX: Int, screenY: Int, mouseX: Int, mouseY: Int, partialTicks: Float) = {
super.renderUpdate(screenX, screenY, mouseX, mouseY, partialTicks)
subElements.foreach(gui => gui.renderUpdate(screenX + gui.anchorX,
screenY + gui.anchorY,
mouseX - gui.anchorX,
mouseY - gui.anchorY,
partialTicks))
}
}
| BlockWorker/ItszuLib | src/main/scala/com/itszuvalex/itszulib/gui/GuiPanel.scala | Scala | gpl-2.0 | 1,967 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3
package com.google.protobuf.struct
/** `ListValue` is a wrapper around a repeated field of values.
*
* The JSON representation for `ListValue` is JSON array.
*
* @param values
* Repeated field of dynamically typed values.
*/
@SerialVersionUID(0L)
final case class ListValue(
values: _root_.scala.Seq[com.google.protobuf.struct.Value] = _root_.scala.Seq.empty,
unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[ListValue] {
@transient
private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
private[this] def __computeSerializedValue(): _root_.scala.Int = {
var __size = 0
values.foreach { __item =>
val __value = __item
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
__size += unknownFields.serializedSize
__size
}
override def serializedSize: _root_.scala.Int = {
var read = __serializedSizeCachedValue
if (read == 0) {
read = __computeSerializedValue()
__serializedSizeCachedValue = read
}
read
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
values.foreach { __v =>
val __m = __v
_output__.writeTag(1, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
unknownFields.writeTo(_output__)
}
def clearValues = copy(values = _root_.scala.Seq.empty)
def addValues(__vs: com.google.protobuf.struct.Value*): ListValue = addAllValues(__vs)
def addAllValues(__vs: Iterable[com.google.protobuf.struct.Value]): ListValue = copy(values = values ++ __vs)
def withValues(__v: _root_.scala.Seq[com.google.protobuf.struct.Value]): ListValue = copy(values = __v)
def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @_root_.scala.unchecked) match {
case 1 => values
}
}
def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @_root_.scala.unchecked) match {
case 1 => _root_.scalapb.descriptors.PRepeated(values.iterator.map(_.toPMessage).toVector)
}
}
def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
def companion = com.google.protobuf.struct.ListValue
}
object ListValue extends scalapb.GeneratedMessageCompanion[com.google.protobuf.struct.ListValue] {
implicit def messageCompanion: scalapb.GeneratedMessageCompanion[com.google.protobuf.struct.ListValue] = this
def merge(`_message__`: com.google.protobuf.struct.ListValue, `_input__`: _root_.com.google.protobuf.CodedInputStream): com.google.protobuf.struct.ListValue = {
val __values = (_root_.scala.collection.immutable.Vector.newBuilder[com.google.protobuf.struct.Value] ++= `_message__`.values)
var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__values += _root_.scalapb.LiteParser.readMessage(_input__, com.google.protobuf.struct.Value.defaultInstance)
case tag =>
if (_unknownFields__ == null) {
_unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder(_message__.unknownFields)
}
_unknownFields__.parseField(tag, _input__)
}
}
com.google.protobuf.struct.ListValue(
values = __values.result(),
unknownFields = if (_unknownFields__ == null) _message__.unknownFields else _unknownFields__.result()
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[com.google.protobuf.struct.ListValue] = _root_.scalapb.descriptors.Reads{
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
com.google.protobuf.struct.ListValue(
values = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Seq[com.google.protobuf.struct.Value]]).getOrElse(_root_.scala.Seq.empty)
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = StructProto.javaDescriptor.getMessageTypes().get(2)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = StructProto.scalaDescriptor.messages(2)
def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
(__number: @_root_.scala.unchecked) match {
case 1 => __out = com.google.protobuf.struct.Value
}
__out
}
lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
lazy val defaultInstance = com.google.protobuf.struct.ListValue(
values = _root_.scala.Seq.empty
)
implicit class ListValueLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.struct.ListValue]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, com.google.protobuf.struct.ListValue](_l) {
def values: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[com.google.protobuf.struct.Value]] = field(_.values)((c_, f_) => c_.copy(values = f_))
}
final val VALUES_FIELD_NUMBER = 1
def of(
values: _root_.scala.Seq[com.google.protobuf.struct.Value]
): _root_.com.google.protobuf.struct.ListValue = _root_.com.google.protobuf.struct.ListValue(
values
)
// @@protoc_insertion_point(GeneratedMessageCompanion[google.protobuf.ListValue])
}
| trueaccord/ScalaPB | scalapb-runtime/src/main/scalajs/com/google/protobuf/struct/ListValue.scala | Scala | apache-2.0 | 6,462 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.Graph.ModuleNode
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.Table
import com.intel.analytics.bigdl.utils.serializer.{ContainerSerializable, DeserializeContext, ModuleData, SerializeContext}
import com.intel.analytics.bigdl.serialization.Bigdl.BigDLModule
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
/**
* This class is a container for a single module which will be applied
* to all input elements. The member module is cloned as necessary to
* process all input elements.
*
* @param module
*/
@SerialVersionUID( 4403280698280280268L)
class MapTable[T: ClassTag](
var module: AbstractModule[_ <: Activity, _ <: Activity, T] = null)
(implicit ev: TensorNumeric[T]) extends DynamicContainer[Table, Table, T] {
if ( module != null) {
this.add(module)
}
private def extend(n: Int): Unit = {
var i = 2
while (i <= n && modules.size <= i) {
if (modules.length <= i) {
modules.append(module
.cloneModule().setName(module.getName() + i)
.asInstanceOf[AbstractModule[Activity, Activity, T]])
}
i += 1
}
}
override def add(module: AbstractModule[_ <: Activity, _ <: Activity, T]): this.type = {
require(module != null, "Single module required")
this.module = module
if (modules.nonEmpty) {
modules.update(0, module.asInstanceOf[AbstractModule[Activity, Activity, T]])
for (i <- 1 until modules.size) {
modules.update(i, module.cloneModule().asInstanceOf[AbstractModule[Activity, Activity, T]])
}
} else {
modules.append(module.asInstanceOf[AbstractModule[Activity, Activity, T]])
}
this
}
override def updateOutput(input: Table): Table = {
require(module != null, "Single module required")
extend(input.length())
var i = 0
while (i < input.length()) {
output.update(i + 1, modules(i).forward(input(i + 1)))
i += 1
}
output
}
override def updateGradInput(input: Table, gradOutput: Table): Table = {
require(module != null, "Single module required")
extend(input.length())
var i = 0
while (i < input.length()) {
gradInput.update(i + 1, modules(i).updateGradInput(input(i + 1), gradOutput(i + 1)))
i += 1
}
gradInput
}
override def accGradParameters(input: Table, gradOutput: Table): Unit = {
require(module != null, "Single module required")
extend(input.length())
var i = 0
while (i < input.length()) {
modules(i).accGradParameters(input(i + 1), gradOutput(i + 1))
i += 1
}
}
override def getEndNodes(startNodes: Array[ModuleNode[T]]): Array[ModuleNode[T]] = {
throw new IllegalArgumentException("Can not transform Container MapTable to graph")
}
override def toString(): String = {
val tab = " "
val extlast = " "
val line = "\\n"
var str = s"${getPrintName}"
if (module != null) {
str += s"{$line$tab$module$line}"
} else {
str += " { }"
}
str
}
override def clearState(): this.type = {
modules.clear()
if ( module != null) {
this.add(module)
}
this
}
}
object MapTable extends ContainerSerializable {
def apply[@specialized(Float, Double) T: ClassTag](
module: AbstractModule[_ <: Activity, _ <: Activity, T] = null
)(implicit ev: TensorNumeric[T]) : MapTable[T] = {
new MapTable[T](module)
}
override def doLoadModule[T: ClassTag](context: DeserializeContext)
(implicit ev: TensorNumeric[T]) : AbstractModule[Activity, Activity, T] = {
val mapTable = super.doLoadModule(context).asInstanceOf[MapTable[T]]
require(mapTable.modules.size >=1, "sub module should not be empty")
mapTable.add(mapTable.modules(0))
mapTable
}
override def doSerializeModule[T: ClassTag](context: SerializeContext[T],
mapBuilder : BigDLModule.Builder)
(implicit ev: TensorNumeric[T]) : Unit = {
val mapTable = context.moduleData.module.asInstanceOf[MapTable[T]]
val subModules = mapTable.modules
require(subModules.size >=1, "sub module should not be empty")
// `modules` are created during forward() by 'n' times of the same module depends on input size,
// store the first one to save the storage cost just in case large input size
val singleModule = subModules(0)
mapTable.modules.clear()
mapTable.modules.append(singleModule)
super.doSerializeModule(context, mapBuilder)
}
}
| yiheng/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/MapTable.scala | Scala | apache-2.0 | 5,321 |
package com.socrata.datacoordinator.common.collocation
import com.socrata.thirdparty.typesafeconfig.ConfigClass
import com.typesafe.config.Config
class CollocationConfig(config: Config, root: String) extends ConfigClass(config, root) {
private def k(field: String) = root + "." + field
val cost = new CollocationCostConfig(config, k("cost"))
val group = getStringList("group").toSet
val lockPath = getString("lock-path")
val lockTimeout = getDuration("lock-timeout")
}
class CollocationCostConfig(config: Config, root: String) extends ConfigClass(config, root) {
val movesWeight = getString("moves-weight").toDouble
val totalSizeBytesWeight = getString("total-size-bytes-weight").toDouble
val moveSizeMaxBytesWeight = getString("move-size-max-bytes-weight").toDouble
}
| socrata-platform/data-coordinator | coordinatorlib/src/main/scala/com/socrata/datacoordinator/common/collocation/CollocationConfig.scala | Scala | apache-2.0 | 788 |
/*
* Copyright 2014 porter <https://github.com/eikek/porter>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package porter.app.akka.api
import scala.util.{Failure, Success, Try}
import scala.concurrent.Future
import akka.actor._
import akka.util.Timeout
import porter.store.MutableStore
import porter.client.messages._
import porter.model.Ident
class MutableStoreActor(stores: List[(Set[Ident], MutableStore)]) extends Actor with ActorLogging {
import MutableStoreActor._
private var workerCreated = 0
private var workerActive = 0
private def findStore(realm: Ident) =
for {
(id, mstore) <- stores.find(_._1 contains realm) orElse stores.headOption
} yield mstore
private def withStore(realm: Ident, f: ActorRef => Unit, g: => Unit) {
findStore(realm) match {
case Some(s) =>
val a = context.watch(context.actorOf(workerProps(s), name = s"mstore$workerCreated"))
workerCreated += 1; workerActive += 1
f(a)
a ! PoisonPill
case None => g
}
}
def receive = {
case pm: MutableStoreMessage =>
withStore(pm.realmId, _ forward pm, sender ! OperationFinished.failure(new Exception("No mutable store available")))
case Terminated(ref) =>
workerActive -= 1
log.debug(s"Actor $ref terminated. Active mstore workers: $workerActive")
}
}
object MutableStoreActor {
import porter.model._
def apply(stores: List[(Set[Ident], MutableStore)]) = Props(classOf[MutableStoreActor], stores)
private def workerProps(store: MutableStore) = Props(classOf[WorkerActor], store)
private def finish(result: Boolean) = OperationFinished(success = result, None)
private class WorkerActor(store: MutableStore) extends Actor with ActorLogging {
import akka.pattern.pipe
import context.dispatcher
implicit val timeout = Timeout(3000)
private def fail: PartialFunction[Throwable, OperationFinished] = {
case x =>
log.error(x, "Mutable store operation failed")
OperationFinished.failure(x)
}
private def exec(result: Try[Future[Boolean]]) {
result match {
case Success(f) => f.map(finish).recover(fail) pipeTo sender
case Failure(ex) => sender ! fail(ex)
}
}
def receive = {
case UpdateRealm(realm) =>
exec(Try(store.updateRealm(realm)))
case DeleteRealm(realm) =>
exec(Try(store.deleteRealm(realm)))
case UpdateAccount(realm, account) =>
if (PropertyList.mutableSource.get(account.props).getOrElse(true)) {
exec(Try(store.updateAccount(realm, account)))
} else {
val msg = s"Account '${account.name.name}' is not mutable"
log.error(msg)
sender ! OperationFinished.failure(new Exception(msg))
}
case DeleteAccount(realm, account) =>
exec(Try(store.deleteAccount(realm, account)))
case UpdateGroup(realm, group) =>
if (PropertyList.mutableSource.get(group.props).getOrElse(true)) {
exec(Try(store.updateGroup(realm, group)))
} else {
val msg = s"Group '${group.name.name}' is not mutable"
log.error(msg)
sender ! OperationFinished.failure(new Exception(msg))
}
case DeleteGroup(realm, group) =>
exec(Try(store.deleteGroup(realm, group)))
}
override def preRestart(reason: Throwable, message: Option[Any]) = {
super.preRestart(reason, message)
store.close()
}
override def postStop() = {
super.postStop()
store.close()
}
}
} | eikek/porter | app/src/main/scala/porter/app/akka/api/MutableStoreActor.scala | Scala | apache-2.0 | 4,065 |
package com.github.pedrovgs.haveaniceday.smiles
import com.github.pedrovgs.haveaniceday.utils.model.{HaveANiceDayError, QueryResult}
import org.joda.time.DateTime
object model {
type SmilesExtractionResult = Either[SmilesExtractionError, Seq[Smile]]
type SmilesGenerationResult = Either[SmilesGenerationError, Smile]
type GetSmilesResult = Either[HaveANiceDayError, QueryResult[Smile]]
type GetSmileResult = Either[HaveANiceDayError, Smile]
sealed trait SmilesExtractionError {
val message: String
override def toString: String = message
}
sealed trait SmilesGenerationError {
val message: String
override def toString: String = message
}
case object NoExtractedSmilesFound extends SmilesGenerationError {
override val message: String =
s"Try to extract generate smiles but there are no smiles generated previously"
}
case class ErrorSendingNotification(smile: Smile, error: String) extends SmilesGenerationError {
override val message: String =
s"Try to send smile with id ${smile.id} but there was an error sending the push notification. Error: $error"
}
case class UnknownError(message: String) extends SmilesExtractionError with SmilesGenerationError
case class SmilesGeneratorConfig(twitterAccounts: List[String],
scheduleTasks: Boolean,
extractionSchedule: String,
generationSchedule: String,
allowManualSmilesExtraction: Boolean,
allowManualSmilesGeneration: Boolean)
object Source extends Enumeration {
val Twitter = Value
}
case class Smile(id: Long,
creationDate: DateTime,
photo: Option[String],
description: Option[String],
source: Source.Value,
sourceUrl: String,
numberOfLikes: Long,
sent: Boolean,
sentDate: Option[DateTime],
number: Option[Int])
case class SmilesGeneration(id: Long,
generationDate: DateTime,
smileId: Option[Long] = None,
error: Option[String] = None)
}
| pedrovgs/HaveANiceDay | src/main/scala/com/github/pedrovgs/haveaniceday/smiles/model.scala | Scala | gpl-3.0 | 2,330 |
/*
* Copyright (C) 2018 Joan Goyeau.
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.scala.kstream
import org.apache.kafka.streams.scala.ImplicitConversions._
import org.apache.kafka.streams.scala.Serdes._
import org.apache.kafka.streams.scala.utils.TestDriver
import org.apache.kafka.streams.scala.{ByteArrayKeyValueStore, StreamsBuilder}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
@RunWith(classOf[JUnitRunner])
class KTableTest extends FlatSpec with Matchers with TestDriver {
"filter a KTable" should "filter records satisfying the predicate" in {
val builder = new StreamsBuilder()
val sourceTopic = "source"
val sinkTopic = "sink"
val table = builder.stream[String, String](sourceTopic).groupBy((key, _) => key).count()
table.filter((_, value) => value > 1).toStream.to(sinkTopic)
val testDriver = createTestDriver(builder)
{
testDriver.pipeRecord(sourceTopic, ("1", "value1"))
val record = testDriver.readRecord[String, Long](sinkTopic)
record.key shouldBe "1"
record.value shouldBe (null: java.lang.Long)
}
{
testDriver.pipeRecord(sourceTopic, ("1", "value2"))
val record = testDriver.readRecord[String, Long](sinkTopic)
record.key shouldBe "1"
record.value shouldBe 2
}
{
testDriver.pipeRecord(sourceTopic, ("2", "value1"))
val record = testDriver.readRecord[String, Long](sinkTopic)
record.key shouldBe "2"
record.value shouldBe (null: java.lang.Long)
}
testDriver.readRecord[String, Long](sinkTopic) shouldBe null
testDriver.close()
}
"filterNot a KTable" should "filter records not satisfying the predicate" in {
val builder = new StreamsBuilder()
val sourceTopic = "source"
val sinkTopic = "sink"
val table = builder.stream[String, String](sourceTopic).groupBy((key, _) => key).count()
table.filterNot((_, value) => value > 1).toStream.to(sinkTopic)
val testDriver = createTestDriver(builder)
{
testDriver.pipeRecord(sourceTopic, ("1", "value1"))
val record = testDriver.readRecord[String, Long](sinkTopic)
record.key shouldBe "1"
record.value shouldBe 1
}
{
testDriver.pipeRecord(sourceTopic, ("1", "value2"))
val record = testDriver.readRecord[String, Long](sinkTopic)
record.key shouldBe "1"
record.value shouldBe (null: java.lang.Long)
}
{
testDriver.pipeRecord(sourceTopic, ("2", "value1"))
val record = testDriver.readRecord[String, Long](sinkTopic)
record.key shouldBe "2"
record.value shouldBe 1
}
testDriver.readRecord[String, Long](sinkTopic) shouldBe null
testDriver.close()
}
"join 2 KTables" should "join correctly records" in {
val builder = new StreamsBuilder()
val sourceTopic1 = "source1"
val sourceTopic2 = "source2"
val sinkTopic = "sink"
val table1 = builder.stream[String, String](sourceTopic1).groupBy((key, _) => key).count()
val table2 = builder.stream[String, String](sourceTopic2).groupBy((key, _) => key).count()
table1.join(table2)((a, b) => a + b).toStream.to(sinkTopic)
val testDriver = createTestDriver(builder)
testDriver.pipeRecord(sourceTopic1, ("1", "topic1value1"))
testDriver.pipeRecord(sourceTopic2, ("1", "topic2value1"))
testDriver.readRecord[String, Long](sinkTopic).value shouldBe 2
testDriver.readRecord[String, Long](sinkTopic) shouldBe null
testDriver.close()
}
"join 2 KTables with a Materialized" should "join correctly records and state store" in {
val builder = new StreamsBuilder()
val sourceTopic1 = "source1"
val sourceTopic2 = "source2"
val sinkTopic = "sink"
val stateStore = "store"
val materialized = Materialized.as[String, Long, ByteArrayKeyValueStore](stateStore)
val table1 = builder.stream[String, String](sourceTopic1).groupBy((key, _) => key).count()
val table2 = builder.stream[String, String](sourceTopic2).groupBy((key, _) => key).count()
table1.join(table2, materialized)((a, b) => a + b).toStream.to(sinkTopic)
val testDriver = createTestDriver(builder)
testDriver.pipeRecord(sourceTopic1, ("1", "topic1value1"))
testDriver.pipeRecord(sourceTopic2, ("1", "topic2value1"))
testDriver.readRecord[String, Long](sinkTopic).value shouldBe 2
testDriver.getKeyValueStore[String, Long](stateStore).get("1") shouldBe 2
testDriver.readRecord[String, Long](sinkTopic) shouldBe null
testDriver.close()
}
}
| gf53520/kafka | streams/streams-scala/src/test/scala/org/apache/kafka/streams/scala/kstream/KTableTest.scala | Scala | apache-2.0 | 5,342 |
package tests
import abstractfactory._
/**
* AbstractFactory tests
* @author lmignot
*/
class AbstractFactoryTest extends BaseTest {
describe("An abstract factory") {
it("abstract factory should return the appropriate factory if available") {
val opt: Option[AbstractParserFactory] = ParserFactoryProducer.getFactory("NYCFactory")
opt.get shouldBe a [AbstractParserFactory]
opt.isDefined should be (true)
}
it("abstract factory should return None if no factory is available") {
val opt: Option[AbstractParserFactory] = ParserFactoryProducer.getFactory("asdf")
opt shouldBe None
opt.isEmpty should be (true)
}
it("if a factory is available, factory should return parser if available") {
val msg = "Parse this XML data using the correct factory."
val expected = "LondonFEEDBACK: Rctug\\"vjku\\"ZON\\"fcvc\\"wukpi\\"vjg\\"eqttgev\\"hcevqt{0"
val opt: Option[XMLParser] = ParserFactoryProducer
.getFactory("LondonFactory")
.flatMap(_.getParserInstance("LondonFEEDBACK"))
opt.get shouldBe a [XMLParser]
opt.isDefined should be (true)
opt.get.parse(msg) should be (expected)
}
it("if a factory is available, factory should return None if no parser is available") {
val opt: Option[XMLParser] = ParserFactoryProducer
.getFactory("LondonFactory")
.flatMap(_.getParserInstance("NYCORDER"))
opt shouldBe None
opt.isDefined should be (false)
}
it("if no factory is available, and no parser is available should return None") {
val opt: Option[XMLParser] = ParserFactoryProducer
.getFactory("asdf")
.flatMap(_.getParserInstance("NYCORDER"))
opt shouldBe None
opt.isDefined should be (false)
}
}
}
| BBK-PiJ-2015-67/sdp-portfolio | exercises/week08/src/test/scala/tests/AbstractFactoryTest.scala | Scala | unlicense | 1,789 |
package org.ugr.sci2s.mllib.test
import org.apache.spark.rdd.RDD
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
import org.ugr.sci2s.mllib.test.{MLExperimentUtils => MLEU}
import org.apache.spark.mllib.classification.ClassificationModel
import org.apache.spark.mllib.linalg._
import org.apache.spark.mllib.tree.RandomForest
import org.apache.spark.mllib.tree.model.RandomForestModel
object RandomForestAdapter extends ClassifierAdapter {
override def algorithmInfo (parameters: Map[String, String]): String = {
val numClasses = parameters.getOrElse("cls-numClasses", "2")
val impurity = parameters.getOrElse("cls-impurity", "gini")
val featSubSet = parameters.getOrElse("cls-featureSubsetStrategy", "auto")
val numTrees = MLEU.toInt(parameters.getOrElse("cls-numTrees", "100"), 100)
val maxDepth = parameters.getOrElse("cls-maxDepth", "4")
val maxBins = parameters.getOrElse("cls-maxBins", "100")
s"Algorithm: Random Forest (RF)\n" +
s"numClasses: $numClasses\n" +
s"numTrees: $numTrees\n" +
s"featureSubsetStrategy: $featSubSet\n" +
s"impurity: $impurity\n" +
s"maxBins: $maxBins\n" +
s"maxDepth: $maxDepth\n\n"
}
override def classify (train: RDD[LabeledPoint], parameters: Map[String, String]): ClassificationModelAdapter = {
val numClasses = MLEU.toInt(parameters.getOrElse("cls-numClasses", "2"), 2)
val impurity = parameters.getOrElse("cls-impurity", "gini")
val featSubSet = parameters.getOrElse("cls-featureSubsetStrategy", "auto")
val numTrees = MLEU.toInt(parameters.getOrElse("cls-numTrees", "100"), 100)
val maxDepth = MLEU.toInt(parameters.getOrElse("cls-maxDepth", "4"), 4)
val maxBins = MLEU.toInt(parameters.getOrElse("cls-maxBins", "100"), 100)
/*val categoricalFeaturesInfo = parameters.get("disc") match {
case Some(s) if s matches "(?i)yes" =>
val bins = MLEU.toInt(parameters.getOrElse("disc-nbins", "15"), 15) + 2 // discretization starts on 1
val categInfo = for(i <- 0 until train.first().features.size) yield (i, bins)
categInfo.toMap
case _ => Map.empty[Int, Int]
}*/
val model = RandomForest.trainClassifier(train,
numClasses, Map.empty[Int, Int], numTrees, featSubSet,
impurity, maxDepth, maxBins)
new RandomForestAdapter(model)
}
def classify (train: RDD[LabeledPoint], parameters: Map[String, String], nominalInfo: Map[Int, Int]): ClassificationModelAdapter = {
val numClasses = MLEU.toInt(parameters.getOrElse("cls-numClasses", "2"), 2)
val impurity = parameters.getOrElse("cls-impurity", "gini")
val featSubSet = parameters.getOrElse("cls-featureSubsetStrategy", "auto")
val numTrees = MLEU.toInt(parameters.getOrElse("cls-numTrees", "100"), 100)
val maxDepth = MLEU.toInt(parameters.getOrElse("cls-maxDepth", "4"), 4)
val maxBins = MLEU.toInt(parameters.getOrElse("cls-maxBins", "100"), 100)
/*val categoricalFeaturesInfo = if(nominalInfo.isEmpty) {
parameters.get("disc") match {
case Some(s) if s matches "(?i)yes" =>
val bins = MLEU.toInt(parameters.getOrElse("disc-nbins", "15"), 15) + 2
val categInfo = for(i <- 0 until train.first().features.size) yield (i, bins)
categInfo.toMap
case _ => Map.empty[Int, Int]
}
} else {
nominalInfo
}*/
val model = RandomForest.trainClassifier(train,
numClasses, nominalInfo, numTrees, featSubSet,
impurity, maxDepth, maxBins)
new RandomForestAdapter(model)
}
}
class RandomForestAdapter(model: RandomForestModel) extends ClassificationModelAdapter {
override def predict(data: RDD[Vector]): RDD[Double] = {
model.predict(data)
}
override def predict(data: Vector): Double = {
model.predict(data)
}
}
| sramirez/spark-experiments | src/main/scala/org/ugr/sci2s/mllib/test/RandomForestAdapter.scala | Scala | apache-2.0 | 3,927 |
package io.circe
import scala.annotation.{ switch, tailrec }
/**
* A pretty-printer for JSON values.
*
* @author Travis Brown
* @author Tony Morris
*
* @param indent The indentation to use if any format strings contain a new line.
* @param lbraceLeft Spaces to insert to left of a left brace.
* @param lbraceRight Spaces to insert to right of a left brace.
* @param rbraceLeft Spaces to insert to left of a right brace.
* @param rbraceRight Spaces to insert to right of a right brace.
* @param lbracketLeft Spaces to insert to left of a left bracket.
* @param lbracketRight Spaces to insert to right of a left bracket.
* @param rbracketLeft Spaces to insert to left of a right bracket.
* @param rbracketRight Spaces to insert to right of a right bracket.
* @param lrbracketsEmpty Spaces to insert for an empty array.
* @param arrayCommaLeft Spaces to insert to left of a comma in an array.
* @param arrayCommaRight Spaces to insert to right of a comma in an array.
* @param objectCommaLeft Spaces to insert to left of a comma in an object.
* @param objectCommaRight Spaces to insert to right of a comma in an object.
* @param colonLeft Spaces to insert to left of a colon.
* @param colonRight Spaces to insert to right of a colon.
* @param preserveOrder Determines if field ordering should be preserved.
* @param dropNullKeys Determines if object fields with values of null are dropped from the output.
*/
final case class Printer(
preserveOrder: Boolean,
dropNullKeys: Boolean,
indent: String,
lbraceLeft: String = "",
lbraceRight: String = "",
rbraceLeft: String = "",
rbraceRight: String = "",
lbracketLeft: String = "",
lbracketRight: String = "",
rbracketLeft: String = "",
rbracketRight: String = "",
lrbracketsEmpty: String = "",
arrayCommaLeft: String = "",
arrayCommaRight: String = "",
objectCommaLeft: String = "",
objectCommaRight: String = "",
colonLeft: String = "",
colonRight: String = ""
) extends Serializable {
private[this] val openBraceText = "{"
private[this] val closeBraceText = "}"
private[this] val openArrayText = "["
private[this] val closeArrayText = "]"
private[this] val commaText = ","
private[this] val colonText = ":"
private[this] val nullText = "null"
private[this] val trueText = "true"
private[this] val falseText = "false"
private[this] val stringEnclosureText = "\\""
private[this] def addIndentation(s: String): Int => String = {
val lastNewLineIndex = s.lastIndexOf("\\n")
if (lastNewLineIndex < 0) {
_ => s
} else {
val afterLastNewLineIndex = lastNewLineIndex + 1
val start = s.substring(0, afterLastNewLineIndex)
val end = s.substring(afterLastNewLineIndex)
n => start + indent * n + end
}
}
private[this] val pieces = new Printer.MemoizedPieces {
def compute(i: Int): Printer.Pieces = Printer.Pieces(
"%s%s%s".format(
addIndentation(lbraceLeft)(i),
openBraceText,
addIndentation(lbraceRight)(i + 1)
),
"%s%s%s".format(
addIndentation(rbraceLeft)(i),
closeBraceText,
addIndentation(rbraceRight)(i + 1)
),
"%s%s%s".format(
addIndentation(lbracketLeft)(i),
openArrayText,
addIndentation(lbracketRight)(i + 1)
),
"%s%s%s".format(
addIndentation(rbracketLeft)(i),
closeArrayText,
addIndentation(rbracketRight)(i + 1)
),
"%s%s%s".format(
openArrayText,
addIndentation(lrbracketsEmpty)(i),
closeArrayText
),
"%s%s%s".format(
addIndentation(arrayCommaLeft)(i + 1),
commaText,
addIndentation(arrayCommaRight)(i + 1)
),
"%s%s%s".format(
addIndentation(objectCommaLeft)(i + 1),
commaText,
addIndentation(objectCommaRight)(i + 1)
),
"%s%s%s".format(
addIndentation(colonLeft)(i + 1),
colonText,
addIndentation(colonRight)(i + 1)
)
)
}
/**
* Returns a string representation of a pretty-printed JSON value.
*/
def pretty(j: Json): String = {
val builder = new StringBuilder()
@tailrec
def appendJsonString(
jsonString: String,
normalChars: Boolean = true
): Unit = if (normalChars) {
jsonString.span(Printer.isNormalChar) match {
case (prefix, suffix) =>
builder.append(prefix)
if (suffix.nonEmpty) appendJsonString(suffix, normalChars = false)
}
} else {
jsonString.span(c => !Printer.isNormalChar(c)) match {
case (prefix, suffix) => {
prefix.foreach { c => builder.append(Printer.escape(c)) }
if (suffix.nonEmpty) appendJsonString(suffix, normalChars = true)
}
}
}
def encloseJsonString(jsonString: String): Unit = {
builder.append(stringEnclosureText)
appendJsonString(jsonString)
builder.append(stringEnclosureText)
}
def trav(depth: Int, k: Json): Unit = {
val p = pieces(depth)
import Json._
k match {
case JObject(o) =>
builder.append(p.lBraces)
val items = if (preserveOrder) o.toList else o.toMap
var first = true
items.foreach {
case (key, value) =>
if (!dropNullKeys || !value.isNull) {
if (!first) {
builder.append(p.objectCommas)
}
encloseJsonString(key)
builder.append(p.colons)
trav(depth + 1, value)
first = false
}
}
builder.append(p.rBraces)
case JString(s) => encloseJsonString(s)
case JNumber(n) => n match {
case JsonLong(x) => builder.append(x.toString)
case JsonDouble(x) => builder.append(x.toString)
case JsonDecimal(x) => builder.append(x)
case JsonBigDecimal(x) => builder.append(x.toString)
}
case JBoolean(b) => builder.append(if (b) trueText else falseText)
case JArray(arr) =>
if (arr.length == 0) builder.append(p.lrEmptyBrackets) else {
builder.append(p.lBrackets)
trav(depth + 1, arr(0))
var i = 1
while (i < arr.length) {
builder.append(p.arrayCommas)
trav(depth + 1, arr(i))
i += 1
}
builder.append(p.rBrackets)
}
case JNull => builder.append(nullText)
}
}
trav(0, j)
builder.toString
}
}
object Printer {
/**
* A pretty-printer configuration that inserts no spaces.
*/
final val noSpaces: Printer = Printer(
preserveOrder = true,
dropNullKeys = false,
indent = ""
)
/**
* A pretty-printer configuration that indents by the given spaces.
*/
final def indented(indent: String): Printer = Printer(
preserveOrder = true,
dropNullKeys = false,
indent = indent,
lbraceRight = "\\n",
rbraceLeft = "\\n",
lbracketRight = "\\n",
rbracketLeft = "\\n",
lrbracketsEmpty = "\\n",
arrayCommaRight = "\\n",
objectCommaRight = "\\n",
colonLeft = " ",
colonRight = " "
)
/**
* A pretty-printer configuration that indents by two spaces.
*/
final val spaces2: Printer = indented(" ")
/**
* A pretty-printer configuration that indents by four spaces.
*/
final val spaces4: Printer = indented(" ")
private[circe] final def escape(c: Char): String = (c: @switch) match {
case '\\\\' => "\\\\\\\\"
case '"' => "\\\\\\""
case '\\b' => "\\\\b"
case '\\f' => "\\\\f"
case '\\n' => "\\\\n"
case '\\r' => "\\\\r"
case '\\t' => "\\\\t"
case possibleUnicode => if (Character.isISOControl(possibleUnicode)) {
"\\\\u%04x".format(possibleUnicode.toInt)
} else possibleUnicode.toString
}
private[circe] final def isNormalChar(c: Char): Boolean = (c: @switch) match {
case '\\\\' => false
case '"' => false
case '\\b' => false
case '\\f' => false
case '\\n' => false
case '\\r' => false
case '\\t' => false
case possibleUnicode => !Character.isISOControl(possibleUnicode)
}
private[circe] final case class Pieces(
lBraces: String,
rBraces: String,
lBrackets: String,
rBrackets: String,
lrEmptyBrackets: String,
arrayCommas: String,
objectCommas: String,
colons: String
) extends Serializable
private[circe] abstract class MemoizedPieces extends Serializable {
def compute(i: Int): Pieces
private[this] final val known = new java.util.concurrent.CopyOnWriteArrayList[Pieces]
def apply(i: Int): Pieces = if (i < known.size) known.get(i) else if (i == known.size) {
val res = compute(i)
known.add(i, res)
res
} else {
var j = known.size
var res: Pieces = null
while (j <= i) {
res = compute(j)
known.add(j, res)
j += 1
}
res
}
}
}
| alexarchambault/circe | core/shared/src/main/scala/io/circe/Printer.scala | Scala | apache-2.0 | 8,928 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sparklinedata.spark.dateTime
import com.github.nscala_time.time.Imports._
import org.apache.spark.sql.catalyst.dsl.expressions._
import TestSQLContext._
import org.apache.spark.sparklinedata.datetime.SparkDateTime
import org.sparklinedata.spark.dateTime.Functions._
import org.sparklinedata.spark.dateTime.dsl.expressions._
import scala.language.postfixOps
class IssuesTest extends BaseTest {
test("issue1") {
val dT = dateTime('dt)
val dT1 = dateTime('dt) - 8.hour
val t = sql(date"select dt, $dT, $dT1 from input")
t.collect.foreach { r =>
val o = r.getString(0)
val d : DateTime = r.getAs[SparkDateTime](1)
val d1 : DateTime = r.getAs[SparkDateTime](2)
val oDt = DateTime.parse(o).withZone(DateTimeZone.UTC)
assert(oDt == d)
val oDt1 = DateTime.parse(o).withZone(DateTimeZone.UTC) - 8.hour
assert(oDt1 == d1)
}
}
test("issue3") {
val dT = dateTime('dt)
val dT1 = dateTime('dt) withZone("US/Pacific")
val dT2 = dateTime('dt) withZone("Asia/Calcutta")
val t = sql(date"select dt, $dT, $dT1, $dT2 from input")
t.collect.foreach { r =>
val o = r.getString(0)
val d : DateTime = r.getAs[SparkDateTime](1)
val d1 : DateTime = r.getAs[SparkDateTime](2)
val d2 : DateTime = r.getAs[SparkDateTime](3)
val oDt = DateTime.parse(o).withZone(DateTimeZone.UTC)
assert(oDt == d)
val oDt1 = DateTime.parse(o).withZone(DateTimeZone.forID("US/Pacific"))
assert(oDt1 == d1)
val oDt2 = DateTime.parse(o).withZone(DateTimeZone.forID("Asia/Calcutta"))
assert(oDt2 == d2)
}
}
test("issue5") {
val dP = "yyyy-MM-dd HH:mm:ss"
val fmt1 = DateTimeFormat.forPattern(dP)
val dT = dateTime('dt, Some(dP))
val t = sql(date"select dt, $dT from input1")
t.collect.foreach { r =>
val o = r.getString(0)
val d : DateTime = r.getAs[SparkDateTime](1)
val oDt = DateTime.parse(o, fmt1).withZone(DateTimeZone.UTC)
assert(oDt == d)
}
}
}
| SparklineData/spark-datetime | src/test/scala/org/sparklinedata/spark/dateTime/IssuesTest.scala | Scala | apache-2.0 | 2,836 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.AggregateCall
import org.apache.flink.table.calcite.FlinkRelBuilder
import FlinkRelBuilder.NamedWindowProperty
import org.apache.flink.table.runtime.aggregate.AggregateUtil._
import scala.collection.JavaConverters._
trait CommonAggregate {
private[flink] def groupingToString(inputType: RelDataType, grouping: Array[Int]): String = {
val inFields = inputType.getFieldNames.asScala
grouping.map( inFields(_) ).mkString(", ")
}
private[flink] def aggregationToString(
inputType: RelDataType,
grouping: Array[Int],
rowType: RelDataType,
namedAggregates: Seq[CalcitePair[AggregateCall, String]],
namedProperties: Seq[NamedWindowProperty])
: String = {
val inFields = inputType.getFieldNames.asScala
val outFields = rowType.getFieldNames.asScala
val groupStrings = grouping.map( inFields(_) )
val aggs = namedAggregates.map(_.getKey)
val aggStrings = aggs.map( a => s"${a.getAggregation}(${
val prefix = if (a.isDistinct) "DISTINCT " else ""
prefix + (if (a.getArgList.size() > 0) {
a.getArgList.asScala.map(inFields(_)).mkString(", ")
} else {
"*"
})
})")
val propStrings = namedProperties.map(_.property.toString)
(groupStrings ++ aggStrings ++ propStrings).zip(outFields).map {
case (f, o) => if (f == o) {
f
} else {
s"$f AS $o"
}
}.mkString(", ")
}
}
| ueshin/apache-flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/nodes/CommonAggregate.scala | Scala | apache-2.0 | 2,355 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.net.{Authenticator, PasswordAuthentication}
import java.security.KeyStore
import java.security.cert.X509Certificate
import javax.net.ssl._
import com.google.common.io.Files
import org.apache.hadoop.io.Text
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.network.sasl.SecretKeyHolder
import org.apache.spark.util.Utils
/**
* Spark class responsible for security.
*
* In general this class should be instantiated by the SparkEnv and most components
* should access it from that. There are some cases where the SparkEnv hasn't been
* initialized yet and this class must be instantiated directly.
*
* Spark currently supports authentication via a shared secret.
* Authentication can be configured to be on via the 'spark.authenticate' configuration
* parameter. This parameter controls whether the Spark communication protocols do
* authentication using the shared secret. This authentication is a basic handshake to
* make sure both sides have the same shared secret and are allowed to communicate.
* If the shared secret is not identical they will not be allowed to communicate.
*
* The Spark UI can also be secured by using javax servlet filters. A user may want to
* secure the UI if it has data that other users should not be allowed to see. The javax
* servlet filter specified by the user can authenticate the user and then once the user
* is logged in, Spark can compare that user versus the view acls to make sure they are
* authorized to view the UI. The configs 'spark.acls.enable' and 'spark.ui.view.acls'
* control the behavior of the acls. Note that the person who started the application
* always has view access to the UI.
*
* Spark has a set of modify acls (`spark.modify.acls`) that controls which users have permission
* to modify a single application. This would include things like killing the application. By
* default the person who started the application has modify access. For modify access through
* the UI, you must have a filter that does authentication in place for the modify acls to work
* properly.
*
* Spark also has a set of admin acls (`spark.admin.acls`) which is a set of users/administrators
* who always have permission to view or modify the Spark application.
*
* Starting from version 1.3, Spark has partial support for encrypted connections with SSL.
*
* At this point spark has multiple communication protocols that need to be secured and
* different underlying mechanisms are used depending on the protocol:
*
* - Akka -> The only option here is to use the Akka Remote secure-cookie functionality.
* Akka remoting allows you to specify a secure cookie that will be exchanged
* and ensured to be identical in the connection handshake between the client
* and the server. If they are not identical then the client will be refused
* to connect to the server. There is no control of the underlying
* authentication mechanism so its not clear if the password is passed in
* plaintext or uses DIGEST-MD5 or some other mechanism.
*
* Akka also has an option to turn on SSL, this option is currently supported (see
* the details below).
*
* - HTTP for broadcast and file server (via HttpServer) -> Spark currently uses Jetty
* for the HttpServer. Jetty supports multiple authentication mechanisms -
* Basic, Digest, Form, Spengo, etc. It also supports multiple different login
* services - Hash, JAAS, Spnego, JDBC, etc. Spark currently uses the HashLoginService
* to authenticate using DIGEST-MD5 via a single user and the shared secret.
* Since we are using DIGEST-MD5, the shared secret is not passed on the wire
* in plaintext.
*
* We currently support SSL (https) for this communication protocol (see the details
* below).
*
* The Spark HttpServer installs the HashLoginServer and configures it to DIGEST-MD5.
* Any clients must specify the user and password. There is a default
* Authenticator installed in the SecurityManager to how it does the authentication
* and in this case gets the user name and password from the request.
*
* - BlockTransferService -> The Spark BlockTransferServices uses java nio to asynchronously
* exchange messages. For this we use the Java SASL
* (Simple Authentication and Security Layer) API and again use DIGEST-MD5
* as the authentication mechanism. This means the shared secret is not passed
* over the wire in plaintext.
* Note that SASL is pluggable as to what mechanism it uses. We currently use
* DIGEST-MD5 but this could be changed to use Kerberos or other in the future.
* Spark currently supports "auth" for the quality of protection, which means
* the connection does not support integrity or privacy protection (encryption)
* after authentication. SASL also supports "auth-int" and "auth-conf" which
* SPARK could support in the future to allow the user to specify the quality
* of protection they want. If we support those, the messages will also have to
* be wrapped and unwrapped via the SaslServer/SaslClient.wrap/unwrap API's.
*
* Since the NioBlockTransferService does asynchronous messages passing, the SASL
* authentication is a bit more complex. A ConnectionManager can be both a client
* and a Server, so for a particular connection it has to determine what to do.
* A ConnectionId was added to be able to track connections and is used to
* match up incoming messages with connections waiting for authentication.
* The ConnectionManager tracks all the sendingConnections using the ConnectionId,
* waits for the response from the server, and does the handshake before sending
* the real message.
*
* The NettyBlockTransferService ensures that SASL authentication is performed
* synchronously prior to any other communication on a connection. This is done in
* SaslClientBootstrap on the client side and SaslRpcHandler on the server side.
*
* - HTTP for the Spark UI -> the UI was changed to use servlets so that javax servlet filters
* can be used. Yarn requires a specific AmIpFilter be installed for security to work
* properly. For non-Yarn deployments, users can write a filter to go through their
* organization's normal login service. If an authentication filter is in place then the
* SparkUI can be configured to check the logged in user against the list of users who
* have view acls to see if that user is authorized.
* The filters can also be used for many different purposes. For instance filters
* could be used for logging, encryption, or compression.
*
* The exact mechanisms used to generate/distribute the shared secret are deployment-specific.
*
* For Yarn deployments, the secret is automatically generated using the Akka remote
* Crypt.generateSecureCookie() API. The secret is placed in the Hadoop UGI which gets passed
* around via the Hadoop RPC mechanism. Hadoop RPC can be configured to support different levels
* of protection. See the Hadoop documentation for more details. Each Spark application on Yarn
* gets a different shared secret. On Yarn, the Spark UI gets configured to use the Hadoop Yarn
* AmIpFilter which requires the user to go through the ResourceManager Proxy. That Proxy is there
* to reduce the possibility of web based attacks through YARN. Hadoop can be configured to use
* filters to do authentication. That authentication then happens via the ResourceManager Proxy
* and Spark will use that to do authorization against the view acls.
*
* For other Spark deployments, the shared secret must be specified via the
* spark.authenticate.secret config.
* All the nodes (Master and Workers) and the applications need to have the same shared secret.
* This again is not ideal as one user could potentially affect another users application.
* This should be enhanced in the future to provide better protection.
* If the UI needs to be secure, the user needs to install a javax servlet filter to do the
* authentication. Spark will then use that user to compare against the view acls to do
* authorization. If not filter is in place the user is generally null and no authorization
* can take place.
*
* Connection encryption (SSL) configuration is organized hierarchically. The user can configure
* the default SSL settings which will be used for all the supported communication protocols unless
* they are overwritten by protocol specific settings. This way the user can easily provide the
* common settings for all the protocols without disabling the ability to configure each one
* individually.
*
* All the SSL settings like `spark.ssl.xxx` where `xxx` is a particular configuration property,
* denote the global configuration for all the supported protocols. In order to override the global
* configuration for the particular protocol, the properties must be overwritten in the
* protocol-specific namespace. Use `spark.ssl.yyy.xxx` settings to overwrite the global
* configuration for particular protocol denoted by `yyy`. Currently `yyy` can be either `akka` for
* Akka based connections or `fs` for broadcast and file server.
*
* Refer to [[org.apache.spark.SSLOptions]] documentation for the list of
* options that can be specified.
*
* SecurityManager initializes SSLOptions objects for different protocols separately. SSLOptions
* object parses Spark configuration at a given namespace and builds the common representation
* of SSL settings. SSLOptions is then used to provide protocol-specific configuration like
* TypeSafe configuration for Akka or SSLContextFactory for Jetty.
*
* SSL must be configured on each node and configured for each component involved in
* communication using the particular protocol. In YARN clusters, the key-store can be prepared on
* the client side then distributed and used by the executors as the part of the application
* (YARN allows the user to deploy files before the application is started).
* In standalone deployment, the user needs to provide key-stores and configuration
* options for master and workers. In this mode, the user may allow the executors to use the SSL
* settings inherited from the worker which spawned that executor. It can be accomplished by
* setting `spark.ssl.useNodeLocalConf` to `true`.
*/
private[spark] class SecurityManager(sparkConf: SparkConf)
extends Logging with SecretKeyHolder {
// key used to store the spark secret in the Hadoop UGI
private val sparkSecretLookupKey = "sparkCookie"
private val authOn = sparkConf.getBoolean("spark.authenticate", false)
// keep spark.ui.acls.enable for backwards compatibility with 1.0
private var aclsOn =
sparkConf.getBoolean("spark.acls.enable", sparkConf.getBoolean("spark.ui.acls.enable", false))
// admin acls should be set before view or modify acls
private var adminAcls: Set[String] =
stringToSet(sparkConf.get("spark.admin.acls", ""))
private var viewAcls: Set[String] = _
// list of users who have permission to modify the application. This should
// apply to both UI and CLI for things like killing the application.
private var modifyAcls: Set[String] = _
// always add the current user and SPARK_USER to the viewAcls
private val defaultAclUsers = Set[String](System.getProperty("user.name", ""),
Utils.getCurrentUserName())
setViewAcls(defaultAclUsers, sparkConf.get("spark.ui.view.acls", ""))
setModifyAcls(defaultAclUsers, sparkConf.get("spark.modify.acls", ""))
private val secretKey = generateSecretKey()
logInfo("SecurityManager: authentication " + (if (authOn) "enabled" else "disabled") +
"; ui acls " + (if (aclsOn) "enabled" else "disabled") +
"; users with view permissions: " + viewAcls.toString() +
"; users with modify permissions: " + modifyAcls.toString())
// Set our own authenticator to properly negotiate user/password for HTTP connections.
// This is needed by the HTTP client fetching from the HttpServer. Put here so its
// only set once.
if (authOn) {
Authenticator.setDefault(
new Authenticator() {
override def getPasswordAuthentication(): PasswordAuthentication = {
var passAuth: PasswordAuthentication = null
val userInfo = getRequestingURL().getUserInfo()
if (userInfo != null) {
val parts = userInfo.split(":", 2)
passAuth = new PasswordAuthentication(parts(0), parts(1).toCharArray())
}
return passAuth
}
}
)
}
// the default SSL configuration - it will be used by all communication layers unless overwritten
private val defaultSSLOptions = SSLOptions.parse(sparkConf, "spark.ssl", defaults = None)
// SSL configuration for different communication layers - they can override the default
// configuration at a specified namespace. The namespace *must* start with spark.ssl.
val fileServerSSLOptions = SSLOptions.parse(sparkConf, "spark.ssl.fs", Some(defaultSSLOptions))
val akkaSSLOptions = SSLOptions.parse(sparkConf, "spark.ssl.akka", Some(defaultSSLOptions))
logDebug(s"SSLConfiguration for file server: $fileServerSSLOptions")
logDebug(s"SSLConfiguration for Akka: $akkaSSLOptions")
val (sslSocketFactory, hostnameVerifier) = if (fileServerSSLOptions.enabled) {
val trustStoreManagers =
for (trustStore <- fileServerSSLOptions.trustStore) yield {
val input = Files.asByteSource(fileServerSSLOptions.trustStore.get).openStream()
try {
val ks = KeyStore.getInstance(KeyStore.getDefaultType)
ks.load(input, fileServerSSLOptions.trustStorePassword.get.toCharArray)
val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm)
tmf.init(ks)
tmf.getTrustManagers
} finally {
input.close()
}
}
lazy val credulousTrustStoreManagers = Array({
logWarning("Using 'accept-all' trust manager for SSL connections.")
new X509TrustManager {
override def getAcceptedIssuers: Array[X509Certificate] = null
override def checkClientTrusted(x509Certificates: Array[X509Certificate], s: String) {}
override def checkServerTrusted(x509Certificates: Array[X509Certificate], s: String) {}
}: TrustManager
})
val sslContext = SSLContext.getInstance(fileServerSSLOptions.protocol.getOrElse("Default"))
sslContext.init(null, trustStoreManagers.getOrElse(credulousTrustStoreManagers), null)
val hostVerifier = new HostnameVerifier {
override def verify(s: String, sslSession: SSLSession): Boolean = true
}
(Some(sslContext.getSocketFactory), Some(hostVerifier))
} else {
(None, None)
}
/**
* Split a comma separated String, filter out any empty items, and return a Set of strings
*/
private def stringToSet(list: String): Set[String] = {
list.split(',').map(_.trim).filter(!_.isEmpty).toSet
}
/**
* Admin acls should be set before the view or modify acls. If you modify the admin
* acls you should also set the view and modify acls again to pick up the changes.
*/
def setViewAcls(defaultUsers: Set[String], allowedUsers: String) {
viewAcls = (adminAcls ++ defaultUsers ++ stringToSet(allowedUsers))
logInfo("Changing view acls to: " + viewAcls.mkString(","))
}
def setViewAcls(defaultUser: String, allowedUsers: String) {
setViewAcls(Set[String](defaultUser), allowedUsers)
}
def getViewAcls: String = viewAcls.mkString(",")
/**
* Admin acls should be set before the view or modify acls. If you modify the admin
* acls you should also set the view and modify acls again to pick up the changes.
*/
def setModifyAcls(defaultUsers: Set[String], allowedUsers: String) {
modifyAcls = (adminAcls ++ defaultUsers ++ stringToSet(allowedUsers))
logInfo("Changing modify acls to: " + modifyAcls.mkString(","))
}
def getModifyAcls: String = modifyAcls.mkString(",")
/**
* Admin acls should be set before the view or modify acls. If you modify the admin
* acls you should also set the view and modify acls again to pick up the changes.
*/
def setAdminAcls(adminUsers: String) {
adminAcls = stringToSet(adminUsers)
logInfo("Changing admin acls to: " + adminAcls.mkString(","))
}
def setAcls(aclSetting: Boolean) {
aclsOn = aclSetting
logInfo("Changing acls enabled to: " + aclsOn)
}
/**
* Generates or looks up the secret key.
*
* The way the key is stored depends on the Spark deployment mode. Yarn
* uses the Hadoop UGI.
*
* For non-Yarn deployments, If the config variable is not set
* we throw an exception.
*/
private def generateSecretKey(): String = {
if (!isAuthenticationEnabled) return null
// first check to see if the secret is already set, else generate a new one if on yarn
val sCookie = if (SparkHadoopUtil.get.isYarnMode) {
val secretKey = SparkHadoopUtil.get.getSecretKeyFromUserCredentials(sparkSecretLookupKey)
if (secretKey != null) {
logDebug("in yarn mode, getting secret from credentials")
return new Text(secretKey).toString
} else {
logDebug("getSecretKey: yarn mode, secret key from credentials is null")
}
val cookie = akka.util.Crypt.generateSecureCookie
// if we generated the secret then we must be the first so lets set it so t
// gets used by everyone else
SparkHadoopUtil.get.addSecretKeyToUserCredentials(sparkSecretLookupKey, cookie)
logInfo("adding secret to credentials in yarn mode")
cookie
} else {
// user must have set spark.authenticate.secret config
sparkConf.getOption("spark.authenticate.secret") match {
case Some(value) => value
case None => throw new Exception("Error: a secret key must be specified via the " +
"spark.authenticate.secret config")
}
}
sCookie
}
/**
* Check to see if Acls for the UI are enabled
* @return true if UI authentication is enabled, otherwise false
*/
def aclsEnabled(): Boolean = aclsOn
/**
* Checks the given user against the view acl list to see if they have
* authorization to view the UI. If the UI acls are disabled
* via spark.acls.enable, all users have view access. If the user is null
* it is assumed authentication is off and all users have access.
*
* @param user to see if is authorized
* @return true is the user has permission, otherwise false
*/
def checkUIViewPermissions(user: String): Boolean = {
logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " viewAcls=" +
viewAcls.mkString(","))
!aclsEnabled || user == null || viewAcls.contains(user)
}
/**
* Checks the given user against the modify acl list to see if they have
* authorization to modify the application. If the UI acls are disabled
* via spark.acls.enable, all users have modify access. If the user is null
* it is assumed authentication isn't turned on and all users have access.
*
* @param user to see if is authorized
* @return true is the user has permission, otherwise false
*/
def checkModifyPermissions(user: String): Boolean = {
logDebug("user=" + user + " aclsEnabled=" + aclsEnabled() + " modifyAcls=" +
modifyAcls.mkString(","))
!aclsEnabled || user == null || modifyAcls.contains(user)
}
/**
* Check to see if authentication for the Spark communication protocols is enabled
* @return true if authentication is enabled, otherwise false
*/
def isAuthenticationEnabled(): Boolean = authOn
/**
* Gets the user used for authenticating HTTP connections.
* For now use a single hardcoded user.
* @return the HTTP user as a String
*/
def getHttpUser(): String = "sparkHttpUser"
/**
* Gets the user used for authenticating SASL connections.
* For now use a single hardcoded user.
* @return the SASL user as a String
*/
def getSaslUser(): String = "sparkSaslUser"
/**
* Gets the secret key.
* @return the secret key as a String if authentication is enabled, otherwise returns null
*/
def getSecretKey(): String = secretKey
// Default SecurityManager only has a single secret key, so ignore appId.
override def getSaslUser(appId: String): String = getSaslUser()
override def getSecretKey(appId: String): String = getSecretKey()
}
| Dax1n/spark-core | core/src/main/scala/org/apache/spark/SecurityManager.scala | Scala | apache-2.0 | 21,682 |
package poly.collection.mut
import cats.implicits._
import poly.collection._
import poly.collection.factory._
import poly.collection.impl._
import poly.macroutil._
/**
* @author Tongfei Chen
*/
class SegmentTreeRangeQuery[T] private(private val data: ResizableSeq[T])(implicit val monoid: Monoid[T])
extends AbstractIndexedSeq[T] with ValueMutableIndexedSeq[T] with RangeMonoidQueryable[T]
{
def fastLength = data.len / 2
def fastApply(i: Int): T = data(length + i)
def update(i: Int, x: T) = {
var j = length + i
data(j) = x
while (j > 0) {
var l = j
var r = j
if (j % 2 == 0) r = j + 1 // left child
else l = j - 1 // right child
data(j / 2) = data(l) |+| data(r)
j /= 2
}
}
def prefixAggregate(i: Int): T = rangeAggregate(0, i)
def rangeAggregate(i: Int, j: Int) = {
var l = length + i
var r = length + j
var s = monoid.empty
while (l <= r) {
if (l % 2 == 1) {
s |+|= data(l)
l += 1
}
if (r % 2 == 0) {
s |+|= data(r)
r -= 1
}
l /= 2
r /= 2
}
s
}
}
object SegmentTreeRangeQuery extends Factory1[Id, SegmentTreeRangeQuery, Monoid] {
def newBuilder[T: Monoid]: Builder[T, SegmentTreeRangeQuery[T]] = new Builder[T, SegmentTreeRangeQuery[T]] {
private[this] val array = new ResizableSeq[T]()
override def sizeHint(n: Int) = array.ensureCapacity(n)
def add(x: T): Unit = array.append_!(x)
def result() = {
val n = array.len
val t = new ResizableSeq[T](2 * n)
t.len = 2 * n
FastLoop.ascending(0, n, 1) { i =>
t(n + i) = array(i)
}
FastLoop.descending(n - 1, 0, -1) { i =>
t(i) = t(2 * i) |+| t(2 * i + 1)
}
new SegmentTreeRangeQuery[T](t)
}
}
}
| ctongfei/poly-collection | rangequery/src/main/scala/poly/collection/mut/SegmentTreeRangeQuery.scala | Scala | mit | 1,801 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
/**
* Replaces [[ResolvedHint]] operators from the plan. Move the [[HintInfo]] to associated [[Join]]
* operators, otherwise remove it if no [[Join]] operator is matched.
*/
object EliminateResolvedHint extends Rule[LogicalPlan] {
// This is also called in the beginning of the optimization phase, and as a result
// is using transformUp rather than resolveOperators.
def apply(plan: LogicalPlan): LogicalPlan = {
val pulledUp = plan transformUp {
case j: Join =>
val leftHint = mergeHints(collectHints(j.left))
val rightHint = mergeHints(collectHints(j.right))
j.copy(hint = JoinHint(leftHint, rightHint))
}
pulledUp.transformUp {
case h: ResolvedHint => h.child
}
}
private def mergeHints(hints: Seq[HintInfo]): Option[HintInfo] = {
hints.reduceOption((h1, h2) => HintInfo(
broadcast = h1.broadcast || h2.broadcast))
}
private def collectHints(plan: LogicalPlan): Seq[HintInfo] = {
plan match {
case h: ResolvedHint => collectHints(h.child) :+ h.hints
case u: UnaryNode => collectHints(u.child)
// TODO revisit this logic:
// except and intersect are semi/anti-joins which won't return more data then
// their left argument, so the broadcast hint should be propagated here
case i: Intersect => collectHints(i.left)
case e: Except => collectHints(e.left)
case _ => Seq.empty
}
}
}
| WindCanDie/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/EliminateResolvedHint.scala | Scala | apache-2.0 | 2,374 |
/*
* MUSIT is a museum database to archive natural and cultural history data.
* Copyright (C) 2016 MUSIT Norway, part of www.uio.no (University of Oslo)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License,
* or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package no.uio.musit.models
case class UserGroupMembership(
id: Option[Int] = None,
feideEmail: Email,
groupId: GroupId,
collection: Option[CollectionUUID]
)
object UserGroupMembership {
def applyMulti(
email: Email,
grpId: GroupId,
maybeCollections: Option[Seq[CollectionUUID]]
): Seq[UserGroupMembership] = {
maybeCollections.map { cids =>
if (cids.nonEmpty) {
cids.map { cid =>
UserGroupMembership(
feideEmail = email,
groupId = grpId,
collection = Option(cid)
)
}
} else {
Seq(
UserGroupMembership(
feideEmail = email,
groupId = grpId,
collection = None
)
)
}
}.getOrElse {
Seq(
UserGroupMembership(
feideEmail = email,
groupId = grpId,
collection = None
)
)
}
}
}
| kpmeen/musit | musit-models/src/main/scala/no/uio/musit/models/UserGroupMembership.scala | Scala | gpl-2.0 | 1,809 |
package org.denigma.kappa
import java.io.{File => JFile}
import java.nio.ByteBuffer
import akka.http.scaladsl.testkit.WSProbe
import better.files.File
import boopickle.DefaultBasic._
import net.ceedubs.ficus.Ficus._
import org.denigma.kappa.messages.FileResponses.UploadStatus
import org.denigma.kappa.messages.KappaMessage.Container
import org.denigma.kappa.messages._
import org.denigma.kappa.notebook.FileManager
import org.denigma.kappa.notebook.communication.WebSocketManager
import org.denigma.kappa.notebook.pages.WebSockets
import scala.List
import org.denigma.kappa.notebook.extensions._
import scala.collection.immutable._
class WebSocketProjectsSuite extends BasicWebSocketSuite {
val filePath: String = config.as[Option[String]]("app.files").getOrElse("files/")
val files = File(filePath)
files.createIfNotExists(asDirectory = true)
val fileManager = new FileManager(files, log)
val transport = new WebSocketManager(system, fileManager)
val routes = new WebSockets(transport.openChannel).routes
"Via websocket we" should {
"load projects" in {
val wsClient = WSProbe()
WS("/channel/notebook?username=tester3", wsClient.flow) ~> routes ~>
check {
checkConnection(wsClient)
checkTestProjects(wsClient)
}
wsClient.sendCompletion()
//wsClient.expectCompletion()
}
"update projects" in {
val wsClient = WSProbe()
WS("/channel/notebook?username=tester4", wsClient.flow) ~> routes ~>
check {
// check response for WS Upgrade headers
checkConnection(wsClient)
val big = KappaProject("big")
val Container(ProjectResponses.ProjectList(lst) :: (ProjectResponses.LoadedProject(proj)) :: Nil, _) = checkTestProjects(wsClient)
val rem: ByteBuffer = Pickle.intoBytes[KappaMessage](ProjectRequests.Remove("big"))
checkMessage(wsClient, rem) {
case Done(ProjectRequests.Remove(_), _) =>
}
println("removed message went well")
checkProject(wsClient, big) {
case Failed(/*KappaProject("big", _, _)*/ _, _, _) =>
}
println("remove is ok")
val create: ByteBuffer = Pickle.intoBytes[KappaMessage](ProjectRequests.Create(proj))
checkMessage(wsClient, create) {
case Done(ProjectRequests.Create(_, false), _) =>
}
checkTestProjects(wsClient)
println("create is ok")
wsClient.sendCompletion()
//wsClient.expectCompletion()
}
}
"download, remove and upload project" in {
val wsClient = WSProbe()
WS("/channel/notebook?username=tester5", wsClient.flow) ~> routes ~>
check {
// check response for WS Upgrade headers
checkConnection(wsClient)
val fl = files / "big"
fl.exists() shouldEqual true
val big = KappaProject("big")
val Container(ProjectResponses.ProjectList(lst) :: (ProjectResponses.LoadedProject(proj)) :: Nil, _) = checkTestProjects(wsClient)
val downloadWrong: ByteBuffer = Pickle.intoBytes[KappaMessage](ProjectRequests.Download("big_wrong"))
checkMessage(wsClient, downloadWrong) {
case Failed(_, List("project big_wrong does not exist"), _) =>
}
val downloadRight: ByteBuffer = Pickle.intoBytes[KappaMessage](ProjectRequests.Download("big"))
val dat: Array[Byte] = checkMessage(wsClient, downloadRight) {
case FileResponses.Downloaded("big", data) =>
val zp = fl.compress().byteArray
data.sameElements(zp) shouldEqual true
//data shouldEqual zp
data
}
val rem: ByteBuffer = Pickle.intoBytes[KappaMessage](ProjectRequests.Remove("big"))
checkMessage(wsClient, rem) {
case Done(ProjectRequests.Remove(_), _) =>
}
fl.exists() shouldEqual false
checkProject(wsClient, big) {
case Failed(/*KappaProject("big", _, _)*/ _, _, _) =>
}
val ms = KappaBinaryFile("big", ByteBuffer.wrap(dat))
val upl = FileRequests.ZipUpload(ms, false)
val upload: ByteBuffer = Pickle.intoBytes[KappaMessage](upl)
checkMessage(wsClient, upload) {
case Done(upd: UploadStatus, _) =>
}
checkTestProjects(wsClient)
}
}
def checkTestProjects(wsClient: WSProbe): Container = checkProject(wsClient, KappaProject("big")) {
case l@Container(ProjectResponses.ProjectList(lst) :: (ProjectResponses.LoadedProject(proj)) :: Nil, _) =>
proj.name shouldEqual "big"
proj.folder.files.map(_.name) shouldEqual Set("big_0.ka", "big_1.ka", "big_2.ka")
l
}
}
} | antonkulaga/kappa-notebook | app/jvm/src/test/scala/org.denigma.kappa/WebSocketProjectsSuite.scala | Scala | mpl-2.0 | 4,802 |
/**
* KnuthMorrisPratt.scala --- Tail-recursive Knuth-Morris-Pratt search
*
* Copyright (C) 2014 Aaron S. Hawley
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Commentary:
*
* Based on Sedgewick, Robert. Algorithms in C, Parts 1-4:
* Fundamentals, Data Structures, Sorting, Searching, 3rd
* Edition. Addison-Wesley. 1998. Page 277-292.
*/
package org.ninthfloor.users.ashawley.algs.search
import scala.util.Try
/**
* Singleton object providing a binary search method.
*
* == Integers ==
* Example interactive session:
* {{{
* scala> KnuthMorrisPratt.search("ababc", "abc")
* res1: Int = 2
*
* scala> KnuthMorrisPratt.search("ababz", "abc")
* res2: Int = -1
* }}}
*
* == Command-line ==
* Compilation:
* {{{
* $ scalac ./org/ninthfloor/users/ashawley/algs/search/KnuthMorrisPratt.scala
* }}}
*
* Usage:
* {{{
* $ scala org.ninthfloor.users.ashawley.algs.search.KnuthMorrisPratt file.txt < search.txt
* }}}
*/
object KnuthMorrisPratt {
/**
* Search string `s' for occurence of substring `w`.
*
* @return Index in `s` where `w' is found or else -1
*/
def search(s: String, w: String) = new KnuthMorrisPratt(s, w).matchPos
}
class KnuthMorrisPratt(val s: String, val w: String) {
val table = buildTable(0, 0)
def buildTable(i: Int, j: Int): List[Int] = (i, j) match {
case (i, j) if w.length <= i => Nil
case (0, j) => -1 :: buildTable(1, j)
case (1, j) => 0 :: buildTable(2, j)
case (i, j) if w(i) == w(j) => (j + 1) :: buildTable(i + 1, j + 1)
case (i, j) => 0 :: buildTable(i + 1, 0)
}
val matchPos = search(0, 0)
/**
* Search numbers on standard input in a file, display misses.
*/
@annotation.tailrec
final def search(i: Int, j: Int): Int = (i, j) match {
case (i, j) if w.length == 0 => 0
case (i, j) if s.length <= i + j => -1
case (i, j) if w(j) == s(i + j) && w.length - 1 == j => -1
case (i, j) if w(j) == s(i + j) => search(i, j + 1)
case (i, j) if table(j) > -1 => search(i + j - table(j), table(j))
case (i, _) => search(i + 1, 0)
}
}
| ashawley/algs | src/main/scala/org/ninthfloor/users/ashawley/algs/search/KnuthMorrisPratt.scala | Scala | gpl-3.0 | 2,676 |
package unfiltered.oauth2
object OAuth2 {
val XAuthorizedIdentity = "X-Authorized-Identity"
val XAuthorizedClientIdentity = "X-Authorized-Client-Identity"
val XAuthorizedScopes = "X-Authorized-Scopes"
}
/** Extractor for a resource owner and the client they authorized, as well as the granted scope. */
object OAuthIdentity {
import OAuth2._
import javax.servlet.http.HttpServletRequest
import unfiltered.request.HttpRequest
// todo: how can we accomplish this and not tie ourselves to underlying request?
/**
* @return a 3-tuple of (resource-owner-id, client-id, scopes) as an Option, or None if any of these is not available
* in the request
*/
def unapply[T <: HttpServletRequest](r: HttpRequest[T]): Option[(String, String, Seq[String])] =
r.underlying.getAttribute(XAuthorizedIdentity) match {
case null => None
case id: String => r.underlying.getAttribute(XAuthorizedClientIdentity) match {
case null => None
case clientId: String => r.underlying.getAttribute(XAuthorizedScopes) match {
case null => Some((id, clientId, Nil))
case scopes => Some((id, clientId, scopes.asInstanceOf[Seq[String]]))
}
}
}
}
| beni55/unfiltered | oauth2/src/main/scala/oauth2.scala | Scala | mit | 1,210 |
/*
* ******************************************************************************
* * Copyright (C) 2013 Christopher Harris (Itszuvalex)
* * Itszuvalex@gmail.com
* *
* * This program is free software; you can redistribute it and/or
* * modify it under the terms of the GNU General Public License
* * as published by the Free Software Foundation; either version 2
* * of the License, or (at your option) any later version.
* *
* * This program is distributed in the hope that it will be useful,
* * but WITHOUT ANY WARRANTY; without even the implied warranty of
* * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* * GNU General Public License for more details.
* *
* * You should have received a copy of the GNU General Public License
* * along with this program; if not, write to the Free Software
* * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* *****************************************************************************
*/
package com.itszuvalex.femtocraft.industry.containers
import com.itszuvalex.femtocraft.common.gui.OutputSlot
import com.itszuvalex.femtocraft.core.container.ContainerInv
import com.itszuvalex.femtocraft.industry.containers.ContainerFemtoEntangler._
import com.itszuvalex.femtocraft.industry.tiles.TileEntityFemtoEntangler
import cpw.mods.fml.relauncher.{Side, SideOnly}
import net.minecraft.entity.player.{EntityPlayer, InventoryPlayer}
import net.minecraft.inventory.{ICrafting, Slot}
import net.minecraft.item.ItemStack
import scala.collection.JavaConversions._
object ContainerFemtoEntangler {
private val cookTimeID : Int = 0
private val cookTimeMaxID: Int = 1
private val powerID : Int = 2
}
class ContainerFemtoEntangler(player: EntityPlayer, par1InventoryPlayer: InventoryPlayer, par2TileEntityFurnace: TileEntityFemtoEntangler)
extends ContainerInv[TileEntityFemtoEntangler](player, par2TileEntityFurnace, 0, 13) {
private var lastCookTime = 0
private var lastCookMax = 0
private var lastPower = 0
addSlotToContainer(new Slot(inventory, 0, 89, 35))
addSlotToContainer(new Slot(inventory, 1, 62, 8))
addSlotToContainer(new Slot(inventory, 2, 80, 8))
addSlotToContainer(new Slot(inventory, 3, 98, 8))
addSlotToContainer(new Slot(inventory, 4, 116, 8))
addSlotToContainer(new Slot(inventory, 5, 62, 26))
addSlotToContainer(new Slot(inventory, 6, 116, 26))
addSlotToContainer(new Slot(inventory, 7, 62, 44))
addSlotToContainer(new Slot(inventory, 8, 116, 44))
addSlotToContainer(new Slot(inventory, 9, 62, 62))
addSlotToContainer(new Slot(inventory, 10, 80, 62))
addSlotToContainer(new Slot(inventory, 11, 98, 62))
addSlotToContainer(new Slot(inventory, 12, 116, 62))
addSlotToContainer(new OutputSlot(inventory, 13, 147, 35))
addPlayerInventorySlots(par1InventoryPlayer)
override def addCraftingToCrafters(par1ICrafting: ICrafting) {
super.addCraftingToCrafters(par1ICrafting)
sendUpdateToCrafter(this, par1ICrafting, cookTimeID, inventory.getProgress)
sendUpdateToCrafter(this, par1ICrafting, cookTimeMaxID, inventory.getProgressMax)
sendUpdateToCrafter(this, par1ICrafting, powerID, inventory.getCurrentPower)
}
/**
* Looks for changes made in the container, sends them to every listener.
*/
override def detectAndSendChanges() {
super.detectAndSendChanges()
crafters.foreach { case icrafting: ICrafting =>
if (lastCookTime != inventory.getProgress) {
sendUpdateToCrafter(this, icrafting, cookTimeID, inventory.getProgress)
}
if (lastCookMax != inventory.getProgressMax) {
sendUpdateToCrafter(this, icrafting, cookTimeMaxID, inventory.getProgressMax)
}
if (lastPower != inventory.getCurrentPower) {
sendUpdateToCrafter(this, icrafting, powerID, inventory.getCurrentPower)
}
}
lastCookTime = inventory.getProgress
lastCookMax = inventory.getProgressMax
lastPower = inventory.getCurrentPower
}
@SideOnly(Side.CLIENT) override def updateProgressBar(par1: Int, par2: Int) {
par1 match {
case `cookTimeID` => inventory.setProgress(par2)
case `cookTimeMaxID` => inventory.setProgressMax(par2)
case `powerID` => inventory.setCurrentStorage(par2)
}
}
override def eligibleForInput(item: ItemStack) = true
}
| Itszuvalex/Femtocraft-alpha-1 | src/main/java/com/itszuvalex/femtocraft/industry/containers/ContainerFemtoEntangler.scala | Scala | gpl-2.0 | 4,365 |
// Copyright 2014 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.twofishes.server
import com.vividsolutions.jts.geom.Geometry
import io.fsq.common.scala.Lists.Implicits._
import io.fsq.twofishes.gen.{CellGeometry, GeocodeServingFeature}
import io.fsq.twofishes.util.StoredFeatureId
import org.slf4s.Logging
class HotfixableGeocodeStorageService(
underlying: GeocodeStorageReadService,
hotfix: HotfixStorageService
) extends GeocodeStorageReadService
with Logging {
def getIdsByName(name: String): Seq[StoredFeatureId] = {
(underlying.getIdsByName(name) ++ hotfix.getIdsToAddByName(name))
.filterNot(hotfix.getIdsToRemoveByName(name).has)
.distinct
}
def getIdsByNamePrefix(name: String): Seq[StoredFeatureId] = {
(underlying.getIdsByNamePrefix(name) ++ hotfix.getIdsToAddByNamePrefix(name))
.filterNot(hotfix.getIdsToRemoveByNamePrefix(name).has)
.distinct
}
def getByName(name: String): Seq[GeocodeServingFeature] = {
getByFeatureIds(getIdsByName(name)).map(_._2).toSeq
}
def getByFeatureIds(ids: Seq[StoredFeatureId]): Map[StoredFeatureId, GeocodeServingFeature] = {
// filter results rather than incoming list of ids in order to correctly handle concordance
(underlying
.getByFeatureIds(ids)
.filterNot({ case (id, feature) => hotfix.getAddedOrModifiedFeatureLongIds.has(feature.longId) }) ++
(for {
id <- ids
feature <- hotfix.getByFeatureId(id).toList
} yield {
(id -> feature)
}).toMap).filterNot({ case (id, feature) => hotfix.getDeletedFeatureLongIds.has(feature.longId) })
}
def getBySlugOrFeatureIds(ids: Seq[String]): Map[String, GeocodeServingFeature] = {
// first resolve newly created/modified slugs, if any
val changedSlugFidMap = (for {
idString <- ids
longId <- hotfix.resolveNewSlugToLongId(idString)
fid <- StoredFeatureId.fromLong(longId)
} yield {
(idString -> Some(fid))
}).toMap
// next, call underlying lookup to resolve any unchanged slugs/ids
val existingIdFidMap = underlying
.getBySlugOrFeatureIds(ids.filterNot(changedSlugFidMap.contains))
.map({ case (idString, feature) => (idString -> StoredFeatureId.fromLong(feature.longId)) })
.toMap
// any valid id that is in neither of the above could only be newly created and without a slug
val newIdFidMap = (for {
idString <- ids
id <- StoredFeatureId.fromUserInputString(idString)
// filter out ids that already exist
if !(existingIdFidMap.contains(idString) || changedSlugFidMap.contains(idString))
} yield {
(idString -> Some(id))
}).toMap
// put them all together
(for {
(idString, featureIdOpt) <- changedSlugFidMap ++ existingIdFidMap ++ newIdFidMap
featureId <- featureIdOpt
(resultFeatureId, feature) <- getByFeatureIds(Seq(featureId)).headOption
} yield {
(idString -> feature)
}).toMap
}
def getMinS2Level: Int = underlying.getMinS2Level
def getMaxS2Level: Int = underlying.getMaxS2Level
def getLevelMod: Int = underlying.getLevelMod
def getByS2CellId(id: Long): Seq[CellGeometry] = {
(underlying
.getByS2CellId(id)
.filterNot(cellGeometry => hotfix.getAddedOrModifiedPolygonFeatureLongIds.has(cellGeometry.longId)) ++
hotfix.getCellGeometriesByS2CellId(id))
.filterNot(cellGeometry => hotfix.getDeletedPolygonFeatureLongIds.has(cellGeometry.longId))
}
def getPolygonByFeatureId(id: StoredFeatureId): Option[Geometry] = {
if (hotfix.getDeletedPolygonFeatureLongIds.has(id.longId)) {
None
} else if (hotfix.getAddedOrModifiedPolygonFeatureLongIds.has(id.longId)) {
hotfix.getPolygonByFeatureId(id)
} else {
underlying.getPolygonByFeatureId(id)
}
}
def getPolygonByFeatureIds(ids: Seq[StoredFeatureId]): Map[StoredFeatureId, Geometry] = {
(for {
id <- ids
polygon <- getPolygonByFeatureId(id)
} yield {
(id -> polygon)
}).toMap
}
def getS2CoveringByFeatureId(id: StoredFeatureId): Option[Seq[Long]] = {
if (hotfix.getDeletedPolygonFeatureLongIds.has(id.longId)) {
None
} else if (hotfix.getAddedOrModifiedPolygonFeatureLongIds.has(id.longId)) {
hotfix.getS2CoveringByFeatureId(id)
} else {
underlying.getS2CoveringByFeatureId(id)
}
}
def getS2CoveringByFeatureIds(ids: Seq[StoredFeatureId]): Map[StoredFeatureId, Seq[Long]] = {
(for {
id <- ids
covering <- getS2CoveringByFeatureId(id)
} yield {
(id -> covering)
}).toMap
}
def getS2InteriorByFeatureId(id: StoredFeatureId): Option[Seq[Long]] = {
if (hotfix.getDeletedPolygonFeatureLongIds.has(id.longId)) {
None
} else if (hotfix.getAddedOrModifiedPolygonFeatureLongIds.has(id.longId)) {
hotfix.getS2InteriorByFeatureId(id)
} else {
underlying.getS2InteriorByFeatureId(id)
}
}
def getS2InteriorByFeatureIds(ids: Seq[StoredFeatureId]): Map[StoredFeatureId, Seq[Long]] = {
(for {
id <- ids
covering <- getS2InteriorByFeatureId(id)
} yield {
(id -> covering)
}).toMap
}
def refresh() {
// refresh underlying first so hotfixes are applied on top of latest data
underlying.refresh()
hotfix.refresh()
}
}
| foursquare/fsqio | src/jvm/io/fsq/twofishes/server/HotfixableGeocodeStorageService.scala | Scala | apache-2.0 | 5,308 |
package cas.analysis.estimation
import cas.analysis.subject.Subject
import cas.analysis.subject.components.Attachments
import cas.utils.StdImplicits.RightBiasedEither
import cas.utils.Regexf
case class AttachmentsConfigs(override val weight: Double = 1.0
) extends EstimatorConfigs(weight)
class AttachmentsEstimator(cfg: AttachmentsConfigs) extends ActualityEstimator(cfg) {
override def estimateActuality(subj: Subject): Either[String, Double] = for {
attachs <- subj.getComponent[Attachments]
} yield if (attachs.kinds.nonEmpty) -1.0 else 0.0
}
| bk0606/CAS | src/main/scala/cas/analysis/estimation/AttachmentsEstimator.scala | Scala | mit | 563 |
/*
* UGenGraphBuilder.scala
* (ScalaCollider)
*
* Copyright (c) 2008-2021 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss
package synth
package impl
import de.sciss.synth.UGenGraph.IndexedUGen
import de.sciss.synth.ugen.{Constant, ControlProxyLike, ControlUGenOutProxy, UGenProxy}
import scala.annotation.elidable
import scala.collection.immutable.{IndexedSeq => Vec, Set => ISet}
import scala.collection.mutable.{Buffer => MBuffer, Map => MMap}
object DefaultUGenGraphBuilderFactory extends UGenGraph.BuilderFactory {
def build(graph: SynthGraph): UGenGraph = {
val b = new DefaultUGenGraphBuilder
UGenGraph.use(b) {
val proxies = buildWith(graph, b)
b.build(proxies)
}
}
/** Recursively expands the synth graph until no elements are left.
* The caller should in most cases make sure that the builder is
* actually installed as the current one, wrapping the call in
* `UGenGraph.use(builder)`!
* The method returns the control proxies for further processing
* in the builder.
*
* @param g0 the graph to expand
* @param builder the builder that will assembly the ugens
*/
def buildWith(g0: SynthGraph, builder: UGenGraph.Builder): ISet[ControlProxyLike] = {
var g = g0
var controlProxies = ISet.empty[ControlProxyLike]
while (g.nonEmpty) {
// XXX these two lines could be more efficient eventually -- using a 'clearable' SynthGraph
controlProxies ++= g.controlProxies
g = SynthGraph(g.sources.foreach(_.force(builder))) // allow for further graphs being created
}
controlProxies
}
}
final class DefaultUGenGraphBuilder extends BasicUGenGraphBuilder {
builder =>
override def toString = s"UGenGraph.Builder@${hashCode.toHexString}"
}
object UGenGraphBuilderLike {
// ---- IndexedUGen ----
private final class IndexedUGenBuilder(val ugen: UGen, var index: Int, var effective: Boolean) {
val parents : MBuffer[IndexedUGenBuilder] = MBuffer.empty
var children : MBuffer[IndexedUGenBuilder] = MBuffer.empty
var inputIndices: List[UGenInIndex] = Nil
override def toString = s"IndexedUGen($ugen, $index, $effective) : richInputs = $inputIndices"
}
private trait UGenInIndex {
def create: (Int, Int) // XXX TODO --- replace with specialized tuple to avoid boxing?
def makeEffective(): Int
}
private final class ConstantIndex(constIdx: Int) extends UGenInIndex {
def create: (Int, Int) = (-1, constIdx)
def makeEffective() = 0
override def toString = s"ConstantIndex($constIdx)"
}
private final class UGenProxyIndex(iu: IndexedUGenBuilder, outIdx: Int) extends UGenInIndex {
def create: (Int, Int) = (iu.index, outIdx)
def makeEffective(): Int = {
if (!iu.effective) {
iu.effective = true
var numEff = 1
iu.inputIndices.foreach(numEff += _.makeEffective())
numEff
} else 0
}
override def toString = s"UGenProxyIndex($iu, $outIdx)"
}
}
trait BasicUGenGraphBuilder extends UGenGraphBuilderLike {
protected var ugens : Vec[UGen] = Vector.empty
protected var controlValues: Vec[Float] = Vector.empty
protected var controlNames : Vec[(String, Int)] = Vector.empty
protected var sourceMap : Map[AnyRef, Any] = Map .empty
}
/** Complete implementation of a ugen graph builder, except for the actual code that
* calls `force` on the sources of a `SynthGraph`. Implementations should call
* the `build` method passing in the control proxies for all involved synth graphs.
*/
trait UGenGraphBuilderLike extends UGenGraph.Builder {
builder =>
import UGenGraphBuilderLike._
// ---- abstract ----
// updated during build
protected var ugens : Vec[UGen]
protected var controlValues : Vec[Float]
protected var controlNames : Vec[(String, Int)]
protected var sourceMap : Map[AnyRef, Any]
// ---- impl: public ----
var showLog = false
final def addUGen(ugen: UGen): Unit = {
ugens :+= ugen
log(s"addUGen ${ugen.name} @ ${ugen.hashCode.toHexString} ${if (ugen.isIndividual) "indiv" else ""}")
}
final def prependUGen(ugen: UGen): Unit = {
ugens +:= ugen
log(s"prependUGen ${ugen.name} @ ${ugen.hashCode.toHexString} ${if (ugen.isIndividual) "indiv" else ""}")
}
final def addControl(values: Vec[Float], name: Option[String]): Int = {
val specialIndex = controlValues.size
controlValues ++= values
name.foreach(n => controlNames :+= n -> specialIndex)
log(s"addControl ${name.getOrElse("<unnamed>")} num = ${values.size}, idx = $specialIndex")
specialIndex
}
def visit[U](ref: AnyRef, init: => U): U = {
log(s"visit ${ref.hashCode.toHexString}")
sourceMap.getOrElse(ref, {
log(s"expand ${ref.hashCode.toHexString}...")
val exp = init
log(s"...${ref.hashCode.toHexString} -> ${exp.hashCode.toHexString} ${printSmart(exp)}")
sourceMap += ref -> exp
exp
}).asInstanceOf[U] // not so pretty...
}
// ---- impl: protected ----
// this proxy function is useful because `elem.force` is package private.
// so other projects implementing `UGenGraphBuilderLike` can use this function
protected def force(elem: Lazy): Unit = elem.force(this)
/** Finalizes the build process. It is assumed that the graph elements have been expanded at this
* stage, having called into `addUGen` and `addControl`. The caller must collect all the control
* proxies and pass them into this method.
*
* @param controlProxies the control proxies participating in this graph
*
* @return the completed `UGenGraph` build
*/
def build(controlProxies: Iterable[ControlProxyLike]): UGenGraph = {
val ctrlProxyMap = buildControls(controlProxies)
val (iUGens, constants) = indexUGens(ctrlProxyMap)
val indexedUGens: Array[IndexedUGenBuilder] = sortUGens(iUGens)
val richUGensB = Vector.newBuilder[IndexedUGen]
richUGensB.sizeHint(indexedUGens.length)
var i = 0
while (i < indexedUGens.length) {
val iu = indexedUGens(i)
richUGensB += new IndexedUGen(iu.ugen, iu.inputIndices.map(_.create))
i += 1
}
val richUGens: Vec[IndexedUGen] = richUGensB.result()
UGenGraph(constants, controlValues, controlNames, richUGens)
}
// ---- impl: private ----
private def indexUGens(ctrlProxyMap: Map[ControlProxyLike, (UGen, Int)]): (Vec[IndexedUGenBuilder], Vec[Float]) = {
val constantMap = MMap.empty[Float, ConstantIndex]
val constants = Vector.newBuilder[Float]
var numConstants = 0
var numIneffective = ugens.size
val indexedUGens: Vec[IndexedUGenBuilder] = ugens.zipWithIndex.map { case (ugen, idx) =>
val eff = ugen.hasSideEffect
if (eff) numIneffective -= 1
new IndexedUGenBuilder(ugen, idx, eff)
}
//indexedUGens.foreach( iu => println( iu.ugen.ref ))
//val a0 = indexedUGens(1).ugen
//val a1 = indexedUGens(3).ugen
//val ee = a0.equals(a1)
val ugenMap: Map[AnyRef, IndexedUGenBuilder] = indexedUGens.iterator.map(iu => (iu.ugen /* .ref */ , iu)).toMap
indexedUGens.foreach { iu =>
// XXX Warning: match not exhaustive -- "missing combination UGenOutProxy"
// this is clearly a nasty scala bug, as UGenProxy does catch UGenOutProxy;
// might be http://lampsvn.epfl.ch/trac/scala/ticket/4020
iu.inputIndices = iu.ugen.inputs.iterator.map {
// don't worry -- the match _is_ exhaustive
case Constant(value) => constantMap.getOrElse(value, {
val rc = new ConstantIndex(numConstants)
constantMap += value -> rc
constants += value
numConstants += 1
rc
})
case up: UGenProxy =>
val iui = ugenMap(up.source)
iu.parents += iui
iui.children += iu
new UGenProxyIndex(iui, up.outputIndex)
case ControlUGenOutProxy(proxy, outputIndex) =>
val (ugen, off) = ctrlProxyMap(proxy)
val iui = ugenMap(ugen)
iu.parents += iui
iui.children += iu
new UGenProxyIndex(iui, off + outputIndex)
} .toList
if (iu.effective) iu.inputIndices.foreach(numIneffective -= _.makeEffective())
}
val filtered: Vec[IndexedUGenBuilder] = if (numIneffective == 0) indexedUGens
else indexedUGens.collect {
case iu if iu.effective =>
iu.children = iu.children.filter(_.effective)
iu
}
(filtered, constants.result())
}
/*
* Note that in Scala like probably in most other languages,
* the UGens _can only_ be added in right topological order,
* as that is the only way they can refer to their inputs.
* However, the Synth-Definition-File-Format help documents
* states that depth-first order is preferable performance-
* wise. Truth is, performance is probably the same,
* mNumWireBufs might be different, so it's a space not a
* time issue.
*/
private def sortUGens(indexedUGens: Vec[IndexedUGenBuilder]): Array[IndexedUGenBuilder] = {
indexedUGens.foreach(iu => iu.children = iu.children.sortWith((a, b) => a.index > b.index))
val sorted = new Array[IndexedUGenBuilder](indexedUGens.size)
var avail: List[IndexedUGenBuilder] = indexedUGens.iterator.collect {
case iu if iu.parents.isEmpty => iu
} .toList
var cnt = 0
while (avail.nonEmpty) {
val iu = avail.head
avail = avail.tail
iu.index = cnt
sorted(cnt) = iu
cnt += 1
iu.children foreach { iuc =>
iuc.parents.remove(iuc.parents.indexOf(iu))
if (iuc.parents.isEmpty) avail = iuc :: avail
}
}
sorted
}
private def printSmart(x: Any): String = x match {
case u: UGen => u.name
case _ => x.toString
}
@elidable(elidable.CONFIG) private def log(what: => String): Unit =
if (showLog) println(s"ScalaCollider <ugen-graph> $what")
private def buildControls(p: Iterable[ControlProxyLike]): Map[ControlProxyLike, (UGen, Int)] =
p.groupBy(_.factory).iterator.flatMap { case (factory, proxies) =>
factory.build(builder, proxies.toIndexedSeq)
} .toMap
} | Sciss/ScalaCollider | shared/src/main/scala/de/sciss/synth/impl/UGenGraphBuilder.scala | Scala | lgpl-2.1 | 10,449 |
package net.sansa_stack.inference.abstraction
/**
* @author Lorenz Buehmann
*/
trait AbstractionGenerator {
}
| SANSA-Stack/SANSA-RDF | sansa-inference/sansa-inference-common/src/main/scala/net/sansa_stack/inference/abstraction/AbstractionGenerator.scala | Scala | apache-2.0 | 116 |
package com.fsist.stream
import com.fsist.stream.run.FutureStreamBuilder
import com.fsist.util.concurrent.Func
import org.scalatest.FunSuite
import scala.concurrent.Promise
class PipeTest extends FunSuite with StreamTester {
test("Using a pipe manually with a single component") {
val data = 1 to 10
val source = Source.from(data)
val pipe: Pipe[Int, Int] = Transform.map((i: Int) => i + 1)
val result = source.to(pipe).collect[List].singleResult().futureValue
val expected = data.map(_ + 1)
assert(result == expected)
}
test("Using a pipe manually with several components") {
val data = 1 to 10
val source = Source.from(data)
val tr1 = Transform.map((i: Int) => i + 1)
val tr2 = Transform.map((i: Int) => i + 1)
val tr3 = Transform.map((i: Int) => i + 1)
tr1.connect(tr2)
tr2.connect(tr3)
val pipe = Pipe(tr1, tr3)
val result = source.to(pipe).collect[List].singleResult().futureValue
val expected = data.map(_ + 3)
assert(result == expected)
}
test("Building on a pipe") {
val data = 1 to 10
def tr = Transform.map((i: Int) => i + 1)
def pipe = tr.pipe(tr).pipe(tr)
val result = Source.from(data).to(pipe).collect[List]().singleResult().futureValue
assert(result == data.map(_ + 3))
}
test("DelayedPipe") {
val data = 1 to 10
val promise = Promise[Pipe[Int, Int]]()
val stream = Source.from(data).to(Pipe.flatten(promise.future)).toList.singleResult()
awaitTimeout(stream, "Stream doesn't complete while waiting for delayed pipe")(impatience)
val pipe = Transform.map(Func[Int, Int](_ + 1)).pipe(Transform.map(Func[Int, Int](_ - 2)))
promise.success(pipe)
assert(stream.futureValue == data.map(_ - 1))
}
test("DelayedPipe (when the Future is already completed)") {
val data = 1 to 10
val promise = Promise[Pipe[Int, Int]]()
val pipe = Transform.map(Func[Int, Int](_ + 1)).pipe(Transform.map(Func[Int, Int](_ - 2)))
promise.success(pipe)
val stream = Source.from(data).to(Pipe.flatten(promise.future)).toList.singleResult()
assert(stream.futureValue == data.map(_ - 1))
}
test("DelayedPipe (when the Future fails)") {
val data = 1 to 10
val promise = Promise[Pipe[Int, Int]]()
val stream = Source.from(data).to(Pipe.flatten(promise.future)).toList.singleResult()
val error = new IllegalArgumentException("test")
promise.failure(error)
assert(stream.failed.futureValue == error)
}
test("Double-delayed pipe") {
val data = 1 to 10
val promise = Promise[Pipe[Int, Int]]()
val result = Source.from(data).to(Pipe.flatten(promise.future)).toList.singleResult()
val promise2 = Promise[Pipe[Int, Int]]()
promise.success(Pipe.flatten(promise2.future))
promise2.success(Pipe.nop[Int])
assert(result.futureValue == data)
}
test("Pipe of pipe") {
// Regression test for bug in FutureStreamBuilder
val data = 1 to 10
implicit val builder = new FutureStreamBuilder
val src = Source.from(data)
val tr1 = Transform.map[Int, Int](Func.pass[Int])
val pipe1 = Pipe(tr1, tr1.map(_ + 1))
val tr2 = pipe1.source.map(_ + 1)
val pipe2 = Pipe(pipe1, tr2) // This is legal
val result = src.to(pipe2).toList.singleResult
assert(result.futureValue == data.map(_ + 2))
}
}
| fsist/future-streams | src/test/scala/com/fsist/stream/PipeTest.scala | Scala | apache-2.0 | 3,347 |
package service
import model.Profile._
import profile.simple._
import model.{PullRequest, Issue}
trait PullRequestService { self: IssuesService =>
import PullRequestService._
def getPullRequest(owner: String, repository: String, issueId: Int)
(implicit s: Session): Option[(Issue, PullRequest)] =
getIssue(owner, repository, issueId.toString).flatMap{ issue =>
PullRequests.filter(_.byPrimaryKey(owner, repository, issueId)).firstOption.map{
pullreq => (issue, pullreq)
}
}
def updateCommitId(owner: String, repository: String, issueId: Int, commitIdTo: String, commitIdFrom: String)
(implicit s: Session): Unit =
PullRequests.filter(_.byPrimaryKey(owner, repository, issueId))
.map(pr => pr.commitIdTo -> pr.commitIdFrom)
.update((commitIdTo, commitIdFrom))
def getPullRequestCountGroupByUser(closed: Boolean, owner: Option[String], repository: Option[String])
(implicit s: Session): List[PullRequestCount] =
PullRequests
.innerJoin(Issues).on { (t1, t2) => t1.byPrimaryKey(t2.userName, t2.repositoryName, t2.issueId) }
.filter { case (t1, t2) =>
(t2.closed === closed.bind) &&
(t1.userName === owner.get.bind, owner.isDefined) &&
(t1.repositoryName === repository.get.bind, repository.isDefined)
}
.groupBy { case (t1, t2) => t2.openedUserName }
.map { case (userName, t) => userName -> t.length }
.sortBy(_._2 desc)
.list
.map { x => PullRequestCount(x._1, x._2) }
// def getAllPullRequestCountGroupByUser(closed: Boolean, userName: String)(implicit s: Session): List[PullRequestCount] =
// PullRequests
// .innerJoin(Issues).on { (t1, t2) => t1.byPrimaryKey(t2.userName, t2.repositoryName, t2.issueId) }
// .innerJoin(Repositories).on { case ((t1, t2), t3) => t2.byRepository(t3.userName, t3.repositoryName) }
// .filter { case ((t1, t2), t3) =>
// (t2.closed === closed.bind) &&
// (
// (t3.isPrivate === false.bind) ||
// (t3.userName === userName.bind) ||
// (Collaborators.filter { t4 => t4.byRepository(t3.userName, t3.repositoryName) && (t4.collaboratorName === userName.bind)} exists)
// )
// }
// .groupBy { case ((t1, t2), t3) => t2.openedUserName }
// .map { case (userName, t) => userName -> t.length }
// .sortBy(_._2 desc)
// .list
// .map { x => PullRequestCount(x._1, x._2) }
def createPullRequest(originUserName: String, originRepositoryName: String, issueId: Int,
originBranch: String, requestUserName: String, requestRepositoryName: String, requestBranch: String,
commitIdFrom: String, commitIdTo: String)(implicit s: Session): Unit =
PullRequests insert PullRequest(
originUserName,
originRepositoryName,
issueId,
originBranch,
requestUserName,
requestRepositoryName,
requestBranch,
commitIdFrom,
commitIdTo)
def getPullRequestsByRequest(userName: String, repositoryName: String, branch: String, closed: Boolean)
(implicit s: Session): List[PullRequest] =
PullRequests
.innerJoin(Issues).on { (t1, t2) => t1.byPrimaryKey(t2.userName, t2.repositoryName, t2.issueId) }
.filter { case (t1, t2) =>
(t1.requestUserName === userName.bind) &&
(t1.requestRepositoryName === repositoryName.bind) &&
(t1.requestBranch === branch.bind) &&
(t2.closed === closed.bind)
}
.map { case (t1, t2) => t1 }
.list
}
object PullRequestService {
val PullRequestLimit = 25
case class PullRequestCount(userName: String, count: Int)
}
| mqshen/gitbucketTest | src/main/scala/service/PullRequestService.scala | Scala | apache-2.0 | 3,798 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flink.batch.connectors.pulsar.example
import org.apache.flink.api.common.serialization.SerializationSchema
import org.apache.flink.api.scala._
import org.apache.flink.batch.connectors.pulsar.PulsarOutputFormat
import org.apache.flink.util.Collector
/**
* Data type for words with count.
*/
case class WordWithCount(word: String, count: Long) {
override def toString: String = "WordWithCount { word = " + word + ", count = " + count + " }"
}
/**
* Implements a batch word-count Scala program on Pulsar topic by writing Flink DataSet.
*/
object FlinkPulsarBatchSinkScalaExample {
private val EINSTEIN_QUOTE = "Imagination is more important than knowledge. " +
"Knowledge is limited. Imagination encircles the world."
private val SERVICE_URL = "pulsar://127.0.0.1:6650"
private val TOPIC_NAME = "my-flink-topic"
def main(args: Array[String]): Unit = {
// set up the execution environment
val env = ExecutionEnvironment.getExecutionEnvironment
// create PulsarOutputFormat instance
val pulsarOutputFormat =
new PulsarOutputFormat[WordWithCount](SERVICE_URL, TOPIC_NAME, new SerializationSchema[WordWithCount] {
override def serialize(wordWithCount: WordWithCount): Array[Byte] = wordWithCount.toString.getBytes
})
// create DataSet
val textDS = env.fromElements[String](EINSTEIN_QUOTE)
// convert sentence to words
textDS.flatMap((value: String, out: Collector[WordWithCount]) => {
val words = value.toLowerCase.split(" ")
for (word <- words) {
out.collect(new WordWithCount(word.replace(".", ""), 1))
}
})
// filter words which length is bigger than 4
.filter((wordWithCount: WordWithCount) => wordWithCount.word.length > 4)
// group the words
.groupBy((wordWithCount: WordWithCount) => wordWithCount.word)
// sum the word counts
.reduce((wordWithCount1: WordWithCount, wordWithCount2: WordWithCount) =>
new WordWithCount(wordWithCount1.word, wordWithCount1.count + wordWithCount2.count))
// write batch data to Pulsar
.output(pulsarOutputFormat)
// set parallelism to write Pulsar in parallel (optional)
env.setParallelism(2)
// execute program
env.execute("Flink - Pulsar Batch WordCount")
}
} | ArvinDevel/incubator-pulsar | pulsar-flink/src/test/scala/org/apache/flink/batch/connectors/pulsar/example/FlinkPulsarBatchSinkScalaExample.scala | Scala | apache-2.0 | 3,096 |
// scalac: -Ystop-after:parser
//
object foo {
val bar = "baz"
val xml =
<root>
<!---->
<!-- {bar} --->
</root>
}
| scala/scala | test/files/pos/xml-comments.scala | Scala | apache-2.0 | 139 |
package scalafiddle.router.cache
import scala.concurrent.Future
trait Cache {
def get(id: String, expiration: Int): Future[Option[Array[Byte]]]
def put(id: String, data: Array[Byte], expiration: Int): Future[Unit]
def clean(expiration: Int): Unit
}
| scalafiddle/scalafiddle-core | router/src/main/scala/scalafiddle/router/cache/Cache.scala | Scala | apache-2.0 | 259 |
/*
* -╥⌐⌐⌐⌐ -⌐⌐⌐⌐-
* ≡╢░░░░⌐\\░░░φ ╓╝░░░░⌐░░░░╪╕
* ╣╬░░` `░░░╢┘ φ▒╣╬╝╜ ░░╢╣Q
* ║╣╬░⌐ ` ╤▒▒▒Å` ║╢╬╣
* ╚╣╬░⌐ ╔▒▒▒▒`«╕ ╢╢╣▒
* ╫╬░░╖ .░ ╙╨╨ ╣╣╬░φ ╓φ░╢╢Å
* ╙╢░░░░⌐"░░░╜ ╙Å░░░░⌐░░░░╝`
* ``˚¬ ⌐ ˚˚⌐´
*
* Copyright © 2016 Flipkart.com
*/
package com.flipkart.connekt.busybees.streams.flows.formaters
import java.net.URL
import java.util.Base64
import com.fasterxml.jackson.databind.node.ObjectNode
import com.flipkart.connekt.busybees.encryption.WebPushEncryptionUtils
import com.flipkart.connekt.busybees.streams.errors.ConnektPNStageException
import com.flipkart.connekt.busybees.streams.flows.NIOFlow
import com.flipkart.connekt.commons.entities.MobilePlatform
import com.flipkart.connekt.commons.factories.{ConnektLogger, LogFile, ServiceFactory}
import com.flipkart.connekt.commons.helpers.CallbackRecorder._
import com.flipkart.connekt.commons.helpers.ConnektRequestHelper._
import com.flipkart.connekt.commons.iomodels.MessageStatus.InternalStatus
import com.flipkart.connekt.commons.iomodels._
import com.flipkart.connekt.commons.services.{DeviceDetailsService, KeyChainManager}
import com.flipkart.connekt.commons.utils.NetworkUtils.URLFunctions
import com.flipkart.connekt.commons.utils.StringUtils._
import org.bouncycastle.jce.interfaces.ECPublicKey
import org.jose4j.jws.{AlgorithmIdentifiers, JsonWebSignature}
import org.jose4j.jwt.JwtClaims
import scala.concurrent.ExecutionContextExecutor
import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}
class OpenWebChannelFormatter(parallelism: Int)(implicit ec: ExecutionContextExecutor) extends NIOFlow[ConnektRequest, OpenWebStandardPayloadEnvelope](parallelism)(ec) {
private lazy val stencilService = ServiceFactory.getStencilService
override def map: ConnektRequest => List[OpenWebStandardPayloadEnvelope] = message => {
try {
ConnektLogger(LogFile.PROCESSORS).debug(s"OpenWebChannelFormatter received message: ${message.id}")
val pnInfo = message.channelInfo.asInstanceOf[PNRequestInfo]
val devicesInfo = DeviceDetailsService.get(pnInfo.appName, pnInfo.deviceIds).get
val validDeviceIds = devicesInfo.map(_.deviceId)
val invalidDeviceIds = pnInfo.deviceIds.diff(validDeviceIds.toSet)
invalidDeviceIds.map(PNCallbackEvent(message.id, message.clientId, _, InternalStatus.MissingDeviceInfo, MobilePlatform.OPENWEB, pnInfo.appName, message.contextId.orEmpty)).persist
ServiceFactory.getReportingService.recordPushStatsDelta(message.clientId, message.contextId, message.meta.get("stencilId").map(_.toString), Option(message.platform), message.appName, InternalStatus.MissingDeviceInfo, invalidDeviceIds.size)
val ttl = message.expiryTs.map(expiry => (expiry - System.currentTimeMillis) / 1000).getOrElse(6.hour.toSeconds)
val openWebStencil = stencilService.getStencilsByName(s"ckt-${pnInfo.appName.toLowerCase}-openweb").head
if (ttl > 0) {
if (!message.isTestRequest) {
devicesInfo.flatMap(device => {
//standard open-web (firefox, chrome, etc)
if (device.token != null && device.token.nonEmpty && device.token.isValidUrl) {
val token = device.token.replace("https://android.googleapis.com/gcm/send", "https://gcm-http.googleapis.com/gcm")
val headers = scala.collection.mutable.Map("TTL" -> ttl.toString)
val appDataWithId = stencilService.materialize(openWebStencil, message.channelData.asInstanceOf[PNRequestData].data).asInstanceOf[String].getObj[ObjectNode]
.put("messageId", message.id)
.put("contextId", message.contextId.orEmpty)
.put("deviceId", device.deviceId).getJson
val vapIdKeyPair = KeyChainManager.getKeyPairCredential(message.appName).get
if (device.keys != null && device.keys.nonEmpty) {
Try(WebPushEncryptionUtils.encrypt(device.keys("p256dh"), device.keys("auth"), appDataWithId)) match {
case Success(data) =>
headers += (
"Encryption" -> WebPushEncryptionUtils.createEncryptionHeader(data.salt),
"Content-Encoding" -> "aesgcm"
)
val claims = new JwtClaims()
claims.setAudience(new URL(token).origin)
claims.setExpirationTimeMinutesInTheFuture(12 * 60)
claims.setSubject("mailto:connekt-dev@flipkart.com")
val jws = new JsonWebSignature()
jws.setHeader("typ", "JWT")
jws.setPayload(claims.toJson)
jws.setKey(WebPushEncryptionUtils.loadPrivateKey(vapIdKeyPair.privateKey))
jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.ECDSA_USING_P256_CURVE_AND_SHA256)
val compactJws = jws.getCompactSerialization.stripSuffix("=")
val publicKey = WebPushEncryptionUtils.loadPublicKey(vapIdKeyPair.publicKey).asInstanceOf[ECPublicKey].getQ.getEncoded(false)
headers +=(
"Crypto-Key" -> s"${WebPushEncryptionUtils.createCryptoKeyHeader(data.serverPublicKey)};p256ecdsa=${Base64.getUrlEncoder.encodeToString(publicKey).stripSuffix("=")}",
"Authorization" -> s"WebPush $compactJws"
)
if (token.startsWith("https://gcm-http.googleapis.com/gcm")) //pre-chrome-52.
headers += ("Authorization" -> s"key=${KeyChainManager.getGoogleCredential(message.appName).get.apiKey}")
List(OpenWebStandardPayloadEnvelope(message.id, message.clientId, device.deviceId, pnInfo.appName,
message.contextId.orEmpty, token, OpenWebStandardPayload(data.encodedData), headers.toMap, message.meta))
case Failure(e) =>
ConnektLogger(LogFile.PROCESSORS).error(s"OpenWebChannelFormatter error for ${message.id}", e)
PNCallbackEvent(message.id, message.clientId, device.deviceId, InternalStatus.EncryptionError, MobilePlatform.OPENWEB, device.appName, message.contextId.orEmpty, e.getMessage).enqueue
List.empty[OpenWebStandardPayloadEnvelope]
}
} else {
List(OpenWebStandardPayloadEnvelope(message.id, message.clientId, device.deviceId, pnInfo.appName, message.contextId.orEmpty,
token, OpenWebStandardPayload(Array.empty), headers.toMap, message.meta))
}
} else {
PNCallbackEvent(message.id, message.clientId, device.deviceId, InternalStatus.InvalidToken, MobilePlatform.OPENWEB, device.appName, message.contextId.orEmpty, "OpenWeb without or empty/invalid token not allowed").enqueue
List.empty[OpenWebStandardPayloadEnvelope]
}
})
} else {
ConnektLogger(LogFile.PROCESSORS).info(s"OpenWebChannelFormatter dropping dry-run message: ${message.id}")
List.empty[OpenWebStandardPayloadEnvelope]
}
} else {
ConnektLogger(LogFile.PROCESSORS).warn(s"OpenWebChannelFormatter dropping ttl-expired message: ${message.id}")
ServiceFactory.getReportingService.recordPushStatsDelta(message.clientId, message.contextId, message.meta.get("stencilId").map(_.toString), Option(message.platform), message.appName, InternalStatus.TTLExpired, devicesInfo.size)
devicesInfo.map(d => PNCallbackEvent(message.id, message.clientId, d.deviceId, InternalStatus.TTLExpired, MobilePlatform.OPENWEB, d.appName, message.contextId.orEmpty)).enqueue
List.empty[OpenWebStandardPayloadEnvelope]
}
}
catch {
case e: Exception =>
ConnektLogger(LogFile.PROCESSORS).error(s"OpenWebChannelFormatter error for ${message.id}", e)
throw ConnektPNStageException(message.id, message.clientId, message.destinations, InternalStatus.StageError, message.appName, message.platform, message.contextId.orEmpty, message.meta, "OpenWebChannelFormatter::".concat(e.getMessage), e)
}
}
}
| Flipkart/connekt | busybees/src/main/scala/com/flipkart/connekt/busybees/streams/flows/formaters/OpenWebChannelFormatter.scala | Scala | mit | 8,457 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package models.navigation
import play.api.mvc.Call
/**
* @author hiral
*/
object QuickRoutes {
import models.navigation.BreadCrumbs._
val baseRoutes : Map[String, Call] = Map(
"Clusters" -> controllers.routes.Application.index(),
"List" -> controllers.routes.Application.index()
)
val clusterRoutes : Map[String, String => Call] = Map(
"Update Cluster" -> controllers.routes.Cluster.updateCluster,
"Summary" -> controllers.routes.Application.cluster,
"Brokers" -> controllers.routes.Application.brokers,
"Topics" -> controllers.routes.Topic.topics,
"List" -> controllers.routes.Topic.topics,
"Create" -> controllers.routes.Topic.createTopic,
"Preferred Replica Election" -> controllers.routes.PreferredReplicaElection.preferredReplicaElection,
"Reassign Partitions" -> controllers.routes.ReassignPartitions.reassignPartitions
)
val topicRoutes : Map[String, (String, String) => Call] = Map(
"Topic View" -> controllers.routes.Topic.topic
)
implicit class BaseRoute(s: String) {
def baseRouteMenuItem : (String, Call) = {
s -> baseRoutes(s)
}
def baseRoute : Call = {
baseRoutes(s)
}
def baseMenu(c: String): Menu = {
Menu(s,IndexedSeq.empty,Some(baseRoute))
}
def baseRouteBreadCrumb : BCStaticLink = {
BCStaticLink(s, baseRoutes(s))
}
}
implicit class ClusterRoute(s: String) {
def clusterRouteMenuItem(c: String): (String, Call) = {
s -> clusterRoutes(s)(c)
}
def clusterRoute(c: String): Call = {
clusterRoutes(s)(c)
}
def clusterMenu(c: String): Menu = {
Menu(s,IndexedSeq.empty,Some(clusterRoute(c)))
}
def clusterRouteBreadCrumb : BCDynamicLink = {
BCDynamicLink( s, clusterRoutes(s))
}
}
implicit class TopicRoute(s: String) {
def topicRoute(c: String, t: String): Call = {
topicRoutes(s)(c,t)
}
}
}
| vvutharkar/kafka-manager | app/models/navigation/QuickRoutes.scala | Scala | apache-2.0 | 2,030 |
package code.api
import net.liftweb.http.rest.RestHelper
import net.liftweb.json.JsonAST.JObject
import net.liftweb.util.Helpers.AsLong
import net.liftweb.json.JsonAST.JField
import net.liftweb.json.JsonAST.JString
import code.service.TaskService
import net.liftweb.json._
import net.liftweb.json.Extraction._
import code.service.Color
import code.service.TaskItemService
import code.service.TaskItemService.IntervalQuery
import code.model.User
import net.liftweb.mapper.By
import org.joda.time.LocalDate
import org.joda.time.Interval
import com.github.nscala_time.time.Imports._
import net.liftweb.common.Box
import code.model.ExtSession
/**
* A basic REST API to provide access to Timeadmin functions.
*
* By default, th API is not enabled. It can enabled with the following configuration property:
* <code>api.enable=true</code>
*
* @see https://github.com/dodie/time-admin/blob/master/api-reference.md
*/
object Endpoints extends RestHelper with ClientsOnly with HandlingErrors {
case class TaskDto(id: Long, taskName: String, projectName: String, fullName: String, color: Color)
case class TaskItemDto(id: Long, taskId: Long, start: Long, duration: Long, user: Long)
def getLong(jsonData: JValue, name: String) = {
(jsonData \\ name).asInstanceOf[JString].values.toLong
}
def getString(jsonData: JValue, name: String) = {
(jsonData \\ name).asInstanceOf[JString].values
}
val OK_RESPONSE = Some(JObject(JField("status", JString("OK"))))
def OK_RESPONSE_WITH_ID(id: Long) = Some(JObject(JField("status", JString("OK")), JField("id", JString(id.toString()))))
val ERROR_RESPONSE = Some(JObject(JField("status", JString("ERROR"))))
val dateRange = """(\\d\\d\\d\\d)(\\d\\d)(\\d\\d)-(\\d\\d\\d\\d)(\\d\\d)(\\d\\d)""".r
def date(year: String, monthOfYear: String, dayOfMonth: String): LocalDate = new LocalDate(year.toInt, monthOfYear.toInt, dayOfMonth.toInt)
def interval(s : LocalDate, e:LocalDate) = new Interval(s.toInterval.start, e.toInterval.end)
def user(): Box[User] = User.currentUser
serve {
handlingErrors {
case "api" :: "login" :: Nil JsonPost ((jsonData, req)) => {
val email = getString(jsonData, "email")
val password = getString(jsonData, "password")
if (User.canLogin(email, password)) {
val userIdAsString = User.find(By(User.email, email)).openOrThrowException("Current user must be defined!").userIdAsString
val extSession = ExtSession.create.userId(userIdAsString).tokentype(ExtSession.TOKEN_TYPE_CLIENT_API).saveMe
Some(JObject(JField("token", JString(extSession.cookieId.get))))
} else {
ERROR_RESPONSE
}
}
case "api" :: "logout" :: Nil JsonPost ((jsonData, req)) => {
val token = getString(jsonData, "token")
ExtSession.find(By(ExtSession.cookieId, token)).foreach(_.delete_!)
OK_RESPONSE
}
}
}
serve {
handlingErrors {
clientsOnly {
case "api" :: "tasks" :: Nil JsonGet req => {
decompose(
TaskService.getAllActiveTasks
.map(task => TaskDto(task.task.id.get, task.taskName, task.projectName, task.fullName, task.color)))
}
case "api" :: "taskitems" :: dateRange(startYear, startMonth, startDay, endYear, endMonth, endDay) :: Nil JsonGet req => {
val start = date(startYear, startMonth, startDay)
val end = date(endYear, endMonth, endDay)
val intervalQuery = IntervalQuery(interval(start, end))
decompose(
TaskItemService.getTaskItems(intervalQuery, user)
.map(taskItem => TaskItemDto(taskItem.taskItem.id.get, taskItem.taskItem.task.get, taskItem.taskItem.start.get, taskItem.duration.getMillis, taskItem.taskItem.user.get)))
}
case "api" :: "taskitems" :: Nil JsonPost ((jsonData, req)) => {
val taskId = getLong(jsonData, "taskId")
val time = getLong(jsonData, "time")
val taskItem = TaskItemService.insertTaskItem(taskId, time, user)
OK_RESPONSE_WITH_ID(taskItem.id.get)
}
case "api" :: "taskitems" :: AsLong(taskItemId) :: Nil JsonPut ((jsonData, req)) => {
val taskId = getLong(jsonData, "taskId")
val time = getLong(jsonData, "time")
TaskItemService.editTaskItem(taskItemId, taskId, time, false, user)
OK_RESPONSE
}
case "api" :: "taskitems" :: AsLong(taskItemId) :: Nil JsonDelete req => {
TaskItemService.deleteTaskItem(taskItemId, user)
OK_RESPONSE
}
}
}
}
}
| dodie/time-admin | src/main/scala/code/api/Endpoints.scala | Scala | apache-2.0 | 4,699 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import org.apache.spark.annotation.DeveloperApi
/**
* :: DeveloperApi ::
* Stores information about an Miscellaneous Process to pass from the scheduler to SparkListeners.
*/
@DeveloperApi
class MiscellaneousProcessDetails(
val hostPort: String,
val cores: Int,
val logUrlInfo: Map[String, String]) extends Serializable
| maropu/spark | core/src/main/scala/org/apache/spark/scheduler/MiscellaneousProcessDetails.scala | Scala | apache-2.0 | 1,173 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.common.si
import java.io.File
import java.util.Date
import com.bwsw.common.file.utils.MongoFileStorage
import com.bwsw.sj.common.dal.model.instance.InstanceDomain
import com.bwsw.sj.common.dal.model.module.{FileMetadataDomain, IOstream, SpecificationDomain}
import com.bwsw.sj.common.dal.repository.{ConnectionRepository, GenericMongoRepository}
import com.bwsw.sj.common.si.model.FileMetadataLiterals
import com.bwsw.sj.common.si.model.module.{ModuleMetadata, ModuleMetadataCreator, Specification}
import com.bwsw.sj.common.si.result._
import com.bwsw.sj.common.utils.EngineLiterals.{batchStreamingType, inputStreamingType, outputStreamingType, regularStreamingType}
import com.bwsw.sj.common.utils.{MessageResourceUtils, MessageResourceUtilsMock}
import org.bson.types.ObjectId
import org.mockito.ArgumentMatchers.{any, anyString, argThat, eq => argEq}
import org.mockito.Mockito.{never, reset, verify, when}
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import scaldi.{Injector, Module}
import scala.collection.mutable.ArrayBuffer
class ModuleSiTests extends FlatSpec with Matchers with MockitoSugar with BeforeAndAfterEach {
val tmpDirectory = "/tmp/"
val createModuleMetadata = mock[ModuleMetadataCreator]
val notStoredModule = createModule("not-stored-module", "v1", regularStreamingType)
val inputModuleV1 = createModule("input-module", "v1", inputStreamingType)
val inputModuleV2 = createModule("input-module", "v2", inputStreamingType)
val inputModules = Seq(inputModuleV1, inputModuleV2)
val outputModule = createModule("output-module", "v1", outputStreamingType)
val storedModulesWithJars = Seq(inputModuleV1, inputModuleV2, outputModule)
val moduleWithoutJar = createModule("module-without-jar", "v3", batchStreamingType, withFile = false)
val storedModules = storedModulesWithJars :+ moduleWithoutJar
val fileMetadataRepository = mock[GenericMongoRepository[FileMetadataDomain]]
when(fileMetadataRepository.getByParameters(any[Map[String, Any]])).thenReturn(Seq.empty)
when(fileMetadataRepository.getByParameters(Map("filetype" -> FileMetadataLiterals.moduleType)))
.thenReturn(storedModules.map(_.metadataDomain))
storedModules.foreach { module =>
when(fileMetadataRepository.getByParameters(
Map(
"filetype" -> FileMetadataLiterals.moduleType,
"specification.name" -> module.name,
"specification.module-type" -> module.moduleType,
"specification.version" -> module.version))).thenReturn(Seq(module.metadataDomain))
}
val modulesToType: Map[String, Seq[ModuleInfo]] = Map(
inputStreamingType -> inputModules,
regularStreamingType -> Seq.empty,
batchStreamingType -> Seq(moduleWithoutJar),
outputStreamingType -> Seq(outputModule))
modulesToType.foreach {
case (moduleType, modules) =>
when(fileMetadataRepository.getByParameters(
Map(
"filetype" -> FileMetadataLiterals.moduleType,
"specification.module-type" -> moduleType)))
.thenReturn(modules.map(_.metadataDomain))
}
val moduleWithoutInstances = inputModuleV1
val inputInstanceNames = Seq("input-instance-1", "input-instance-2")
val outputInstanceNames = Seq("output-instance-1", "output-instance-2")
val modulesWithInstances = Map(
inputModuleV2 -> inputInstanceNames,
outputModule -> outputInstanceNames)
val moduleToInstanceNames = modulesWithInstances + (moduleWithoutInstances -> Seq.empty)
val instanceRepository = mock[GenericMongoRepository[InstanceDomain]]
when(instanceRepository.getByParameters(any[Map[String, Any]])).thenReturn(Seq.empty)
modulesWithInstances.foreach {
case (module, names) =>
val instances = names.map { name =>
val instance = mock[InstanceDomain]
when(instance.name).thenReturn(name)
instance
}
when(instanceRepository.getByParameters(Map(
"module-name" -> module.name,
"module-type" -> module.moduleType,
"module-version" -> module.version)))
.thenReturn(instances)
}
val fileStorage = mock[MongoFileStorage]
val connectionRepository = mock[ConnectionRepository]
when(connectionRepository.getFileStorage).thenReturn(fileStorage)
when(connectionRepository.getFileMetadataRepository).thenReturn(fileMetadataRepository)
when(connectionRepository.getInstanceRepository).thenReturn(instanceRepository)
val injector = new Module {
bind[ConnectionRepository] to connectionRepository
bind[MessageResourceUtils] to MessageResourceUtilsMock.messageResourceUtils
bind[FileBuffer] to mock[FileBuffer]
bind[ModuleMetadataCreator] to createModuleMetadata
}.injector
val moduleSI = new ModuleSI()(injector)
override def beforeEach(): Unit = {
reset(fileStorage)
when(fileStorage.exists(anyString())).thenReturn(false)
when(fileStorage.delete(anyString())).thenReturn(false)
storedModulesWithJars.foreach { module =>
when(fileStorage.get(module.filename, tmpDirectory + module.filename)).thenReturn(module.file.get)
when(fileStorage.exists(module.filename)).thenReturn(true)
when(fileStorage.delete(module.filename)).thenReturn(true)
}
}
// create
"ModuleSI" should "create correct module" in {
when(notStoredModule.metadata.validate()).thenReturn(ArrayBuffer[String]())
moduleSI.create(notStoredModule.metadata) shouldBe Created
verify(fileStorage).put(
argThat[File](file => file.getName == notStoredModule.filename),
argEq(notStoredModule.filename),
argEq(notStoredModule.specificationDomain),
argEq(FileMetadataLiterals.moduleType))
}
it should "not create incorrect module" in {
val errors = ArrayBuffer("Not valid")
when(notStoredModule.metadata.validate()).thenReturn(errors)
moduleSI.create(notStoredModule.metadata) shouldBe NotCreated(errors)
verify(fileStorage, never()).put(
any[File](),
anyString(),
any[SpecificationDomain](),
anyString())
}
// get
it should "give module if it exists" in {
storedModulesWithJars.foreach {
case ModuleInfo(name, version, moduleType, _, _, _, _, metadata, _, _) =>
moduleSI.get(moduleType, name, version) shouldBe Right(metadata)
}
}
it should "not give module with incorrect module type" in {
val name = "some-name"
val version = "some-version"
val moduleType = "incorrect type"
val error = "rest.modules.type.unknown:" + moduleType
moduleSI.get(moduleType, name, version) shouldBe Left(error)
}
it should "not give module if it does not exists" in {
val name = "some-name"
val version = "some-version"
val moduleType = regularStreamingType
val error = s"rest.modules.module.notfound:$moduleType-$name-$version"
moduleSI.get(moduleType, name, version) shouldBe Left(error)
}
it should "not give module if it does not have jar file" in {
val error = "rest.modules.module.jar.notfound:" + moduleWithoutJar.signature
moduleSI.get(moduleWithoutJar.moduleType, moduleWithoutJar.name, moduleWithoutJar.version) shouldBe Left(error)
}
// getAll
it should "give all modules from storage" in {
moduleSI.getAll.toSet shouldBe storedModules.map(_.metadataWithoutFile).toSet
}
// getMetadataWithoutFile
it should "give module metadata without file if it exists" in {
storedModulesWithJars.foreach {
case ModuleInfo(name, version, moduleType, _, _, _, _, _, metadataWithoutFile, _) =>
moduleSI.getMetadataWithoutFile(moduleType, name, version) shouldBe Right(metadataWithoutFile)
}
}
it should "not give module metadata without file with incorrect module type" in {
val name = "some-name"
val version = "some-version"
val moduleType = "incorrect type"
val error = "rest.modules.type.unknown:" + moduleType
moduleSI.getMetadataWithoutFile(moduleType, name, version) shouldBe Left(error)
}
it should "not give module metadata without file if it does not exists" in {
val name = "some-name"
val version = "some-version"
val moduleType = regularStreamingType
val error = s"rest.modules.module.notfound:$moduleType-$name-$version"
moduleSI.getMetadataWithoutFile(moduleType, name, version) shouldBe Left(error)
}
it should "not give module metadata without file if it does not have jar file" in {
val error = "rest.modules.module.jar.notfound:" + moduleWithoutJar.signature
val gotten = moduleSI.getMetadataWithoutFile(
moduleWithoutJar.moduleType,
moduleWithoutJar.name,
moduleWithoutJar.version)
gotten shouldBe Left(error)
}
// getByType
it should "give all modules with specific type" in {
modulesToType.foreach {
case (moduleType, modules) =>
moduleSI.getByType(moduleType).map(_.toSet) shouldBe Right(modules.map(_.metadataWithoutFile).toSet)
}
}
it should "tell error if module type is incorrect" in {
val moduleType = "incorrect type"
val error = "rest.modules.type.unknown:" + moduleType
moduleSI.getByType(moduleType) shouldBe Left(error)
}
// getRelatedInstances
it should "give instances related to module" in {
moduleToInstanceNames.foreach {
case (ModuleInfo(_, _, _, _, _, _, _, metadata, _, _), instanceNames) =>
moduleSI.getRelatedInstances(metadata).toSet shouldBe instanceNames.toSet
}
}
// delete
it should "delete module without related instances" in {
moduleSI.delete(moduleWithoutInstances.metadata) shouldBe Deleted
}
it should "not delete module with related instances" in {
modulesWithInstances.keySet.foreach {
case ModuleInfo(_, _, _, signature, _, _, _, metadata, _, _) =>
val error = "rest.modules.module.cannot.delete:" + signature
moduleSI.delete(metadata) shouldBe DeletionError(error)
}
}
it should "tell that storage can't delete module file" in {
val error = "rest.cannot.delete.file:" + moduleWithoutJar.filename
moduleSI.delete(moduleWithoutJar.metadata) shouldBe DeletionError(error)
}
// exists
it should "give module metadata if module exists and has jar file" in {
storedModulesWithJars.foreach {
case ModuleInfo(name, version, moduleType, _, _, _, _, _, _, metadataDomain) =>
moduleSI.exists(moduleType, name, version) shouldBe Right(metadataDomain)
}
}
it should "not give module metadata if module does not exists" in {
val name = "some-name"
val version = "some-version"
val moduleType = regularStreamingType
val error = s"rest.modules.module.notfound:$moduleType-$name-$version"
moduleSI.exists(moduleType, name, version) shouldBe Left(error)
}
it should "not give module metadata if module type is incorrect" in {
val name = "some-name"
val version = "some-version"
val moduleType = "incorrect type"
val error = "rest.modules.type.unknown:" + moduleType
moduleSI.exists(moduleType, name, version) shouldBe Left(error)
}
it should "not give module metadata if it does not have jar file" in {
val error = "rest.modules.module.jar.notfound:" + moduleWithoutJar.signature
val gotten = moduleSI.exists(
moduleWithoutJar.moduleType,
moduleWithoutJar.name,
moduleWithoutJar.version)
gotten shouldBe Left(error)
}
private def createModule(name: String, version: String, moduleType: String, withFile: Boolean = true): ModuleInfo = {
val signature = s"$moduleType-$name-$version"
val filename = s"$name-$version.jar"
val file = {
if (withFile) Some(new File(getClass.getResource(filename).toURI))
else None
}
val specificationDomain = new SpecificationDomain(
name, "", version, "", "", mock[IOstream], mock[IOstream], moduleType, "", "", "", "")
val specification = mock[Specification]
when(specification.to).thenReturn(specificationDomain)
when(specification.name).thenReturn(name)
when(specification.version).thenReturn(version)
when(specification.moduleType).thenReturn(moduleType)
val metadata = mock[ModuleMetadata]
when(metadata.filename).thenReturn(filename)
when(metadata.specification).thenReturn(specification)
when(metadata.file).thenReturn(file)
when(metadata.name).thenReturn(Some(name))
when(metadata.version).thenReturn(Some(version))
when(metadata.signature).thenReturn(signature)
val metadataWithoutFile = mock[ModuleMetadata]
when(metadataWithoutFile.filename).thenReturn(filename)
when(metadataWithoutFile.specification).thenReturn(specification)
when(metadataWithoutFile.file).thenReturn(None)
when(metadataWithoutFile.name).thenReturn(Some(name))
when(metadataWithoutFile.version).thenReturn(Some(version))
when(metadataWithoutFile.signature).thenReturn(signature)
val metadataDomain = FileMetadataDomain(
new ObjectId(),
name,
filename,
FileMetadataLiterals.moduleType,
new Date(),
0,
specificationDomain)
when(createModuleMetadata.from(argEq(metadataDomain), any[Option[File]]())(any[Injector]()))
.thenReturn(metadataWithoutFile)
when(createModuleMetadata.from(argEq(metadataDomain), argEq(file))(any[Injector]()))
.thenReturn(metadata)
ModuleInfo(
name,
version,
moduleType,
signature,
filename,
file,
specificationDomain,
metadata,
metadataWithoutFile,
metadataDomain)
}
case class ModuleInfo(name: String,
version: String,
moduleType: String,
signature: String,
filename: String,
file: Option[File],
specificationDomain: SpecificationDomain,
metadata: ModuleMetadata,
metadataWithoutFile: ModuleMetadata,
metadataDomain: FileMetadataDomain)
}
| bwsw/sj-platform | core/sj-common/src/test/scala-2.12/com/bwsw/sj/common/si/ModuleSiTests.scala | Scala | apache-2.0 | 14,800 |
package jobs
import akka.actor.Actor
import play.api.Play
import oauth.signpost.commonshttp.CommonsHttpOAuthConsumer
import akka.actor.ActorRef
import org.apache.http.client.methods.HttpGet
import org.apache.http.client.methods.HttpPost
import models._
import akka.actor.Cancellable
import akka.actor.ActorSystem
import akka.actor.Props
import TweetManager._
import scala.concurrent.duration.Duration
import java.util.concurrent.TimeUnit
import play.api.libs.json.JsNumber
import play.api.libs.concurrent.Execution.Implicits._
import utils.AkkaImplicits._
import scala.language.postfixOps
/**
* Filter the tweets in order to only keep the non-duplicates.
* NB: This checker can have some small true negative.
*/
class TweetDuplicateChecker(nbQueries: Int, keywords: Set[List[String]], searcherPeriod: Int) extends Actor {
var running = true /* If true, the actor will be scheduled */
var scheduled: Option[Cancellable] = None /* cancellable corresponding to the self-schedule of the thread */
/* A map keeping track, for each keyword list used in the research, of the last duplicates */
var ids: Map[List[String], List[Long]] = keywords.map(k => (k, Nil)) toMap
val keep = nbQueries * 100 / keywords.size /* Number of tweets ids to keep per keyword's list */
val period = searcherPeriod * nbQueries /* Period between each flush of the lists of ids */
def receive = {
/* Check for duplicates. If none, send the tweet to the proper listener */
case (tw @ Tweet(value, query), listener: ActorRef) =>
val id = (value \\ "id").as[JsNumber].value.toLong
if (!ids(query.keywords).contains(id)) {
listener ! tw
ids += (query.keywords -> (ids(query.keywords) :+ id))
}
/* Cleanup the list of ids. Some olds ids aren't necessary anymore */
case Cleanup =>
if (running) scheduled = Some(self.scheduleOnce(period, TimeUnit.SECONDS, Cleanup))
ids = ids.keys.map(kwe => (kwe, ids(kwe).takeRight(keep))) toMap
/* Stop the schedule of the DuplicateChecker */
case Stop =>
running = false
if (scheduled.isDefined) scheduled.get.cancel
case Resume =>
running = true
scheduled = Some(self.scheduleOnce(period, TimeUnit.SECONDS, Cleanup))
case _ => sys.error("TweetDuplicateChecker: wrong message.")
}
}
| TweetAggregator/CrossTalk | app/jobs/TweetDuplicateChecker.scala | Scala | gpl-2.0 | 2,322 |
package bad.robot.radiate.config
import org.specs2.mutable.Specification
class TemplateTest extends Specification {
"Fully populated config file template" >> {
Template(new ConfigFile {
def url: Option[String] = Some("http://teamcity.com:8111")
def username: Option[String] = Some("bob")
def password: Option[String] = Some("secret")
def authorisation: Option[String] = Some("basic")
def projects: List[String] = List("projectA", "projectB")
def ecoMode: (Option[String], Option[String]) = (Some("10:00"), Some("12:00"))
}) must_==
"""
|server {
| url = "http://teamcity.com:8111"
| username = "bob"
| password = "secret"
| authorisation = "basic" # guest | basic
|}
|
|projects = ["projectA", "projectB"]
|
|ui {
| eco-mode {
| start = "10:00"
| end = "12:00"
| }
|}
|""".stripMargin
}
"Partially populated (but valid) config file template" >> {
Template(new ConfigFile {
def url: Option[String] = Some("http://teamcity.com:8111")
def username: Option[String] = None
def password: Option[String] = None
def authorisation: Option[String] = Some("basic")
def projects: List[String] = List()
def ecoMode: (Option[String], Option[String]) = (Some("10:00"), None)
}) must_==
"""
|server {
| url = "http://teamcity.com:8111"
| # username = "???"
| # password = "???"
| authorisation = "basic" # guest | basic
|}
|
|projects = [ ]
|
|ui {
| eco-mode {
| # supply *both* start and end time to enable eco mode
| # start = "18:00" # Uncomment to enable eco mode start time (hh:mm)
| # end = "07:00" # Uncomment to enable eco mode end time (hh:mm)
| }
|}
|""".stripMargin
}
"Empty config file template" >> {
Template(new ConfigFile {
def url: Option[String] = None
def username: Option[String] = None
def password: Option[String] = None
def authorisation: Option[String] = None
def projects: List[String] = List()
def ecoMode: (Option[String], Option[String]) = (None, None)
}) must_==
"""
|server {
| # url = "http://example.com:8111"
| # username = "???"
| # password = "???"
| # authorisation = "guest | basic"
|}
|
|projects = [ ]
|
|ui {
| eco-mode {
| # supply *both* start and end time to enable eco mode
| # start = "18:00" # Uncomment to enable eco mode start time (hh:mm)
| # end = "07:00" # Uncomment to enable eco mode end time (hh:mm)
| }
|}
|""".stripMargin
}
}
| tobyweston/radiate | src/test/scala/bad/robot/radiate/config/TemplateTest.scala | Scala | apache-2.0 | 2,956 |
object SCL5738 extends App {
val key = "a"
val foo = Some(args) collect {
case Array("1") => Map(key -> 1)
case _ => Map(key -> "unknown")
} getOrElse Map.empty
println(/*start*/foo/*end*/)
}
//Map[String, Any] | ilinum/intellij-scala | testdata/typeInference/bugs5/SCL5738.scala | Scala | apache-2.0 | 237 |
package nl.dekkr.pagefetcher.services
import akka.actor.ActorRef
import nl.dekkr.pagefetcher.model.{BackendResult, PageUrl}
trait BackendSystem {
implicit val persistence: ActorRef
def getContent(request: PageUrl, charSet: Option[String], userAgent: Option[String]): BackendResult
}
| dekkr/pagefetcher | src/main/scala/nl/dekkr/pagefetcher/services/BackendSystem.scala | Scala | mit | 291 |
package com.wavesplatform.lang
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils.Base58
import com.wavesplatform.lang.v1.parser.BinaryOperation._
import com.wavesplatform.lang.v1.parser.Expressions.Pos.AnyPos
import com.wavesplatform.lang.v1.parser.Expressions._
import com.wavesplatform.lang.v1.parser.{BinaryOperation, Expressions, Parser}
import com.wavesplatform.lang.v1.testing.ScriptGenParser
import com.wavesplatform.test._
import fastparse.Parsed.{Failure, Success}
import org.scalacheck.Gen
import org.scalatest.exceptions.TestFailedException
class ScriptParserTest extends PropSpec with ScriptGenParser {
private def parse(x: String): EXPR = Parser.parseExpr(x) match {
case Success(r, _) => r
case f: Failure => throw new TestFailedException(f.msg, 0)
}
private def cleanOffsets(l: LET): LET =
l.copy(Pos(0, 0), name = cleanOffsets(l.name), value = cleanOffsets(l.value)) // , types = l.types.map(cleanOffsets(_))
private def cleanOffsets[T](p: PART[T]): PART[T] = p match {
case PART.VALID(_, x) => PART.VALID(AnyPos, x)
case PART.INVALID(_, x) => PART.INVALID(AnyPos, x)
}
private def cleanOffsets(expr: EXPR): EXPR = expr match {
case x: CONST_LONG => x.copy(position = Pos(0, 0))
case x: REF => x.copy(position = Pos(0, 0), key = cleanOffsets(x.key))
case x: CONST_STRING => x.copy(position = Pos(0, 0), value = cleanOffsets(x.value))
case x: CONST_BYTESTR => x.copy(position = Pos(0, 0), value = cleanOffsets(x.value))
case x: TRUE => x.copy(position = Pos(0, 0))
case x: FALSE => x.copy(position = Pos(0, 0))
case x: BINARY_OP => x.copy(position = Pos(0, 0), a = cleanOffsets(x.a), b = cleanOffsets(x.b))
case x: IF => x.copy(position = Pos(0, 0), cond = cleanOffsets(x.cond), ifTrue = cleanOffsets(x.ifTrue), ifFalse = cleanOffsets(x.ifFalse))
case x @ BLOCK(_, l: Expressions.LET, _, _, _) => x.copy(position = Pos(0, 0), let = cleanOffsets(l), body = cleanOffsets(x.body))
case x: FUNCTION_CALL => x.copy(position = Pos(0, 0), name = cleanOffsets(x.name), args = x.args.map(cleanOffsets(_)))
case _ => throw new NotImplementedError(s"toString for ${expr.getClass.getSimpleName}")
}
private def genElementCheck(gen: Gen[EXPR]): Unit = {
val testGen: Gen[(EXPR, String)] = for {
expr <- gen
str <- toString(expr)
} yield (expr, str)
forAll(testGen) {
case (expr, str) =>
withClue(str) {
cleanOffsets(parse(str)) shouldBe expr
}
}
}
private def multiLineExprTests(tests: (String, Gen[EXPR])*): Unit = tests.foreach {
case (label, gen) =>
property(s"multiline expressions: $label") {
genElementCheck(gen)
}
}
private val gas = 50
multiLineExprTests(
"CONST_LONG" -> CONST_LONGgen.map(_._1),
"STR" -> STRgen,
"REF" -> REFgen,
"BOOL" -> BOOLgen(gas).map(_._1),
"SUM" -> SUMgen(gas).map(_._1),
"EQ" -> EQ_INTgen(gas).map(_._1),
"INT" -> INTGen(gas).map(_._1),
"GE" -> GEgen(gas).map(_._1),
"GT" -> GTgen(gas).map(_._1),
"AND" -> ANDgen(gas).map(_._1),
"OR" -> ORgen(gas).map(_._1),
"BLOCK" -> BLOCKgen(gas)
)
property("priority in binary expressions") {
parse("1 == 0 || 3 == 2") shouldBe BINARY_OP(
AnyPos,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 1), EQ_OP, CONST_LONG(AnyPos, 0)),
OR_OP,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 3), EQ_OP, CONST_LONG(AnyPos, 2))
)
parse("3 + 2 > 2 + 1") shouldBe BINARY_OP(
AnyPos,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 3), SUM_OP, CONST_LONG(AnyPos, 2)),
GT_OP,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 2), SUM_OP, CONST_LONG(AnyPos, 1))
)
parse("1 >= 0 || 3 > 2") shouldBe BINARY_OP(
AnyPos,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 1), GE_OP, CONST_LONG(AnyPos, 0)),
OR_OP,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 3), GT_OP, CONST_LONG(AnyPos, 2))
)
}
property("bytestr expressions") {
parse("false || sigVerify(base58'333', base58'222', base58'111')") shouldBe BINARY_OP(
AnyPos,
FALSE(AnyPos),
OR_OP,
FUNCTION_CALL(
AnyPos,
PART.VALID(AnyPos, "sigVerify"),
List(
CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr(Base58.decode("333")))),
CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr(Base58.decode("222")))),
CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr(Base58.decode("111"))))
)
)
)
}
property("valid non-empty base58 definition") {
parse("base58'bQbp'") shouldBe CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr("foo".getBytes("UTF-8"))))
}
property("valid empty base58 definition") {
parse("base58''") shouldBe CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr.empty))
}
property("invalid base58 definition") {
parse("base58' bQbp'") shouldBe CONST_BYTESTR(AnyPos, PART.INVALID(AnyPos, "can't parse Base58 string"))
}
property("valid non-empty base64 definition") {
parse("base64'TElLRQ=='") shouldBe CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr("LIKE".getBytes("UTF-8"))))
}
property("valid empty base64 definition") {
parse("base64''") shouldBe CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr.empty))
}
property("invalid base64 definition") {
parse("base64'mid-size'") shouldBe CONST_BYTESTR(AnyPos, PART.INVALID(AnyPos, "can't parse Base64 string"))
}
property("valid empty base16 definition") {
parse("base16''") shouldBe CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr.empty))
}
property("valid non-empty base16 definition") {
parse("base16'0123456789abcdef123456789ABCDEF0ABCDEFfabcde'") shouldBe
CONST_BYTESTR(
AnyPos,
PART.VALID(
AnyPos,
ByteStr(
Array[Short](0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef, 0x12, 0x34, 0x56, 0x78, 0x9A, 0xBC, 0xDE, 0xF0, 0xAB, 0xCD, 0xEF, 0xfa, 0xbc,
0xde).map(_.toByte)
)
)
)
}
property("invalid base16 definition") {
parse("base16'mid-size'") shouldBe CONST_BYTESTR(Pos(0, 16), PART.INVALID(Pos(8, 15), "Unrecognized character: m"), None)
parse("base16'123'") shouldBe CONST_BYTESTR(Pos(0, 11), PART.INVALID(Pos(8, 10), "Invalid input length 3"))
}
property("literal too long") {
import Global.MaxLiteralLength
val longLiteral = "A" * (MaxLiteralLength + 1)
val to = 8 + MaxLiteralLength
parse(s"base58'$longLiteral'") shouldBe
CONST_BYTESTR(Pos(0, to + 1), PART.INVALID(Pos(8, to), s"base58Decode input exceeds $MaxLiteralLength"))
}
property("string is consumed fully") {
parse(""" " fooo bar" """) shouldBe CONST_STRING(Pos(1, 17), PART.VALID(Pos(2, 16), " fooo bar"))
}
property("string literal with unicode chars") {
val stringWithUnicodeChars = "❤✓☀★☂♞☯☭☢€☎∞❄♫\\u20BD"
parse(
s"""
|
| "$stringWithUnicodeChars"
|
""".stripMargin
) shouldBe CONST_STRING(Pos(3, 20), PART.VALID(Pos(4, 19), stringWithUnicodeChars))
}
property("string literal with unicode chars in language") {
parse("\\"\\\\u1234\\"") shouldBe CONST_STRING(Pos(0, 8), PART.VALID(Pos(1, 7), "ሴ"))
}
property("should parse invalid unicode symbols") {
parse("\\"\\\\uqwer\\"") shouldBe CONST_STRING(
AnyPos,
PART.INVALID(AnyPos, "can't parse 'qwer' as HEX string in '\\\\uqwer'")
)
}
property("should parse incomplete unicode symbol definition") {
parse("\\"\\\\u12 test\\"") shouldBe CONST_STRING(AnyPos, PART.INVALID(AnyPos, "incomplete UTF-8 symbol definition: '\\\\u12'"))
parse("\\"\\\\u\\"") shouldBe CONST_STRING(AnyPos, PART.INVALID(AnyPos, "incomplete UTF-8 symbol definition: '\\\\u'"))
}
property("string literal with special symbols") {
parse("\\"\\\\t\\\\n\\\\r\\\\\\\\\\\\\\"\\"") shouldBe CONST_STRING(AnyPos, PART.VALID(AnyPos, "\\t\\n\\r\\\\\\""))
}
property("should parse invalid special symbols") {
parse("\\"\\\\ test\\"") shouldBe CONST_STRING(AnyPos, PART.INVALID(AnyPos, "unknown escaped symbol: '\\\\ '. The valid are \\b, \\f, \\n, \\r, \\t"))
}
property("block: multiline without ;") {
val s =
"""let q = 1
|c""".stripMargin
parse(s) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "q"), CONST_LONG(AnyPos, 1)),
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
}
property("block: func") {
val s =
"""func q(x: Int, y: Boolean) = { 42 }
|c""".stripMargin
parse(s) shouldBe BLOCK(
AnyPos,
FUNC(
AnyPos,
CONST_LONG(AnyPos, 42),
PART.VALID(AnyPos, "q"),
Seq((PART.VALID(AnyPos, "x"), Single(PART.VALID(AnyPos, "Int"), None)), (PART.VALID(AnyPos, "y"), Single(PART.VALID(AnyPos, "Boolean"), None)))
),
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
}
property("block: func with union") {
val s =
"""func q(x: Int | String) = { 42 }
|c""".stripMargin
parse(s) shouldBe BLOCK(
AnyPos,
FUNC(
AnyPos,
CONST_LONG(AnyPos, 42),
PART.VALID(AnyPos, "q"),
Seq((PART.VALID(AnyPos, "x"), Union(Seq(Single(PART.VALID(AnyPos, "Int"), None), Single(PART.VALID(AnyPos, "String"), None)))))
),
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
}
property("block: multiline with ; at end of let") {
val s =
"""let q = 1;
|c""".stripMargin
parse(s) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "q"), CONST_LONG(AnyPos, 1)),
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
}
property("block: multiline with ; at start of body") {
val s =
"""let q = 1
|; c""".stripMargin
parse(s) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "q"), CONST_LONG(AnyPos, 1)),
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
}
property("block: oneline") {
val s = "let q = 1; c"
parse(s) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "q"), CONST_LONG(AnyPos, 1)),
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
}
property("block: invalid") {
val s = "let q = 1 c"
parse(s) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "q"), CONST_LONG(AnyPos, 1)),
INVALID(AnyPos, "expected ';'")
)
}
property("should parse a binary operation with block operand") {
val script =
"""let x = a &&
|let y = 1
|true
|true""".stripMargin
parse(script) shouldBe BLOCK(
AnyPos,
LET(
AnyPos,
PART.VALID(AnyPos, "x"),
BINARY_OP(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "a")),
AND_OP,
BLOCK(AnyPos, LET(AnyPos, PART.VALID(AnyPos, "y"), CONST_LONG(AnyPos, 1)), TRUE(AnyPos))
)
),
TRUE(AnyPos)
)
}
property("reserved keywords are invalid variable names in block: if") {
val script =
s"""let if = 1
|true""".stripMargin
parse(script) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.INVALID(AnyPos, "keywords are restricted: if"), CONST_LONG(AnyPos, 1)),
TRUE(AnyPos)
)
}
property("reserved keywords are invalid variable names in block: let") {
val script =
s"""let let = 1
|true""".stripMargin
parse(script) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.INVALID(AnyPos, "keywords are restricted: let"), CONST_LONG(AnyPos, 1)),
TRUE(AnyPos)
)
}
List("then", "else", "true").foreach { keyword =>
property(s"reserved keywords are invalid variable names in block: $keyword") {
val script =
s"""let ${keyword.padTo(4, " ").mkString} = 1
|true""".stripMargin
parse(script) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.INVALID(AnyPos, s"keywords are restricted: $keyword"), CONST_LONG(AnyPos, 1)),
TRUE(AnyPos)
)
}
}
property("reserved keywords are invalid variable names in block: false") {
val script =
s"""let false = 1
|true""".stripMargin
parse(script) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.INVALID(AnyPos, "keywords are restricted: false"), CONST_LONG(AnyPos, 1)),
TRUE(AnyPos)
)
}
property("reserved keywords are invalid variable names in expr: let") {
val script = "let + 1"
parse(script) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.INVALID(AnyPos, "expected a variable's name"), INVALID(AnyPos, "expected a value")),
INVALID(AnyPos, "expected ';'")
)
}
property("reserved keywords are invalid variable names in expr: if") {
val script = "if + 1"
parse(script) shouldBe BINARY_OP(
AnyPos,
IF(AnyPos, INVALID(AnyPos, "expected a condition"), INVALID(AnyPos, "expected a true branch"), INVALID(AnyPos, "expected a false branch")),
BinaryOperation.SUM_OP,
CONST_LONG(AnyPos, 1)
)
}
property("reserved keywords are invalid variable names in expr: then") {
val script = "then + 1"
parse(script) shouldBe BINARY_OP(
AnyPos,
IF(
AnyPos,
INVALID(AnyPos, "expected a condition"),
INVALID(AnyPos, "expected a true branch's expression"),
INVALID(AnyPos, "expected a false branch")
),
BinaryOperation.SUM_OP,
CONST_LONG(AnyPos, 1)
)
}
property("reserved keywords are invalid variable names in expr: else") {
val script = "else + 1"
parse(script) shouldBe BINARY_OP(
AnyPos,
IF(
AnyPos,
INVALID(AnyPos, "expected a condition"),
INVALID(AnyPos, "expected a true branch"),
INVALID(AnyPos, "expected a false branch's expression")
),
BinaryOperation.SUM_OP,
CONST_LONG(AnyPos, 1)
)
}
property("multisig sample") {
val script =
"""
|
|let A = base58'PK1PK1PK1PK1PK1'
|let B = base58'PK2PK2PK2PK2PK2'
|let C = base58'PK3PK3PK3PK3PK3'
|
|let W = tx.bodyBytes
|let P = tx.PROOF
|let V = sigVerify(W,P,A)
|
|let AC = if(V) then 1 else 0
|let BC = if(sigVerify(tx.bodyBytes,tx.PROOF,B)) then 1 else 0
|let CC = if(sigVerify(tx.bodyBytes,tx.PROOF,C)) then 1 else 0
|
| AC + BC+ CC >= 2
|
""".stripMargin
parse(script) // gets parsed, but later will fail on type check!
}
property("function call") {
parse("FOO(1,2)".stripMargin) shouldBe FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "FOO"), List(CONST_LONG(AnyPos, 1), CONST_LONG(AnyPos, 2)))
parse("FOO(X)".stripMargin) shouldBe FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "FOO"), List(REF(AnyPos, PART.VALID(AnyPos, "X"))))
}
property("isDefined") {
parse("isDefined(X)") shouldBe FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "isDefined"), List(REF(AnyPos, PART.VALID(AnyPos, "X"))))
}
property("extract") {
parse("if(isDefined(X)) then extract(X) else Y") shouldBe IF(
AnyPos,
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "isDefined"), List(REF(AnyPos, PART.VALID(AnyPos, "X")))),
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "extract"), List(REF(AnyPos, PART.VALID(AnyPos, "X")))),
REF(AnyPos, PART.VALID(AnyPos, "Y"))
)
}
property("getter: spaces from left") {
parse("xxx .yyy") shouldBe GETTER(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "xxx")), PART.VALID(AnyPos, "yyy"))
}
property("getter: spaces from right") {
parse("xxx. yyy") shouldBe GETTER(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "xxx")), PART.VALID(AnyPos, "yyy"))
}
property("getter: no spaces") {
parse("xxx.yyy") shouldBe GETTER(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "xxx")), PART.VALID(AnyPos, "yyy"))
}
property("getter on function result") {
parse("xxx(yyy).zzz") shouldBe GETTER(
AnyPos,
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "xxx"), List(REF(AnyPos, PART.VALID(AnyPos, "yyy")))),
PART.VALID(AnyPos, "zzz")
)
}
property("getter on round braces") {
parse("(xxx(yyy)).zzz") shouldBe GETTER(
AnyPos,
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "xxx"), List(REF(AnyPos, PART.VALID(AnyPos, "yyy")))),
PART.VALID(AnyPos, "zzz")
)
}
property("getter on curly braces") {
parse("{xxx(yyy)}.zzz") shouldBe GETTER(
AnyPos,
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "xxx"), List(REF(AnyPos, PART.VALID(AnyPos, "yyy")))),
PART.VALID(AnyPos, "zzz")
)
}
property("getter on block") {
parse(
"""{
| let yyy = aaa(bbb)
| xxx(yyy)
|}.zzz""".stripMargin
) shouldBe GETTER(
AnyPos,
BLOCK(
AnyPos,
LET(
AnyPos,
PART.VALID(AnyPos, "yyy"),
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "aaa"), List(REF(AnyPos, PART.VALID(AnyPos, "bbb"))))
),
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "xxx"), List(REF(AnyPos, PART.VALID(AnyPos, "yyy"))))
),
PART.VALID(AnyPos, "zzz")
)
}
property("multiple getters") {
parse("x.y.z") shouldBe GETTER(AnyPos, GETTER(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "x")), PART.VALID(AnyPos, "y")), PART.VALID(AnyPos, "z"))
}
property("array accessor") {
parse("x[0]") shouldBe FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "getElement"), List(REF(AnyPos, PART.VALID(AnyPos, "x")), CONST_LONG(AnyPos, 0)))
}
property("multiple array accessors") {
parse("x[0][1]") shouldBe FUNCTION_CALL(
AnyPos,
PART.VALID(AnyPos, "getElement"),
List(
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "getElement"), List(REF(AnyPos, PART.VALID(AnyPos, "x")), CONST_LONG(AnyPos, 0))),
CONST_LONG(AnyPos, 1)
)
)
}
property("accessor and getter") {
parse("x[0].y") shouldBe GETTER(
AnyPos,
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "getElement"), List(REF(AnyPos, PART.VALID(AnyPos, "x")), CONST_LONG(AnyPos, 0))),
PART.VALID(AnyPos, "y")
)
}
property("getter and accessor") {
parse("x.y[0]") shouldBe FUNCTION_CALL(
AnyPos,
PART.VALID(AnyPos, "getElement"),
List(
GETTER(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "x")), PART.VALID(AnyPos, "y")),
CONST_LONG(AnyPos, 0)
)
)
}
property("function call and accessor") {
parse("x(y)[0]") shouldBe FUNCTION_CALL(
AnyPos,
PART.VALID(AnyPos, "getElement"),
List(
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "x"), List(REF(AnyPos, PART.VALID(AnyPos, "y")))),
CONST_LONG(AnyPos, 0)
)
)
}
property("braces in block's let and body") {
val text =
"""let a = (foo)
|(bar)""".stripMargin
parse(text) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "a"), REF(AnyPos, PART.VALID(AnyPos, "foo"))),
REF(AnyPos, PART.VALID(AnyPos, "bar"))
)
}
property("crypto functions: sha256") {
val text = "❤✓☀★☂♞☯☭☢€☎∞❄♫\\u20BD=test message"
val encodedText = Base58.encode(text.getBytes("UTF-8"))
parse(s"sha256(base58'$encodedText')".stripMargin) shouldBe
FUNCTION_CALL(
Pos(0, 96),
PART.VALID(Pos(0, 6), "sha256"),
List(CONST_BYTESTR(Pos(7, 95), PART.VALID(Pos(15, 94), ByteStr(text.getBytes("UTF-8")))))
)
}
property("crypto functions: blake2b256") {
val text = "❤✓☀★☂♞☯☭☢€☎∞❄♫\\u20BD=test message"
val encodedText = Base58.encode(text.getBytes("UTF-8"))
parse(s"blake2b256(base58'$encodedText')".stripMargin) shouldBe
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "blake2b256"), List(CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr(text.getBytes("UTF-8"))))))
}
property("crypto functions: keccak256") {
val text = "❤✓☀★☂♞☯☭☢€☎∞❄♫\\u20BD=test message"
val encodedText = Base58.encode(text.getBytes("UTF-8"))
parse(s"keccak256(base58'$encodedText')".stripMargin) shouldBe
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "keccak256"), List(CONST_BYTESTR(AnyPos, PART.VALID(AnyPos, ByteStr(text.getBytes("UTF-8"))))))
}
property("should parse a binary operation without a second operand") {
val script = "a &&"
parse(script) shouldBe BINARY_OP(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "a")),
AND_OP,
INVALID(AnyPos, "expected a second operator")
)
}
property("simple matching") {
val code =
"""match tx {
| case a: TypeA => 0
| case b: TypeB => 1
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(AnyPos, Some(PART.VALID(AnyPos, "a")), List(PART.VALID(AnyPos, "TypeA")), CONST_LONG(AnyPos, 0)),
MATCH_CASE(AnyPos, Some(PART.VALID(AnyPos, "b")), List(PART.VALID(AnyPos, "TypeB")), CONST_LONG(AnyPos, 1))
)
)
}
property("multiple union type matching") {
val code =
"""match tx {
| case txa: TypeA => 0
| case underscore : TypeB | TypeC => 1
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(AnyPos, Some(PART.VALID(AnyPos, "txa")), List(PART.VALID(AnyPos, "TypeA")), CONST_LONG(AnyPos, 0)),
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "underscore")),
List(PART.VALID(AnyPos, "TypeB"), PART.VALID(AnyPos, "TypeC")),
CONST_LONG(AnyPos, 1)
)
)
)
}
property("matching expression") {
val code =
"""match foo(x) + bar {
| case x:TypeA => 0
| case y:TypeB | TypeC => 1
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
BINARY_OP(
AnyPos,
FUNCTION_CALL(AnyPos, PART.VALID(AnyPos, "foo"), List(REF(AnyPos, PART.VALID(AnyPos, "x")))),
BinaryOperation.SUM_OP,
REF(AnyPos, PART.VALID(AnyPos, "bar"))
),
List(
MATCH_CASE(AnyPos, Some(PART.VALID(AnyPos, "x")), List(PART.VALID(AnyPos, "TypeA")), CONST_LONG(AnyPos, 0)),
MATCH_CASE(AnyPos, Some(PART.VALID(AnyPos, "y")), List(PART.VALID(AnyPos, "TypeB"), PART.VALID(AnyPos, "TypeC")), CONST_LONG(AnyPos, 1))
)
)
}
property("pattern matching - allow shadowing") {
val code =
"""match p {
| case p: PointA | PointB => true
| case _ => false
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "p")),
List(
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "p")),
List(PART.VALID(AnyPos, "PointA"), PART.VALID(AnyPos, "PointB")),
TRUE(AnyPos)
),
MATCH_CASE(
AnyPos,
None,
List.empty,
FALSE(AnyPos)
)
)
)
}
property("pattern matching with valid case, but no type is defined") {
parse("match tx { case x => 1 } ") shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "x")),
List.empty,
CONST_LONG(AnyPos, 1)
)
)
)
}
property("pattern matching with valid case, placeholder instead of variable name") {
parse("match tx { case _:TypeA => 1 } ") shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
None,
List(PART.VALID(AnyPos, "TypeA")),
CONST_LONG(AnyPos, 1)
)
)
)
}
property("pattern matching with no cases") {
parse("match tx { } ") shouldBe INVALID(AnyPos, "pattern matching requires case branches")
}
property("pattern matching with invalid case - no variable, type and expr are defined") {
parse("match tx { case => } ") shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
Some(PART.INVALID(AnyPos, "invalid syntax, should be: `case varName: Type => expr` or `case _ => expr`")),
List.empty,
INVALID(AnyPos, "expected expression")
)
)
)
}
property("pattern matching with invalid case - no variable and type are defined") {
parse("match tx { case => 1 } ") shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
Some(PART.INVALID(AnyPos, "invalid syntax, should be: `case varName: Type => expr` or `case _ => expr`")),
List.empty,
CONST_LONG(AnyPos, 1)
)
)
)
}
property("pattern matching with invalid case - no expr is defined") {
parse("match tx { case TypeA => } ") shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(AnyPos, Some(PART.VALID(AnyPos, "TypeA")), Seq.empty, INVALID(AnyPos, "expected expression"))
)
)
}
property("pattern matching with invalid case - no var is defined") {
parse("match tx { case :TypeA => 1 } ") shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
Some(PART.INVALID(AnyPos, "invalid syntax, should be: `case varName: Type => expr` or `case _ => expr`")),
Seq.empty,
CONST_LONG(AnyPos, 1)
)
)
)
}
ignore("pattern matching with invalid case - expression in variable definition") {
parse("match tx { case 1 + 1 => 1 } ") shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
Some(PART.INVALID(AnyPos, "invalid syntax, should be: `case varName: Type => expr` or `case _ => expr`")),
List.empty,
CONST_LONG(AnyPos, 1)
)
)
)
}
property("pattern matching with default case - no type is defined, one separator") {
parse("match tx { case _: | => 1 } ") shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
TypedVar(None, Single(PART.INVALID(AnyPos, "the type for variable should be specified: `case varName: Type => expr`"))),
CONST_LONG(AnyPos, 1)
)
)
)
}
property("pattern matching with default case - no type is defined, multiple separators") {
parse("match tx { case _: |||| => 1 } ") shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
TypedVar(None, Single(PART.INVALID(AnyPos, "the type for variable should be specified: `case varName: Type => expr`"))),
CONST_LONG(AnyPos, 1)
)
)
)
}
property("pattern matching - incomplete binary operation") {
val script =
"""match tx {
| case a => true &&
| case b => 1
|}""".stripMargin
parse(script) shouldBe
MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
TypedVar(Some(PART.VALID(AnyPos, "a")), Union(List())),
BINARY_OP(AnyPos, TRUE(AnyPos), AND_OP, INVALID(AnyPos, "expected a second operator"))
),
MATCH_CASE(AnyPos, Some(PART.VALID(AnyPos, "b")), List(), CONST_LONG(AnyPos, 1))
)
)
}
property("pattern matching - incomplete binary operation with block") {
val script =
"""match tx {
| case a =>
| let x = true
| x &&
| case b => 1
|}""".stripMargin
parse(script) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "tx")),
List(
MATCH_CASE(
AnyPos,
TypedVar(Some(PART.VALID(AnyPos, "a")), Union(List())),
BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "x"), TRUE(AnyPos)),
BINARY_OP(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "x")), AND_OP, INVALID(AnyPos, "expected a second operator"))
)
),
MATCH_CASE(AnyPos, TypedVar(Some(PART.VALID(AnyPos, "b")), Union(List.empty)), CONST_LONG(AnyPos, 1))
)
)
}
property("if expressions") {
parse("if (10 < 15) then true else false") shouldBe IF(
AnyPos,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 15), LT_OP, CONST_LONG(AnyPos, 10)),
TRUE(AnyPos),
FALSE(AnyPos)
)
parse("if 10 < 15 then true else false") shouldBe IF(
AnyPos,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 15), LT_OP, CONST_LONG(AnyPos, 10)),
TRUE(AnyPos),
FALSE(AnyPos)
)
parse(s"""if (10 < 15)
|then true
|else false""".stripMargin) shouldBe IF(
AnyPos,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 15), LT_OP, CONST_LONG(AnyPos, 10)),
TRUE(AnyPos),
FALSE(AnyPos)
)
parse(s"""if 10 < 15
|then true
|else false""".stripMargin) shouldBe IF(
AnyPos,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 15), LT_OP, CONST_LONG(AnyPos, 10)),
TRUE(AnyPos),
FALSE(AnyPos)
)
}
property("underscore in numbers") {
parse("100_000_000") shouldBe CONST_LONG(AnyPos, 100000000)
}
property("comments - the whole line at start") {
val code =
"""# foo
|true""".stripMargin
parse(code) shouldBe TRUE(AnyPos)
}
property("comments - the whole line at end") {
val code =
"""true
|# foo""".stripMargin
parse(code) shouldBe TRUE(AnyPos)
}
property("comments - block - after let") {
val s =
"""let # foo
| x = true
|x""".stripMargin
parse(s) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "x"), TRUE(AnyPos)),
REF(AnyPos, PART.VALID(AnyPos, "x"))
)
}
property("comments - block - before assignment") {
val s =
"""let x # foo
| = true
|x""".stripMargin
parse(s) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "x"), TRUE(AnyPos)),
REF(AnyPos, PART.VALID(AnyPos, "x"))
)
}
property("comments - block - between LET and BODY (full line)") {
val code =
"""let x = true
|# foo
|x""".stripMargin
parse(code) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "x"), TRUE(AnyPos)),
REF(AnyPos, PART.VALID(AnyPos, "x"))
)
}
property("comments - block - between LET and BODY (at end of a line)") {
val code =
"""let x = true # foo
|x""".stripMargin
parse(code) shouldBe BLOCK(
AnyPos,
LET(AnyPos, PART.VALID(AnyPos, "x"), TRUE(AnyPos)),
REF(AnyPos, PART.VALID(AnyPos, "x"))
)
}
property("comments - if - after condition") {
val code =
"""if 10 < 15 # test
|then true else false""".stripMargin
parse(code) shouldBe IF(
AnyPos,
BINARY_OP(AnyPos, CONST_LONG(AnyPos, 15), LT_OP, CONST_LONG(AnyPos, 10)),
TRUE(AnyPos),
FALSE(AnyPos)
)
}
property("comments - pattern matching - after case") {
val code =
"""match p {
| case # test
| p: PointA | PointB => true
| case _ => false
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "p")),
List(
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "p")),
List(PART.VALID(AnyPos, "PointA"), PART.VALID(AnyPos, "PointB")),
TRUE(AnyPos)
),
MATCH_CASE(
AnyPos,
None,
List.empty,
FALSE(AnyPos)
)
)
)
}
property("comments - pattern matching - after variable") {
val code =
"""match p {
| case p # test
| : PointA
| | PointB => true
| case _ => false
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "p")),
List(
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "p")),
List(PART.VALID(AnyPos, "PointA"), PART.VALID(AnyPos, "PointB")),
TRUE(AnyPos)
),
MATCH_CASE(
AnyPos,
None,
List.empty,
FALSE(AnyPos)
)
)
)
}
property("comments - pattern matching - before types") {
val code =
"""match p {
| case p: # test
| PointA | PointB => true
| case _ => false
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "p")),
List(
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "p")),
List(PART.VALID(AnyPos, "PointA"), PART.VALID(AnyPos, "PointB")),
TRUE(AnyPos)
),
MATCH_CASE(
AnyPos,
None,
List.empty,
FALSE(AnyPos)
)
)
)
}
property("comments - pattern matching - before a value's block") {
val code =
"""match p {
| case p: PointA | PointB # test
| => true
| case _ => false
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "p")),
List(
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "p")),
List(PART.VALID(AnyPos, "PointA"), PART.VALID(AnyPos, "PointB")),
TRUE(AnyPos)
),
MATCH_CASE(
AnyPos,
None,
List.empty,
FALSE(AnyPos)
)
)
)
}
property("comments - pattern matching - in a type definition - 1") {
val code =
"""match p {
| case p : PointA # foo
| | PointB # bar
| => true
| case _ => false
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "p")),
List(
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "p")),
List(PART.VALID(AnyPos, "PointA"), PART.VALID(AnyPos, "PointB")),
TRUE(AnyPos)
),
MATCH_CASE(
AnyPos,
None,
List.empty,
FALSE(AnyPos)
)
)
)
}
property("comments - pattern matching - in a type definition - 2") {
val code =
"""match p {
| case p: PointA | # foo
| PointB # bar
| => true
| case _ => false
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "p")),
List(
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "p")),
List(PART.VALID(AnyPos, "PointA"), PART.VALID(AnyPos, "PointB")),
TRUE(AnyPos)
),
MATCH_CASE(
AnyPos,
None,
List.empty,
FALSE(AnyPos)
)
)
)
}
property("comments - pattern matching - between cases") {
val code =
"""match p {
| # foo
| case p: PointA | PointB => true
| # bar
| case _ => false
| # baz
|}""".stripMargin
parse(code) shouldBe MATCH(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "p")),
List(
MATCH_CASE(
AnyPos,
Some(PART.VALID(AnyPos, "p")),
List(PART.VALID(AnyPos, "PointA"), PART.VALID(AnyPos, "PointB")),
TRUE(AnyPos)
),
MATCH_CASE(
AnyPos,
None,
List.empty,
FALSE(AnyPos)
)
)
)
}
property("comments - getter - before dot") {
val code =
"""x # foo
|.y""".stripMargin
parse(code) shouldBe GETTER(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "x")),
PART.VALID(AnyPos, "y")
)
}
property("comments - getter - after dot") {
val code =
"""x. # foo
|y""".stripMargin
parse(code) shouldBe GETTER(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "x")),
PART.VALID(AnyPos, "y")
)
}
property("comments - function call") {
val code =
"""f(
| # foo
| 1 # bar
| # baz
| , 2
| # quux
|)""".stripMargin
parse(code) shouldBe FUNCTION_CALL(
AnyPos,
PART.VALID(AnyPos, "f"),
List(CONST_LONG(AnyPos, 1), CONST_LONG(AnyPos, 2))
)
}
property("comments - array") {
val code =
"""xs[
| # foo
| 1
| # bar
|]""".stripMargin
parse(code) shouldBe FUNCTION_CALL(
AnyPos,
PART.VALID(AnyPos, "getElement"),
List(REF(AnyPos, PART.VALID(AnyPos, "xs")), CONST_LONG(AnyPos, 1))
)
}
property("comments - in func and around") {
val code =
"""
|
| # comment 1
| func foo() = # comment 2
| { # more comments
| throw()
| } # comment 3
|
| foo()
|
""".stripMargin
parse(code)
}
property("operations priority") {
parse("a-b+c") shouldBe BINARY_OP(
AnyPos,
BINARY_OP(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "a")), SUB_OP, REF(AnyPos, PART.VALID(AnyPos, "b"))),
SUM_OP,
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
parse("a+b-c") shouldBe BINARY_OP(
AnyPos,
BINARY_OP(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "a")), SUM_OP, REF(AnyPos, PART.VALID(AnyPos, "b"))),
SUB_OP,
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
parse("a+b*c") shouldBe BINARY_OP(
AnyPos,
REF(AnyPos, PART.VALID(AnyPos, "a")),
SUM_OP,
BINARY_OP(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "b")), MUL_OP, REF(AnyPos, PART.VALID(AnyPos, "c")))
)
parse("a*b-c") shouldBe BINARY_OP(
AnyPos,
BINARY_OP(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "a")), MUL_OP, REF(AnyPos, PART.VALID(AnyPos, "b"))),
SUB_OP,
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
parse("a/b*c") shouldBe BINARY_OP(
AnyPos,
BINARY_OP(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "a")), DIV_OP, REF(AnyPos, PART.VALID(AnyPos, "b"))),
MUL_OP,
REF(AnyPos, PART.VALID(AnyPos, "c"))
)
parse("a<b==c>=d") shouldBe BINARY_OP(
AnyPos,
BINARY_OP(
AnyPos,
BINARY_OP(AnyPos, REF(AnyPos, PART.VALID(AnyPos, "b")), EQ_OP, REF(AnyPos, PART.VALID(AnyPos, "c"))),
LT_OP,
REF(AnyPos, PART.VALID(AnyPos, "a"))
),
GE_OP,
REF(AnyPos, PART.VALID(AnyPos, "d"))
)
}
property("allow name starts with kerword") {
parse("ifx") shouldBe REF(AnyPos, PART.VALID(AnyPos, "ifx"))
parse("thenx") shouldBe REF(AnyPos, PART.VALID(AnyPos, "thenx"))
parse("elsex") shouldBe REF(AnyPos, PART.VALID(AnyPos, "elsex"))
parse("matchx") shouldBe REF(AnyPos, PART.VALID(AnyPos, "matchx"))
parse("truex") shouldBe REF(AnyPos, PART.VALID(AnyPos, "truex"))
parse("falsex") shouldBe REF(AnyPos, PART.VALID(AnyPos, "falsex"))
}
property("parser StackOverflow check") {
val depth = 10000
val lastStmt = (1 to depth).foldLeft("i0") { (acc, i) =>
s"$acc + i$i"
}
val manyLets = (1 to depth).foldLeft("let i0 = 1") { (acc, i) =>
s"$acc\\nlet i$i = 1"
}
val script = s"$manyLets\\n$lastStmt"
Parser.parseExpr(script) shouldBe an[Success[_]]
}
}
| wavesplatform/Waves | lang/tests/src/test/scala/com/wavesplatform/lang/ScriptParserTest.scala | Scala | mit | 39,704 |
package me.axiometry.blocknet.nbt
import java.io._
class NBTInputStream(in: InputStream) extends DataInputStream(in) {
private def invalid() = throw new IOException("invalid NBT tag")
def readNBTTag(): NBT.Tag = readNBTTag(true) match {
case Some(tag) => tag
case None => invalid
}
private def readNBTTag(withName: Boolean): Option[NBT.Tag] = {
readByte match {
case 0 => None
case id => NBT.Tag.Type.byId get id match {
case Some(t) => Some(readNBTTag(t, if(withName) Some(readUTF) else None))
case None => invalid
}
}
}
private def readNBTTag(t: NBT.Tag.Type, name: Option[String]): NBT.Tag = {
import NBT.Tag; import NBT.Tag.Type
t match {
case Type.Byte => new Tag.Byte(name, readByte)
case Type.Short => new Tag.Short(name, readShort)
case Type.Int => new Tag.Int(name, readInt)
case Type.Long => new Tag.Long(name, readLong)
case Type.Float => new Tag.Float(name, readFloat)
case Type.Double => new Tag.Double(name, readDouble)
case Type.ByteArray => new Tag.ByteArray(name, Seq.fill(readInt)(readByte))
case Type.IntArray => new Tag.IntArray(name, Seq.fill(readInt)(readInt))
case Type.String => new Tag.String(name, readUTF)
case Type.List =>
val listType = Type.byId get (readByte) match {
case Some(t) => t
case None => invalid
}
val tags = Seq.fill(readInt)(readNBTTag(listType, None))
new Tag.List(name, listType, tags: _*)
case Type.Compound =>
val tags = scala.collection.mutable.ListBuffer[Tag]()
while(readNBTTag(true) match {
case Some(tag) => tags += tag; true
case None => false
}) {}
new Tag.Compound(name, tags.toList: _*)
}
}
}
| Axiometry/Blocknet | blocknet-minecraft/src/main/scala/me/axiometry/blocknet/minecraft/nbt/NBTInputStream.scala | Scala | bsd-2-clause | 1,833 |
package com.cloudwick.generator.odvs
import scala.util.Random
/**
* Description goes here
* @author ashrith
*/
class ODVSGenerator(customersMap: Map[Long, String], movieGenerator: MovieGenerator) {
private val customersSize = customersMap.size
def eventGenerate = {
val custID = Random.nextInt(customersSize) + 1
val custName = customersMap(custID)
val movieInfo: Array[String] = movieGenerator.gen
val customer = new Customers(custID, custName, movieInfo(3).toInt)
new ODVSEvent(
custID,
custName,
customer.userActiveOrNot.toInt,
customer.timeWatched,
customer.pausedTime,
customer.rating,
movieInfo(0), //movieId
movieInfo(1).replace("'", ""), //movieName
movieInfo(2), //movieReleaseDate
movieInfo(3).toInt, //movieRunTime
movieInfo(4) //movieGenre
)
}
}
| vaagrawa/generator | src/main/scala/com/cloudwick/generator/odvs/ODVSGenerator.scala | Scala | apache-2.0 | 862 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.scaladsl.broker.kafka
import akka.actor.ActorSystem
import akka.stream.Materializer
import com.lightbend.lagom.internal.broker.kafka.KafkaConfig
import com.lightbend.lagom.internal.scaladsl.api.broker.TopicFactory
import com.lightbend.lagom.scaladsl.api.Descriptor.TopicCall
import com.lightbend.lagom.scaladsl.api.ServiceInfo
import com.lightbend.lagom.scaladsl.api.ServiceLocator
import com.lightbend.lagom.scaladsl.api.broker.Topic
import com.typesafe.config.Config
import scala.concurrent.ExecutionContext
/**
* Factory for creating topics instances.
*/
private[lagom] class KafkaTopicFactory(
serviceInfo: ServiceInfo,
system: ActorSystem,
serviceLocator: ServiceLocator,
config: Config
)(implicit materializer: Materializer, executionContext: ExecutionContext)
extends TopicFactory {
@deprecated("Use constructor that accepts a Config", "2.0.0")
def this(serviceInfo: ServiceInfo, system: ActorSystem, serviceLocator: ServiceLocator)(
implicit materializer: Materializer,
executionContext: ExecutionContext
) = {
this(serviceInfo, system, serviceLocator, system.settings.config)
}
private val kafkaConfig = KafkaConfig(config)
def create[Message](topicCall: TopicCall[Message]): Topic[Message] = {
new ScaladslKafkaTopic(kafkaConfig, topicCall, serviceInfo, system, serviceLocator)
}
}
| ignasi35/lagom | service/scaladsl/kafka/client/src/main/scala/com/lightbend/lagom/internal/scaladsl/broker/kafka/KafkaTopicFactory.scala | Scala | apache-2.0 | 1,468 |
/*
* Copyright 2011, 2012 Johannes Rudolph
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.virtualvoid.sbt.graph
import xml.parsing.ConstructingParser
import java.io.File
import collection.mutable.HashMap
import collection.mutable.MultiMap
import collection.mutable.{Set => MSet}
import sbt.ConsoleLogger
import xml.{NodeSeq, Document, XML, Node}
import com.github.mdr.ascii.layout
import layout._
import sbinary.{Format, DefaultProtocol}
object IvyGraphMLDependencies extends App {
case class ModuleId(organisation: String,
name: String,
version: String) {
def idString: String = organisation+":"+name+":"+version
}
case class Module(id: ModuleId,
license: Option[String] = None,
extraInfo: String = "",
evictedByVersion: Option[String] = None,
error: Option[String] = None) {
def hadError: Boolean = error.isDefined
def isUsed: Boolean = !evictedByVersion.isDefined
}
type Edge = (ModuleId, ModuleId)
case class ModuleGraph(nodes: Seq[Module], edges: Seq[Edge]) {
lazy val modules: Map[ModuleId, Module] =
nodes.map(n => (n.id, n)).toMap
def module(id: ModuleId): Module = modules(id)
lazy val dependencyMap: Map[ModuleId, Seq[Module]] =
createMap(identity)
lazy val reverseDependencyMap: Map[ModuleId, Seq[Module]] =
createMap { case (a, b) => (b, a) }
def createMap(bindingFor: ((ModuleId, ModuleId)) => (ModuleId, ModuleId)): Map[ModuleId, Seq[Module]] = {
val m = new HashMap[ModuleId, MSet[Module]] with MultiMap[ModuleId, Module]
edges.foreach { entry =>
val (f, t) = bindingFor(entry)
m.addBinding(f, module(t))
}
m.toMap.mapValues(_.toSeq.sortBy(_.id.idString)).withDefaultValue(Nil)
}
}
def graph(ivyReportFile: String): ModuleGraph =
buildGraph(buildDoc(ivyReportFile))
def buildGraph(doc: Document): ModuleGraph = {
def edgesForModule(id: ModuleId, revision: NodeSeq): Seq[Edge] =
for {
caller <- revision \\ "caller"
callerModule = moduleIdFromElement(caller, caller.attribute("callerrev").get.text)
} yield (moduleIdFromElement(caller, caller.attribute("callerrev").get.text), id)
val moduleEdges: Seq[(Module, Seq[Edge])] = for {
mod <- doc \\ "dependencies" \\ "module"
revision <- mod \\ "revision"
rev = revision.attribute("name").get.text
moduleId = moduleIdFromElement(mod, rev)
module = Module(moduleId,
(revision \\ "license").headOption.flatMap(_.attribute("name")).map(_.text),
evictedByVersion = (revision \\ "evicted-by").headOption.flatMap(_.attribute("rev").map(_.text)),
error = revision.attribute("error").map(_.text))
} yield (module, edgesForModule(moduleId, revision))
val (nodes, edges) = moduleEdges.unzip
val info = (doc \\ "info").head
def infoAttr(name: String): String =
info.attribute(name).getOrElse(throw new IllegalArgumentException("Missing attribute "+name)).text
val rootModule = Module(ModuleId(infoAttr("organisation"), infoAttr("module"), infoAttr("revision")))
ModuleGraph(rootModule +: nodes, edges.flatten)
}
def reverseGraphStartingAt(graph: ModuleGraph, root: ModuleId): ModuleGraph = {
val deps = graph.reverseDependencyMap
def visit(module: ModuleId, visited: Set[ModuleId]): Seq[(ModuleId, ModuleId)] =
if (visited(module))
Nil
else
deps.get(module) match {
case Some(deps) =>
deps.flatMap { to =>
(module, to.id) +: visit(to.id, visited + module)
}
case None => Nil
}
val edges = visit(root, Set.empty)
val nodes = edges.foldLeft(Set.empty[ModuleId])((set, edge) => set + edge._1 + edge._2).map(graph.module)
ModuleGraph(nodes.toSeq, edges)
}
def ignoreScalaLibrary(scalaVersion: String, graph: ModuleGraph): ModuleGraph = {
def isScalaLibrary(m: Module) = isScalaLibraryId(m.id)
def isScalaLibraryId(id: ModuleId) = id.organisation == "org.scala-lang" && id.name == "scala-library"
def dependsOnScalaLibrary(m: Module): Boolean =
graph.dependencyMap(m.id).exists(isScalaLibrary)
def addScalaLibraryAnnotation(m: Module): Module = {
if (dependsOnScalaLibrary(m))
m.copy(extraInfo = m.extraInfo + " [S]")
else
m
}
val newNodes = graph.nodes.map(addScalaLibraryAnnotation).filterNot(isScalaLibrary)
val newEdges = graph.edges.filterNot(e => isScalaLibraryId(e._2))
ModuleGraph(newNodes, newEdges)
}
def asciiGraph(graph: ModuleGraph): String =
Layouter.renderGraph(buildAsciiGraph(graph))
def asciiTree(graph: ModuleGraph): String = {
val deps = graph.dependencyMap
// there should only be one root node (the project itself)
val roots = graph.nodes.filter(n => !graph.edges.exists(_._2 == n.id)).sortBy(_.id.idString)
roots.map { root =>
Graph.toAscii[Module](root, node => deps.getOrElse(node.id, Seq.empty[Module]), displayModule)
}.mkString("\\n")
}
def displayModule(module: Module): String =
red(module.id.idString +
module.extraInfo +
module.error.map(" (error: "+_+")").getOrElse("") +
module.evictedByVersion.map(_ formatted " (evicted by: %s)").getOrElse(""), module.hadError)
private def buildAsciiGraph(moduleGraph: ModuleGraph): layout.Graph[String] = {
def renderVertex(module: Module): String =
module.id.name + module.extraInfo + "\\n" +
module.id.organisation + "\\n" +
module.id.version +
module.error.map("\\nerror: "+_).getOrElse("") +
module.evictedByVersion.map(_ formatted "\\nevicted by: %s").getOrElse("")
val vertices = moduleGraph.nodes.map(renderVertex).toList
val edges = moduleGraph.edges.toList.map { case (from, to) ⇒ (renderVertex(moduleGraph.module(from)), renderVertex(moduleGraph.module(to))) }
layout.Graph(vertices, edges)
}
def saveAsGraphML(graph: ModuleGraph, outputFile: String) {
val nodesXml =
for (n <- graph.nodes)
yield
<node id={n.id.idString}><data key="d0">
<y:ShapeNode>
<y:NodeLabel>{n.id.idString}</y:NodeLabel>
</y:ShapeNode>
</data></node>
val edgesXml =
for (e <- graph.edges)
yield <edge source={e._1.idString} target={e._2.idString} />
val xml =
<graphml xmlns="http://graphml.graphdrawing.org/xmlns"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:y="http://www.yworks.com/xml/graphml"
xsi:schemaLocation="http://graphml.graphdrawing.org/xmlns http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd">
<key for="node" id="d0" yfiles.type="nodegraphics"/>
<graph id="Graph" edgedefault="undirected">
{nodesXml}
{edgesXml}
</graph>
</graphml>
XML.save(outputFile, xml)
}
def saveAsDot(graph: ModuleGraph,
dotHead: String,
nodeFormation: (String, String, String) => String,
outputFile: File): File = {
val nodes = {
for (n <- graph.nodes)
yield
""" "%s"[label=%s]""".format(n.id.idString,
nodeFormation(n.id.organisation, n.id.name, n.id.version))
}.mkString("\\n")
val edges = {
for ( e <- graph.edges)
yield
""" "%s" -> "%s"""".format(e._1.idString, e._2.idString)
}.mkString("\\n")
val dot = "%s\\n%s\\n%s\\n}".format(dotHead, nodes, edges)
sbt.IO.write(outputFile, dot)
outputFile
}
def moduleIdFromElement(element: Node, version: String): ModuleId =
ModuleId(element.attribute("organisation").get.text, element.attribute("name").get.text, version)
private def buildDoc(ivyReportFile: String) = ConstructingParser.fromSource(io.Source.fromFile(ivyReportFile), false).document
def red(str: String, doRed: Boolean): String =
if (ConsoleLogger.formatEnabled && doRed)
Console.RED + str + Console.RESET
else
str
def die(msg: String): Nothing = {
println(msg)
sys.exit(1)
}
def usage: String =
"Usage: <ivy-report-file> <output-file>"
val reportFile = args.lift(0).filter(f => new File(f).exists).getOrElse(die(usage))
val outputFile = args.lift(1).getOrElse(die(usage))
saveAsGraphML(graph(reportFile), outputFile)
}
object ModuleGraphProtocol extends DefaultProtocol {
import IvyGraphMLDependencies._
implicit def seqFormat[T: Format]: Format[Seq[T]] = wrap[Seq[T], List[T]](_.toList, _.toSeq)
implicit val ModuleIdFormat: Format[ModuleId] = asProduct3(ModuleId)(ModuleId.unapply(_).get)
implicit val ModuleFormat: Format[Module] = asProduct5(Module)(Module.unapply(_).get)
implicit val ModuleGraphFormat: Format[ModuleGraph] = asProduct2(ModuleGraph)(ModuleGraph.unapply(_).get)
}
| rintcius/sbt-dependency-graph | src/main/scala/net/virtualvoid/sbt/graph/IvyGraphMLDependencies.scala | Scala | apache-2.0 | 9,539 |
package com.arcusys.valamis.lesson.scorm.storage.sequencing
import com.arcusys.valamis.lesson.scorm.model.manifest.SequencingTracking
trait SequencingTrackingStorage {
def create(sequencingId: Long, entity: SequencingTracking)
def get(sequencingId: Long): Option[SequencingTracking]
}
| igor-borisov/valamis | valamis-scorm-lesson/src/main/scala/com/arcusys/valamis/lesson/scorm/storage/sequencing/SequencingTrackingStorage.scala | Scala | gpl-3.0 | 291 |
/*
* MUSIT is a museum database to archive natural and cultural history data.
* Copyright (C) 2016 MUSIT Norway, part of www.uio.no (University of Oslo)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License,
* or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package services
import com.google.inject.Inject
import models.{MusitObject, ObjectSearchResult}
import no.uio.musit.MusitResults._
import no.uio.musit.models._
import no.uio.musit.security.AuthenticatedUser
import play.api.Logger
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import repositories.dao.{ObjectDao, StorageNodeDao}
import scala.concurrent.Future
import scala.util.control.NonFatal
class ObjectService @Inject()(
val objDao: ObjectDao,
val nodeDao: StorageNodeDao
) {
private val logger = Logger(classOf[ObjectService])
/**
* Service that looks up objects using the old primary key in for the old DB
* schema name. Implementation is specific to the Delphi client integration.
*
* @param oldSchema The old DB schema name
* @param oldObjectIds The local primary key for the given schema name.
* @return A list containing the _new_ ObjectIds for the objects.
*/
def findByOldObjectIds(
oldSchema: String,
oldObjectIds: Seq[Long]
): Future[MusitResult[Seq[ObjectId]]] = {
objDao.findObjectIdsForOld(oldSchema, oldObjectIds)
}
def findByUUID(
mid: MuseumId,
objectUUID: ObjectUUID,
cids: Seq[MuseumCollection]
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[Option[MusitObject]]] = {
objDao.findByUUID(mid, objectUUID, cids)
}
/**
* A helper method for getting the current location of an object
*
* @param mid The MuseumId to look in
* @param obj The MusitObject to look for
* @return The augmented object with path, pathNames and currentLocationId
*/
private def getCurrentLocation(
mid: MuseumId,
obj: MusitObject
): Future[MusitObject] =
nodeDao.currentLocation(mid, obj.id).flatMap {
case Some(nodeIdAndPath) =>
nodeDao.namesForPath(nodeIdAndPath._2).map { pathNames =>
obj.copy(
currentLocationId = Some(nodeIdAndPath._1),
path = Some(nodeIdAndPath._2),
pathNames = Some(pathNames)
)
}
case None =>
Future.successful(obj)
}
/**
* Locate object(s) based on museum, old barcode and collection(s).
*
* @param mid The MuseumId to look for objects in.
* @param oldBarcode The bar code to look for.
* @param collections Which collections to look in.
* @param currUsr The currently authenticated user.
* @return A list of objects that share tha same bare code
*/
def findByOldBarcode(
mid: MuseumId,
oldBarcode: Long,
collections: Seq[MuseumCollection]
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[Seq[MusitObject]]] = {
objDao.findByOldBarcode(mid, oldBarcode, collections).flatMap {
case MusitSuccess(objs) =>
Future
.sequence(objs.map(getCurrentLocation(mid, _)))
.map(MusitSuccess.apply)
.recover {
case NonFatal(ex) =>
val msg = s"An error occured when executing object search by old barcode"
logger.error(msg, ex)
MusitInternalError(msg)
}
case err: MusitError =>
Future.successful(err)
}
}
/**
* Locate objects that share the same main object ID.
*
* @param mid The MuseumId to look for objects in.
* @param mainObjectId The main object ID to look for.
* @param collectionIds Which collections to look in.
* @param currUsr The currently authenticated user.
* @return A list of objects that share the same main object ID.
*/
def findMainObjectChildren(
mid: MuseumId,
mainObjectId: ObjectId,
collectionIds: Seq[MuseumCollection]
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[Seq[MusitObject]]] = {
objDao.findMainObjectChildren(mid, mainObjectId, collectionIds)
}
/**
* Locate objects in the specified museum, node and collection(s).
*
* @param mid The MuseumId to look for objects in.
* @param nodeId The specific StorageNodeDatabaseId to look for objects in.
* @param collectionIds Specifies collections to fetch objects for.
* @param page The page number to retrieve.
* @param limit The number of results per page.
* @param currUsr The currently authenticated user.
* @return A list of objects matching the given criteria.
*/
def findObjects(
mid: MuseumId,
nodeId: StorageNodeDatabaseId,
collectionIds: Seq[MuseumCollection],
page: Int,
limit: Int
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[PagedResult[MusitObject]]] = {
objDao.pagedObjects(mid, nodeId, collectionIds, page, limit)
}
/**
* Search for objects based on the given criteria.
*
* @param mid The MuseumId to search for objects in
* @param collectionIds The collections to search for objects in.
* @param page The page number to retrieve.
* @param limit The number of results per page.
* @param museumNo The MuseumNo to find matches for.
* @param subNo The SubNo to find matches for.
* @param term The object term to find matches for.
* @param currUsr The currently authenticated user.
* @return A list of search results matching the given criteria.
*/
def search(
mid: MuseumId,
collectionIds: Seq[MuseumCollection],
page: Int,
limit: Int,
museumNo: Option[MuseumNo],
subNo: Option[SubNo],
term: Option[String]
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[ObjectSearchResult]] = {
objDao.search(mid, page, limit, museumNo, subNo, term, collectionIds).flatMap {
case MusitSuccess(searchResult) =>
// We found some objects...now we need to find the current location for each.
Future
.sequence(searchResult.matches.map(getCurrentLocation(mid, _)))
.map { objects =>
MusitSuccess(searchResult.copy(matches = objects))
}
.recover {
case NonFatal(ex) =>
val msg = s"An error occured when executing object search"
logger.error(msg, ex)
MusitInternalError(msg)
}
case err: MusitError =>
Future.successful(err)
}
}
}
| kpmeen/musit | service_thing_aggregate/app/services/ObjectService.scala | Scala | gpl-2.0 | 7,170 |
package ch.uzh.dyndco.algorithms.maxsum
import collection.mutable.Map
import collection.mutable.Set
import com.signalcollect.AbstractVertex
import ch.uzh.dyndco.problems.MeetingConstraints
import scala.collection.mutable.MutableList
import com.signalcollect.Graph
import com.signalcollect.StateForwarderEdge
import ch.uzh.dyndco.problems.MeetingSchedulingProblem
import com.signalcollect.GraphBuilder
import ch.uzh.dyndco.problems.Problem
import ch.uzh.dyndco.util.Tabulator
import ch.uzh.dyndco.stack.graph.DynamicGraph
import ch.uzh.dyndco.stack.vertex.DynamicVertex
import ch.uzh.dyndco.stack.graph.GraphFactory
import ch.uzh.dyndco.stack.vertex.MeetingSchedulingVertex
class MaxSumGraph (
varVertices_ : Set[VariableVertex],
funcVertices_ : Set[FunctionVertex],
neighbourhoods_ : Map[Int, Map[VariableVertex,FunctionVertex]],
agentIndices_ : Map[Int, Map[Any,Int]],
meetingIndices_ : Map[Int, Map[Any,Int]],
graph_ : Graph[Any,Any]
)
extends DynamicGraph {
var varVertices = varVertices_
var funcVertices = funcVertices_
var neighbourhoods = neighbourhoods_
var agentIndices = agentIndices_
var meetingIndices = meetingIndices_
var graph = graph_
def nextNeighbourhood() : Int = neighbourhoods.size + 1
def nextAgent : Int = varVertices.size + 1
def numOfAgents : Int = varVertices.size
def numOfNeighbourhoods : Int = neighbourhoods.size
def getAgents : Set[DynamicVertex] = varVertices.asInstanceOf[Set[DynamicVertex]]
def getFactory : GraphFactory[DynamicGraph, Problem] = MaxSumGraphFactory.asInstanceOf[GraphFactory[DynamicGraph, Problem]]
def show {
showMeetingResultsMultiple(neighbourhoods
.asInstanceOf[Map[Int, Map[MeetingSchedulingVertex,MeetingSchedulingVertex]]])
showAgentResults(varVertices
.asInstanceOf[Set[MeetingSchedulingVertex]])
}
}
| danihegglin/DynDCO | src/main/scala/ch/uzh/dyndco/algorithms/maxsum/MaxSumGraph.scala | Scala | apache-2.0 | 1,875 |
package hu.frankdavid.diss.expression
import hu.frankdavid.diss.DataTable
import hu.frankdavid.diss.types.{MatrixLike, Matrix}
case class CountTo(override val parameters: Array[HasValue]) extends Expression {
def this() = this(null)
def evaluate(implicit table: DataTable) = {
val to = operands(table)(0).asInstanceOf[Long]
var i = 0L
while(i < to)
i += 1
0
}
def mapReduce(maxCost: Int) = None
def cost(implicit table: DataTable) = operands(table)(0).toString.toLong
} | frankdavid/diss | src/main/scala/hu/frankdavid/diss/expression/CountTo.scala | Scala | apache-2.0 | 506 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.layers
import com.intel.analytics.bigdl.dllib.nn.internal.{UpSampling3D => BigDLUpSampling3D}
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.Shape
import com.intel.analytics.bigdl.dllib.keras.Net
import com.intel.analytics.bigdl.dllib.keras.layers.utils.KerasUtils
import scala.reflect.ClassTag
/**
* UpSampling layer for 3D inputs.
* Repeats the 1st, 2nd and 3rd dimensions of the data by size(0), size(1) and size(2) respectively.
* Data format currently supported for this layer is 'CHANNEL_FIRST' (dimOrdering='th').
* The input of this layer should be 5D.
*
* When you use this layer as the first layer of a model, you need to provide the argument
* inputShape (a Single Shape, does not include the batch dimension).
*
* @param size Int array of length 3. UpSampling factors for dim1, dim2 and dim3.
* Default is (2, 2, 2).
* @param dimOrdering Format of the input data. Please use "CHANNEL_FIRST" (dimOrdering='th').
* @param inputShape A Single Shape, does not include the batch dimension.
* @tparam T The numeric type of parameter(e.g. weight, bias). Only support float/double now.
*/
class UpSampling3D[T: ClassTag](
override val size: Array[Int] = Array(2, 2, 2),
override val dimOrdering: String = "CHANNEL_FIRST",
override val inputShape: Shape = null)(implicit ev: TensorNumeric[T])
extends BigDLUpSampling3D[T](size, dimOrdering, inputShape) with Net {}
object UpSampling3D {
def apply[@specialized(Float, Double) T: ClassTag](
size: (Int, Int, Int) = (2, 2, 2),
dimOrdering: String = "th",
inputShape: Shape = null)(implicit ev: TensorNumeric[T]): UpSampling3D[T] = {
val sizeArray = size match {
case null => throw new IllegalArgumentException("For UpSampling3D, " +
"size can not be null, please input int tuple of length 3")
case _ => Array(size._1, size._2, size._3)
}
new UpSampling3D[T](sizeArray, KerasUtils.toBigDLFormat5D(dimOrdering), inputShape)
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/keras/layers/UpSampling3D.scala | Scala | apache-2.0 | 2,690 |
package mesosphere.marathon
package core.appinfo.impl
import mesosphere.UnitTest
import mesosphere.marathon.core.appinfo.{AppInfo, GroupInfo, _}
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.state._
import mesosphere.marathon.test.GroupCreation
import scala.concurrent.Future
class DefaultInfoServiceTest extends UnitTest with GroupCreation {
"DefaultInfoService" should {
"queryForAppId" in {
Given("a group repo with some apps")
val f = new Fixture
f.groupManager.app(app1.id) returns Some(app1)
f.baseData.appInfoFuture(any, any) answers { args =>
Future.successful(AppInfo(args.head.asInstanceOf[AppDefinition]))
}
When("querying for one App")
val appInfo = f.infoService.selectApp(id = app1.id, embed = Set.empty, selector = Selector.all).futureValue
Then("we get an appInfo for the app from the appRepo/baseAppData")
appInfo.map(_.app.id).toSet should be(Set(app1.id))
verify(f.groupManager, times(1)).app(app1.id)
for (app <- Set(app1)) {
verify(f.baseData, times(1)).appInfoFuture(app, Set.empty)
}
And("no more interactions")
f.verifyNoMoreInteractions()
}
"queryForAppId passes embed options along" in {
Given("a group repo with some apps")
val f = new Fixture
f.groupManager.app(app1.id) returns Some(app1)
f.baseData.appInfoFuture(any, any) answers { args =>
Future.successful(AppInfo(args.head.asInstanceOf[AppDefinition]))
}
When("querying for one App")
val embed: Set[AppInfo.Embed] = Set(AppInfo.Embed.Tasks, AppInfo.Embed.Counts)
f.infoService.selectApp(id = app1.id, embed = embed, selector = Selector.all).futureValue
Then("we get the baseData calls with the correct embed info")
for (app <- Set(app1)) {
verify(f.baseData, times(1)).appInfoFuture(app, embed)
}
}
"queryAll" in {
Given("an app repo with some apps")
val f = new Fixture
val someGroup = createRootGroup(apps = someApps)
f.groupManager.rootGroup() returns someGroup
f.baseData.appInfoFuture(any, any) answers { args =>
Future.successful(AppInfo(args.head.asInstanceOf[AppDefinition]))
}
When("querying all apps")
val appInfos = f.infoService.selectAppsBy(Selector.all, embed = Set.empty).futureValue
Then("we get appInfos for each app from the appRepo/baseAppData")
appInfos.map(_.app.id).toSet should be(someApps.keys)
verify(f.groupManager, times(1)).rootGroup()
for (app <- someApps.values) {
verify(f.baseData, times(1)).appInfoFuture(app, Set.empty)
}
And("no more interactions")
f.verifyNoMoreInteractions()
}
"queryAll passes embed options along" in {
Given("an app repo with some apps")
val f = new Fixture
val someGroup = createRootGroup(apps = someApps)
f.groupManager.rootGroup() returns someGroup
f.baseData.appInfoFuture(any, any) answers { args =>
Future.successful(AppInfo(args.head.asInstanceOf[AppDefinition]))
}
When("querying all apps")
val embed: Set[AppInfo.Embed] = Set(AppInfo.Embed.Tasks, AppInfo.Embed.Counts)
f.infoService.selectAppsBy(Selector.all, embed = embed).futureValue
Then("we get the base data calls with the correct embed")
for (app <- someApps.values) {
verify(f.baseData, times(1)).appInfoFuture(app, embed)
}
}
"queryAll filters" in {
Given("an app repo with some apps")
val f = new Fixture
val someGroup = createRootGroup(apps = someApps)
f.groupManager.rootGroup() returns someGroup
When("querying all apps with a filter that filters all apps")
val appInfos = f.infoService.selectAppsBy(Selector.none, embed = Set.empty).futureValue
Then("we get appInfos for no app from the appRepo/baseAppData")
appInfos.map(_.app.id).toSet should be(Set.empty)
verify(f.groupManager, times(1)).rootGroup()
And("no more interactions")
f.verifyNoMoreInteractions()
}
"queryForGroupId" in {
Given("a group repo with some apps below the queried group id")
val f = new Fixture
f.groupManager.group(PathId("/nested")) returns someGroupWithNested.group(PathId("/nested"))
f.baseData.appInfoFuture(any, any) answers { args =>
Future.successful(AppInfo(args.head.asInstanceOf[AppDefinition]))
}
When("querying all apps in that group")
val appInfos = f.infoService.selectAppsInGroup(PathId("/nested"), Selector.all, Set.empty).futureValue
Then("we get appInfos for each app from the groupRepo/baseAppData")
appInfos.map(_.app.id).toSet should be(someNestedApps.keys)
verify(f.groupManager, times(1)).group(PathId("/nested"))
for (app <- someNestedApps.values) {
verify(f.baseData, times(1)).appInfoFuture(app, Set.empty)
}
And("no more interactions")
f.verifyNoMoreInteractions()
}
"queryForGroupId passes embed infos along" in {
Given("a group repo with some apps below the queried group id")
val f = new Fixture
f.groupManager.group(PathId("/nested")) returns someGroupWithNested.group(PathId("/nested"))
f.baseData.appInfoFuture(any, any) answers { args =>
Future.successful(AppInfo(args.head.asInstanceOf[AppDefinition]))
}
When("querying all apps in that group")
val embed: Set[AppInfo.Embed] = Set(AppInfo.Embed.Tasks, AppInfo.Embed.Counts)
f.infoService.selectAppsInGroup(PathId("/nested"), Selector.all, embed).futureValue
Then("baseData was called with the correct embed options")
for (app <- someNestedApps.values) {
verify(f.baseData, times(1)).appInfoFuture(app, embed)
}
}
"query for extended group information" in {
Given("a group with apps")
val f = new Fixture
val rootGroup = someGroupWithNested
f.baseData.appInfoFuture(any, any) answers { args =>
Future.successful(AppInfo(args.head.asInstanceOf[AppDefinition]))
}
f.groupManager.group(rootGroup.id) returns Some(rootGroup)
When("querying extending group information")
val result = f.infoService.selectGroup(rootGroup.id, GroupInfoService.Selectors.all, Set.empty,
Set(GroupInfo.Embed.Apps, GroupInfo.Embed.Groups))
Then("The group info contains apps and groups")
result.futureValue.get.maybeGroups should be(defined)
result.futureValue.get.maybeApps should be(defined)
result.futureValue.get.transitiveApps.get should have size 5
result.futureValue.get.maybeGroups.get should have size 1
When("querying extending group information without apps")
val result2 = f.infoService.selectGroup(rootGroup.id, GroupInfoService.Selectors.all, Set.empty,
Set(GroupInfo.Embed.Groups))
Then("The group info contains no apps but groups")
result2.futureValue.get.maybeGroups should be(defined)
result2.futureValue.get.maybeApps should be(empty)
When("querying extending group information without apps and groups")
val result3 = f.infoService.selectGroup(rootGroup.id, GroupInfoService.Selectors.all, Set.empty, Set.empty)
Then("The group info contains no apps nor groups")
result3.futureValue.get.maybeGroups should be(empty)
result3.futureValue.get.maybeApps should be(empty)
}
"Selecting with Group Selector filters the result" in {
Given("a nested group with apps")
val f = new Fixture
val rootGroup = nestedGroup
f.baseData.appInfoFuture(any, any) answers { args =>
Future.successful(AppInfo(args.head.asInstanceOf[AppDefinition]))
}
f.groupManager.group(rootGroup.id) returns Some(rootGroup)
val selector = GroupInfoService.Selectors(
Selector(_.id.toString.startsWith("/visible")),
Selector(_.id.toString.startsWith("/visible")),
Selector(_.id.toString.startsWith("/visible"))
)
When("querying extending group information with selector")
val result = f.infoService.selectGroup(rootGroup.id, selector, Set.empty, Set(GroupInfo.Embed.Apps, GroupInfo.Embed.Groups))
Then("The result is filtered by the selector")
result.futureValue.get.maybeGroups should be(defined)
result.futureValue.get.maybeApps should be(defined)
result.futureValue.get.transitiveApps.get should have size 2
result.futureValue.get.transitiveGroups.get should have size 2
}
"Selecting with App Selector implicitly gives access to parent groups" in {
Given("a nested group with access to only nested app /group/app1")
val f = new Fixture
val rootId = PathId.empty
val rootApp = AppDefinition(PathId("/app"), cmd = Some("sleep"))
val nestedApp1 = AppDefinition(PathId("/group/app1"), cmd = Some("sleep"))
val nestedApp2 = AppDefinition(PathId("/group/app2"), cmd = Some("sleep"))
val nestedGroup = createGroup(PathId("/group"), Map(nestedApp1.id -> nestedApp1, nestedApp2.id -> nestedApp2))
val rootGroup = createRootGroup(Map(rootApp.id -> rootApp), groups = Set(nestedGroup))
f.baseData.appInfoFuture(any, any) answers { args =>
Future.successful(AppInfo(args.head.asInstanceOf[AppDefinition]))
}
f.groupManager.group(rootId) returns Some(rootGroup)
val selector = GroupInfoService.Selectors(
Selector(_.id.toString.startsWith("/group/app1")),
Selector(_ => false), // no pod
Selector(_ => false) // no group
)
When("querying extending group information with selector")
val result = f.infoService.selectGroup(rootId, selector, Set.empty, Set(GroupInfo.Embed.Apps, GroupInfo.Embed.Groups))
Then("The result is filtered by the selector")
result.futureValue.get.transitiveGroups.get should have size 1
result.futureValue.get.transitiveGroups.get.head.group should be(nestedGroup)
result.futureValue.get.transitiveApps.get should have size 1
result.futureValue.get.transitiveApps.get.head.app should be(nestedApp1)
}
}
class Fixture {
lazy val groupManager = mock[GroupManager]
lazy val baseData = mock[AppInfoBaseData]
import scala.concurrent.ExecutionContext.Implicits.global
lazy val infoService = new DefaultInfoService(groupManager, { () => baseData })
def verifyNoMoreInteractions(): Unit = {
noMoreInteractions(groupManager)
noMoreInteractions(baseData)
}
}
private val app1: AppDefinition = AppDefinition(PathId("/test1"), cmd = Some("sleep"))
val someApps = {
val app2 = AppDefinition(PathId("/test2"), cmd = Some("sleep"))
val app3 = AppDefinition(PathId("/test3"), cmd = Some("sleep"))
Map(
app1.id -> app1,
app2.id -> app2,
app3.id -> app3
)
}
val someNestedApps = {
val nestedApp1 = AppDefinition(PathId("/nested/test1"), cmd = Some("sleep"))
val nestedApp2 = AppDefinition(PathId("/nested/test2"), cmd = Some("sleep"))
Map(
(nestedApp1.id, nestedApp1),
(nestedApp2.id, nestedApp2)
)
}
val someGroupWithNested = createRootGroup(
apps = someApps,
groups = Set(
createGroup(
id = PathId("/nested"),
apps = someNestedApps
)
))
val nestedGroup = {
val app1 = AppDefinition(PathId("/app1"), cmd = Some("sleep"))
val visibleApp1 = AppDefinition(PathId("/visible/app1"), cmd = Some("sleep"))
val visibleGroupApp1 = AppDefinition(PathId("/visible/group/app1"), cmd = Some("sleep"))
val secureApp1 = AppDefinition(PathId("/secure/app1"), cmd = Some("sleep"))
val secureGroupApp1 = AppDefinition(PathId("/secure/group/app1"), cmd = Some("sleep"))
val otherApp1 = AppDefinition(PathId("/other/app1"), cmd = Some("sleep"))
val otherGroupApp1 = AppDefinition(PathId("/other/group/app1"), cmd = Some("sleep"))
createRootGroup(Map(app1.id -> app1), groups = Set(
createGroup(PathId("/visible"), Map(visibleApp1.id -> visibleApp1), groups = Set(
createGroup(PathId("/visible/group"), Map(visibleGroupApp1.id -> visibleGroupApp1))
)),
createGroup(PathId("/secure"), Map(secureApp1.id -> secureApp1), groups = Set(
createGroup(PathId("/secure/group"), Map(secureGroupApp1.id -> secureGroupApp1))
)),
createGroup(PathId("/other"), Map(otherApp1.id -> otherApp1), groups = Set(
createGroup(PathId("/other/group"), Map(otherGroupApp1.id -> otherGroupApp1)
))
)))
}
}
| gsantovena/marathon | src/test/scala/mesosphere/marathon/core/appinfo/impl/DefaultInfoServiceTest.scala | Scala | apache-2.0 | 12,572 |
package cn.hjmao.learning.akka.http.demo.api
/**
* Created by hjmao on 17-5-11.
*/
import akka.http.scaladsl.server.directives.{BasicDirectives, FutureDirectives, HeaderDirectives, RouteDirectives}
import akka.http.scaladsl.server.Directive1
import cn.hjmao.learning.akka.http.demo.model.UserEntity
import cn.hjmao.learning.akka.http.demo.service.AuthService
trait SecurityDirectives {
import BasicDirectives._
import HeaderDirectives._
import RouteDirectives._
import FutureDirectives._
def authenticate: Directive1[UserEntity] = {
headerValueByName("Token").flatMap { token =>
onSuccess(authService.authenticate(token)).flatMap {
case Some(user) => provide(user)
case None => reject
}
}
}
protected val authService: AuthService
}
| huajianmao/learning | framework/akka-http/demo/src/main/scala/cn/hjmao/learning/akka/http/demo/api/SecurityDirectives.scala | Scala | mit | 795 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.msb
import forms.{Form2, InvalidForm, ValidForm}
import jto.validation.{Path, ValidationError}
import models.moneyservicebusiness.BranchesOrAgentsHasCountries
import org.scalatest.MustMatchers
import play.api.i18n.Messages
import utils.{AmlsViewSpec, AutoCompleteServiceMocks}
import views.Fixture
import views.html.msb.branches_or_agents
class branches_or_agents_has_countriesSpec extends AmlsViewSpec with MustMatchers {
trait ViewFixture extends Fixture with AutoCompleteServiceMocks {
lazy val branches_or_agents = app.injector.instanceOf[branches_or_agents]
implicit val requestWithToken = addTokenForView()
}
"branches_or_agents view" must {
"have the back link button" in new ViewFixture {
val form2: ValidForm[BranchesOrAgentsHasCountries] = Form2(BranchesOrAgentsHasCountries(true))
def view = branches_or_agents(form2, edit = true)
doc.getElementsByAttributeValue("class", "link-back") must not be empty
}
"have correct title" in new ViewFixture {
val form2: ValidForm[BranchesOrAgentsHasCountries] = Form2(BranchesOrAgentsHasCountries(true))
def view = branches_or_agents(form2, edit = true)
doc.title must be(Messages("msb.branchesoragents.title") +
" - " + Messages("summary.msb") +
" - " + Messages("title.amls") +
" - " + Messages("title.gov"))
}
"have correct headings" in new ViewFixture {
val form2: ValidForm[BranchesOrAgentsHasCountries] = Form2(BranchesOrAgentsHasCountries(true))
def view = branches_or_agents(form2, edit = true)
heading.html must be(Messages("msb.branchesoragents.title"))
subHeading.html must include(Messages("summary.msb"))
}
"show errors in the correct locations" in new ViewFixture {
val form2: InvalidForm = InvalidForm(Map.empty,
Seq(
(Path \\ "hasCountries") -> Seq(ValidationError("not a message Key"))
))
def view = branches_or_agents(form2, edit = true)
errorSummary.html() must include("not a message Key")
doc.getElementById("hasCountries")
.getElementsByClass("error-notification").first().html() must include("not a message Key")
}
}
} | hmrc/amls-frontend | test/views/msb/branches_or_agents_has_countriesSpec.scala | Scala | apache-2.0 | 2,816 |
package recipestore
import com.google.inject.AbstractModule
import com.google.inject.name.Names
import net.codingwell.scalaguice.ScalaModule
class CommonModule extends AbstractModule with ScalaModule {
private[recipestore] val GRAPHFRAME_DIR: String = "graphframe_dir"
final protected val graphDir: String = String.format("file://%s", ResourceLoader.apply(AppResource.GraphResource, GRAPHFRAME_DIR).get)
override protected def configure() {
bind(classOf[String]).annotatedWith(Names.named("graphDirectory")).toInstance(graphDir)
}
} | prad-a-RuntimeException/semantic-store | src/main/scala/recipestore/CommonModule.scala | Scala | mit | 547 |
package io.hydrosphere.mist.worker
import org.apache.spark.SparkContext
object SparkUtils {
def getSparkUiAddress(sc: SparkContext): Option[String] = {
sc.uiWebUrl
}
}
| Hydrospheredata/mist | mist/worker/src/main/scala/io/hydrosphere/mist/worker/SparkUtils.scala | Scala | apache-2.0 | 181 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, BinaryOperator, Expression, IsNotNull, Literal}
import org.apache.spark.sql.execution.{FileSourceScanExec, SparkPlan}
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.test.SQLTestUtils
abstract class PrunePartitionSuiteBase extends QueryTest with SQLTestUtils with TestHiveSingleton {
protected def format: String
test("SPARK-28169: Convert scan predicate condition to CNF") {
withTempView("temp") {
withTable("t") {
sql(
s"""
|CREATE TABLE t(i INT, p STRING)
|USING $format
|PARTITIONED BY (p)""".stripMargin)
spark.range(0, 1000, 1).selectExpr("id as col")
.createOrReplaceTempView("temp")
for (part <- Seq(1, 2, 3, 4)) {
sql(
s"""
|INSERT OVERWRITE TABLE t PARTITION (p='$part')
|SELECT col FROM temp""".stripMargin)
}
assertPrunedPartitions(
"SELECT * FROM t WHERE p = '1' OR (p = '2' AND i = 1)", 2,
"((`p` = '1') || (`p` = '2'))")
assertPrunedPartitions(
"SELECT * FROM t WHERE (p = '1' AND i = 2) OR (i = 1 OR p = '2')", 4,
"")
assertPrunedPartitions(
"SELECT * FROM t WHERE (p = '1' AND i = 2) OR (p = '3' AND i = 3 )", 2,
"((`p` = '1') || (`p` = '3'))")
assertPrunedPartitions(
"SELECT * FROM t WHERE (p = '1' AND i = 2) OR (p = '2' OR p = '3')", 3,
"((`p` = '1') || ((`p` = '2') || (`p` = '3')))")
assertPrunedPartitions(
"SELECT * FROM t", 4,
"")
assertPrunedPartitions(
"SELECT * FROM t WHERE p = '1' AND i = 2", 1,
"(`p` = '1')")
assertPrunedPartitions(
"""
|SELECT i, COUNT(1) FROM (
|SELECT * FROM t WHERE p = '1' OR (p = '2' AND i = 1)
|) tmp GROUP BY i
""".stripMargin, 2, "((`p` = '1') || (`p` = '2'))")
}
}
}
private def getCleanStringRepresentation(exp: Expression): String = exp match {
case attr: AttributeReference =>
attr.sql.replaceAll("spark_catalog.default.t.", "")
case l: Literal =>
l.sql
case e: BinaryOperator =>
s"(${getCleanStringRepresentation(e.left)} ${e.symbol} " +
s"${getCleanStringRepresentation(e.right)})"
}
protected def assertPrunedPartitions(
query: String,
expectedPartitionCount: Long,
expectedPushedDownFilters: String): Unit = {
val qe = sql(query).queryExecution
val plan = qe.sparkPlan
assert(getScanExecPartitionSize(plan) == expectedPartitionCount)
val pushedDownPartitionFilters = qe.executedPlan.collectFirst {
case scan: FileSourceScanExec => scan.partitionFilters
case scan: HiveTableScanExec => scan.partitionPruningPred
}.map(exps => exps.filterNot(e => e.isInstanceOf[IsNotNull]))
val pushedFilters = pushedDownPartitionFilters.map(filters => {
filters.foldLeft("")((currentStr, exp) => {
if (currentStr == "") {
s"${getCleanStringRepresentation(exp)}"
} else {
s"$currentStr AND ${getCleanStringRepresentation(exp)}"
}
})
})
assert(pushedFilters == Some(expectedPushedDownFilters))
}
protected def getScanExecPartitionSize(plan: SparkPlan): Long
}
| shuangshuangwang/spark | sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/PrunePartitionSuiteBase.scala | Scala | apache-2.0 | 4,278 |
package nest.sparkle.time.server
import scala.concurrent.duration._
import com.typesafe.config.Config
import akka.util.Timeout
import akka.actor._
import spray.can.Http
import spray.http._
import HttpMethods._
import spray.can.Http.RegisterChunkHandler
import spray.http.HttpHeaders._
import nest.sparkle.store.WriteableStore
//
// We use a separate port and a lower level interface for now, due to limitations in the
// spray routing layer for chunked uploads. Revisit when this is fixed in akka.http.
//
/** A spray.io service that listens on a tcp port for http upload requests to the /file-upload
* port.
*
* Currently allows cross origin requests from any host, so be sure this port is locked down.
*/
class FileUploadService(rootConfig: Config, store:WriteableStore) extends Actor with ActorLogging {
implicit val timeout: Timeout = 1.second // for the actor 'asks'
/** number of rows to read in a block */
val batchSize = rootConfig.getInt("sparkle.files-loader.batch-size")
def corsHeaders: List[String] = {
val headerStrings = corsBaseHeaders.map(_.name)
(headerStrings ::: corsAdditionalHeaders)
}
private lazy val corsBaseHeaders: List[ModeledCompanion] = List(`Content-Type`, Origin, Accept,
`Accept-Encoding`, `Accept-Language`, Host, `User-Agent`, `Authorization`, `Cache-Control`,
`Connection`, `Content-Length`)
private lazy val corsAdditionalHeaders: List[String] = List("Referer", "Pragma", "X-Requested-With", "DNT", "Keep-Alive") // SPRAY why no Referer?
private lazy val corsHeaderString = corsHeaders.mkString(", ")
def receive = {
// when a new connection comes in we register ourselves as the connection handler
case _: Http.Connected => sender ! Http.Register(self)
case r@HttpRequest(OPTIONS, Uri.Path("/file-upload"), headers, entity, protocol) =>
sender ! HttpResponse(
status = 200,
headers = List(`Access-Control-Allow-Origin`(AllOrigins), // TODO limit to only this host
`Access-Control-Allow-Methods`(GET, POST, OPTIONS),
`Access-Control-Allow-Headers`(corsHeaderString)
),
entity = ""
)
case request@HttpRequest(POST, Uri.Path("/file-upload"), headers, entity: HttpEntity.NonEmpty, protocol) =>
// emulate chunked behavior for POST requests to this path
val parts = request.asPartStream()
val client = sender
val handler = context.actorOf(Props(
new FileUploadHandler(rootConfig, store, batchSize, client,
parts.head.asInstanceOf[ChunkedRequestStart])
))
parts.tail.foreach(handler !)
case request@ChunkedRequestStart(HttpRequest(POST, Uri.Path("/file-upload"), _, _, _)) =>
val client = sender
val handler = context.actorOf(Props(new FileUploadHandler(rootConfig, store, batchSize, client, request)))
sender ! RegisterChunkHandler(handler)
case _: HttpRequest => sender ! HttpResponse(status = 404, entity = "Unknown resource!")
case akka.io.Tcp.PeerClosed => // TODO close FileUploadHandler if necessary here
case x =>
log.debug(s"unexpected message: $x")
}
}
| mighdoll/sparkle | protocol/src/main/scala/nest/sparkle/time/server/FileUploadService.scala | Scala | apache-2.0 | 3,181 |
/*
* Copyright (c) 2010 e.e d3si9n
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package scalaxb.compiler.xsd
import scalashim._
import scalaxb.compiler.{Config, Snippet, CaseClassTooLong, Log}
import scala.collection.mutable
import scala.collection.{Map}
import scala.xml._
abstract class GenSource(val schema: SchemaDecl,
val context: XsdContext) extends Parsers with XMLOutput {
private val logger = Log.forName("xsd.GenSource")
type =>?[A, B] = PartialFunction[A, B]
val topElems = schema.topElems
val elemList = schema.elemList
val MIXED_PARAM = "mixed"
val genLens = new GenScalazLens(config)
def run: Snippet = {
logger.debug("run")
val snippets = mutable.ListBuffer.empty[Snippet]
snippets += Snippet(makeSchemaComment, Nil, Nil, Nil)
if(config.generateLens){
snippets += Snippet(<source>{genLens.buildImport}</source>, Nil, Nil, Nil)
}
schema.typeList map {
case decl: ComplexTypeDecl if !context.duplicatedTypes.contains((schema, decl)) =>
if (context.baseToSubs.contains(decl)) {
snippets += makeTrait(decl)
if (!decl.abstractValue) snippets += makeSuperType(decl)
}
else snippets += makeType(decl)
case decl: SimpleTypeDecl if !context.duplicatedTypes.contains((schema, decl)) =>
if (containsEnumeration(decl)) snippets += makeEnumType(decl)
case _ =>
}
for ((sch, group) <- context.groups if sch == this.schema)
snippets += makeGroup(group)
for (group <- schema.topAttrGroups.valuesIterator)
snippets += makeAttributeGroup(group)
Snippet(snippets: _*)
}
def makeSuperType(decl: ComplexTypeDecl): Snippet = {
val localName = makeProtectedTypeName(schema.targetNamespace, decl, context)
val fqn = buildFullyQualifiedNameFromNS(schema.targetNamespace, localName)
makeCaseClassWithType(localName, fqn, decl)
}
def makeType(decl: ComplexTypeDecl): Snippet =
makeCaseClassWithType(buildTypeName(decl, true), buildTypeName(decl, false), decl)
def types(namespace: Option[String], name: String) =
(for (schema <- schemas;
if schema.targetNamespace == namespace;
if schema.topTypes.contains(name))
yield schema.topTypes(name)) match {
case x :: xs => x
case Nil => sys.error("Type not found: {" + namespace + "}:" + name)
}
def baseToDescendants(base: ComplexTypeDecl): List[ComplexTypeDecl] =
context.baseToSubs(base) flatMap { child =>
child :: (
if (context.baseToSubs.contains(child)) baseToDescendants(child)
else Nil
)
}
def makeTrait(decl: ComplexTypeDecl): Snippet = {
val localName = buildTypeName(decl, true)
val fqn = buildTypeName(decl, false)
val formatterName = buildFormatterName(decl.namespace, localName)
logger.debug("makeTrait: emitting " + fqn)
val effectiveMixed = buildEffectiveMixed(decl)
val childElements = if (effectiveMixed) Nil
else flattenElements(decl)
val list = List.concat[Decl](childElements, flattenAttributes(decl))
val paramList = list map { buildParam }
val defaultType = buildFullyQualifiedNameFromNS(schema.targetNamespace, makeProtectedTypeName(schema.targetNamespace, decl, context))
val argList = list map {
case any: AnyAttributeDecl => buildArgForAnyAttribute(decl, false)
case x => buildArg(x)
}
val superNames = buildSuperNames(decl)
val extendString = if (superNames.isEmpty) ""
else " extends " + superNames.mkString(" with ")
def makeCaseEntry(decl: ComplexTypeDecl) =
"case (" + quoteNamespace(decl.namespace) + ", " + quote(Some(decl.family.head)) + ") => " +
"Right(" + buildFromXML(buildTypeName(decl, false), "node", Some("stack"), None) + ")"
def makeToXmlCaseEntry(decl: ComplexTypeDecl) =
"case x: " + buildTypeName(decl, false) + " => " +
buildToXML(buildTypeName(decl, false), "x, __namespace, __elementLabel, __scope, true")
val compositors = context.compositorParents.filter(_._2 == decl).keysIterator.toList
// val extendedSubtypes = context.baseToSubs(decl) filter { sub =>
// !schema.typeList.contains(sub) && !dependentSchemas.exists(_.typeList.contains(sub)) }
// val extendedSchemas = (for (sub <- extendedSubtypes;
// sch <- context.schemas; if sch.typeList.contains(sub))
// yield sch).toList.distinct
// val imports = extendedSchemas map { sch =>
// val pkg = packageName(sch, context)
// val name = context.typeNames(pkg)(sch)
// "import " + pkg.map(_ + ".").getOrElse("") + buildDefaultProtocolName(name) + "._"
// }
val traitCode = <source>{ buildComment(decl) }trait {localName}{extendString} {{
{
val vals = for (param <- paramList)
yield "val " + param.toTraitScalaCode
vals.mkString(newline + indent(1))}
}}</source>
val compDepth = 1
val defaultFormats = <source> trait Default{formatterName} extends scalaxb.XMLFormat[{fqn}] {{
{ // if (imports.isEmpty) ""
// else imports.mkString(newline + indent(2)) + newline + indent(2)
}def reads(seq: scala.xml.NodeSeq, stack: List[scalaxb.ElemName]): Either[String, {fqn}] = seq match {{
case node: scala.xml.Node =>
scalaxb.Helper.instanceType(node) match {{
{ val cases = for (sub <- baseToDescendants(decl) if sub.isNamed)
yield makeCaseEntry(sub)
cases.mkString(newline + indent(4 + compDepth))
}
{ if (!decl.abstractValue) "case _ => Right(" + buildFromXML(defaultType, "node", Some("stack"), None) + ")"
else """case x => Left("Unknown type: " + x)""" }
}}
case _ => Left("reads failed: seq must be scala.xml.Node")
}}
def writes(__obj: {fqn}, __namespace: Option[String], __elementLabel: Option[String],
__scope: scala.xml.NamespaceBinding, __typeAttribute: Boolean): scala.xml.NodeSeq = __obj match {{
{ val cases = for (sub <- context.baseToSubs(decl))
yield makeToXmlCaseEntry(sub)
cases.mkString(newline + indent(2 + compDepth))
}
{ if (!decl.abstractValue) "case x: " + defaultType + " => " +
buildToXML(defaultType, "x, __namespace, __elementLabel, __scope, false")
else """case _ => sys.error("Unknown type: " + __obj)"""
}
}}
}}</source>
val compositorCodes: Seq[Snippet] = if (decl.abstractValue) compositors map { makeCompositor }
else Nil
Snippet(Snippet(traitCode, <source/>, defaultFormats, makeImplicitValue(fqn, formatterName)) +:
compositorCodes: _*)
}
def makeImplicitValue(fqn: String, formatterName: String): Node =
<source> implicit lazy val {formatterName}: scalaxb.XMLFormat[{fqn}] = new Default{formatterName} {{}}</source>
def makeImplicitValue(group: AttributeGroupDecl): Node = {
val formatterName = buildFormatterName(group)
val fqn = buildTypeName(group, false)
<source> implicit lazy val {formatterName}: scalaxb.AttributeGroupFormat[{fqn}] = new Default{formatterName} {{}}</source>
}
def isQualifyAsIRIStyle(decl: ComplexTypeDecl): Boolean = {
val primary = decl.content match {
case ComplexContentDecl(CompContRestrictionDecl(_, x, _)) => x
case ComplexContentDecl(CompContExtensionDecl(_, x, _)) => x
case _ => None
}
primary match {
case Some(SequenceDecl(_, _, _, _, _)) =>
val flatParticles = flattenElements(decl)
val attributes = flattenAttributes(decl)
flatParticles.forall(_.typeSymbol match {
case AnyType(symbol) => false
case symbol: BuiltInSimpleTypeSymbol => true
case ReferenceTypeSymbol(decl: SimpleTypeDecl) => true
case _ => false
}) && attributes.isEmpty
case _ => false
}
}
def makeCaseClassWithType(localName: String, fqn: String, decl: ComplexTypeDecl): Snippet = {
logger.debug("makeCaseClassWithType: emitting " + fqn)
val formatterName = buildFormatterName(decl.namespace, localName)
val primary = decl.content match {
case ComplexContentDecl(CompContRestrictionDecl(_, x, _)) => x
case ComplexContentDecl(CompContExtensionDecl(_, x, _)) => x
case _ => None
}
val effectiveMixed = buildEffectiveMixed(decl)
val superNames: List[String] =
if (context.baseToSubs.contains(decl)) List(buildTypeName(decl, true))
else buildSuperNames(decl)
val flatParticles = flattenElements(decl)
// val particles = buildParticles(decl, name)
val childElements = if (effectiveMixed) flattenMixed(decl)
else flatParticles
val attributes = flattenAttributes(decl)
val longAttribute = (!namedAttributes && !attributes.isEmpty) ||
(attributes.size + childElements.size > contentsSizeLimit &&
childElements.size + 1 <= contentsSizeLimit)
val list = if (longAttribute) List.concat[Decl](childElements, List(buildLongAttributeRef))
else List.concat[Decl](childElements, attributes)
val paramList = list map { buildParam }
// val dependents = ((flatParticles flatMap { buildDependentType } collect {
// case ReferenceTypeSymbol(d: ComplexTypeDecl) if d != decl => d
// }).toList ++ (attributes collect {
// case group: AttributeGroupDecl => group
// }).toList).distinct
val unmixedParserList = flatParticles map { buildParser(_, effectiveMixed, effectiveMixed, false) }
val parserList = if (effectiveMixed) buildTextParser :: (unmixedParserList flatMap { List(_, buildTextParser) })
else unmixedParserList
val parserVariableList = ( 0 to parserList.size - 1) map { buildSelector }
val longAll: Boolean = primary match {
case Some(all: AllDecl) if isLongAll(all, decl.namespace, decl.family) => true
case _ => false
}
val particleArgs = if (effectiveMixed) (0 to parserList.size - 1).toList map { i =>
if (i % 2 == 1) buildArgForMixed(flatParticles((i - 1) / 2), i, false)
else buildArgForOptTextRecord(i) }
else primary match {
case Some(all: AllDecl) => all.particles map { buildArgForAll(_, longAll) }
case _ => (0 to flatParticles.size - 1).toList map { i => buildArg(flatParticles(i), i) }
}
val accessors = (primary match {
case Some(all: AllDecl) if longAll => generateAccessors(all)
case _ => generateAccessors(paramList, splitSequences(decl))
}) ::: (if (longAttribute) generateAccessors(attributes) else Nil)
logger.debug("makeCaseClassWithType: generateAccessors " + accessors)
val compositors = context.compositorParents.filter(
x => x._2 == decl).keysIterator.toList
val extendString = if (superNames.isEmpty) ""
else " extends " + superNames.mkString(" with ")
val hasSequenceParam = (paramList.size == 1) && (paramList.head.cardinality == Multiple) &&
(!paramList.head.attribute) && (!effectiveMixed) && (!longAll) && (config.useVarArg) && (!config.generateLens)
def paramsString = if (hasSequenceParam) makeParamName(paramList.head.name, false) + ": " +
paramList.head.singleTypeName + "*"
else paramList.map(_.toScalaCode).mkString("," + newline + indent(1))
val defLenses = config.generateLens match {
case true => paramList.map( param => genLens.buildDefLens(localName, param)).mkString(newline + indent(1))
case false => ""
}
val defComposeLenses = config.generateLens match {
case true => paramList.map( param => genLens.buildDefComposeLens(localName, param)).mkString(newline + indent(1))
case false => ""
}
val simpleFromXml: Boolean = if (flatParticles.isEmpty && !effectiveMixed) true
else (decl.content, primary) match {
case (x: SimpleContentDecl, _) => true
case (_, Some(all: AllDecl)) => true
case _ => false
}
def argsString = if (hasSequenceParam) particleArgs.head + ": _*"
else {
val particleString = if (effectiveMixed) "Seq.concat(" + particleArgs.mkString("," + newline + indent(4)) + ")"
else if (longAll) "scala.collection.immutable.ListMap(List(" + newline +
indent(4) + particleArgs.mkString("," + newline + indent(4)) + ").flatten[(String, scalaxb.DataRecord[Any])]: _*)"
else decl.content match {
case simp@SimpleContentDecl(SimpContRestrictionDecl(base: XsTypeSymbol, _, _, _)) =>
buildArg(simp, base)
case simp@SimpleContentDecl(SimpContExtensionDecl(base: XsTypeSymbol, _)) =>
buildArg(simp, base)
case _ => particleArgs.mkString("," + newline + indent(4))
}
val attributeString = if (attributes.isEmpty) ""
else {
val notAnyAttributes = attributes filter {
case any: AnyAttributeDecl => false
case _ => true
}
val anyAttributes = attributes filter {
case any: AnyAttributeDecl => true
case _ => false
}
if (longAttribute) {
val nonAnyString = if (notAnyAttributes.isEmpty) ""
else "List(" + newline +
indent(4) + (notAnyAttributes map { x =>
buildArgForAttribute(x, Some("node"), longAttribute) }).mkString("," + newline + indent(4)) + newline +
indent(4) + ").flatten[(String, scalaxb.DataRecord[Any])]"
val anyString = if (anyAttributes.isEmpty) ""
else "(" + buildArgForAnyAttribute(decl, longAttribute) + ")"
"scala.collection.immutable.ListMap(" +
(if (nonAnyString != "" && anyString != "") nonAnyString + " ::: " + anyString
else nonAnyString + anyString ) + ": _*)"
}
else (attributes map {
case any: AnyAttributeDecl => buildArgForAnyAttribute(decl, longAttribute)
case x => buildArgForAttribute(x, Some("node"), longAttribute)
}).mkString("," + newline + indent(4))
} // if-else
if (!particleString.isEmpty && !attributeString.isEmpty) particleString + "," + newline +
indent(4) + attributeString
else particleString + attributeString
}
val childElemParams = paramList.filter(!_.attribute)
def makeWritesChildNodes = {
def simpleContentString(base: XsTypeSymbol) = base match {
case AnyType(symbol) =>
"__obj.value.value match {" + newline +
indent(4) + "case elem: scala.xml.Elem => elem.child" + newline +
indent(4) + "case _ => Seq(scala.xml.Text(__obj.value.value.toString))" + newline +
indent(3) + "}"
case _ => "Seq(scala.xml.Text(__obj.value.toString))"
}
def childString(decl: ComplexTypeDecl): String =
if (effectiveMixed) "__obj." + makeParamName(MIXED_PARAM, false) +
".toSeq flatMap { x => " + buildToXML("scalaxb.DataRecord[Any]", "x, x.namespace, x.key, __scope, false") + " }"
else decl.content.content match {
case SimpContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _, _) => childString(base)
case SimpContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _) => childString(base)
case SimpContRestrictionDecl(base: XsTypeSymbol, _, _, _) => simpleContentString(base)
case SimpContExtensionDecl(base: XsTypeSymbol, _) => simpleContentString(base)
case _ =>
if (childElemParams.isEmpty) "Nil"
else if (childElemParams.size == 1) "(" + buildXMLString(childElemParams(0)) + ")"
else childElemParams.map(x =>
buildXMLString(x)).mkString("Seq.concat(", "," + newline + indent(4), ")")
}
<source> def writesChildNodes(__obj: {fqn}, __scope: scala.xml.NamespaceBinding): Seq[scala.xml.Node] =
{childString(decl)}</source>
}
val groups = filterGroup(decl).distinct filter { g => primaryCompositor(g).particles.size > 0 }
val defaultFormatSuperNames: List[String] = "scalaxb.ElemNameParser[" + fqn + "]" :: groups.map(g =>
buildFormatterName(g.namespace, groupTypeName(g))).distinct
val caseClassCode = <source>{ buildComment(decl) }case class {localName}({paramsString}){extendString}{ if (accessors.size == 0) ""
else " {" + newline +
indent(1) + accessors.mkString(newline + indent(1)) + newline +
"}" + newline}
{if(config.generateLens){genLens.buildObjectLens(localName, defLenses, defComposeLenses)}}
</source>
def defaultFormats = if (simpleFromXml) <source> trait Default{formatterName} extends scalaxb.XMLFormat[{fqn}] with scalaxb.CanWriteChildNodes[{fqn}] {{
val targetNamespace: Option[String] = { quote(schema.targetNamespace) }
import scalaxb.ElemName._
def reads(seq: scala.xml.NodeSeq, stack: List[scalaxb.ElemName]): Either[String, {fqn}] = seq match {{
case node: scala.xml.Node => Right({fqn}({argsString}))
case _ => Left("reads failed: seq must be scala.xml.Node")
}}
{makeWritesAttribute}{makeWritesChildNodes}
}}</source>
else <source> trait Default{formatterName} extends {defaultFormatSuperNames.mkString(" with ")} {{
val targetNamespace: Option[String] = { quote(schema.targetNamespace) }
{ if (decl.isNamed) "override def typeName: Option[String] = Some(" + quote(decl.name) + ")" + newline + newline + indent(2)
else ""
}{ if (effectiveMixed) "override def isMixed: Boolean = true" + newline + newline + indent(2)
else "" }def parser(node: scala.xml.Node, stack: List[scalaxb.ElemName]): Parser[{fqn}] =
phrase({ parserList.mkString(" ~ " + newline + indent(3)) } ^^
{{ case { parserVariableList.mkString(" ~ ") } =>
{fqn}({argsString}) }})
{makeWritesAttribute}{makeWritesChildNodes} }}</source>
def makeWritesAttribute = if (attributes.isEmpty) <source></source>
else if (longAttribute) {
val cases = attributes collect {
case attr: AttributeDecl => "case (" + quote(buildNodeName(attr, false)) + ", _) => " + buildAttributeString(attr)
case ref: AttributeRef =>
val attr = buildAttribute(ref)
"case (" + quote(buildNodeName(attr, false)) + ", _) => " + buildAttributeString(attr)
case group: AttributeGroupDecl => "case (" + quote(buildNodeName(group)) + ", _) => " + buildAttributeString(group)
}
val caseString = if (cases.isEmpty) ""
else cases.mkString(newline + indent(4)) + newline + indent(4)
<source> override def writesAttribute(__obj: {fqn}, __scope: scala.xml.NamespaceBinding): scala.xml.MetaData = {{
var attr: scala.xml.MetaData = scala.xml.Null
__obj.{makeParamName(ATTRS_PARAM, false)}.toList map {{
{caseString}case (key, x) => attr = scala.xml.Attribute((x.namespace map {{ __scope.getPrefix(_) }}).orNull, x.key.orNull, x.value.toString, attr)
}}
attr
}}</source>
} else <source> override def writesAttribute(__obj: {fqn}, __scope: scala.xml.NamespaceBinding): scala.xml.MetaData = {{
var attr: scala.xml.MetaData = scala.xml.Null
{ attributes.map(x => buildAttributeString(x)).mkString(newline + indent(3)) }
attr
}}</source>
val compositorCodes = compositors map { makeCompositor }
Snippet(Snippet(caseClassCode, <source/>, defaultFormats, makeImplicitValue(fqn, formatterName)) +:
compositorCodes: _*)
}
def buildComment(p: Product) = p match {
case decl: TypeDecl =>
if (schema.typeToAnnotatable.contains(decl))
makeAnnotation(schema.typeToAnnotatable(decl).annotation) + newline
else makeAnnotation(decl.annotation) + newline
case anno: Annotatable =>
makeAnnotation(anno.annotation) + newline
case _ => ""
}
// family is used to split sequences.
def makeCompositor(compositor: HasParticle): Snippet = compositor match {
case seq: SequenceDecl => makeSequence(seq)
case _ =>
val superNames: List[String] = buildOptions(compositor)
val superString = if (superNames.isEmpty) ""
else " extends " + superNames.mkString(" with ")
val localName = makeTypeName(context.compositorNames(compositor))
Snippet(<source>trait {localName}{superString}</source>)
}
def makeSequence(seq: SequenceDecl): Snippet = {
val localName = makeTypeName(context.compositorNames(seq))
val fqn = buildFullyQualifiedNameFromNS(schema.targetNamespace, localName)
val formatterName = buildFormatterName(schema.targetNamespace, localName)
logger.debug("makeSequence: emitting " + fqn)
// pass in local name for the family.
// since the sequence is already split at this point, it does not require resplitting.
val particles = flattenElements(schema.targetNamespace, List(localName), seq, 0, false)
val paramList = particles map { buildParam }
val defLenses = config.generateLens match {
case true => paramList.map( param => genLens.buildDefLens(localName, param)).mkString(newline + indent(1))
case false => ""
}
val defComposeLenses = config.generateLens match {
case true => paramList.map( param => genLens.buildDefComposeLens(localName, param)).mkString(newline + indent(1))
case false => ""
}
val hasSequenceParam = (paramList.size == 1) &&
(paramList.head.cardinality == Multiple) &&
(!paramList.head.attribute)
val paramsString = if (hasSequenceParam)
makeParamName(paramList.head.name, false) + ": " + paramList.head.singleTypeName + "*"
else paramList.map(_.toScalaCode).mkString("," + newline + indent(1))
def makeWritesXML = <source> def writes(__obj: {fqn}, __namespace: Option[String], __elementLabel: Option[String],
__scope: scala.xml.NamespaceBinding, __typeAttribute: Boolean): scala.xml.NodeSeq =
{childString}</source>
def childString = if (paramList.isEmpty) "Nil"
else if (paramList.size == 1) buildXMLString(paramList(0))
else paramList.map(x =>
buildXMLString(x)).mkString("Seq.concat(", "," + newline + indent(4), ")")
val superNames: List[String] = buildOptions(seq)
val superString = if (superNames.isEmpty) ""
else " extends " + superNames.mkString(" with ")
Snippet(<source>{ buildComment(seq) }case class {localName}({paramsString}){superString}
{if(config.generateLens){genLens.buildObjectLens(localName, defLenses, defComposeLenses)}}</source>,
<source/>,
<source> trait Default{formatterName} extends scalaxb.XMLFormat[{fqn}] {{
def reads(seq: scala.xml.NodeSeq, stack: List[scalaxb.ElemName]): Either[String, {fqn}] = Left("don't call me.")
{makeWritesXML}
}}</source>,
makeImplicitValue(fqn, formatterName))
}
def makeGroup(group: GroupDecl): Snippet = {
val compositors = context.compositorParents.filter(
x => x._2 == makeGroupComplexType(group)).keysIterator.toList
val localName = makeTypeName(context.compositorNames(group))
val fqn = buildFullyQualifiedNameFromNS(schema.targetNamespace, localName)
val formatterName = buildFormatterName(group.namespace, localName)
logger.debug("makeGroup: emitting " + fqn)
val compositor = primaryCompositor(group)
val param = buildParam(compositor)
val o = buildOccurrence(compositor).toSingle
val wrapperParam = compositor match {
case choice: ChoiceDecl => param
case _ => param.copy(typeSymbol = XsDataRecord(param.typeSymbol))
}
val mixedParam = param.copy(typeSymbol = XsDataRecord(XsAnyType))
val parser = buildCompositorParser(compositor, o, false, false, false)
val wrapperParser = compositor match {
case choice: ChoiceDecl => parser
case _ => buildCompositorParser(compositor, o, false, true, false)
}
val mixedparser = buildCompositorParser(compositor, o, true, true, false)
val groups = filterGroup(compositor).distinct
val superNames: List[String] =
if (groups.isEmpty) List("scalaxb.AnyElemNameParser")
else groups.map { g => buildFormatterName(g.namespace, groupTypeName(g)) }
val defaultFormats = if (compositor.particles.size == 0) <source></source>
else <source>{ buildComment(group) } trait {formatterName} extends {superNames.mkString(" with ")} {{
def parse{localName}(node: scala.xml.Node, stack: List[scalaxb.ElemName]): Parser[{param.baseTypeName}] =
{parser}
def parse{localName}(node: scala.xml.Node, stack: List[scalaxb.ElemName], wrap: Boolean): Parser[{wrapperParam.baseTypeName}] =
{wrapperParser}
def parsemixed{localName}(node: scala.xml.Node, stack: List[scalaxb.ElemName]): Parser[Seq[{mixedParam.baseTypeName}]] =
{mixedparser}
}}</source>
val compositorCodes = compositors map { makeCompositor }
Snippet(Snippet(Nil, Nil, defaultFormats, Nil) +:
compositorCodes: _*)
}
def makeAttributeGroup(group: AttributeGroupDecl): Snippet = {
val localName = buildTypeName(group, true)
val fqn = buildTypeName(group, false)
val formatterName = buildFormatterName(group.namespace, localName)
logger.debug("makeAttributeGroup: emitting " + fqn)
val attributes = flattenAttributes(group.attributes)
val paramList = attributes map { buildParam }
val argList = attributes map {
case any: AnyAttributeDecl => buildArgForAnyAttribute(group, false)
case x => buildArg(x)
}
val paramsString = paramList.map(
_.toScalaCode).mkString("," + newline + indent(1))
val argsString = argList.mkString("," + newline + indent(3))
val attributeString = attributes.map(x => buildAttributeString(x)).mkString(newline + indent(2))
val caseClassCode = <source>{ buildComment(group) }case class {localName}({paramsString})</source>
val defaultFormats = <source> trait Default{formatterName} extends scalaxb.AttributeGroupFormat[{fqn}] {{
val targetNamespace: Option[String] = { quote(schema.targetNamespace) }
def reads(seq: scala.xml.NodeSeq, stack: List[scalaxb.ElemName]): Either[String, {fqn}] = seq match {{
case node: scala.xml.Node => Right({fqn}({argsString}))
case _ => Left("reads failed: seq must be scala.xml.Node")
}}
def toAttribute(__obj: {fqn}, __attr: scala.xml.MetaData, __scope: scala.xml.NamespaceBinding): scala.xml.MetaData = {{
var attr: scala.xml.MetaData = __attr
{attributeString}
attr
}}
}}</source>
Snippet(caseClassCode,
<source/>,
defaultFormats,
makeImplicitValue(group))
}
def makeEnumType(decl: SimpleTypeDecl) = {
val localName = buildTypeName(decl, true)
val fqn = buildTypeName(decl, false)
val formatterName = buildFormatterName(decl.namespace, localName)
val enums = filterEnumeration(decl).distinct
def makeEnum(enum: EnumerationDecl[_]) =
"case object " + buildTypeName(localName, enum, true) + " extends " + localName +
" { override def toString = " + quote(enum.value.toString) + " }"
def makeCaseEntry(enum: EnumerationDecl[_]) =
indent(2) + "case " + quote(enum.value.toString) + " => " + buildTypeName(localName, enum, true) + newline
val enumString = enums.map(makeEnum).mkString(newline)
def valueCode: String =
(decl.content match {
case SimpTypRestrictionDecl(base, _) => Some(base)
case _ => None
}) match {
case Some(XsQName) => """({ val (ns, localPart) = scalaxb.Helper.splitQName(value, scope)
new javax.xml.namespace.QName(ns.orNull, localPart).toString })"""
case _ => "value"
}
val traitCode = enums match {
case Nil =>
<source>case class {localName}()
object {localName} {{
def fromString(value: String, scope: scala.xml.NamespaceBinding): {localName} = {localName}()
}}</source>
case _ =>
<source>trait {localName}
object {localName} {{
def fromString(value: String, scope: scala.xml.NamespaceBinding): {localName} = {valueCode} match {{
{ enums.map(e => makeCaseEntry(e)) }
}}
}}
{ enumString }</source>
} // match
Snippet(traitCode,
Nil,
<source> def build{formatterName} = new Default{formatterName} {{}}
trait Default{formatterName} extends scalaxb.XMLFormat[{fqn}] {{
val targetNamespace: Option[String] = { quote(schema.targetNamespace) }
def reads(seq: scala.xml.NodeSeq, stack: List[scalaxb.ElemName]): Either[String, {fqn}] = seq match {{
case elem: scala.xml.Elem => Right({fqn}.fromString(elem.text, elem.scope))
case _ => Right({fqn}.fromString(seq.text, scala.xml.TopScope))
}}
def writes(__obj: {fqn}, __namespace: Option[String], __elementLabel: Option[String],
__scope: scala.xml.NamespaceBinding, __typeAttribute: Boolean): scala.xml.NodeSeq =
scala.xml.Elem(scalaxb.Helper.getPrefix(__namespace, __scope).orNull,
__elementLabel getOrElse {{ sys.error("missing element label.") }},
scala.xml.Null, __scope, true, scala.xml.Text(__obj.toString))
}}</source>,
makeImplicitValue(fqn, formatterName))
}
def buildEffectiveMixed(decl: ComplexTypeDecl): Boolean =
decl.content match {
case x: ComplexContentDecl if decl.mixed => true
case x: SimpleContentDecl => false
case x: ComplexContentDecl =>
x.content.base match {
case ReferenceTypeSymbol(base: ComplexTypeDecl) => buildEffectiveMixed(base)
case _ => false
}
}
def buildSuperNames(decl: ComplexTypeDecl) =
buildSuperName(decl) ::: buildOptions(decl)
def buildSuperName(decl: ComplexTypeDecl) =
decl.content.content.base match {
case ReferenceTypeSymbol(base: ComplexTypeDecl) => List(buildTypeName(base, true))
case _ => Nil
}
def buildOptions(decl: ComplexTypeDecl): List[String] = {
val set = mutable.ListBuffer.empty[String]
def addIfMatch(typeSymbol: XsTypeSymbol, choice: ChoiceDecl) = {
typeSymbol match {
case ReferenceTypeSymbol(that: ComplexTypeDecl) =>
if (that.namespace == decl.namespace &&
that.name == decl.name &&
!containsForeignType(choice))
set += makeTypeName(context.compositorNames(choice))
case _ =>
}
}
for (sch <- context.schemas;
choice <- sch.choices;
particle <- choice.particles) particle match {
case elem: ElemDecl => addIfMatch(elem.typeSymbol, choice)
case ref: ElemRef => addIfMatch(buildElement(ref).typeSymbol, choice)
case _ => // do nothing
}
set.toList.distinct
}
// reverse lookup all choices that contains that.
def buildOptions(that: HasParticle): List[String] = {
val set = mutable.ListBuffer.empty[String]
def addIfMatch(comp: HasParticle, choice: ChoiceDecl) {
if (comp == that && !containsForeignType(choice))
set += makeTypeName(context.compositorNames(choice))
}
def addIfContains(choice: ChoiceDecl) {
choice.particles foreach { _ match {
case ch: ChoiceDecl =>
addIfMatch(ch, choice)
addIfContains(ch)
case comp: HasParticle => addIfMatch(comp, choice)
case _ =>
}
}
}
for (sch <- context.schemas;
choice <- sch.choices)
addIfContains(choice)
set.toList.distinct
}
def filterGroup(decl: ComplexTypeDecl): List[GroupDecl] = decl.content.content match {
// complex content means 1. has child elements 2. has attributes
case CompContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) =>
filterGroup(base)
case res@CompContRestrictionDecl(XsAnyType, _, _) =>
filterGroup(res.compositor)
case ext@CompContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) =>
filterGroup(base) :::
filterGroup(ext.compositor)
case ext@CompContExtensionDecl(XsAnyType, _, _) =>
filterGroup(ext.compositor)
case _ => Nil
}
def filterGroup(compositor: Option[HasParticle]): List[GroupDecl] = compositor match {
case Some(c) => filterGroup(c)
case None => Nil
}
def filterGroup(compositor: HasParticle): List[GroupDecl] = compositor match {
case ref: GroupRef => List(buildGroup(ref))
case group: GroupDecl => List(group)
case _ =>
(compositor.particles flatMap {
case ref: GroupRef => List(buildGroup(ref))
case group: GroupDecl => List(group)
case compositor2: HasParticle => filterGroup(compositor2)
case _ => Nil
})
}
def argSize(decl: ComplexTypeDecl): Int = decl.content.content match {
// complex content means 1. has child elements 2. has attributes
case CompContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) =>
argSize(base)
case res@CompContRestrictionDecl(XsAnyType, _, _) =>
argSize(res.compositor)
case ext@CompContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) =>
argSize(base) + argSize(ext.compositor)
case ext@CompContExtensionDecl(XsAnyType, _, _) =>
argSize(ext.compositor)
case _ => 1
}
def argSize(compositor: Option[HasParticle]): Int = compositor match {
case Some(c) =>
c match {
case seq: SequenceDecl => c.particles.size
case _ => 1
}
case None => 1
}
def flattenElements(decl: ComplexTypeDecl): List[ElemDecl] = {
val index = decl.content.content match {
// complex content means 1. has child elements 2. has attributes
case CompContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) => argSize(base)
case res@CompContRestrictionDecl(XsAnyType, _, _) => 0
case ext@CompContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) => argSize(base)
case ext@CompContExtensionDecl(XsAnyType, _, _) => 0
case _ => 0
}
anyNumbers.clear()
val build: ComplexTypeContent =>? List[ElemDecl] = {
case SimpContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _, _) =>
flattenElements(base)
case SimpContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _) =>
flattenElements(base)
// complex content means 1. has child elements 2. has attributes
case CompContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) =>
flattenElements(base)
case res@CompContRestrictionDecl(XsAnyType, _, _) =>
res.compositor map { flattenElements(decl.namespace, decl.family, _, index, true) } getOrElse { Nil }
case ext@CompContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) =>
flattenElements(base) :::
(ext.compositor map { flattenElements(decl.namespace, decl.family, _, index, true) } getOrElse { Nil })
case ext@CompContExtensionDecl(XsAnyType, _, _) =>
ext.compositor map { flattenElements(decl.namespace, decl.family, _, index, true) } getOrElse { Nil }
case _ => Nil
}
val pf = buildSimpleTypeRef orElse build
pf(decl.content.content)
}
// sometimes we don't have ComplexTypeDecl because it's a group.
def splitLongSequence(namespace: Option[String], family: List[String], particles: List[Particle]): List[Particle] =
if (particles.size <= contentsSizeLimit && !isWrapped(namespace, family)) particles
else splitLong[SequenceDecl](particles) { SequenceDecl(namespace, _, 1, 1, 0) }
// used to generte accessor
def splitSequences(decl: ComplexTypeDecl): List[SequenceDecl] = decl.content.content match {
case SimpContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _, _) => splitSequences(base)
case SimpContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _) => splitSequences(base)
// complex content means 1. has child elements 2. has attributes
case CompContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) => splitSequences(base)
case res@CompContRestrictionDecl(XsAnyType, _, _) =>
res.compositor map { splitSequences(decl.namespace, decl.family, _) } getOrElse { Nil }
case ext@CompContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) =>
splitSequences(base) :::
(ext.compositor map { splitSequences(decl.namespace, decl.family, _) } getOrElse { Nil })
case ext@CompContExtensionDecl(XsAnyType, _, _) =>
ext.compositor map { splitSequences(decl.namespace, decl.family, _) } getOrElse { Nil }
case _ => Nil
}
def splitSequences(namespace: Option[String], family: List[String],
compositor: HasParticle): List[SequenceDecl] = compositor match {
case seq: SequenceDecl if seq.particles.size > contentsSizeLimit || isWrapped(namespace, family) =>
splitLong[SequenceDecl](seq.particles) { xs => SequenceDecl(namespace, xs, 1, 1, 0) }
case _ => Nil
}
def flattenElements(namespace: Option[String], family: List[String],
compositor: HasParticle, index: Int, wrapTopSequence: Boolean): List[ElemDecl] = {
compositor match {
case ref:GroupRef => flattenElements(namespace, family, buildGroup(ref), index, wrapTopSequence)
case group:GroupDecl =>
if (primaryCompositor(group).particles.isEmpty) Nil
else List(buildCompositorRef(group, index))
case seq: SequenceDecl =>
if (wrapTopSequence &&
(seq.minOccurs != 1 || seq.maxOccurs != 1))
if (seq.particles.size == 1) compositor.particles(0) match {
case any: AnyDecl => List(buildAnyRef(any.copy(
minOccurs = math.min(any.minOccurs, seq.minOccurs),
maxOccurs = math.max(any.maxOccurs, seq.maxOccurs)) ))
case choice: ChoiceDecl =>
val occurence = mergeOccurrence(buildOccurrence(choice).copy(nillable = false),
buildOccurrence(seq))
List(buildCompositorRef(choice, occurence, 0))
case group: GroupDecl => flattenElements(namespace, family, group, index, wrapTopSequence)
case _ => List(buildCompositorRef(seq, index))
}
else List(buildCompositorRef(seq, index))
else splitLongSequence(
namespace, family, compositor.particles).zipWithIndex flatMap {
case (ref: GroupRef, i: Int) => flattenElements(namespace, family, buildGroup(ref), i + index, wrapTopSequence)
case (compositor2: HasParticle, i: Int) => List(buildCompositorRef(compositor2, i + index))
case (elem: ElemDecl, i: Int) => List(elem)
case (ref: ElemRef, i: Int) => List(buildElement(ref))
case (any: AnyDecl, i: Int) => List(buildAnyRef(any))
}
case all: AllDecl =>
if (isLongAll(all, namespace, family)) List(buildLongAllRef(all))
else compositor.particles flatMap {
// by spec, <all> contains only elems.
case elem: ElemDecl => List(toOptional(elem))
case ref: ElemRef => List(toOptional(buildElement(ref)))
}
case choice: ChoiceDecl =>
List(buildCompositorRef(choice, index))
}
}
def isLongAll(all: AllDecl, namespace: Option[String], family: List[String]): Boolean =
(all.particles.size > contentsSizeLimit || isWrapped(namespace, family))
val buildSimpleTypeRef: ComplexTypeContent =>? List[ElemDecl] = {
case content: ComplexTypeContent
if content.base.isInstanceOf[BuiltInSimpleTypeSymbol] =>
val symbol = content.base.asInstanceOf[BuiltInSimpleTypeSymbol]
List(buildSymbolElement(symbol))
case content: ComplexTypeContent
if content.base.isInstanceOf[ReferenceTypeSymbol] &&
content.base.asInstanceOf[ReferenceTypeSymbol].decl.isInstanceOf[SimpleTypeDecl] =>
val symbol = content.base.asInstanceOf[ReferenceTypeSymbol]
List(buildSymbolElement(symbol))
}
def generateAccessors(all: AllDecl): List[String] = {
val wrapperName = makeParamName("all", false)
// by spec, there are only elements under <all>
all.particles collect {
case elem: ElemDecl => elem
case ref: ElemRef => buildElement(ref)
} map { elem => toCardinality(elem.minOccurs, elem.maxOccurs) match {
case Optional => "lazy val " + makeParamName(elem.name, false) + " = " +
wrapperName + ".get(" + quote(buildNodeName(elem, true)) + ") map { _.as[" + buildTypeName(elem.typeSymbol) + "] }"
case _ => "lazy val " + makeParamName(elem.name, false) + " = " +
wrapperName + "(" + quote(buildNodeName(elem, true)) + ").as[" + buildTypeName(elem.typeSymbol) + "]"
}
}
}
def generateAccessors(attributes: List[AttributeLike]): List[String] = {
val wrapperName = makeParamName(ATTRS_PARAM, false)
attributes collect {
case attr: AttributeDecl => (attr, toCardinality(attr))
case ref: AttributeRef =>
val attr = buildAttribute(ref)
(attr, toCardinality(attr))
case group: AttributeGroupDecl => (group, Single)
} collect {
case (attr: AttributeDecl, Optional) =>
"lazy val " + makeParamName(buildParam(attr).name, true) + " = " +
wrapperName + ".get(" + quote(buildNodeName(attr, false)) + ") map { _.as[" + buildTypeName(attr.typeSymbol, true) + "] }"
case (attr: AttributeDecl, Single) =>
"lazy val " + makeParamName(buildParam(attr).name, true) + " = " +
wrapperName + "(" + quote(buildNodeName(attr, false)) + ").as[" + buildTypeName(attr.typeSymbol, true) + "]"
}
}
def generateAccessors(params: List[Param], splits: List[SequenceDecl]) = params flatMap {
case param@Param(_, _, ReferenceTypeSymbol(decl@ComplexTypeDecl(_, _, _, _, _, _, _, _)), _, _, _, _, _) if
compositorWrapper.contains(decl) &&
splits.contains(compositorWrapper(decl)) =>
val wrapperName = makeParamName(param.name, false)
val particles = compositorWrapper(decl).particles.zipWithIndex flatMap {
case (ref: GroupRef, i: Int) => List(buildCompositorRef(ref, i))
case (compositor2: HasParticle, i: Int) => List(buildCompositorRef(compositor2, i))
case (elem: ElemDecl, i: Int) => List(elem)
case (ref: ElemRef, i: Int) => List(buildElement(ref))
case (any: AnyDecl, i: Int) => List(buildAnyRef(any))
}
val paramList = particles map { buildParam }
paramList map { p =>
"lazy val " + makeParamName(p.name, false) + " = " + wrapperName + "." + makeParamName(p.name, false)
}
case _ => Nil
}
def buildParticles(decl: ComplexTypeDecl, name: String): List[ElemDecl] = {
anyNumbers.clear()
val build: ComplexTypeContent =>? List[ElemDecl] = {
case SimpContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _, _) =>
buildParticles(base, makeTypeName(base.name))
case SimpContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _) =>
buildParticles(base, makeTypeName(base.name))
// complex content means 1. has child elements 2. has attributes
case CompContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) =>
buildParticles(base, makeTypeName(base.name))
case res@CompContRestrictionDecl(XsAnyType, _, _) =>
buildParticles(res.compositor, name)
case ext@CompContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) =>
buildParticles(base, makeTypeName(base.name)) :::
buildParticles(ext.compositor, name)
case ext@CompContExtensionDecl(XsAnyType, _, _) =>
buildParticles(ext.compositor, name)
case _ => Nil
}
val pf = buildSimpleTypeRef orElse build
pf(decl.content.content)
}
def flattenMixed(decl: ComplexTypeDecl) = if (buildEffectiveMixed(decl))
List(ElemDecl(Some(INTERNAL_NAMESPACE), MIXED_PARAM, XsMixed,
None, None, 0, Integer.MAX_VALUE))
else Nil
// def buildAttributes(decl: ComplexTypeDecl): List[AttributeLike] = {
// val attributes = mergeAttributes(decl.content.content match {
// case SimpContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _, _) => buildAttributes(base)
// case SimpContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _) => buildAttributes(base)
// case CompContRestrictionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) => buildAttributes(base)
// case CompContExtensionDecl(ReferenceTypeSymbol(base: ComplexTypeDecl), _, _) => buildAttributes(base)
// case _ => Nil
// }, buildAttributes(decl.content.content.attributes))
//
// // rearrange attributes so AnyAttributeDecl comes at the end.
// val notAnyAttributes = attributes filter {
// case any: AnyAttributeDecl => false
// case _ => true
// }
// val anyAttributes = attributes filter {
// case any: AnyAttributeDecl => true
// case _ => false
// }
// if (anyAttributes.isEmpty) notAnyAttributes
// else notAnyAttributes ::: List(anyAttributes.head)
// }
//
// def buildAttributes(attributes: List[AttributeLike]): List[AttributeLike] =
// attributes map(resolveRef)
//
def makeSchemaComment = <source>{makeAnnotation(schema.annotation)}</source>
def makeAnnotation(anno: Option[AnnotationDecl]) = anno match {
case Some(annotation) =>
newline + "/** " +
(for (doc <- annotation.documentations;
x <- doc.any)
yield x.toString).mkString + newline +
"*/"
case None => ""
}
}
| Banno/scalaxb | cli/src/main/scala/scalaxb/compiler/xsd/GenSource.scala | Scala | mit | 47,196 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.models
import com.esotericsoftware.kryo.{ Kryo, Serializer }
import com.esotericsoftware.kryo.io.{ Input, Output }
import Ordering.Option
import org.apache.spark.Logging
import org.bdgenomics.adam.instrumentation.Timers.CreateReferencePositionPair
import org.bdgenomics.adam.models.ReferenceRegion._
import org.bdgenomics.adam.rich.RichAlignmentRecord
import org.bdgenomics.formats.avro.AlignmentRecord
object ReferencePositionPair extends Logging {
def apply(singleReadBucket: SingleReadBucket): ReferencePositionPair = CreateReferencePositionPair.time {
val firstOfPair = (singleReadBucket.primaryMapped.filter(_.getReadInFragment == 0) ++
singleReadBucket.unmapped.filter(_.getReadInFragment == 0)).toSeq
val secondOfPair = (singleReadBucket.primaryMapped.filter(_.getReadInFragment == 1) ++
singleReadBucket.unmapped.filter(_.getReadInFragment == 1)).toSeq
def getPos(r: AlignmentRecord): ReferencePosition = {
if (r.getReadMapped) {
new RichAlignmentRecord(r).fivePrimeReferencePosition
} else {
ReferencePosition(r.getSequence, 0L)
}
}
if (firstOfPair.size + secondOfPair.size > 0) {
new ReferencePositionPair(
firstOfPair.lift(0).map(getPos),
secondOfPair.lift(0).map(getPos)
)
} else {
new ReferencePositionPair(
(singleReadBucket.primaryMapped ++
singleReadBucket.unmapped).toSeq.lift(0).map(getPos),
None
)
}
}
}
case class ReferencePositionPair(
read1refPos: Option[ReferencePosition],
read2refPos: Option[ReferencePosition])
class ReferencePositionPairSerializer extends Serializer[ReferencePositionPair] {
val rps = new ReferencePositionSerializer()
def writeOptionalReferencePos(kryo: Kryo, output: Output, optRefPos: Option[ReferencePosition]) = {
optRefPos match {
case None =>
output.writeBoolean(false)
case Some(refPos) =>
output.writeBoolean(true)
rps.write(kryo, output, refPos)
}
}
def readOptionalReferencePos(kryo: Kryo, input: Input): Option[ReferencePosition] = {
val exists = input.readBoolean()
if (exists) {
Some(rps.read(kryo, input, classOf[ReferencePosition]))
} else {
None
}
}
def write(kryo: Kryo, output: Output, obj: ReferencePositionPair) = {
writeOptionalReferencePos(kryo, output, obj.read1refPos)
writeOptionalReferencePos(kryo, output, obj.read2refPos)
}
def read(kryo: Kryo, input: Input, klazz: Class[ReferencePositionPair]): ReferencePositionPair = {
val read1ref = readOptionalReferencePos(kryo, input)
val read2ref = readOptionalReferencePos(kryo, input)
new ReferencePositionPair(read1ref, read2ref)
}
}
| rnpandya/adam | adam-core/src/main/scala/org/bdgenomics/adam/models/ReferencePositionPair.scala | Scala | apache-2.0 | 3,543 |
package com.peterpotts.snake.coercion
import scala.language.implicitConversions
import scala.util.Try
trait Coercion[T] {
implicit def booleanToInt(value: Boolean): Int = IntCoercer(value)
implicit def booleanToLong(value: Boolean): Long = LongCoercer(value)
implicit def booleanToDouble(value: Boolean): Double = DoubleCoercer(value)
implicit def booleanToString(value: Boolean): String = StringCoercer(value)
implicit def intToLong(value: Int): Long = LongCoercer(value)
implicit def intToDouble(value: Int): Double = DoubleCoercer(value)
implicit def intToString(value: Int): String = StringCoercer(value)
implicit def longToDouble(value: Long): Double = DoubleCoercer(value)
implicit def longToString(value: Long): String = StringCoercer(value)
implicit def doubleToString(value: Double): String = StringCoercer(value)
private val lift: Any => Any = {
case value: Boolean => value
case value: Int => value
case value: Long => value
case value: Double => value
case value: String =>
Try(value.toBoolean).getOrElse(
Try(value.toInt).getOrElse(
Try(value.toLong).getOrElse(
Try(value.toDouble).getOrElse(
Try(value.toBoolean).getOrElse(
value)))))
case value: Any => throw new MatchError(value)
}
private val lifted: (Any, Any) => T = {
case (left: Boolean, right: Boolean) => int(left, right)
case (left: Boolean, right: Int) => int(left, right)
case (left: Boolean, right: Long) => long(left, right)
case (left: Boolean, right: Double) => double(left, right)
case (left: Boolean, right: String) => string(left, right)
case (left: Int, right: Boolean) => int(left, right)
case (left: Int, right: Int) => int(left, right)
case (left: Int, right: Long) => long(left, right)
case (left: Int, right: Double) => double(left, right)
case (left: Int, right: String) => string(left, right)
case (left: Long, right: Boolean) => long(left, right)
case (left: Long, right: Int) => long(left, right)
case (left: Long, right: Long) => long(left, right)
case (left: Long, right: Double) => double(left, right)
case (left: Long, right: String) => string(left, right)
case (left: Double, right: Boolean) => double(left, right)
case (left: Double, right: Int) => double(left, right)
case (left: Double, right: Long) => double(left, right)
case (left: Double, right: Double) => double(left, right)
case (left: Double, right: String) => string(left, right)
case (left: String, right: Boolean) => string(left, right)
case (left: String, right: Int) => string(left, right)
case (left: String, right: Long) => string(left, right)
case (left: String, right: Double) => string(left, right)
case (left: String, right: String) => string(left, right)
case value => throw new MatchError(value)
}
def apply(left: Any, right: Any): T = lifted(lift(left), lift(right))
def boolean(left: Boolean, right: Boolean): T
def int(left: Int, right: Int): T
def long(left: Long, right: Long): T
def double(left: Double, right: Double): T
def string(left: String, right: String): T
}
| peterpotts/snake | src/main/scala/com/peterpotts/snake/coercion/Coercion.scala | Scala | mit | 3,203 |
package lila.pool
import org.joda.time.DateTime
import lila.rating.RatingRange
import lila.user.User
case class PoolMember(
userId: User.ID,
socketId: lila.socket.Socket.Uid,
rating: Int,
ratingRange: Option[RatingRange],
engine: Boolean,
blocking: PoolMember.BlockedUsers,
since: DateTime,
misses: Int = 0 // how many waves they missed
) {
def incMisses = copy(misses = misses + 1)
def waitMillis: Int = (DateTime.now.getMillis - since.getMillis).toInt
def ratingDiff(other: PoolMember) = Math.abs(rating - other.rating)
def withRange(r: Option[RatingRange]) =
if (r == ratingRange) this
else copy(ratingRange = r, misses = 0)
def hasRange = ratingRange.isDefined
}
object PoolMember {
case class BlockedUsers(ids: Set[User.ID]) extends AnyVal
def apply(joiner: PoolApi.Joiner, config: PoolConfig): PoolMember =
PoolMember(
userId = joiner.userId,
socketId = joiner.socketId,
engine = joiner.engine,
rating = joiner.ratingMap.getOrElse(config.perfType.key, 1500),
ratingRange = joiner.ratingRange,
blocking = BlockedUsers(joiner.blocking),
since = DateTime.now)
}
| clarkerubber/lila | modules/pool/src/main/PoolMember.scala | Scala | agpl-3.0 | 1,176 |
package org.finra.datagenerator.scaffolding.hierarchy
/**
* Created by dkopel on 31/05/16.
*/
trait SingleParentNested extends Nested {
def getParent[T <: Nested]: T
} | FINRAOS/DataGenerator | rubber-scaffolding/rubber-commons/src/main/scala/org/finra/datagenerator/scaffolding/hierarchy/SingleParentNested.scala | Scala | apache-2.0 | 176 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.