code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
/**
* Copyright (C) 2007 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.library
import org.orbeon.saxon.`type`.BuiltInAtomicType._
import org.orbeon.saxon.expr.StaticProperty._
import org.orbeon.oxf.xml.OrbeonFunctionLibrary
import org.orbeon.saxon.`type`.Type
import org.orbeon.oxf.xforms.function._
/**
* XForms functions that depend on the XForms environment.
*/
trait XFormsEnvFunctions extends OrbeonFunctionLibrary {
// Define in early definition of subclass
val XFormsEnvFunctionsNS: Seq[String]
Namespace(XFormsEnvFunctionsNS) {
Fun("index", classOf[Index], op = 0, min = 1, INTEGER, EXACTLY_ONE,
Arg(STRING, EXACTLY_ONE)
)
Fun("property", classOf[Property], op = 0, min = 1, STRING, EXACTLY_ONE,
Arg(STRING, EXACTLY_ONE)
)
Fun("instance", classOf[Instance], op = 0, min = 0, Type.NODE_TYPE, ALLOWS_ZERO_OR_ONE,
Arg(STRING, EXACTLY_ONE)
)
Fun("current", classOf[Current], op = 0, min = 0, Type.ITEM_TYPE, ALLOWS_ZERO_OR_ONE)
Fun("context", classOf[Context], op = 0, min = 0, Type.ITEM_TYPE, ALLOWS_ZERO_OR_ONE)
Fun("event", classOf[Event], op = 0, min = 1, Type.ITEM_TYPE, ALLOWS_ZERO_OR_MORE,
Arg(STRING, EXACTLY_ONE)
)
Fun("valid", classOf[XFormsValid], op = 0, min = 0, BOOLEAN, EXACTLY_ONE,
Arg(Type.ITEM_TYPE, ALLOWS_ZERO_OR_MORE),
Arg(BOOLEAN, EXACTLY_ONE),
Arg(BOOLEAN, EXACTLY_ONE)
)
// XForms 2.0
Fun("bind", classOf[Bind], op = 0, min = 1, Type.NODE_TYPE, ALLOWS_ZERO_OR_MORE,
Arg(STRING, EXACTLY_ONE)
)
}
} | ajw625/orbeon-forms | src/main/scala/org/orbeon/oxf/xforms/library/XFormsEnvFunctions.scala | Scala | lgpl-2.1 | 2,284 |
package com.lendap.spark.lsh
/**
* Created by maytekin on 06.08.2015.
*/
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext
import org.apache.spark.mllib.linalg.SparseVector
import org.apache.spark.rdd.RDD
import scala.collection.mutable.ListBuffer
import org.apache.spark.mllib.util.Saveable
import org.apache.spark.sql.SparkSession
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
/** Create LSH model for maximum m number of elements in each vector.
*
* @param m max number of possible elements in a vector
* @param numHashFunc number of hash functions
* @param numHashTables number of hashTables.
*
* */
class LSHModel(val m: Int, val numHashFunc : Int, val numHashTables: Int)
extends Serializable with Saveable {
/** generate numHashFunc * numBands randomly generated hash functions and store them in hashFunctions */
private val _hashFunctions = ListBuffer[Hasher]()
for (i <- 0 until numHashFunc * numHashTables)
_hashFunctions += Hasher(m)
final var hashFunctions: List[(Hasher, Int)] = _hashFunctions.toList.zipWithIndex
/** the "hashTables" ((hashTableID, hash key), vector_id) */
var hashTables: RDD[((Int, String), Long)] = null
/** generic filter function for hashTables. */
def filter(f: (((Int, String), Long)) => Boolean): RDD[((Int, String), Long)] =
hashTables.map(a => a).filter(f)
/** hash a single vector against an existing model and return the candidate buckets */
def filter(data: SparseVector, model: LSHModel, itemID: Long): RDD[Long] = {
val hashKey = hashFunctions.map(h => h._1.hash(data)).mkString("")
hashTables.filter(x => x._1._2 == hashKey).map(a => a._2)
}
/** creates hashValue for each hashTable.*/
def hashValue(data: SparseVector): List[(Int, String)] =
hashFunctions.map(a => (a._2 % numHashTables, a._1.hash(data)))
.groupBy(_._1)
.map(x => (x._1, x._2.map(_._2).mkString(""))).toList
/** returns candidate set for given vector id.*/
def getCandidates(vId: Long): RDD[Long] = {
val buckets = hashTables.filter(x => x._2 == vId).map(x => x._1).distinct().collect()
hashTables.filter(x => buckets contains x._1).map(x => x._2).filter(x => x != vId)
}
/** returns candidate set for given vector.*/
def getCandidates(v: SparseVector): RDD[Long] = {
val hashVal = hashValue(v)
hashTables.filter(x => hashVal contains x._1).map(x => x._2)
}
/** adds a new sparse vector with vector Id: vId to the model. */
def add (vId: Long, v: SparseVector, sc: SparkContext): LSHModel = {
val newRDD = sc.parallelize(hashValue(v).map(a => (a, vId)))
hashTables ++ newRDD
this
}
/** remove sparse vector with vector Id: vId from the model. */
def remove (vId: Long, sc: SparkContext): LSHModel = {
hashTables = hashTables.filter(x => x._2 != vId)
this
}
override def save(sc:SparkContext, path: String): Unit =
LSHModel.SaveLoadV0_0_1.save(sc, this, path)
override protected def formatVersion: String = "0.0.1"
}
object LSHModel {
def load(session:SparkSession, path: String): LSHModel = {
LSHModel.SaveLoadV0_0_1.load(session, path)
}
private [lsh] object SaveLoadV0_0_1 {
private val thisFormatVersion = "0.2.0"
private val thisClassName = this.getClass.getName()
def save(sc:SparkContext, model: LSHModel, path: String): Unit = {
val metadata =
compact(render(("class" -> thisClassName) ~ ("version" -> thisFormatVersion)))
//save metadata info
sc.parallelize(Seq(metadata), 1).saveAsTextFile(Loader.metadataPath(path))
//save hash functions as (hashTableId, randomVector)
sc.parallelize(model.hashFunctions
.map(x => (x._2, x._1.r.mkString(",")))
.map(_.productIterator.mkString(",")))
.saveAsTextFile(Loader.hasherPath(path))
//save data as (hashTableId#, hashValue, vectorId)
model.hashTables
.map(x => (x._1._1, x._1._2, x._2))
.map(_.productIterator.mkString(","))
.saveAsTextFile(Loader.dataPath(path))
}
def load(session:SparkSession, path: String): LSHModel = {
implicit val formats = DefaultFormats
val (className, formatVersion, metadata) = Loader.loadMetadata(session, path)
assert(className == thisClassName)
assert(formatVersion == thisFormatVersion)
val hashTables = session.sparkContext.textFile(Loader.dataPath(path))
.map(x => x.split(","))
.map(x => ((x(0).toInt, x(1)), x(2).toLong))
val hashers = session.sparkContext.textFile(Loader.hasherPath(path))
.map(a => a.split(","))
.map(x => (x.head, x.tail))
.map(x => (new Hasher(x._2.map(_.toDouble)), x._1.toInt)).collect().toList
val numBands = hashTables.map(x => x._1._1).distinct.count()
val numHashFunc = hashers.size / numBands
//Validate loaded data
//check size of data
assert(hashTables.count != 0, s"Loaded hashTable data is empty")
//check size of hash functions
assert(hashers.size != 0, s"Loaded hasher data is empty")
//check hashValue size. Should be equal to numHashFunc
assert(hashTables.map(x => x._1._2).filter(x => x.size != numHashFunc).collect().size == 0,
s"hashValues in data does not match with hash functions")
//create model
val model = new LSHModel(0, numHashFunc.toInt, numBands.toInt)
model.hashFunctions = hashers
model.hashTables = hashTables
model
}
}
}
/** Helper functions for save/load data from mllib package.
* TODO: Remove and use Loader functions from mllib. */
private[lsh] object Loader {
/** Returns URI for path/data using the Hadoop filesystem */
def dataPath(path: String): String = new Path(path, "data").toUri.toString
/** Returns URI for path/metadata using the Hadoop filesystem */
def metadataPath(path: String): String = new Path(path, "metadata").toUri.toString
/** Returns URI for path/metadata using the Hadoop filesystem */
def hasherPath(path: String): String = new Path(path, "hasher").toUri.toString
/**
* Load metadata from the given path.
* @return (class name, version, metadata)
*/
def loadMetadata(session:SparkSession, path: String): (String, String, JValue) = {
implicit val formats = DefaultFormats
val metadata = parse(session.sparkContext.textFile(metadataPath(path)).first())
val clazz = (metadata \\ "class").extract[String]
val version = (metadata \\ "version").extract[String]
(clazz, version, metadata)
}
} | rayyildiz/lsh-spark | src/main/scala/com/lendap/spark/lsh/LSHModel.scala | Scala | apache-2.0 | 6,557 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, QueryTest, Row}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, AttributeSet, In}
import org.apache.spark.sql.catalyst.plans.physical.HashPartitioning
import org.apache.spark.sql.columnar.CachedBatch
import org.apache.spark.sql.execution.{FilterExec, InputAdapter, WholeStageCodegenExec}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.test.SQLTestData._
import org.apache.spark.sql.types._
import org.apache.spark.storage.StorageLevel
import org.apache.spark.storage.StorageLevel._
class TestCachedBatchSerializer(
useCompression: Boolean,
batchSize: Int) extends DefaultCachedBatchSerializer {
override def convertInternalRowToCachedBatch(input: RDD[InternalRow],
schema: Seq[Attribute],
storageLevel: StorageLevel,
conf: SQLConf): RDD[CachedBatch] = {
convertForCacheInternal(input, schema, batchSize, useCompression)
}
}
class InMemoryColumnarQuerySuite extends QueryTest with SharedSparkSession {
import testImplicits._
setupTestData()
private def cachePrimitiveTest(data: DataFrame, dataType: String): Unit = {
data.createOrReplaceTempView(s"testData$dataType")
val storageLevel = MEMORY_ONLY
val plan = spark.sessionState.executePlan(data.logicalPlan).sparkPlan
val inMemoryRelation = InMemoryRelation(new TestCachedBatchSerializer(useCompression = true, 5),
storageLevel, plan, None, data.logicalPlan)
assert(inMemoryRelation.cacheBuilder.cachedColumnBuffers.getStorageLevel == storageLevel)
inMemoryRelation.cacheBuilder.cachedColumnBuffers.collect().head match {
case _: DefaultCachedBatch =>
case other => fail(s"Unexpected cached batch type: ${other.getClass.getName}")
}
checkAnswer(inMemoryRelation, data.collect().toSeq)
}
private def testPrimitiveType(nullability: Boolean): Unit = {
val dataTypes = Seq(BooleanType, ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DateType, TimestampType, DecimalType(25, 5), DecimalType(6, 5))
val schema = StructType(dataTypes.zipWithIndex.map { case (dataType, index) =>
StructField(s"col$index", dataType, nullability)
})
val rdd = spark.sparkContext.parallelize((1 to 10).map(i => Row(
if (nullability && i % 3 == 0) null else if (i % 2 == 0) true else false,
if (nullability && i % 3 == 0) null else i.toByte,
if (nullability && i % 3 == 0) null else i.toShort,
if (nullability && i % 3 == 0) null else i.toInt,
if (nullability && i % 3 == 0) null else i.toLong,
if (nullability && i % 3 == 0) null else (i + 0.25).toFloat,
if (nullability && i % 3 == 0) null else (i + 0.75).toDouble,
if (nullability && i % 3 == 0) null else new Date(i),
if (nullability && i % 3 == 0) null else new Timestamp(i * 1000000L),
if (nullability && i % 3 == 0) null else BigDecimal(Long.MaxValue.toString + ".12345"),
if (nullability && i % 3 == 0) null
else new java.math.BigDecimal(s"${i % 9 + 1}" + ".23456")
)))
cachePrimitiveTest(spark.createDataFrame(rdd, schema), "primitivesDateTimeStamp")
}
private def tesNonPrimitiveType(nullability: Boolean): Unit = {
val struct = StructType(StructField("f1", FloatType, false) ::
StructField("f2", ArrayType(BooleanType), true) :: Nil)
val schema = StructType(Seq(
StructField("col0", StringType, nullability),
StructField("col1", ArrayType(IntegerType), nullability),
StructField("col2", ArrayType(ArrayType(IntegerType)), nullability),
StructField("col3", MapType(StringType, IntegerType), nullability),
StructField("col4", struct, nullability)
))
val rdd = spark.sparkContext.parallelize((1 to 10).map(i => Row(
if (nullability && i % 3 == 0) null else s"str${i}: test cache.",
if (nullability && i % 3 == 0) null else (i * 100 to i * 100 + i).toArray,
if (nullability && i % 3 == 0) null
else Array(Array(i, i + 1), Array(i * 100 + 1, i * 100, i * 100 + 2)),
if (nullability && i % 3 == 0) null else (i to i + i).map(j => s"key$j" -> j).toMap,
if (nullability && i % 3 == 0) null else Row((i + 0.25).toFloat, Seq(true, false, null))
)))
cachePrimitiveTest(spark.createDataFrame(rdd, schema), "StringArrayMapStruct")
}
test("primitive type with nullability:true") {
testPrimitiveType(true)
}
test("primitive type with nullability:false") {
testPrimitiveType(false)
}
test("non-primitive type with nullability:true") {
val schemaNull = StructType(Seq(StructField("col", NullType, true)))
val rddNull = spark.sparkContext.parallelize((1 to 10).map(i => Row(null)))
cachePrimitiveTest(spark.createDataFrame(rddNull, schemaNull), "Null")
tesNonPrimitiveType(true)
}
test("non-primitive type with nullability:false") {
tesNonPrimitiveType(false)
}
test("simple columnar query") {
val plan = spark.sessionState.executePlan(testData.logicalPlan).sparkPlan
val scan = InMemoryRelation(new TestCachedBatchSerializer(useCompression = true, 5),
MEMORY_ONLY, plan, None, testData.logicalPlan)
checkAnswer(scan, testData.collect().toSeq)
}
test("default size avoids broadcast") {
withTempView("sizeTst") {
// TODO: Improve this test when we have better statistics
sparkContext.parallelize(1 to 10).map(i => TestData(i, i.toString))
.toDF().createOrReplaceTempView("sizeTst")
spark.catalog.cacheTable("sizeTst")
assert(
spark.table("sizeTst").queryExecution.analyzed.stats.sizeInBytes >
spark.conf.get(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD))
}
}
test("projection") {
val logicalPlan = testData.select('value, 'key).logicalPlan
val plan = spark.sessionState.executePlan(logicalPlan).sparkPlan
val scan = InMemoryRelation(new TestCachedBatchSerializer(useCompression = true, 5),
MEMORY_ONLY, plan, None, logicalPlan)
checkAnswer(scan, testData.collect().map {
case Row(key: Int, value: String) => value -> key
}.map(Row.fromTuple))
}
test("access only some column of the all of columns") {
val df = spark.range(1, 100).map(i => (i, (i + 1).toFloat)).toDF("i", "f")
df.cache
df.count // forced to build cache
assert(df.filter("f <= 10.0").count == 9)
}
test("SPARK-1436 regression: in-memory columns must be able to be accessed multiple times") {
val plan = spark.sessionState.executePlan(testData.logicalPlan).sparkPlan
val scan = InMemoryRelation(new TestCachedBatchSerializer(useCompression = true, 5),
MEMORY_ONLY, plan, None, testData.logicalPlan)
checkAnswer(scan, testData.collect().toSeq)
checkAnswer(scan, testData.collect().toSeq)
}
test("SPARK-1678 regression: compression must not lose repeated values") {
checkAnswer(
sql("SELECT * FROM repeatedData"),
repeatedData.collect().toSeq.map(Row.fromTuple))
spark.catalog.cacheTable("repeatedData")
checkAnswer(
sql("SELECT * FROM repeatedData"),
repeatedData.collect().toSeq.map(Row.fromTuple))
}
test("with null values") {
checkAnswer(
sql("SELECT * FROM nullableRepeatedData"),
nullableRepeatedData.collect().toSeq.map(Row.fromTuple))
spark.catalog.cacheTable("nullableRepeatedData")
checkAnswer(
sql("SELECT * FROM nullableRepeatedData"),
nullableRepeatedData.collect().toSeq.map(Row.fromTuple))
}
test("SPARK-2729 regression: timestamp data type") {
withTempView("timestamps") {
val timestamps = (0 to 3).map(i => Tuple1(new Timestamp(i))).toDF("time")
timestamps.createOrReplaceTempView("timestamps")
checkAnswer(
sql("SELECT time FROM timestamps"),
timestamps.collect().toSeq)
spark.catalog.cacheTable("timestamps")
checkAnswer(
sql("SELECT time FROM timestamps"),
timestamps.collect().toSeq)
}
}
test("SPARK-3320 regression: batched column buffer building should work with empty partitions") {
checkAnswer(
sql("SELECT * FROM withEmptyParts"),
withEmptyParts.collect().toSeq.map(Row.fromTuple))
spark.catalog.cacheTable("withEmptyParts")
checkAnswer(
sql("SELECT * FROM withEmptyParts"),
withEmptyParts.collect().toSeq.map(Row.fromTuple))
}
test("SPARK-4182 Caching complex types") {
complexData.cache().count()
// Shouldn't throw
complexData.count()
complexData.unpersist()
}
test("decimal type") {
// Casting is required here because ScalaReflection can't capture decimal precision information.
val df = (1 to 10)
.map(i => Tuple1(Decimal(i, 15, 10).toJavaBigDecimal))
.toDF("dec")
.select($"dec" cast DecimalType(15, 10))
assert(df.schema.head.dataType === DecimalType(15, 10))
withTempView("test_fixed_decimal") {
df.cache().createOrReplaceTempView("test_fixed_decimal")
checkAnswer(
sql("SELECT * FROM test_fixed_decimal"),
(1 to 10).map(i => Row(Decimal(i, 15, 10).toJavaBigDecimal)))
}
}
test("test different data types") {
// Create the schema.
val struct =
StructType(
StructField("f1", FloatType, true) ::
StructField("f2", ArrayType(BooleanType), true) :: Nil)
val dataTypes =
Seq(StringType, BinaryType, NullType, BooleanType,
ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DecimalType(25, 5), DecimalType(6, 5),
DateType, TimestampType, ArrayType(IntegerType), struct)
val fields = dataTypes.zipWithIndex.map { case (dataType, index) =>
StructField(s"col$index", dataType, true)
}
val allColumns = fields.map(_.name).mkString(",")
val schema = StructType(fields)
// Create an RDD for the schema
val rdd =
sparkContext.parallelize(1 to 10000, 10).map { i =>
Row(
s"str$i: test cache.",
s"binary$i: test cache.".getBytes(StandardCharsets.UTF_8),
null,
i % 2 == 0,
i.toByte,
i.toShort,
i,
Long.MaxValue - i.toLong,
(i + 0.25).toFloat,
i + 0.75,
BigDecimal(Long.MaxValue.toString + ".12345"),
new java.math.BigDecimal(s"${i % 9 + 1}" + ".23456"),
new Date(i),
new Timestamp(i * 1000000L),
i to i + 10,
Row((i - 0.25).toFloat, Seq(true, false, null)))
}
spark.createDataFrame(rdd, schema).createOrReplaceTempView("InMemoryCache_different_data_types")
// Cache the table.
sql("cache table InMemoryCache_different_data_types")
// Make sure the table is indeed cached.
spark.table("InMemoryCache_different_data_types").queryExecution.executedPlan
assert(
spark.catalog.isCached("InMemoryCache_different_data_types"),
"InMemoryCache_different_data_types should be cached.")
// Issue a query and check the results.
checkAnswer(
sql(s"SELECT DISTINCT ${allColumns} FROM InMemoryCache_different_data_types"),
spark.table("InMemoryCache_different_data_types").collect())
spark.catalog.dropTempView("InMemoryCache_different_data_types")
}
test("SPARK-10422: String column in InMemoryColumnarCache needs to override clone method") {
val df = spark.range(1, 100).selectExpr("id % 10 as id")
.rdd.map(id => Tuple1(s"str_$id")).toDF("i")
val cached = df.cache()
// count triggers the caching action. It should not throw.
cached.count()
// Make sure, the DataFrame is indeed cached.
assert(spark.sharedState.cacheManager.lookupCachedData(cached).nonEmpty)
// Check result.
checkAnswer(
cached,
spark.range(1, 100).selectExpr("id % 10 as id")
.rdd.map(id => Tuple1(s"str_$id")).toDF("i")
)
// Drop the cache.
cached.unpersist()
}
test("SPARK-10859: Predicates pushed to InMemoryColumnarTableScan are not evaluated correctly") {
val data = spark.range(10).selectExpr("id", "cast(id as string) as s")
data.cache()
assert(data.count() === 10)
assert(data.filter($"s" === "3").count() === 1)
}
test("SPARK-14138: Generated SpecificColumnarIterator can exceed JVM size limit for cached DF") {
val length1 = 3999
val columnTypes1 = List.fill(length1)(IntegerType)
val columnarIterator1 = GenerateColumnAccessor.generate(columnTypes1)
// SPARK-16664: the limit of janino is 8117
val length2 = 8117
val columnTypes2 = List.fill(length2)(IntegerType)
val columnarIterator2 = GenerateColumnAccessor.generate(columnTypes2)
}
test("SPARK-17549: cached table size should be correctly calculated") {
val data = spark.sparkContext.parallelize(1 to 10, 5).toDF()
val plan = spark.sessionState.executePlan(data.logicalPlan).sparkPlan
val cached = InMemoryRelation(new TestCachedBatchSerializer(true, 5),
MEMORY_ONLY, plan, None, data.logicalPlan)
// Materialize the data.
val expectedAnswer = data.collect()
checkAnswer(cached, expectedAnswer)
// Check that the right size was calculated.
assert(cached.cacheBuilder.sizeInBytesStats.value === expectedAnswer.size * INT.defaultSize)
}
test("cached row count should be calculated") {
val data = spark.range(6).toDF
val plan = spark.sessionState.executePlan(data.logicalPlan).sparkPlan
val cached = InMemoryRelation(new TestCachedBatchSerializer(true, 5),
MEMORY_ONLY, plan, None, data.logicalPlan)
// Materialize the data.
val expectedAnswer = data.collect()
checkAnswer(cached, expectedAnswer)
// Check that the right row count was calculated.
assert(cached.cacheBuilder.rowCountStats.value === 6)
}
test("access primitive-type columns in CachedBatch without whole stage codegen") {
// whole stage codegen is not applied to a row with more than WHOLESTAGE_MAX_NUM_FIELDS fields
withSQLConf(SQLConf.WHOLESTAGE_MAX_NUM_FIELDS.key -> "2") {
val data = Seq(null, true, 1.toByte, 3.toShort, 7, 15.toLong,
31.25.toFloat, 63.75, new Date(127), new Timestamp(255000000L), null)
val dataTypes = Seq(NullType, BooleanType, ByteType, ShortType, IntegerType, LongType,
FloatType, DoubleType, DateType, TimestampType, IntegerType)
val schemas = dataTypes.zipWithIndex.map { case (dataType, index) =>
StructField(s"col$index", dataType, true)
}
val rdd = sparkContext.makeRDD(Seq(Row.fromSeq(data)))
val df = spark.createDataFrame(rdd, StructType(schemas))
val row = df.persist.take(1).apply(0)
checkAnswer(df, row)
}
}
test("access decimal/string-type columns in CachedBatch without whole stage codegen") {
withSQLConf(SQLConf.WHOLESTAGE_MAX_NUM_FIELDS.key -> "2") {
val data = Seq(BigDecimal(Long.MaxValue.toString + ".12345"),
new java.math.BigDecimal("1234567890.12345"),
new java.math.BigDecimal("1.23456"),
"test123"
)
val schemas = Seq(
StructField("col0", DecimalType(25, 5), true),
StructField("col1", DecimalType(15, 5), true),
StructField("col2", DecimalType(6, 5), true),
StructField("col3", StringType, true)
)
val rdd = sparkContext.makeRDD(Seq(Row.fromSeq(data)))
val df = spark.createDataFrame(rdd, StructType(schemas))
val row = df.persist.take(1).apply(0)
checkAnswer(df, row)
}
}
test("access non-primitive-type columns in CachedBatch without whole stage codegen") {
withSQLConf(SQLConf.WHOLESTAGE_MAX_NUM_FIELDS.key -> "2") {
val data = Seq((1 to 10).toArray,
Array(Array(10, 11), Array(100, 111, 123)),
Map("key1" -> 111, "key2" -> 222),
Row(1.25.toFloat, Seq(true, false, null))
)
val struct = StructType(StructField("f1", FloatType, false) ::
StructField("f2", ArrayType(BooleanType), true) :: Nil)
val schemas = Seq(
StructField("col0", ArrayType(IntegerType), true),
StructField("col1", ArrayType(ArrayType(IntegerType)), true),
StructField("col2", MapType(StringType, IntegerType), true),
StructField("col3", struct, true)
)
val rdd = sparkContext.makeRDD(Seq(Row.fromSeq(data)))
val df = spark.createDataFrame(rdd, StructType(schemas))
val row = df.persist.take(1).apply(0)
checkAnswer(df, row)
}
}
test("InMemoryTableScanExec should return correct output ordering and partitioning") {
val df1 = Seq((0, 0), (1, 1)).toDF
.repartition(col("_1")).sortWithinPartitions(col("_1")).persist
val df2 = Seq((0, 0), (1, 1)).toDF
.repartition(col("_1")).sortWithinPartitions(col("_1")).persist
// Because two cached dataframes have the same logical plan, this is a self-join actually.
// So we force one of in-memory relation to alias its output. Then we can test if original and
// aliased in-memory relations have correct ordering and partitioning.
val joined = df1.joinWith(df2, df1("_1") === df2("_1"))
val inMemoryScans = joined.queryExecution.executedPlan.collect {
case m: InMemoryTableScanExec => m
}
inMemoryScans.foreach { inMemoryScan =>
val sortedAttrs = AttributeSet(inMemoryScan.outputOrdering.flatMap(_.references))
assert(sortedAttrs.subsetOf(inMemoryScan.outputSet))
val partitionedAttrs =
inMemoryScan.outputPartitioning.asInstanceOf[HashPartitioning].references
assert(partitionedAttrs.subsetOf(inMemoryScan.outputSet))
}
}
test("SPARK-20356: pruned InMemoryTableScanExec should have correct ordering and partitioning") {
withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "200") {
val df1 = Seq(("a", 1), ("b", 1), ("c", 2)).toDF("item", "group")
val df2 = Seq(("a", 1), ("b", 2), ("c", 3)).toDF("item", "id")
val df3 = df1.join(df2, Seq("item")).select($"id", $"group".as("item")).distinct()
df3.unpersist(blocking = true)
val agg_without_cache = df3.groupBy($"item").count()
df3.cache()
val agg_with_cache = df3.groupBy($"item").count()
checkAnswer(agg_without_cache, agg_with_cache)
}
}
test("SPARK-22249: IN should work also with cached DataFrame") {
val df = spark.range(10).cache()
// with an empty list
assert(df.filter($"id".isin()).count() == 0)
// with a non-empty list
assert(df.filter($"id".isin(2)).count() == 1)
assert(df.filter($"id".isin(2, 3)).count() == 2)
df.unpersist(blocking = true)
val dfNulls = spark.range(10).selectExpr("null as id").cache()
// with null as value for the attribute
assert(dfNulls.filter($"id".isin()).count() == 0)
assert(dfNulls.filter($"id".isin(2, 3)).count() == 0)
dfNulls.unpersist()
}
test("SPARK-22249: buildFilter should not throw exception when In contains an empty list") {
val attribute = AttributeReference("a", IntegerType)()
val testSerializer = new TestCachedBatchSerializer(false, 1)
testSerializer.buildFilter(Seq(In(attribute, Nil)), Seq(attribute))
}
testWithWholeStageCodegenOnAndOff("SPARK-22348: table cache " +
"should do partition batch pruning") { codegenEnabled =>
val df1 = Seq((1, 1), (1, 1), (2, 2)).toDF("x", "y")
df1.unpersist(blocking = true)
df1.cache()
// Push predicate to the cached table.
val df2 = df1.where("y = 3")
val planBeforeFilter = df2.queryExecution.executedPlan.collect {
case f: FilterExec => f.child
case WholeStageCodegenExec(FilterExec(_, i: InputAdapter)) => i.child
}
assert(planBeforeFilter.head.isInstanceOf[InMemoryTableScanExec])
val execPlan = planBeforeFilter.head
assert(execPlan.executeCollectPublic().length == 0)
}
test("SPARK-25727 - otherCopyArgs in InMemoryRelation does not include outputOrdering") {
val data = Seq(100).toDF("count").cache()
val json = data.queryExecution.optimizedPlan.toJSON
assert(json.contains("outputOrdering"))
}
test("SPARK-22673: InMemoryRelation should utilize existing stats of the plan to be cached") {
Seq("orc", "").foreach { useV1SourceReaderList =>
// This test case depends on the size of ORC in statistics.
withSQLConf(
SQLConf.CBO_ENABLED.key -> "true",
SQLConf.DEFAULT_DATA_SOURCE_NAME.key -> "orc",
SQLConf.USE_V1_SOURCE_LIST.key -> useV1SourceReaderList) {
withTempPath { workDir =>
withTable("table1") {
val workDirPath = workDir.getAbsolutePath
val data = Seq(100, 200, 300, 400).toDF("count")
data.write.orc(workDirPath)
val dfFromFile = spark.read.orc(workDirPath).cache()
val inMemoryRelation = dfFromFile.queryExecution.optimizedPlan.collect {
case plan: InMemoryRelation => plan
}.head
// InMemoryRelation's stats is file size before the underlying RDD is materialized
assert(inMemoryRelation.computeStats().sizeInBytes === getLocalDirSize(workDir))
// InMemoryRelation's stats is updated after materializing RDD
dfFromFile.collect()
assert(inMemoryRelation.computeStats().sizeInBytes === 16)
// test of catalog table
val dfFromTable = spark.catalog.createTable("table1", workDirPath).cache()
val inMemoryRelation2 = dfFromTable.queryExecution.optimizedPlan.
collect { case plan: InMemoryRelation => plan }.head
// Even CBO enabled, InMemoryRelation's stats keeps as the file size before table's
// stats is calculated
assert(inMemoryRelation2.computeStats().sizeInBytes === getLocalDirSize(workDir))
// InMemoryRelation's stats should be updated after calculating stats of the table
// clear cache to simulate a fresh environment
dfFromTable.unpersist(blocking = true)
spark.sql("ANALYZE TABLE table1 COMPUTE STATISTICS")
val inMemoryRelation3 = spark.read.table("table1").cache().queryExecution.optimizedPlan.
collect { case plan: InMemoryRelation => plan }.head
assert(inMemoryRelation3.computeStats().sizeInBytes === 48)
}
}
}
}
}
}
| shaneknapp/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/columnar/InMemoryColumnarQuerySuite.scala | Scala | apache-2.0 | 23,272 |
package filodb.prometheus.ast
import filodb.query.{Aggregate, AggregationOperator, PeriodicSeriesPlan}
trait Aggregates extends Vectors with TimeUnits with Base {
sealed trait AggregateGrouping {
def labels: Seq[String]
}
case class Without(labels: Seq[String]) extends AggregateGrouping
case class By(labels: Seq[String]) extends AggregateGrouping
case class AggregateExpression(name: String, params: Seq[Expression],
aggregateGrouping: Option[AggregateGrouping],
altFunctionParams: Seq[Expression]) extends Expression with PeriodicSeries {
val aggOpOption: Option[AggregationOperator] = AggregationOperator.withNameInsensitiveOption(name)
if (aggOpOption.isEmpty) {
throw new IllegalArgumentException(s"Unsupported aggregation operator [$name]")
}
if (params.nonEmpty && altFunctionParams.nonEmpty) {
throw new IllegalArgumentException("Can define function params only once")
}
val allParams: Seq[Expression] = if (params.isEmpty) altFunctionParams else params
if (allParams.size < 1 || allParams.size > 2) {
throw new IllegalArgumentException("Aggregate functions have at least 1 parameter and utmost 2 parameters")
}
var parameter: Seq[Any] = Nil
private val aggOp = aggOpOption.get
private val secondParamNeeded = aggOp.equals(AggregationOperator.BottomK) ||
aggOp.equals(AggregationOperator.TopK) || aggOp.equals(AggregationOperator.CountValues) ||
aggOp.equals(AggregationOperator.Quantile)
if (secondParamNeeded && allParams.size < 2)
throw new IllegalArgumentException("2 parameters required for count_values, quantile, topk and bottomk")
if (allParams.size == 2) {
if (!secondParamNeeded) {
throw new IllegalArgumentException("parameter is only required for count_values, quantile, topk and bottomk")
}
allParams.head match {
case num: ScalarExpression =>
parameter = Seq(num.toScalar)
case s: InstantExpression =>
parameter = Seq(s.metricName.get)
case _ =>
throw new IllegalArgumentException("First parameter to aggregate operator can be a string or number")
}
}
val last: Expression = if (allParams.size == 1) allParams.head else allParams(1)
val series: PeriodicSeries = last match {
case s: PeriodicSeries => s
case _ =>
throw new IllegalArgumentException(
s"Second parameter to aggregate operator $name should be a vector, is instead $last"
)
}
def toSeriesPlan(timeParams: TimeRangeParams): PeriodicSeriesPlan = {
val periodicSeriesPlan = series.toSeriesPlan(timeParams)
aggregateGrouping match {
case Some(b: By) =>
Aggregate(aggOpOption.get,
periodicSeriesPlan,
parameter,
b.labels,
Nil
)
case Some(w: Without) =>
Aggregate(aggOpOption.get,
periodicSeriesPlan,
parameter,
Nil,
w.labels
)
case None =>
Aggregate(aggOpOption.get,
periodicSeriesPlan,
parameter,
Nil,
Nil
)
}
}
}
} | tuplejump/FiloDB | prometheus/src/main/scala/filodb/prometheus/ast/Aggregates.scala | Scala | apache-2.0 | 3,281 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.mesos
import java.util.concurrent.TimeUnit
import org.apache.spark.internal.config.ConfigBuilder
package object config {
/* Common app configuration. */
private[spark] val SHUFFLE_CLEANER_INTERVAL_S =
ConfigBuilder("spark.shuffle.cleaner.interval")
.timeConf(TimeUnit.SECONDS)
.createWithDefaultString("30s")
private[spark] val RECOVERY_MODE =
ConfigBuilder("spark.deploy.recoveryMode")
.stringConf
.createWithDefault("NONE")
private[spark] val DISPATCHER_WEBUI_URL =
ConfigBuilder("spark.mesos.dispatcher.webui.url")
.doc("Set the Spark Mesos dispatcher webui_url for interacting with the " +
"framework. If unset it will point to Spark's internal web UI.")
.stringConf
.createOptional
private[spark] val ZOOKEEPER_URL =
ConfigBuilder("spark.deploy.zookeeper.url")
.doc("When `spark.deploy.recoveryMode` is set to ZOOKEEPER, this " +
"configuration is used to set the zookeeper URL to connect to.")
.stringConf
.createOptional
private[spark] val HISTORY_SERVER_URL =
ConfigBuilder("spark.mesos.dispatcher.historyServer.url")
.doc("Set the URL of the history server. The dispatcher will then " +
"link each driver to its entry in the history server.")
.stringConf
.createOptional
private[spark] val DRIVER_LABELS =
ConfigBuilder("spark.mesos.driver.labels")
.doc("Mesos labels to add to the driver. Labels are free-form key-value pairs. Key-value " +
"pairs should be separated by a colon, and commas used to list more than one." +
"Ex. key:value,key2:value2")
.stringConf
.createOptional
private[spark] val DRIVER_FAILOVER_TIMEOUT =
ConfigBuilder("spark.mesos.driver.failoverTimeout")
.doc("Amount of time in seconds that the master will wait to hear from the driver, " +
"during a temporary disconnection, before tearing down all the executors.")
.doubleConf
.createWithDefault(0.0)
private[spark] val NETWORK_NAME =
ConfigBuilder("spark.mesos.network.name")
.doc("Attach containers to the given named network. If this job is launched " +
"in cluster mode, also launch the driver in the given named network.")
.stringConf
.createOptional
private[spark] val NETWORK_LABELS =
ConfigBuilder("spark.mesos.network.labels")
.doc("Network labels to pass to CNI plugins. This is a comma-separated list " +
"of key-value pairs, where each key-value pair has the format key:value. " +
"Example: key1:val1,key2:val2")
.stringConf
.createOptional
}
| stanzhai/spark | resource-managers/mesos/src/main/scala/org/apache/spark/deploy/mesos/config.scala | Scala | apache-2.0 | 3,469 |
package com.twitter.finagle.scribe
import com.twitter.finagle.{Service, SimpleFilter}
import com.twitter.finagle.thrift.scribe.thriftscala.Scribe.Log
import com.twitter.util.Future
private[scribe] class ScribeMetricsFilter(stats: ScribeStats)
extends SimpleFilter[Log.Args, Log.SuccessType] {
def apply(
req: Log.Args,
svc: Service[Log.Args, Log.SuccessType]
): Future[Log.SuccessType] =
svc(req).respond(stats.respond)
}
| twitter/finagle | finagle-scribe/src/main/scala/com/twitter/finagle/scribe/ScribeMetricsFilter.scala | Scala | apache-2.0 | 444 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.dataset
import org.apache.calcite.plan.{RelOptCluster, RelOptCost, RelOptPlanner, RelTraitSet}
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.Calc
import org.apache.calcite.rel.metadata.RelMetadataQuery
import org.apache.calcite.rel.{RelNode, RelWriter}
import org.apache.calcite.rex._
import org.apache.flink.api.common.functions.FlatMapFunction
import org.apache.flink.api.java.DataSet
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.table.api.BatchTableEnvironment
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.codegen.FunctionCodeGenerator
import org.apache.flink.table.plan.nodes.CommonCalc
import org.apache.flink.table.plan.schema.RowSchema
import org.apache.flink.table.runtime.FlatMapRunner
import org.apache.flink.types.Row
import scala.collection.JavaConverters._
/**
* Flink RelNode which matches along with LogicalCalc.
*
*/
class DataSetCalc(
cluster: RelOptCluster,
traitSet: RelTraitSet,
input: RelNode,
rowRelDataType: RelDataType,
calcProgram: RexProgram,
ruleDescription: String)
extends Calc(cluster, traitSet, input, calcProgram)
with CommonCalc
with DataSetRel {
override def deriveRowType(): RelDataType = rowRelDataType
override def copy(traitSet: RelTraitSet, child: RelNode, program: RexProgram): Calc = {
new DataSetCalc(cluster, traitSet, child, getRowType, program, ruleDescription)
}
override def toString: String = calcToString(calcProgram, getExpressionString)
override def explainTerms(pw: RelWriter): RelWriter = {
pw.input("input", getInput)
.item("select", selectionToString(calcProgram, getExpressionString))
.itemIf("where",
conditionToString(calcProgram, getExpressionString),
calcProgram.getCondition != null)
}
override def computeSelfCost(planner: RelOptPlanner, metadata: RelMetadataQuery): RelOptCost = {
val child = this.getInput
val rowCnt = metadata.getRowCount(child)
computeSelfCost(calcProgram, planner, rowCnt)
}
override def estimateRowCount(metadata: RelMetadataQuery): Double = {
val child = this.getInput
val rowCnt = metadata.getRowCount(child)
estimateRowCount(calcProgram, rowCnt)
}
override def translateToPlan(tableEnv: BatchTableEnvironment): DataSet[Row] = {
val config = tableEnv.getConfig
val inputDS = getInput.asInstanceOf[DataSetRel].translateToPlan(tableEnv)
val generator = new FunctionCodeGenerator(config, false, inputDS.getType)
val returnType = FlinkTypeFactory.toInternalRowTypeInfo(getRowType).asInstanceOf[RowTypeInfo]
val projection = calcProgram.getProjectList.asScala.map(calcProgram.expandLocalRef)
val condition = if (calcProgram.getCondition != null) {
Some(calcProgram.expandLocalRef(calcProgram.getCondition))
} else {
None
}
val genFunction = generateFunction(
generator,
ruleDescription,
new RowSchema(getInput.getRowType),
new RowSchema(getRowType),
projection,
condition,
config,
classOf[FlatMapFunction[Row, Row]])
val runner = new FlatMapRunner(genFunction.name, genFunction.code, returnType)
inputDS.flatMap(runner).name(calcOpName(calcProgram, getExpressionString))
}
}
| haohui/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/plan/nodes/dataset/DataSetCalc.scala | Scala | apache-2.0 | 4,162 |
package s99.p14
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import P14._
@RunWith(classOf[JUnitRunner])
class P14Suite extends FunSuite {
test("Duplicate on empty list returns empty list") {
val input = List()
val answer = input
assert(answer === duplicate(input))
}
test("Duplicate on single element list returns list with 2 elements") {
val input = List(1)
val answer = List(1, 1)
assert(answer === duplicate(input))
}
test("Duplicate on multiple element list returns list with duplicated elements") {
val input = List('a', 'b', 'c', 'c', 'd')
val answer = List('a', 'a', 'b', 'b', 'c', 'c', 'c', 'c', 'd', 'd')
assert(answer === duplicate(input))
}
test("Duplicate on large list does not fail with stack overflow") {
val size = 100000
val input = (1 to size).toList
val result = duplicate(input)
assert(size * 2 === result.length)
assert(input === result.distinct)
}
} | izmailoff/scala-s-99 | src/test/scala/s99/p14/P14Suite.scala | Scala | apache-2.0 | 1,015 |
package colossus
package controller
import akka.util.ByteString
import colossus.testkit._
import core._
import scala.util.{Try, Failure, Success}
class InputControllerSpec extends ColossusSpec with CallbackMatchers{
import TestController.createController
"Input Controller" must {
"decode a stream message" in {
val expected = ByteString("Hello world!")
val request = ByteString(expected.size.toString) ++ ByteString("\\r\\n") ++ expected
var called = false
val (endpoint, con) = createController({input =>
input.source.pullCB().execute{
case Success(Some(data)) => {
ByteString(data.takeAll) must equal(expected)
called = true
}
case _ => throw new Exception("wrong result")
}
})
called must equal(false)
con.receivedData(DataBuffer(request))
called must equal(true)
}
"disconnect from read events when pipe fills up" in {
var source: Option[Source[DataBuffer]] = None
val (endpoint, con) = createController({input =>
source = Some(input.source)
})
endpoint.readsEnabled must equal(true)
con.receivedData(DataBuffer(ByteString("4\\r\\n")))
source.isDefined must equal(true)
endpoint.readsEnabled must equal(true)
con.receivedData(DataBuffer(ByteString("a")))
endpoint.readsEnabled must equal(false)
var executed = false
source.get.fold(0){(a, b) => b + a.takeAll.length}.execute{
case Success(4) => {executed = true}
case other => {
throw new Exception(s"bad result $other")
}
}
//we have begun execution of the fold, which should drain the pipe, but
//execution should not yet be complete
executed must equal(false)
endpoint.readsEnabled must equal(true)
con.receivedData(DataBuffer(ByteString("b")))
con.receivedData(DataBuffer(ByteString("c")))
con.receivedData(DataBuffer(ByteString("d")))
//now it should be done
executed must equal(true)
}
"stream is terminated when connection disrupted" in {
var source: Option[Source[DataBuffer]] = None
val (endpoint, con) = createController({input =>
source = Some(input.source)
})
con.receivedData(DataBuffer(ByteString("4\\r\\n")))
source.isDefined must equal(true)
val s = source.get
endpoint.disrupt()
var failed = false
var wrong = false
s.pullCB().execute {
case Failure(t: PipeTerminatedException) => failed = true
case other => {
wrong = true
throw new Exception(s"Invalid result $other")
}
}
wrong must equal(false)
failed must equal(true)
}
"input stream allowed to complete during graceful disconnect" in {
var source: Option[Source[DataBuffer]] = None
val (endpoint, con) = createController({input =>
source = Some(input.source)
})
endpoint.readsEnabled must equal(true)
con.receivedData(DataBuffer(ByteString("4\\r\\nab")))
endpoint.readsEnabled must equal(false)
var total = 0
source.get.fold(0){(buf, len) => len + buf.size}.execute{
case Success(l) => total = l
case Failure(err) => throw err
}
endpoint.readsEnabled must equal(true)
con.testGracefulDisconnect()
//input stream is not done, so reads should still be enabled
endpoint.readsEnabled must equal(true)
con.receivedData(DataBuffer(ByteString("cd")))
//the stream should have finished, so now reads should be disabled
endpoint.readsEnabled must equal(false)
total must equal(4)
}
}
}
| zgagnon/colossus | colossus-tests/src/test/scala/colossus/controller/InputControllerSpec.scala | Scala | apache-2.0 | 3,718 |
package com.sksamuel.elastic4s.testkit
import com.sksamuel.elastic4s.http.ElasticClient
trait ClientProvider {
def client: ElasticClient
}
| Tecsisa/elastic4s | elastic4s-testkit/src/main/scala/com/sksamuel/elastic4s/testkit/ClientProvider.scala | Scala | apache-2.0 | 143 |
package com.sfxcode.sapphire.extension.skin
import javafx.scene.control.SkinBase
import com.sfxcode.sapphire.core.control.FXListCellFactory
import com.sfxcode.sapphire.extension.control.DataListView
import javafx.scene.control._
import javafx.scene.layout.{ HBox, VBox }
class DataListViewSkin[S <: AnyRef](view: DataListView[S]) extends SkinBase[DataListView[S]](view) {
val contentBox = new VBox()
contentBox.setSpacing(5)
contentBox.getStyleClass.add("content-box")
val label = new Label("Footer Label")
label.getStyleClass.add("footer-label")
view.footerLabel.set(label)
val headerBox = new HBox
headerBox.getStyleClass.add("header-box")
view.header.set(headerBox)
val footerBox = new HBox
footerBox.getStyleClass.add("footer-box")
footerBox.getChildren.add(label)
view.footer.set(footerBox)
updateCellFactory()
view.cellProperty.addListener((_, _, _) => updateView())
view.header.addListener((_, _, _) => updateView())
view.showHeader.addListener((_, _, _) => updateView())
view.footer.addListener((_, _, _) => updateView())
view.showFooter.addListener((_, _, _) => updateView())
getChildren.add(contentBox)
updateView()
def updateView(): Unit = {
contentBox.getChildren.clear()
if (view.showHeader.get && view.header.get != null)
contentBox.getChildren.add(view.header.get)
contentBox.getChildren.add(view.listView)
if (view.showFooter.get && view.footer.get != null)
contentBox.getChildren.add(view.footer.get)
}
def updateCellFactory(): Unit = {
val cellFactory = new FXListCellFactory[S]
cellFactory.setProperty(view.cellProperty.get)
view.listView.setCellFactory(cellFactory)
}
}
| sfxcode/sapphire-extension | src/main/scala/com/sfxcode/sapphire/extension/skin/DataListViewSkin.scala | Scala | apache-2.0 | 1,696 |
/**
Copyright (C) 2011-2014 beamly Ltd. http://beamly.com
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**/
package beamly.core.lang.future
import scala.concurrent.{Future, ExecutionContext}
sealed trait MaybeFuture[In] {
type Value
def apply(block: => In)(implicit executor: ExecutionContext): Future[Value]
def flatMap[T](that: Future[T], f: T => In)(implicit executor: ExecutionContext): Future[Value]
}
object MaybeFuture extends MaybeFutureLowPriority {
def apply[In](block: => In)(implicit maybeFuture: MaybeFuture[In], executionContext: ExecutionContext) = maybeFuture(block)
implicit final def futureMaybeFuture[A] = FutureMaybeFuture.asInstanceOf[MaybeFuture[Future[A]] { type Value = A } ]
private object FutureMaybeFuture extends MaybeFuture[Future[Any]] {
type Value = Any
def apply(block: => Future[Any])(implicit executionContext: ExecutionContext) = block
def flatMap[T](that: Future[T], f: T => Future[Any])(implicit executor: ExecutionContext): Future[Any] = that flatMap f
}
}
trait MaybeFutureLowPriority {
implicit final def identityMaybeFuture[A] = IdentityMaybeFuture.asInstanceOf[MaybeFuture[A] { type Value = A } ]
private object IdentityMaybeFuture extends MaybeFuture[Any] {
type Value = Any
def apply(block: => Any)(implicit executionContext: ExecutionContext) = Future(block)
def flatMap[T](that: Future[T], f: T => Any)(implicit executor: ExecutionContext): Future[Any] = that map f
}
}
| beamly/beamly.core.lang | src/main/scala/beamly/core/lang/future/MaybeFuture.scala | Scala | apache-2.0 | 1,980 |
package com.ajjpj.adiagram_.ui.fw
trait Command {
def name: String
def isNop: Boolean
def undo(): Unit
def redo(): Unit
}
class UndoRedoStack {
private var undoStack: List[Command] = Nil
private var redoStack: List[Command] = Nil
def clear() {
undoStack = Nil
redoStack = Nil
}
def nextUndo = undoStack match {
case head :: tail => Some(head)
case _ => None
}
def nextRedo = redoStack match {
case head :: tail => Some(head)
case _ => None
}
def hasUndo = nextUndo.isDefined
def hasRedo = nextRedo.isDefined
def push(cmd: Command) {
//TODO limit size
if (! cmd.isNop) {
undoStack = cmd :: undoStack
redoStack = Nil
}
}
def undo() = undoStack match {
case head :: tail =>
head.undo()
undoStack = tail
redoStack = head :: redoStack
case _ =>
}
def redo() = redoStack match {
case head :: tail =>
head.redo()
redoStack = tail
undoStack = head :: undoStack
case _ =>
}
} | arnohaase/a-diagram | src/main/scala-old/com/ajjpj/adiagram_/ui/fw/command.scala | Scala | apache-2.0 | 1,029 |
/*
* Copyright (c) 2017. Yuriy Stul
*/
package com.stulsoft.chart.util
import org.jfree.data.xy.XYSeries
import scala.util.Random
/**
* @author Yuriy Stul
*/
object DataGenerator {
/**
* Returns XYSeries
*
* @param key the series's key
* @param min minimal value
* @param max maximum value
* @param length the series's length
* @return the XYSeries
*/
def generateXYSeries(key: String, min: Double, max: Double, length: Int): XYSeries = {
val series = new XYSeries(key)
(1 to length)
.map(index => {
(index, min + Random.nextDouble() * (max - min))
}).foreach(e => series.add(e._1, e._2))
series
}
}
| ysden123/poc | charts/src/main/scala/com/stulsoft/chart/util/DataGenerator.scala | Scala | mit | 691 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import java.util.{List => JList}
import java.util.UUID
import scala.collection.JavaConverters._
import scala.language.existentials
import org.apache.hadoop.fs.Path
import org.json4s.{DefaultFormats, JObject, _}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.SparkContext
import org.apache.spark.annotation.Since
import org.apache.spark.ml._
import org.apache.spark.ml.attribute._
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.ml.param.{Param, ParamMap, ParamPair, Params}
import org.apache.spark.ml.util._
import org.apache.spark.sql.{DataFrame, Dataset, Row}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
import org.apache.spark.storage.StorageLevel
private[ml] trait ClassifierTypeTrait {
// scalastyle:off structural.type
type ClassifierType = Classifier[F, E, M] forSome {
type F
type M <: ClassificationModel[F, M]
type E <: Classifier[F, E, M]
}
// scalastyle:on structural.type
}
/**
* Params for [[OneVsRest]].
*/
private[ml] trait OneVsRestParams extends PredictorParams with ClassifierTypeTrait {
/**
* param for the base binary classifier that we reduce multiclass classification into.
* The base classifier input and output columns are ignored in favor of
* the ones specified in [[OneVsRest]].
* @group param
*/
val classifier: Param[ClassifierType] = new Param(this, "classifier", "base binary classifier")
/** @group getParam */
def getClassifier: ClassifierType = $(classifier)
}
private[ml] object OneVsRestParams extends ClassifierTypeTrait {
def validateParams(instance: OneVsRestParams): Unit = {
def checkElement(elem: Params, name: String): Unit = elem match {
case stage: MLWritable => // good
case other =>
throw new UnsupportedOperationException("OneVsRest write will fail " +
s" because it contains $name which does not implement MLWritable." +
s" Non-Writable $name: ${other.uid} of type ${other.getClass}")
}
instance match {
case ovrModel: OneVsRestModel => ovrModel.models.foreach(checkElement(_, "model"))
case _ => // no need to check OneVsRest here
}
checkElement(instance.getClassifier, "classifier")
}
def saveImpl(
path: String,
instance: OneVsRestParams,
sc: SparkContext,
extraMetadata: Option[JObject] = None): Unit = {
val params = instance.extractParamMap().toSeq
val jsonParams = render(params
.filter { case ParamPair(p, v) => p.name != "classifier" }
.map { case ParamPair(p, v) => p.name -> parse(p.jsonEncode(v)) }
.toList)
DefaultParamsWriter.saveMetadata(instance, path, sc, extraMetadata, Some(jsonParams))
val classifierPath = new Path(path, "classifier").toString
instance.getClassifier.asInstanceOf[MLWritable].save(classifierPath)
}
def loadImpl(
path: String,
sc: SparkContext,
expectedClassName: String): (DefaultParamsReader.Metadata, ClassifierType) = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, expectedClassName)
val classifierPath = new Path(path, "classifier").toString
val estimator = DefaultParamsReader.loadParamsInstance[ClassifierType](classifierPath, sc)
(metadata, estimator)
}
}
/**
* Model produced by [[OneVsRest]].
* This stores the models resulting from training k binary classifiers: one for each class.
* Each example is scored against all k models, and the model with the highest score
* is picked to label the example.
*
* @param labelMetadata Metadata of label column if it exists, or Nominal attribute
* representing the number of classes in training dataset otherwise.
* @param models The binary classification models for the reduction.
* The i-th model is produced by testing the i-th class (taking label 1) vs the rest
* (taking label 0).
*/
@Since("1.4.0")
final class OneVsRestModel private[ml] (
@Since("1.4.0") override val uid: String,
private[ml] val labelMetadata: Metadata,
@Since("1.4.0") val models: Array[_ <: ClassificationModel[_, _]])
extends Model[OneVsRestModel] with OneVsRestParams with MLWritable {
/** @group setParam */
@Since("2.1.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("2.1.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
@Since("1.4.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema, fitting = false, getClassifier.featuresDataType)
}
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
// Check schema
transformSchema(dataset.schema, logging = true)
// determine the input columns: these need to be passed through
val origCols = dataset.schema.map(f => col(f.name))
// add an accumulator column to store predictions of all the models
val accColName = "mbc$acc" + UUID.randomUUID().toString
val initUDF = udf { () => Map[Int, Double]() }
val newDataset = dataset.withColumn(accColName, initUDF())
// persist if underlying dataset is not persistent.
val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel.NONE
if (handlePersistence) {
newDataset.persist(StorageLevel.MEMORY_AND_DISK)
}
// update the accumulator column with the result of prediction of models
val aggregatedDataset = models.zipWithIndex.foldLeft[DataFrame](newDataset) {
case (df, (model, index)) =>
val rawPredictionCol = model.getRawPredictionCol
val columns = origCols ++ List(col(rawPredictionCol), col(accColName))
// add temporary column to store intermediate scores and update
val tmpColName = "mbc$tmp" + UUID.randomUUID().toString
val updateUDF = udf { (predictions: Map[Int, Double], prediction: Vector) =>
predictions + ((index, prediction(1)))
}
model.setFeaturesCol($(featuresCol))
val transformedDataset = model.transform(df).select(columns: _*)
val updatedDataset = transformedDataset
.withColumn(tmpColName, updateUDF(col(accColName), col(rawPredictionCol)))
val newColumns = origCols ++ List(col(tmpColName))
// switch out the intermediate column with the accumulator column
updatedDataset.select(newColumns: _*).withColumnRenamed(tmpColName, accColName)
}
if (handlePersistence) {
newDataset.unpersist()
}
// output the index of the classifier with highest confidence as prediction
val labelUDF = udf { (predictions: Map[Int, Double]) =>
predictions.maxBy(_._2)._1.toDouble
}
// output label and label metadata as prediction
aggregatedDataset
.withColumn($(predictionCol), labelUDF(col(accColName)), labelMetadata)
.drop(accColName)
}
@Since("1.4.1")
override def copy(extra: ParamMap): OneVsRestModel = {
val copied = new OneVsRestModel(
uid, labelMetadata, models.map(_.copy(extra).asInstanceOf[ClassificationModel[_, _]]))
copyValues(copied, extra).setParent(parent)
}
@Since("2.0.0")
override def write: MLWriter = new OneVsRestModel.OneVsRestModelWriter(this)
}
@Since("2.0.0")
object OneVsRestModel extends MLReadable[OneVsRestModel] {
@Since("2.0.0")
override def read: MLReader[OneVsRestModel] = new OneVsRestModelReader
@Since("2.0.0")
override def load(path: String): OneVsRestModel = super.load(path)
/** [[MLWriter]] instance for [[OneVsRestModel]] */
private[OneVsRestModel] class OneVsRestModelWriter(instance: OneVsRestModel) extends MLWriter {
OneVsRestParams.validateParams(instance)
override protected def saveImpl(path: String): Unit = {
val extraJson = ("labelMetadata" -> instance.labelMetadata.json) ~
("numClasses" -> instance.models.length)
OneVsRestParams.saveImpl(path, instance, sc, Some(extraJson))
instance.models.zipWithIndex.foreach { case (model: MLWritable, idx) =>
val modelPath = new Path(path, s"model_$idx").toString
model.save(modelPath)
}
}
}
private class OneVsRestModelReader extends MLReader[OneVsRestModel] {
/** Checked against metadata when loading model */
private val className = classOf[OneVsRestModel].getName
override def load(path: String): OneVsRestModel = {
implicit val format = DefaultFormats
val (metadata, classifier) = OneVsRestParams.loadImpl(path, sc, className)
val labelMetadata = Metadata.fromJson((metadata.metadata \\ "labelMetadata").extract[String])
val numClasses = (metadata.metadata \\ "numClasses").extract[Int]
val models = Range(0, numClasses).toArray.map { idx =>
val modelPath = new Path(path, s"model_$idx").toString
DefaultParamsReader.loadParamsInstance[ClassificationModel[_, _]](modelPath, sc)
}
val ovrModel = new OneVsRestModel(metadata.uid, labelMetadata, models)
DefaultParamsReader.getAndSetParams(ovrModel, metadata)
ovrModel.set("classifier", classifier)
ovrModel
}
}
}
/**
* Reduction of Multiclass Classification to Binary Classification.
* Performs reduction using one against all strategy.
* For a multiclass classification with k classes, train k models (one per class).
* Each example is scored against all k models and the model with highest score
* is picked to label the example.
*/
@Since("1.4.0")
final class OneVsRest @Since("1.4.0") (
@Since("1.4.0") override val uid: String)
extends Estimator[OneVsRestModel] with OneVsRestParams with MLWritable {
@Since("1.4.0")
def this() = this(Identifiable.randomUID("oneVsRest"))
/** @group setParam */
@Since("1.4.0")
def setClassifier(value: Classifier[_, _, _]): this.type = {
set(classifier, value.asInstanceOf[ClassifierType])
}
/** @group setParam */
@Since("1.5.0")
def setLabelCol(value: String): this.type = set(labelCol, value)
/** @group setParam */
@Since("1.5.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.5.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
@Since("1.4.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema, fitting = true, getClassifier.featuresDataType)
}
@Since("2.0.0")
override def fit(dataset: Dataset[_]): OneVsRestModel = {
transformSchema(dataset.schema)
val instr = Instrumentation.create(this, dataset)
instr.logParams(labelCol, featuresCol, predictionCol)
instr.logNamedValue("classifier", $(classifier).getClass.getCanonicalName)
// determine number of classes either from metadata if provided, or via computation.
val labelSchema = dataset.schema($(labelCol))
val computeNumClasses: () => Int = () => {
val Row(maxLabelIndex: Double) = dataset.agg(max(col($(labelCol)).cast(DoubleType))).head()
// classes are assumed to be numbered from 0,...,maxLabelIndex
maxLabelIndex.toInt + 1
}
val numClasses = MetadataUtils.getNumClasses(labelSchema).fold(computeNumClasses())(identity)
instr.logNumClasses(numClasses)
val multiclassLabeled = dataset.select($(labelCol), $(featuresCol))
// persist if underlying dataset is not persistent.
val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel.NONE
if (handlePersistence) {
multiclassLabeled.persist(StorageLevel.MEMORY_AND_DISK)
}
// create k columns, one for each binary classifier.
val models = Range(0, numClasses).par.map { index =>
// generate new label metadata for the binary problem.
val newLabelMeta = BinaryAttribute.defaultAttr.withName("label").toMetadata()
val labelColName = "mc2b$" + index
val trainingDataset = multiclassLabeled.withColumn(
labelColName, when(col($(labelCol)) === index.toDouble, 1.0).otherwise(0.0), newLabelMeta)
val classifier = getClassifier
val paramMap = new ParamMap()
paramMap.put(classifier.labelCol -> labelColName)
paramMap.put(classifier.featuresCol -> getFeaturesCol)
paramMap.put(classifier.predictionCol -> getPredictionCol)
classifier.fit(trainingDataset, paramMap)
}.toArray[ClassificationModel[_, _]]
instr.logNumFeatures(models.head.numFeatures)
if (handlePersistence) {
multiclassLabeled.unpersist()
}
// extract label metadata from label column if present, or create a nominal attribute
// to output the number of labels
val labelAttribute = Attribute.fromStructField(labelSchema) match {
case _: NumericAttribute | UnresolvedAttribute =>
NominalAttribute.defaultAttr.withName("label").withNumValues(numClasses)
case attr: Attribute => attr
}
val model = new OneVsRestModel(uid, labelAttribute.toMetadata(), models).setParent(this)
instr.logSuccess(model)
copyValues(model)
}
@Since("1.4.1")
override def copy(extra: ParamMap): OneVsRest = {
val copied = defaultCopy(extra).asInstanceOf[OneVsRest]
if (isDefined(classifier)) {
copied.setClassifier($(classifier).copy(extra))
}
copied
}
@Since("2.0.0")
override def write: MLWriter = new OneVsRest.OneVsRestWriter(this)
}
@Since("2.0.0")
object OneVsRest extends MLReadable[OneVsRest] {
@Since("2.0.0")
override def read: MLReader[OneVsRest] = new OneVsRestReader
@Since("2.0.0")
override def load(path: String): OneVsRest = super.load(path)
/** [[MLWriter]] instance for [[OneVsRest]] */
private[OneVsRest] class OneVsRestWriter(instance: OneVsRest) extends MLWriter {
OneVsRestParams.validateParams(instance)
override protected def saveImpl(path: String): Unit = {
OneVsRestParams.saveImpl(path, instance, sc)
}
}
private class OneVsRestReader extends MLReader[OneVsRest] {
/** Checked against metadata when loading model */
private val className = classOf[OneVsRest].getName
override def load(path: String): OneVsRest = {
val (metadata, classifier) = OneVsRestParams.loadImpl(path, sc, className)
val ovr = new OneVsRest(metadata.uid)
DefaultParamsReader.getAndSetParams(ovr, metadata)
ovr.setClassifier(classifier)
}
}
}
| saturday-shi/spark | mllib/src/main/scala/org/apache/spark/ml/classification/OneVsRest.scala | Scala | apache-2.0 | 15,166 |
package com.ubirch.util.elasticsearch.client.binary.storage
import com.ubirch.util.elasticsearch.client.binary.storage.base.{ESClient, ESBulkStorageBase}
import org.elasticsearch.client.transport.TransportClient
/**
* author: cvandrei
* since: 2017-02-24
*/
trait ESBulkStorage extends ESBulkStorageBase {
override protected val esClient: TransportClient = ESClient.esClient
}
object ESBulkStorage extends ESBulkStorage {}
| ubirch/ubirch-scala-utils | elasticsearch-client-binary/src/main/scala/com/ubirch/util/elasticsearch/client/binary/storage/ESBulkStorage.scala | Scala | apache-2.0 | 437 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.system
import java.util
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import org.apache.samza.serializers.SerdeManager
import org.apache.samza.util.{Logging, TimerUtils}
import org.apache.samza.system.chooser.MessageChooser
import org.apache.samza.SamzaException
import java.util.ArrayDeque
import java.util.Collections
import java.util.HashSet
import java.util.HashMap
import java.util.Queue
import java.util.Set
object SystemConsumers {
val DEFAULT_POLL_INTERVAL_MS = 50
val DEFAULT_NO_NEW_MESSAGES_TIMEOUT = 10
val DEFAULT_DROP_SERIALIZATION_ERROR = false
}
/**
* The SystemConsumers class coordinates between all SystemConsumers, the
* MessageChooser, and the SamzaContainer. Its job is to poll each
* SystemConsumer for messages, update the
* {@link org.apache.samza.system.chooser.MessageChooser} with new incoming
* messages, poll the MessageChooser for the next message to process, and
* return that message to the SamzaContainer.
*/
class SystemConsumers (
/**
* The class that determines the order to process incoming messages.
*/
chooser: MessageChooser,
/**
* A map of SystemConsumers that should be polled for new messages.
*/
consumers: Map[String, SystemConsumer],
/**
* The class that handles deserialization of incoming messages.
*/
serdeManager: SerdeManager = new SerdeManager,
/**
* A helper class to hold all of SystemConsumers' metrics.
*/
metrics: SystemConsumersMetrics = new SystemConsumersMetrics,
/**
* If MessageChooser returns null when it's polled, SystemConsumers will
* poll each SystemConsumer with a timeout next time it tries to poll for
* messages. Setting the timeout to 0 means that SamzaContainer's main
* thread will sit in a tight loop polling every SystemConsumer over and
* over again if no new messages are available.
*/
noNewMessagesTimeout: Int = SystemConsumers.DEFAULT_NO_NEW_MESSAGES_TIMEOUT,
/**
* This parameter is to define how to deal with deserialization failure. If
* set to true, the task will notAValidEvent the messages when deserialization fails.
* If set to false, the task will throw SamzaException and fail the container.
*/
dropDeserializationError: Boolean = SystemConsumers.DEFAULT_DROP_SERIALIZATION_ERROR,
/**
* <p>Defines an upper bound for how long the SystemConsumers will wait
* before polling systems for more data. The default setting is 50ms, which
* means that SystemConsumers will poll for new messages for all
* SystemStreamPartitions with empty buffers every 50ms. SystemConsumers
* will also poll for new messages any time that there are no available
* messages to process, or any time the MessageChooser returns a null
* IncomingMessageEnvelope.</p>
*
* <p>This parameter also implicitly defines how much latency is introduced
* by SystemConsumers. If a message is available for a SystemStreamPartition
* with no remaining unprocessed messages, the SystemConsumers will poll for
* it within 50ms of its availability in the stream system.</p>
*/
val pollIntervalMs: Int = SystemConsumers.DEFAULT_POLL_INTERVAL_MS,
/**
* Clock can be used to inject a custom clock when mocking this class in
* tests. The default implementation returns the current system clock time.
*/
val clock: () => Long = () => System.nanoTime()) extends Logging with TimerUtils {
/**
* A buffer of incoming messages grouped by SystemStreamPartition. These
* messages are handed out to the MessageChooser as it needs them.
*/
private val unprocessedMessagesBySSP = new HashMap[SystemStreamPartition, Queue[IncomingMessageEnvelope]]()
/**
* Set of SSPs that are currently at end-of-stream.
*/
private val endOfStreamSSPs = new HashSet[SystemStreamPartition]()
/**
* A set of SystemStreamPartitions grouped by systemName. This is used as a
* cache to figure out which SystemStreamPartitions we need to poll from the
* underlying system consumer.
*/
private val emptySystemStreamPartitionsBySystem = new HashMap[String, Set[SystemStreamPartition]]()
/**
* Default timeout to noNewMessagesTimeout. Every time SystemConsumers
* receives incoming messages, it sets timeout to 0. Every time
* SystemConsumers receives no new incoming messages from the MessageChooser,
* it sets timeout to noNewMessagesTimeout again.
*/
var timeout = noNewMessagesTimeout
/**
* The last time that systems were polled for new messages.
*/
var lastPollNs = 0L
/**
* Total number of unprocessed messages in unprocessedMessagesBySSP.
*/
var totalUnprocessedMessages = 0
debug("Got stream consumers: %s" format consumers)
debug("Got no new message timeout: %s" format noNewMessagesTimeout)
metrics.setTimeout(() => timeout)
metrics.setNeededByChooser(() => emptySystemStreamPartitionsBySystem.size)
metrics.setUnprocessedMessages(() => totalUnprocessedMessages)
def start {
debug("Starting consumers.")
emptySystemStreamPartitionsBySystem.asScala ++= unprocessedMessagesBySSP
.keySet
.asScala
.groupBy(_.getSystem)
.mapValues(systemStreamPartitions => new util.HashSet(systemStreamPartitions.toSeq.asJava))
consumers
.keySet
.foreach(metrics.registerSystem)
consumers
.values
.foreach(_.start)
chooser.start
refresh
}
def stop {
debug("Stopping consumers.")
consumers.values.foreach(_.stop)
chooser.stop
}
def register(systemStreamPartition: SystemStreamPartition, offset: String) {
debug("Registering stream: %s, %s" format (systemStreamPartition, offset))
if (IncomingMessageEnvelope.END_OF_STREAM_OFFSET.equals(offset)) {
info("Stream : %s is already at end of stream" format (systemStreamPartition))
endOfStreamSSPs.add(systemStreamPartition)
return
}
metrics.registerSystemStreamPartition(systemStreamPartition)
unprocessedMessagesBySSP.put(systemStreamPartition, new ArrayDeque[IncomingMessageEnvelope]())
chooser.register(systemStreamPartition, offset)
try {
consumers(systemStreamPartition.getSystem).register(systemStreamPartition, offset)
} catch {
case e: NoSuchElementException => throw new SystemConsumersException("can't register " + systemStreamPartition.getSystem + "'s consumer.", e)
}
}
def isEndOfStream(systemStreamPartition: SystemStreamPartition) = {
endOfStreamSSPs.contains(systemStreamPartition)
}
def choose (updateChooser: Boolean = true): IncomingMessageEnvelope = {
val envelopeFromChooser = chooser.choose
updateTimer(metrics.deserializationNs) {
if (envelopeFromChooser == null) {
trace("Chooser returned null.")
metrics.choseNull.inc
// Sleep for a while so we don't poll in a tight loop, but, don't do this when called from the AsyncRunLoop
// code because in that case the chooser will not get updated with a new message for an SSP until after a
// message is processed, See how updateChooser variable is used below. The AsyncRunLoop has its own way to
// block when there is no work to process.
timeout = if (updateChooser) noNewMessagesTimeout else 0
} else {
val systemStreamPartition = envelopeFromChooser.getSystemStreamPartition
if (envelopeFromChooser.isEndOfStream) {
info("End of stream reached for partition: %s" format systemStreamPartition)
endOfStreamSSPs.add(systemStreamPartition)
}
trace("Chooser returned an incoming message envelope: %s" format envelopeFromChooser)
// Ok to give the chooser a new message from this stream.
timeout = 0
metrics.choseObject.inc
metrics.systemStreamMessagesChosen(envelopeFromChooser.getSystemStreamPartition).inc
if (updateChooser) {
trace("Update chooser for " + systemStreamPartition.getPartition)
tryUpdate(systemStreamPartition)
}
}
}
updateTimer(metrics.pollNs) {
if (envelopeFromChooser == null || TimeUnit.NANOSECONDS.toMillis(clock() - lastPollNs) > pollIntervalMs) {
refresh
}
}
envelopeFromChooser
}
/**
* Poll all SystemStreamPartitions for which there are currently no new
* messages to process.
*/
private def poll(systemName: String) {
trace("Polling system consumer: %s" format systemName)
metrics.systemPolls(systemName).inc
trace("Getting fetch map for system: %s" format systemName)
val systemFetchSet : util.Set[SystemStreamPartition] =
if (emptySystemStreamPartitionsBySystem.containsKey(systemName)) {
val sspToFetch = new util.HashSet(emptySystemStreamPartitionsBySystem.get(systemName))
sspToFetch.removeAll(endOfStreamSSPs)
sspToFetch
} else {
Collections.emptySet()
}
// Poll when at least one SSP in this system needs more messages.
if (systemFetchSet != null && systemFetchSet.size > 0) {
val consumer = consumers(systemName)
trace("Fetching: %s" format systemFetchSet)
metrics.systemStreamPartitionFetchesPerPoll(systemName).inc(systemFetchSet.size)
val systemStreamPartitionEnvelopes = consumer.poll(systemFetchSet, timeout)
trace("Got incoming message envelopes: %s" format systemStreamPartitionEnvelopes)
metrics.systemMessagesPerPoll(systemName).inc
val sspAndEnvelopeIterator = systemStreamPartitionEnvelopes.entrySet.iterator
while (sspAndEnvelopeIterator.hasNext) {
val sspAndEnvelope = sspAndEnvelopeIterator.next
val systemStreamPartition = sspAndEnvelope.getKey
val envelopes = new ArrayDeque(sspAndEnvelope.getValue)
val numEnvelopes = envelopes.size
totalUnprocessedMessages += numEnvelopes
if (numEnvelopes > 0) {
unprocessedMessagesBySSP.put(systemStreamPartition, envelopes)
// Update the chooser if it needs a message for this SSP.
if (emptySystemStreamPartitionsBySystem.get(systemStreamPartition.getSystem).remove(systemStreamPartition)) {
tryUpdate(systemStreamPartition)
}
}
}
} else {
trace("Skipping polling for %s. Already have messages available for all registered SystemStreamPartitions." format systemName)
}
}
def tryUpdate(ssp: SystemStreamPartition) {
var updated = false
try {
updated = update(ssp)
} finally {
if (!updated) {
// if failed to update the chooser, add the ssp back into the emptySystemStreamPartitionBySystem map to ensure that we will poll for the next message
emptySystemStreamPartitionsBySystem.get(ssp.getSystem).add(ssp)
}
}
}
private def refresh {
trace("Refreshing chooser with new messages.")
// Update last poll time so we don't poll too frequently.
lastPollNs = clock()
// Poll every system for new messages.
consumers.keys.map(poll(_))
}
/**
* Tries to update the message chooser with an envelope from the supplied
* SystemStreamPartition if an envelope is available.
*/
private def update(systemStreamPartition: SystemStreamPartition) = {
var updated = false
val q = unprocessedMessagesBySSP.get(systemStreamPartition)
while (q.size > 0 && !updated) {
val rawEnvelope = q.remove
val deserializedEnvelope = try {
Some(serdeManager.fromBytes(rawEnvelope))
} catch {
case e: Throwable if !dropDeserializationError =>
throw new SystemConsumersException(
"Cannot deserialize an incoming message for %s"
.format(systemStreamPartition.getSystemStream.toString), e)
case ex: Throwable =>
debug("Cannot deserialize an incoming message for %s. Dropping the error message."
.format(systemStreamPartition.getSystemStream.toString), ex)
metrics.deserializationError.inc
None
}
if (deserializedEnvelope.isDefined) {
chooser.update(deserializedEnvelope.get)
updated = true
}
totalUnprocessedMessages -= 1
}
updated
}
}
/**
* When SystemConsumer registers consumers, there are situations where system can not recover
* from. Such as a failed consumer is used in task.input and changelogs.
* SystemConsumersException is thrown to indicate a hard failure when the system can not recover from.
*/
class SystemConsumersException(s: String, t: Throwable) extends SamzaException(s, t) {
def this(s: String) = this(s, null)
}
| TiVo/samza | samza-core/src/main/scala/org/apache/samza/system/SystemConsumers.scala | Scala | apache-2.0 | 13,421 |
package playlastik.dslHelper
import com.sksamuel.elastic4s.ElasticDsl._
import com.sksamuel.elastic4s.Implicits._
import playlastik.method.Post
object SearchHelper {
val action = "_search"
def getRequestInfo(serviceUrl: String, req: SearchDefinition): RequestInfo = {
val indices = if (req.indices.isEmpty || req.indices.contains("*")) "/_all" else "/" + req.indices.mkString(",")
val types = if (req.typeList.isEmpty || req.typeList.contains("*")) "" else "/" + req.typeList.mkString(",")
val url = serviceUrl + indices + types + "/" + action
// handle parent / operation / routing / version in query parameter
val lOptQueryParams: List[Option[(String, String)]] = (
req.oRouting.map(r => "routing" -> r) ::
Nil)
val queryParams = lOptQueryParams flatMap (_.toList)
RequestInfo(Post, url, req.queryString, queryParams)
}
} | fmasion/playLastik | project-code/app/playlastik/dslHelper/SearchHelper.scala | Scala | apache-2.0 | 878 |
package hr.element.beepo
package Model.postgres
import hr.ngs.patterns._
import org.pgscala.converters._
import org.pgscala.util._
import hr.ngs.patterns._
object SmsIptRequestConverter {
private val logger = org.slf4j.LoggerFactory.getLogger(getClass)
def fromPGString(record: String, locator: IServiceLocator): Model.SmsIptRequest = {
val items = PGRecord.unpack(record)
Model.SmsIptRequest.buildInternal(
_locator = locator
, URI = items(URIPos)
, ID = PGIntConverter.fromPGString(items(IDPos))
, taskID = PGUUIDConverter.fromPGString(items(taskIDPos))
, taskURI = items(taskURIPos)
, task = null
, phone = items(phonePos)
, messageText = items(messageTextPos)
, status = if (items(statusPos) != null && items(statusPos).nonEmpty) Some(Model.postgres.RequestStatusConverter.fromPGString(items(statusPos), locator)) else None
, messageLogID = if (items(messageLogIDPos) != null && items(messageLogIDPos).nonEmpty) Some(PGLongConverter.fromPGString(items(messageLogIDPos))) else None
)
}
def fromPGStringExtended(record: String, locator: IServiceLocator): Model.SmsIptRequest = {
val items = PGRecord.unpack(record)
Model.SmsIptRequest.buildInternal(
_locator = locator
, URI = items(URIPosExtended)
, ID = PGIntConverter.fromPGString(items(IDPosExtended))
, taskID = PGUUIDConverter.fromPGString(items(taskIDPosExtended))
, taskURI = items(taskURIPosExtended)
, task = null
, phone = items(phonePosExtended)
, messageText = items(messageTextPosExtended)
, status = if (items(statusPosExtended) != null && items(statusPosExtended).nonEmpty) Some(Model.postgres.RequestStatusConverter.fromPGStringExtended(items(statusPosExtended), locator)) else None
, messageLogID = if (items(messageLogIDPosExtended) != null && items(messageLogIDPosExtended).nonEmpty) Some(PGLongConverter.fromPGString(items(messageLogIDPosExtended))) else None
)
}
def toPGString(item: Model.SmsIptRequest): String = {
val items = new Array[String](columnCount)
items(URIPos) = item.URI
items(IDPos) = PGIntConverter.toPGString(item.ID)
items(taskIDPos) = PGUUIDConverter.toPGString(item.taskID)
items(taskURIPos) = item.taskURI
items(phonePos) = item.phone
items(messageTextPos) = item.messageText
items(statusPos) = if(item.status.isDefined) Model.postgres.RequestStatusConverter.toPGString(item.status.get) else null
items(messageLogIDPos) = if (item.messageLogID.isDefined) PGLongConverter.toPGString(item.messageLogID.get) else null
PGRecord.pack(items)
}
def toPGStringExtended(item: Model.SmsIptRequest): String = {
val items = new Array[String](extendedColumnCount)
items(URIPosExtended) = item.URI
items(IDPosExtended) = PGIntConverter.toPGString(item.ID)
items(taskIDPosExtended) = PGUUIDConverter.toPGString(item.taskID)
items(taskURIPosExtended) = item.taskURI
items(phonePosExtended) = item.phone
items(messageTextPosExtended) = item.messageText
items(statusPosExtended) = if(item.status.isDefined) Model.postgres.RequestStatusConverter.toPGString(item.status.get) else null
items(messageLogIDPosExtended) = if (item.messageLogID.isDefined) PGLongConverter.toPGString(item.messageLogID.get) else null
PGRecord.pack(items)
}
private var columnCount = -1
private var extendedColumnCount = -1
def initializeProperties() {
postgresUtils.getIndexes("Model", "SmsIptRequest_entity").get("URI") match {
case Some(index) => URIPos = index - 1
case None => logger.error("""Couldn't find column "URI" in type Model.SmsIptRequest_entity. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "-ngs_SmsIptRequest_type-").get("URI") match {
case Some(index) => URIPosExtended = index - 1
case None => logger.error("""Couldn't find column "URI" in type Model.SmsIptRequest. Check if database is out of sync with code!""")
}
columnCount = postgresUtils.getColumnCount("Model", "SmsIptRequest_entity")
extendedColumnCount = postgresUtils.getColumnCount("Model", "-ngs_SmsIptRequest_type-")
postgresUtils.getIndexes("Model", "SmsIptRequest_entity").get("ID") match {
case Some(index) => IDPos = index - 1
case None => logger.error("""Couldn't find column "ID" in type Model.SmsIptRequest_entity. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "-ngs_SmsIptRequest_type-").get("ID") match {
case Some(index) => IDPosExtended = index - 1
case None => logger.error("""Couldn't find column "ID" in type Model.SmsIptRequest. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "SmsIptRequest_entity").get("taskID") match {
case Some(index) => taskIDPos = index - 1
case None => logger.error("""Couldn't find column "taskID" in type Model.SmsIptRequest_entity. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "-ngs_SmsIptRequest_type-").get("taskID") match {
case Some(index) => taskIDPosExtended = index - 1
case None => logger.error("""Couldn't find column "taskID" in type Model.SmsIptRequest. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "SmsIptRequest_entity").get("taskURI") match {
case Some(index) => taskURIPos = index - 1
case None => logger.error("""Couldn't find column "taskURI" in type Model.SmsIptRequest_entity. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "-ngs_SmsIptRequest_type-").get("taskURI") match {
case Some(index) => taskURIPosExtended = index - 1
case None => logger.error("""Couldn't find column "taskURI" in type Model.SmsIptRequest. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "SmsIptRequest_entity").get("phone") match {
case Some(index) => phonePos = index - 1
case None => logger.error("""Couldn't find column "phone" in type Model.SmsIptRequest_entity. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "-ngs_SmsIptRequest_type-").get("phone") match {
case Some(index) => phonePosExtended = index - 1
case None => logger.error("""Couldn't find column "phone" in type Model.SmsIptRequest. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "SmsIptRequest_entity").get("messageText") match {
case Some(index) => messageTextPos = index - 1
case None => logger.error("""Couldn't find column "messageText" in type Model.SmsIptRequest_entity. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "-ngs_SmsIptRequest_type-").get("messageText") match {
case Some(index) => messageTextPosExtended = index - 1
case None => logger.error("""Couldn't find column "messageText" in type Model.SmsIptRequest. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "SmsIptRequest_entity").get("status") match {
case Some(index) => statusPos = index - 1
case None => logger.error("""Couldn't find column "status" in type Model.SmsIptRequest_entity. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "-ngs_SmsIptRequest_type-").get("status") match {
case Some(index) => statusPosExtended = index - 1
case None => logger.error("""Couldn't find column "status" in type Model.SmsIptRequest. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "SmsIptRequest_entity").get("messageLogID") match {
case Some(index) => messageLogIDPos = index - 1
case None => logger.error("""Couldn't find column "messageLogID" in type Model.SmsIptRequest_entity. Check if database is out of sync with code!""")
}
postgresUtils.getIndexes("Model", "-ngs_SmsIptRequest_type-").get("messageLogID") match {
case Some(index) => messageLogIDPosExtended = index - 1
case None => logger.error("""Couldn't find column "messageLogID" in type Model.SmsIptRequest. Check if database is out of sync with code!""")
}
}
private var URIPos = -1
private var URIPosExtended = -1
private var IDPos = -1
private var IDPosExtended = -1
def buildURI(ID: Int) : String = {
val _uriParts = new Array[String](1)
_uriParts(0) = PGIntConverter.toPGString(ID)
postgres.Utils.buildURI(_uriParts)
}
private var taskIDPos = -1
private var taskIDPosExtended = -1
private var taskURIPos = -1
private var taskURIPosExtended = -1
private var phonePos = -1
private var phonePosExtended = -1
private var messageTextPos = -1
private var messageTextPosExtended = -1
private var statusPos = -1
private var statusPosExtended = -1
private var messageLogIDPos = -1
private var messageLogIDPosExtended = -1
}
| element-doo/beepo | code/scala/model-services-generated/src/main/scala/hr/element/beepo/Model/postgres/SmsIptRequestConverter.scala | Scala | bsd-3-clause | 8,969 |
/* Code Pulse: a real-time code coverage tool, for more information, see <http://code-pulse.com/>
*
* Copyright (C) 2014-2017 Code Dx, Inc. <https://codedx.com/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codedx.codepulse.hq.monitor
import reactive.EventSource
import reactive.EventStream
/** Describes the health status of a TraceComponent. Instances of this class will generally be
* created by HealthMonitor implementations, and eventually checked by the UI.
*/
case class TraceComponentHealth(component: TraceComponent, status: TraceComponentStatus, message: Option[String] = None, data: Option[TraceComponentMonitorData] = None)
/** Base for trace component monitor data */
trait TraceComponentMonitorData
/** A runnable task that checks the health of a trace component.
*
* Monitors are Runnables, with the expectation that they will be used by a
* ScheduledExecutorService, scheduled to repeat every once in a while.
*/
trait HealthMonitor extends Runnable {
/** Run interval for monitor, in milliseconds.
*/
val runInterval: Int = 5000
def checkHealth: TraceComponentHealth
/** Event source that gets triggered when this monitor is run
* and reports a new health status.
*/
private val healthUpdatesSource = new EventSource[TraceComponentHealth]()
def healthUpdates: EventStream[TraceComponentHealth] = healthUpdatesSource
def run = {
val health = checkHealth
healthUpdatesSource.fire(health)
}
/** Convenience method that creates a "healthy" status for the
* implicitly-provided `cmp` TraceComponent.
*/
def healthy(implicit cmp: TraceComponent) =
TraceComponentHealth(cmp, TraceComponentStatus.Healthy)
/** Convenience method that creates a "healthy" status for the
* implicitly-provided `cmp` TraceComponent with the given `data`.
*/
def healthy(data: TraceComponentMonitorData)(implicit cmp: TraceComponent) =
TraceComponentHealth(cmp, TraceComponentStatus.Healthy, data = Some(data))
/** Convenience method that creates a "concerned" status with the
* given `msg` for the implicitly-provided `cmp` TraceComponent.
*/
def concerned(msg: String)(implicit cmp: TraceComponent) =
TraceComponentHealth(cmp, TraceComponentStatus.Concerned, message = Some(msg))
/** Convenience method that creates a "concerned" status with the
* given `msg` and `data` for the implicitly-provided `cmp` TraceComponent.
*/
def concerned(msg: String, data: TraceComponentMonitorData)(implicit cmp: TraceComponent) =
TraceComponentHealth(cmp, TraceComponentStatus.Concerned, message = Some(msg), data = Some(data))
/** Convenience method that creates an "unhealthy" status with the
* given `msg` for the implicitly-provided `cmp` TraceComponent.
*/
def unhealthy(msg: String)(implicit cmp: TraceComponent) =
TraceComponentHealth(cmp, TraceComponentStatus.Unhealthy, message = Some(msg))
/** Convenience method that creates an "unhealthy" status with the
* given `msg` and `data` for the implicitly-provided `cmp` TraceComponent.
*/
def unhealthy(msg: String, data: TraceComponentMonitorData)(implicit cmp: TraceComponent) =
TraceComponentHealth(cmp, TraceComponentStatus.Unhealthy, message = Some(msg), data = Some(data))
} | secdec/codepulse | hq/src/main/scala/com/secdec/bytefrog/hq/monitor/HealthMonitor.scala | Scala | apache-2.0 | 3,750 |
package com.identityblitz.login
import com.identityblitz.login.FlowAttrName._
import com.identityblitz.login.Platform.Platform
import com.identityblitz.login.transport.{DiscardingCookie, OutboundTransport, Cookie, InboundTransport}
import org.scalatest.{FlatSpec, Matchers}
class LoginFlowTest extends FlatSpec with Matchers {
behavior of "Login flow"
it should "load config properly" in {
val flow = LoginFramework.loginFlow
val itr = new InboundTransport {
var attrs = Map(CALLBACK_URI_NAME -> "some_url")
var lgnCtx: Option[LoginContext] = _
val cookies = Map[String, Cookie]()
var pathToForward: String = _
override def getCookie(name: String): Option[_ <: Cookie] = cookies.get(name)
override def getParameter(name: String): Option[String] = ???
override def platform: Platform = ???
override def containsParameter(name: String): Boolean = ???
override def updatedLoginCtx(loginCtx: Option[LoginContext]): Unit = { lgnCtx = loginCtx }
override def getAttribute(name: String): Option[String] = attrs.get(name)
override def removeAttribute(name: String): Unit = ???
override def getLoginCtx: Option[LoginContext] = lgnCtx
override def unwrap: AnyRef = ???
override def forward(path: String): Unit = pathToForward = path
override def setAttribute(name: String, value: String): Unit = attrs += name -> value
}
val otr = new OutboundTransport {
var cookies = Map[String, Cookie]()
var pathToRedirect: String = _
override def platform: Platform = ???
override def addCookie(cookie: Cookie): Unit = cookie + cookie.name -> cookie
override def unwrap: AnyRef = ???
override def redirect(location: String): Unit = pathToRedirect = location
override def discardCookie(cookie: DiscardingCookie): Unit = ???
}
flow.start(itr, otr)
import scala.language.reflectiveCalls
itr.attrs.keys should contain ("callback_uri")
itr.attrs.keys should contain ("command_name")
itr.attrs.keys should contain ("command")
itr.attrs.values should contain ("bind")
itr.pathToForward should be ("/blitz/lgpage")
}
}
| brainysmith/login-framework | src/test/scala/com/identityblitz/login/LoginFlowTest.scala | Scala | mit | 2,185 |
package unfiltered
package specs2
import java.util.concurrent.{Executors, ThreadFactory}
import okhttp3._
import okio.ByteString
import org.specs2.specification.BeforeAfterAll
import unfiltered.request.Method
import scala.language.implicitConversions
trait Hosted extends BeforeAfterAll {
val port = unfiltered.util.Port.any
val host = HttpUrl.parse(s"http://localhost:$port")
private var dispatcher: Dispatcher = _
override def beforeAll(): Unit = {
dispatcher = new Dispatcher(Executors.newFixedThreadPool(10, new ThreadFactory {
val counter = new java.util.concurrent.atomic.AtomicInteger()
val defaultThreadFactory = Executors.defaultThreadFactory()
override def newThread(r: Runnable) = {
val thread = defaultThreadFactory.newThread(r)
thread.setName("okhttp-dispatcher-" + counter.incrementAndGet())
thread.setDaemon(true)
thread
}
}))
}
override def afterAll(): Unit = {
if (dispatcher != null) {
dispatcher.executorService().shutdown()
}
dispatcher = null
}
def http(req: Request): Response = {
val response = httpx(req)
if (response.code == 200) {
response
} else {
throw StatusCode(response.code)
}
}
def httpx(req: Request): Response = {
requestWithNewClient(req, new OkHttpClient.Builder())
}
def requestWithNewClient(req: Request, builder: OkHttpClient.Builder): Response = {
import collection.JavaConverters._
val client = builder.dispatcher(dispatcher).build()
val res = client.newCall(req).execute()
val headers = res.headers.toMultimap.asScala.mapValues(_.asScala.toList).toMap
val transformed = Response(res.code(), headers, Option(res.body()).map{ body =>
val bytes = body.bytes()
ByteString.of(bytes, 0, bytes.length)
})
res.close()
transformed
}
def req(url: HttpUrl): Request = new Request.Builder().url(url).build()
case class StatusCode(code: Int) extends RuntimeException(code.toString)
implicit class HttpUrlExtensions(url: HttpUrl) {
def /(part: String) = url.newBuilder.addPathSegment(part).build
def <<?(query: Map[String, String]): HttpUrl = {
val b = url.newBuilder()
query.foreach{case (k, v) => b.addQueryParameter(k, v)}
b.build()
}
}
implicit class RequestExtensions(request: Request) {
def <:<(headers: Map[String, String]): Request = {
val builder = request.newBuilder()
headers.foreach{case (k, v) => builder.addHeader(k, v)}
builder.build()
}
def <<?(query: Map[String, String]): Request = {
val b = request.url().newBuilder()
query.foreach{case (k, v) => b.addQueryParameter(k, v)}
req(b.build())
}
def as_!(user: String, password: String) = {
val builder = request.newBuilder()
val basicAuth = Credentials.basic(user, password)
builder.addHeader("Authorization", basicAuth)
builder.build()
}
def <<(data: Map[String, String], method: Method = unfiltered.request.POST): Request = {
val builder = request.newBuilder()
val form = new FormBody.Builder()
data.foreach{case (k,v) => form.add(k, v)}
builder.method(method.method, form.build())
builder.build()
}
def POST[A](data: A, mt: MediaType = MediaType.parse("application/octet-stream"))(implicit c: ByteStringToConverter[A]): Request = {
val builder = request.newBuilder()
builder.post(RequestBody.create(mt, c.toByteString(data))).build()
}
def POST[A](body: RequestBody): Request = {
val builder = request.newBuilder()
builder.post(body).build()
}
def <<*(name: String, file: java.io.File, mt: String) = {
val mp = new MultipartBody.Builder().
setType(MultipartBody.FORM).
addFormDataPart(name, file.getName, RequestBody.create(MediaType.parse(mt), file)).build()
POST(mp)
}
}
case class Response(code: Int, headers: Map[String, List[String]], body: Option[ByteString]) {
def as_string = body.map(_.utf8()).getOrElse("")
def header(name: String): Option[List[String]] = headers.get(name.toLowerCase)
def firstHeader(name: String): Option[String] = header(name).flatMap(_.headOption)
}
trait ByteStringToConverter[A] {
def toByteString(a: A): ByteString
}
object ByteStringToConverter {
implicit val StringByteStringConverter = new ByteStringToConverter[String] {
override def toByteString(a: String): ByteString = ByteString.encodeUtf8(a)
}
implicit val IdentityStringConverter = new ByteStringToConverter[ByteString] {
override def toByteString(a: ByteString): ByteString = a
}
implicit val bytesStringConverter = new ByteStringToConverter[Array[Byte]] {
override def toByteString(a: Array[Byte]): ByteString = ByteString.of(a, 0, a.length)
}
}
implicit def urlToGetRequest(url: HttpUrl): Request = req(url)
}
| omarkilani/unfiltered | specs2/src/main/scala/Hosted.scala | Scala | mit | 4,928 |
package com.twitter.finagle.filter
import com.twitter.finagle.Stack.{Module1, Role}
import com.twitter.finagle._
import com.twitter.finagle.param.Stats
import com.twitter.finagle.stats.{StatsReceiver, Verbosity}
import com.twitter.finagle.tracing.{Trace, Tracing}
import com.twitter.util.{Future, Return, Try}
/**
* A filter that exports two histograms to a given [[StatsReceiver]].
*
* 1. "request_payload_bytes" - a distribution of request payload sizes in bytes
* 2. "response_payload_bytes" - a distribution of response payload sizes in bytes
*
* The sizes are also traced using the binary annotations
* clnt/request_payload_bytes and clnt/response_payload_bytes on the
* client side, and srv/request_payload_bytes and srv/response_payload_bytes.
* on the server.
*/
class PayloadSizeFilter[Req, Rep](
statsReceiver: StatsReceiver,
reqTraceKey: String,
repTraceKey: String,
reqSize: Req => Int,
repSize: Rep => Int)
extends SimpleFilter[Req, Rep] {
private[this] val requestBytes = statsReceiver.stat(Verbosity.Debug, "request_payload_bytes")
private[this] val responseBytes = statsReceiver.stat(Verbosity.Debug, "response_payload_bytes")
private[this] def recordRepSize(trace: Tracing): Try[Rep] => Unit = {
case Return(rep) =>
val size = repSize(rep)
if (trace.isActivelyTracing) trace.recordBinary(repTraceKey, size)
responseBytes.add(size.toFloat)
case _ =>
}
def apply(req: Req, service: Service[Req, Rep]): Future[Rep] = {
val size = reqSize(req)
requestBytes.add(size.toFloat)
val trace = Trace()
if (trace.isActivelyTracing) trace.recordBinary(reqTraceKey, size)
service(req).respond(recordRepSize(trace))
}
}
object PayloadSizeFilter {
val Role: Stack.Role = Stack.Role("PayloadSize")
private[finagle] val Description: String = "Reports request/response payload sizes"
private def module[Req, Rep](
reqTraceKey: String,
repTraceKey: String,
reqSize: Req => Int,
repSize: Rep => Int
): Stackable[ServiceFactory[Req, Rep]] = new Module1[param.Stats, ServiceFactory[Req, Rep]] {
def role: Role = PayloadSizeFilter.Role
def description: String = PayloadSizeFilter.Description
def make(stats: Stats, next: ServiceFactory[Req, Rep]): ServiceFactory[Req, Rep] = {
val filter: SimpleFilter[Req, Rep] =
new PayloadSizeFilter(stats.statsReceiver, reqTraceKey, repTraceKey, reqSize, repSize)
filter.andThen(next)
}
}
val ClientReqTraceKey: String = "clnt/request_payload_bytes"
val ClientRepTraceKey: String = "clnt/response_payload_bytes"
private[finagle] def clientModule[Req, Rep](
reqSize: Req => Int,
repSize: Rep => Int
): Stackable[ServiceFactory[Req, Rep]] = module(
ClientReqTraceKey,
ClientRepTraceKey,
reqSize,
repSize
)
val ServerReqTraceKey: String = "srv/request_payload_bytes"
val ServerRepTraceKey: String = "srv/response_payload_bytes"
private[finagle] def serverModule[Req, Rep](
reqSize: Req => Int,
repSize: Rep => Int
): Stackable[ServiceFactory[Req, Rep]] = module(
ServerReqTraceKey,
ServerRepTraceKey,
reqSize,
repSize
)
}
| twitter/finagle | finagle-core/src/main/scala/com/twitter/finagle/filter/PayloadSizeFilter.scala | Scala | apache-2.0 | 3,174 |
package com.tomogle.fizzbuzz
object FizzBuzz {
def main(args: Array[String]) {
val fizzBuzzer = new BasicExtensibleFizzBuzzer(
Seq(Mappings.divisibleBy3And5, Mappings.divisibleBy3, Mappings.divisibleBy5),
_.toString
)
fizzBuzzer.run(1 to 100) foreach println
}
trait FizzBuzzer[A,B] {
def run(range: Seq[A]): Seq[B] = range map assignValue
def assignValue(element: A): B
}
object BasicFizzBuzzer extends FizzBuzzer[Int, String] {
def assignValue(element: Int): String = {
val divisibleBy3 = element % 3 == 0
val divisibleBy5 = element % 5 == 0
if (divisibleBy3 && divisibleBy5) "FizzBuzz"
else if (divisibleBy3) "Fizz"
else if (divisibleBy5) "Buzz"
else element.toString
}
}
type Mapping[A,B] = (Test[A], B)
type Test[A] = A => Boolean
object Mappings {
val divisibleBy3And5: Mapping[Int, String] = (element => element % 3 == 0 && element % 5 == 0, "FizzBuzz")
val divisibleBy3: Mapping[Int, String] = (_ % 3 == 0, "Fizz")
val divisibleBy5: Mapping[Int, String] = (_ % 5 == 0, "Buzz")
}
trait ExtensibleFizzBuzzer[A, B] extends FizzBuzzer[A, B] {
val mappings: Seq[Mapping[A, B]]
val baseElementTransform: A => B
def assignValue(element: A): B = {
val firstMatchingMapping = mappings.find {
case (test: Test[A], _) => test(element)
}
firstMatchingMapping map {
case (_, result) => result
} getOrElse baseElementTransform(element)
}
}
class BasicExtensibleFizzBuzzer(val mappings: Seq[Mapping[Int, String]],
val baseElementTransform: Int => String) extends ExtensibleFizzBuzzer[Int, String]
}
| tom-ogle/scala-scratch-code | src/main/scala/com/tomogle/fizzbuzz/FizzBuzz.scala | Scala | mit | 1,709 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.collector.builder
import com.twitter.finagle.stats.{OstrichStatsReceiver, StatsReceiver}
import com.twitter.logging.config._
import com.twitter.logging.{ConsoleHandler, Logger, LoggerFactory}
import com.twitter.ostrich.admin._
import java.net.{InetAddress, InetSocketAddress}
import com.twitter.zipkin.builder.Builder
import scala.util.matching.Regex
/**
* Base builder for a Zipkin service
*/
case class ZipkinServerBuilder(
serverPort : Int,
adminPort : Int,
serverAddress : InetAddress = InetAddress.getByAddress(Array[Byte](0,0,0,0)),
adminStatsNodes : List[StatsFactory] = List(StatsFactory(reporters = List(TimeSeriesCollectorFactory()))),
adminStatsFilters : List[Regex] = List.empty,
statsReceiver : StatsReceiver = new OstrichStatsReceiver
) extends Builder[(RuntimeEnvironment) => Unit] {
def serverPort(p: Int) : ZipkinServerBuilder = copy(serverPort = p)
def adminPort(p: Int) : ZipkinServerBuilder = copy(adminPort = p)
def serverAddress(a: InetAddress) : ZipkinServerBuilder = copy(serverAddress = a)
def statsReceiver(s: StatsReceiver) : ZipkinServerBuilder = copy(statsReceiver = s)
def addAdminStatsNode(n: StatsFactory): ZipkinServerBuilder = copy(adminStatsNodes = adminStatsNodes :+ n)
def addAdminStatsFilter(f: Regex) : ZipkinServerBuilder = copy(adminStatsFilters = adminStatsFilters :+ f)
private lazy val adminServiceFactory: AdminServiceFactory =
AdminServiceFactory(
httpPort = adminPort,
statsNodes = adminStatsNodes,
statsFilters = adminStatsFilters
)
lazy val socketAddress = new InetSocketAddress(serverAddress, serverPort)
var adminHttpService: Option[AdminHttpService] = None
def apply() = (runtime: RuntimeEnvironment) => {
adminHttpService = Some(adminServiceFactory(runtime))
}
}
| jfeltesse-mdsol/zipkin | zipkin-collector-service/src/main/scala/com/twitter/zipkin/collector/builder/ZipkinServerBuilder.scala | Scala | apache-2.0 | 2,585 |
package functional
import java.sql.Timestamp
import java.util.UUID
import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.impl.authenticators.CookieAuthenticator
import com.mohiva.play.silhouette.impl.providers.CredentialsProvider
import com.mohiva.play.silhouette.test.{FakeEnvironment, _}
import models._
import models.daos._
import org.joda.time.{DateTime, DateTimeZone}
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import play.api.i18n.MessagesApi
import play.api.libs.json.Json
import play.api.test.Helpers._
import play.api.test.{FakeApplication, FakeRequest, WithApplication}
import play.filters.csrf.CSRF
import controllers.PrescriptionController
import scala.concurrent.{Future, ExecutionContext}
class PrescriptionControllerSpec (implicit ec: ExecutionContext) extends Specification with Mockito{
implicit val app: FakeApplication = FakeApplication(additionalConfiguration = inMemoryDatabase("test"))
implicit val app2: FakeApplication = FakeApplication(additionalConfiguration = inMemoryDatabase("test"))
val email = "bill@thehospital.com"
val hospitalNumber = "123"
val title = "Mrs"
val firstName = "Cruella"
val surname = "DaVille"
val dob = "20-12-1958"
val password = "1000"
val MRDrug = "morphine"
val MRDose = "10.0mg"
val breakthroughDrug = "oramorph"
val breakthroughDose = "2.5mg"
"PrescriptionController.index" should {
"redirect the user to the sign in page if an unauthorized user requests it (with a 303 redirect)" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val identity = Administrator(mockUuid, LoginInfo("email", email), "Mr", "Bill", "Smith", email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val request = FakeRequest().withAuthenticator(identity.loginInfo).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.prescription(mockPatient)(request)
status(result) must equalTo(303)
}
"return OK status if an authorized prescriber requests it" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val identity = Prescriber(mockUuid, LoginInfo("email", email), "Mr", "Bill", "Smith", email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val request = FakeRequest().withAuthenticator(identity.loginInfo).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.prescription(mockPatient)(request)
status(result) must equalTo(OK)
}
"return the prescription page to an authorised user" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val identity = Prescriber(mockUuid, LoginInfo("email", email), "Mr", "Bill", "Smith", email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val request = FakeRequest().withAuthenticator(identity.loginInfo).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.prescription(mockPatient)(request)
contentAsString(result) must contain("PRESCRIBE INITIAL DOSES")
}
"return the prescription page with the correct patients prescription in the view (to an authorised user)" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val identity = Prescriber(mockUuid, LoginInfo("email", email), "Mr", "Bill", "Smith", email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val request = FakeRequest().withAuthenticator(identity.loginInfo).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.prescription(mockPatient)(request)
contentAsString(result) must contain(mockPatient.hospitalNumber)
}
}
"PrescriptionController.selectPatient" should {
"redirect the user to the sign in page if an unauthorized user requests it (with a 303 redirect)" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val identity = Administrator(mockUuid, LoginInfo("email", email), "Mr", "Bill", "Smith", email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val request = FakeRequest().withAuthenticator(identity.loginInfo).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.selectPatient(request)
status(result) must equalTo(303)
}
"return OK status if an authorized prescriber requests it" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val identity = Prescriber(mockUuid, LoginInfo("email", email), "Mr", "Bill", "Smith", email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val request = FakeRequest().withAuthenticator(identity.loginInfo).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.selectPatient(request)
status(result) must equalTo(200)
}
"return the selectPatient page to an authorised user" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val identity = Prescriber(mockUuid, LoginInfo("email", email), "Mr", "Bill", "Smith", email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val request = FakeRequest().withAuthenticator(identity.loginInfo).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.selectPatient(request)
contentAsString(result) must contain("Find a Patient")
}
}
"PrescriptionController.addPrescription" should {
"return http bad request (400) if the form data is of an incorrect format" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.addPrescription(mockPatient)(request)
status(result) must equalTo(400)
}
"return http redirect (303) if the user is unauthorised to access this page" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Administrator(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.addPrescription(mockPatient)(request)
status(result) must equalTo(303)
}
"show the prescriber the current prescription if the data is of the correct format" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = Json.obj(
"MRDrug" -> MRDrug,
"MRDose" -> MRDose,
"breakthroughDrug" -> breakthroughDrug,
"breakthroughDose" -> breakthroughDose
)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withJsonBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.addPrescription(mockPatient)(request)
contentAsString(result) must contain("PRESCRIPTION SUCCESSFUL!")
}
"show the prescriber the current prescription with the correct patient details if the data is of the correct format" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = Json.obj(
"MRDrug" -> MRDrug,
"MRDose" -> MRDose,
"breakthroughDrug" -> breakthroughDrug,
"breakthroughDose" -> breakthroughDose
)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withJsonBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.addPrescription(mockPatient)(request)
contentAsString(result) must contain(mockPatient.hospitalNumber)
}
"show the prescriber the current prescription with the correct prescriber displayed if the data is of the correct format" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = Json.obj(
"MRDrug" -> MRDrug,
"MRDose" -> MRDose,
"breakthroughDrug" -> breakthroughDrug,
"breakthroughDose" -> breakthroughDose
)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withJsonBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.addPrescription(mockPatient)(request)
contentAsString(result) must contain(identity.firstName)
contentAsString(result) must contain(identity.lastName)
}
"show the prescriber the current prescription with the correct breakthrough drug displayed if the data is of the correct format" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = Json.obj(
"MRDrug" -> MRDrug,
"MRDose" -> MRDose,
"breakthroughDrug" -> breakthroughDrug,
"breakthroughDose" -> breakthroughDose
)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withJsonBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.addPrescription(mockPatient)(request)
contentAsString(result) must contain(breakthroughDrug)
}
"show the prescriber the current prescription with the correct breakthrough dose displayed if the data is of the correct format" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = Json.obj(
"MRDrug" -> MRDrug,
"MRDose" -> MRDose,
"breakthroughDrug" -> breakthroughDrug,
"breakthroughDose" -> breakthroughDose
)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withJsonBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.addPrescription(mockPatient)(request)
contentAsString(result) must contain(breakthroughDose)
}
"show the prescriber the current prescription with the correct MR drug displayed if the data is of the correct format" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = Json.obj(
"MRDrug" -> MRDrug,
"MRDose" -> MRDose,
"breakthroughDrug" -> breakthroughDrug,
"breakthroughDose" -> breakthroughDose
)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withJsonBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.addPrescription(mockPatient)(request)
contentAsString(result) must contain(MRDrug)
}
"show the prescriber the current prescription with the correct MR dose displayed if the data is of the correct format" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = Json.obj(
"MRDrug" -> MRDrug,
"MRDose" -> MRDose,
"breakthroughDrug" -> breakthroughDrug,
"breakthroughDose" -> breakthroughDose
)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withJsonBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.addPrescription(mockPatient)(request)
contentAsString(result) must contain(MRDose)
}
}
"PrescriptionController.getLatestPrescriptionWithDoseTitrations" should {
"return http bad request (400) if the form data is of an incorrect format" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.getLatestPrescriptionWithDoseTitrations(request)
status(result) must equalTo(400)
}
"return http redirect (303) if the user is unauthorised to access this page" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Administrator(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.getLatestPrescriptionWithDoseTitrations(request)
status(result) must equalTo(303)
}
"redirect the prescriber to the selectPatient page if the patient doesn't exist" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPatientDAO = new PatientDAOImpl()
val spyPatientDAO = spy(mockPatientDAO)
doReturn(Future(None)).when(spyPatientDAO).findPatient(hospitalNumber)
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = Json.obj(
"hospitalNumber" -> hospitalNumber
)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withJsonBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, spyPatientDAO, dataFormatter, timeZone)
val result = controller.getLatestPrescriptionWithDoseTitrations(request)
redirectLocation(result) must beSome.which(_ == "/patient/select")
}
"redirect the prescriber to the retrieveCurrentPrescription if the patient exists" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val mockPatientDAO = new PatientDAOImpl()
val spyPatientDAO = spy(mockPatientDAO)
doReturn(Future(Some(mockPatient))).when(spyPatientDAO).findPatient(hospitalNumber)
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = Json.obj(
"hospitalNumber" -> hospitalNumber
)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withJsonBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, spyPatientDAO, dataFormatter, timeZone)
val result = controller.getLatestPrescriptionWithDoseTitrations(request)
redirectLocation(result) must beSome.which(_ == "/prescription/current?patient.hospitalNumber=123&patient.title=Mrs&patient.firstName=Cruella&patient.surname=DaVille&patient.dob=20-12-1958")
}
}
"PrescriptionController.retrieveCurrentPrescription" should {
"return http redirect (303) if the user is unauthorised to access this page" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Administrator(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.retrieveCurrentPrescription(mockPatient)(request)
status(result) must equalTo(303)
}
"redirects the user to PrescriptionController.prescription if there is no prescription prescribed for the patient" in new WithApplication(app2) {
val mockUuid = UUID.randomUUID()
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val mockPrescriptionDAO = new PrescriptionDAOImpl(timeZone)
val spyPrescriptionDAO = spy(mockPrescriptionDAO)
doReturn(Future(None)).when(spyPrescriptionDAO).getLatestPrescription(hospitalNumber)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, spyPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.retrieveCurrentPrescription(mockPatient)(request)
redirectLocation(result) must beSome.which(_ == "/script?patient.hospitalNumber=123&patient.title=Mrs&patient.firstName=Cruella&patient.surname=DaVille&patient.dob=20-12-1958")
}
"returns a http 200 response if the user is authorised and the patient has a current prescription" in new WithApplication(app2) {
val mockUuid = UUID.randomUUID()
val mockPrescription = new Prescription(hospitalNumber, "prescriberID", new Timestamp(new DateTime(2016,3,13,1,58).getMillis), "MRDrug", 5.00, "breakthroughDrug", 10.00)
val mockPrescriberDAO = new PrescriberDAOImpl()
val spyPrescriberDAO = spy(mockPrescriberDAO)
doReturn(Future(Some("Mr Bill Smith"))).when(spyPrescriberDAO).findPrescriberName(mockPrescription.prescriberID)
val mockPtDAO = mock[PatientDAO]
val timeZone = DateTimeZone.forID("Europe/London")
val mockDoseDAO = new DoseDAOImpl(timeZone)
val spyDoseDAO = spy(mockDoseDAO)
doReturn(Future(10)).when(spyDoseDAO).countBreakthroughDoses(mockPrescription.ptHospitalNumber, mockPrescription.date)
val mockCredentialsProvider = mock[CredentialsProvider]
val dataFormatter = new PrescriptionDataFormatterImpl(spyPrescriberDAO, spyDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val mockPrescriptionDAO = new PrescriptionDAOImpl(timeZone)
val spyPrescriptionDAO = spy(mockPrescriptionDAO)
doReturn(Future(Some(mockPrescription))).when(spyPrescriptionDAO).getLatestPrescription(hospitalNumber)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, spyPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.retrieveCurrentPrescription(mockPatient)(request)
status(result) must equalTo(200)
}
"displays the doseCalculations page if the user is authorised and the patient has a current prescription" in new WithApplication(app2) {
val mockUuid = UUID.randomUUID()
val mockPrescription = new Prescription(hospitalNumber, "prescriberID", new Timestamp(new DateTime(2016,3,13,1,58).getMillis), "MRDrug", 5.00, "breakthroughDrug", 10.00)
val mockPrescriberDAO = new PrescriberDAOImpl()
val spyPrescriberDAO = spy(mockPrescriberDAO)
doReturn(Future(Some("Mr Bill Smith"))).when(spyPrescriberDAO).findPrescriberName(mockPrescription.prescriberID)
val mockPtDAO = mock[PatientDAO]
val timeZone = DateTimeZone.forID("Europe/London")
val mockDoseDAO = new DoseDAOImpl(timeZone)
val spyDoseDAO = spy(mockDoseDAO)
doReturn(Future(10)).when(spyDoseDAO).countBreakthroughDoses(mockPrescription.ptHospitalNumber, mockPrescription.date)
val mockCredentialsProvider = mock[CredentialsProvider]
val dataFormatter = new PrescriptionDataFormatterImpl(spyPrescriberDAO, spyDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val mockPrescriptionDAO = new PrescriptionDAOImpl(timeZone)
val spyPrescriptionDAO = spy(mockPrescriptionDAO)
doReturn(Future(Some(mockPrescription))).when(spyPrescriptionDAO).getLatestPrescription(hospitalNumber)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, spyPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.retrieveCurrentPrescription(mockPatient)(request)
contentAsString(result) must contain("Suggested Dose Titrations")
}
"displays the doseCalculations page with the correct prescriber details if the user is authorised and the patient has a current prescription" in new WithApplication(app2) {
val mockUuid = UUID.randomUUID()
val mockPrescription = new Prescription(hospitalNumber, "prescriberID", new Timestamp(new DateTime(2016,3,13,1,58).getMillis), "MRDrug", 5.00, "breakthroughDrug", 10.00)
val mockPrescriberDAO = new PrescriberDAOImpl()
val spyPrescriberDAO = spy(mockPrescriberDAO)
doReturn(Future(Some("Mr Bill Smith"))).when(spyPrescriberDAO).findPrescriberName(mockPrescription.prescriberID)
val mockPtDAO = mock[PatientDAO]
val timeZone = DateTimeZone.forID("Europe/London")
val mockDoseDAO = new DoseDAOImpl(timeZone)
val spyDoseDAO = spy(mockDoseDAO)
doReturn(Future(10)).when(spyDoseDAO).countBreakthroughDoses(mockPrescription.ptHospitalNumber, mockPrescription.date)
val mockCredentialsProvider = mock[CredentialsProvider]
val dataFormatter = new PrescriptionDataFormatterImpl(spyPrescriberDAO, spyDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val mockPrescriptionDAO = new PrescriptionDAOImpl(timeZone)
val spyPrescriptionDAO = spy(mockPrescriptionDAO)
doReturn(Future(Some(mockPrescription))).when(spyPrescriptionDAO).getLatestPrescription(hospitalNumber)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, spyPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.retrieveCurrentPrescription(mockPatient)(request)
contentAsString(result) must contain(identity.firstName +" "+ identity.lastName)
}
"displays the doseCalculations page with the correct patient if the user is authorised and the patient has a current prescription" in new WithApplication(app2) {
val mockUuid = UUID.randomUUID()
val mockPrescription = new Prescription(hospitalNumber, "prescriberID", new Timestamp(new DateTime(2016,3,13,1,58).getMillis), "MRDrug", 5.00, "breakthroughDrug", 10.00)
val mockPrescriberDAO = new PrescriberDAOImpl()
val spyPrescriberDAO = spy(mockPrescriberDAO)
doReturn(Future(Some("Mr Bill Smith"))).when(spyPrescriberDAO).findPrescriberName(mockPrescription.prescriberID)
val mockPtDAO = mock[PatientDAO]
val timeZone = DateTimeZone.forID("Europe/London")
val mockDoseDAO = new DoseDAOImpl(timeZone)
val spyDoseDAO = spy(mockDoseDAO)
doReturn(Future(10)).when(spyDoseDAO).countBreakthroughDoses(mockPrescription.ptHospitalNumber, mockPrescription.date)
val mockCredentialsProvider = mock[CredentialsProvider]
val dataFormatter = new PrescriptionDataFormatterImpl(spyPrescriberDAO, spyDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val mockPrescriptionDAO = new PrescriptionDAOImpl(timeZone)
val spyPrescriptionDAO = spy(mockPrescriptionDAO)
doReturn(Future(Some(mockPrescription))).when(spyPrescriptionDAO).getLatestPrescription(hospitalNumber)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, spyPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.retrieveCurrentPrescription(mockPatient)(request)
contentAsString(result) must contain(mockPatient.hospitalNumber)
}
"displays the doseCalculations page with the correct current prescription information if the user is authorised and the patient has a current prescription" in new WithApplication(app2) {
val mockUuid = UUID.randomUUID()
val mockPrescription = new Prescription(hospitalNumber, "prescriberID", new Timestamp(new DateTime(2016,3,13,1,58).getMillis), "MRDrug", 5.00, "breakthroughDrug", 10.00)
val mockPrescriberDAO = new PrescriberDAOImpl()
val spyPrescriberDAO = spy(mockPrescriberDAO)
doReturn(Future(Some("Mr Bill Smith"))).when(spyPrescriberDAO).findPrescriberName(mockPrescription.prescriberID)
val mockPtDAO = mock[PatientDAO]
val timeZone = DateTimeZone.forID("Europe/London")
val mockDoseDAO = new DoseDAOImpl(timeZone)
val spyDoseDAO = spy(mockDoseDAO)
doReturn(Future(10)).when(spyDoseDAO).countBreakthroughDoses(mockPrescription.ptHospitalNumber, mockPrescription.date)
val mockCredentialsProvider = mock[CredentialsProvider]
val dataFormatter = new PrescriptionDataFormatterImpl(spyPrescriberDAO, spyDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val mockPrescriptionDAO = new PrescriptionDAOImpl(timeZone)
val spyPrescriptionDAO = spy(mockPrescriptionDAO)
doReturn(Future(Some(mockPrescription))).when(spyPrescriptionDAO).getLatestPrescription(hospitalNumber)
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, spyPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.retrieveCurrentPrescription(mockPatient)(request)
contentAsString(result) must contain(mockPrescription.breakthroughDrug)
contentAsString(result) must contain(mockPrescription.breakthroughDose.toString)
}
}
"PrescriptionController.repeatPrescription" should {
"return http redirect (303) if the user is unauthorised to access this page" in new WithApplication(app) {
val mockUuid = UUID.randomUUID()
val mockPrescriptionDAO = mock[PrescriptionDAO]
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val timeZone = DateTimeZone.forID("Europe/London")
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Administrator(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val doseTitrationData = DoseTitrationData("10","2","5","20","25","2.5","5")
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.repeatPrescription(doseTitrationData, mockPatient, MRDrug, breakthroughDrug)(request)
status(result) must equalTo(303)
}
"returns a http 200 response if the user is authorised" in new WithApplication(app2) {
val mockUuid = UUID.randomUUID()
val mockPrescription = new Prescription(hospitalNumber, "prescriberID", new Timestamp(new DateTime(2016,3,13,1,58).getMillis), "MRDrug", 5.00, "breakthroughDrug", 10.00)
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val timeZone = DateTimeZone.forID("Europe/London")
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val mockPrescriptionDAO = mock[PrescriptionDAO]
val doseTitrationData = DoseTitrationData("10","2","5","20mg","25mg","2.5mg","5mg")
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.repeatPrescription(doseTitrationData, mockPatient, MRDrug, breakthroughDrug)(request)
status(result) must equalTo(200)
}
"directs an authorised user to the currentPrescription page" in new WithApplication(app2) {
val mockUuid = UUID.randomUUID()
val mockPrescription = new Prescription(hospitalNumber, "prescriberID", new Timestamp(new DateTime(2016,3,13,1,58).getMillis), "MRDrug", 5.00, "breakthroughDrug", 10.00)
val mockPrescriberDAO = mock[PrescriberDAO]
val mockPtDAO = mock[PatientDAO]
val timeZone = DateTimeZone.forID("Europe/London")
val mockDoseDAO = mock[DoseDAO]
val mockCredentialsProvider = mock[CredentialsProvider]
val dataFormatter = new PrescriptionDataFormatterImpl(mockPrescriberDAO, mockDoseDAO, timeZone)
val mockLoginInfo = LoginInfo("email", email)
val identity = Prescriber(mockUuid, mockLoginInfo, title, firstName, surname, email)
val messagesApi = play.api.Play.current.injector.instanceOf[MessagesApi]
implicit val env = FakeEnvironment[User, CookieAuthenticator](Seq(identity.loginInfo -> identity))
val formData = (email, password)
val mockPatient = new Patient(hospitalNumber, title, firstName, surname, dob)
val mockPrescriptionDAO = mock[PrescriptionDAO]
val doseTitrationData = DoseTitrationData("10","2","5","20mg","25mg","2.5mg","5mg")
val request = FakeRequest().withAuthenticator(identity.loginInfo).withFormUrlEncodedBody(formData).withSession("csrfToken" -> CSRF.SignedTokenProvider.generateToken)
val controller = new PrescriptionController(messagesApi, env, mockPrescriptionDAO, mockPrescriberDAO, mockPtDAO, dataFormatter, timeZone)
val result = controller.repeatPrescription(doseTitrationData, mockPatient, MRDrug, breakthroughDrug)(request)
contentAsString(result) must contain("PRESCRIPTION SUCCESSFUL!")
}
}
}
| BBK-SDP-2015-jtomli03/Morphidose2 | test/functional/PrescriptionControllerSpec.scala | Scala | apache-2.0 | 51,242 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.counter.loader.core
import org.apache.s2graph.core.{Edge, Graph, GraphUtil}
import org.apache.s2graph.counter.loader.config.StreamingConfig
import org.apache.s2graph.counter.models.CounterModel
import org.apache.s2graph.spark.config.S2ConfigFactory
import org.apache.spark.Logging
import play.api.libs.json._
import scala.collection.mutable.{HashMap => MutableHashMap}
object CounterEtlFunctions extends Logging {
lazy val filterOps = Seq("insert", "insertBulk", "update", "increment").map(op => GraphUtil.operations(op))
lazy val preFetchSize = StreamingConfig.PROFILE_PREFETCH_SIZE
lazy val config = S2ConfigFactory.config
lazy val counterModel = new CounterModel(config)
def logToEdge(line: String): Option[Edge] = {
for {
elem <- Graph.toGraphElement(line) if elem.isInstanceOf[Edge]
edge <- Some(elem.asInstanceOf[Edge]).filter { x =>
filterOps.contains(x.op)
}
} yield {
edge
}
}
def parseEdgeFormat(line: String): Option[CounterEtlItem] = {
/*
* 1427082276804 insert edge 19073318 52453027_93524145648511699 story_user_ch_doc_view {"doc_type" : "l", "channel_subscribing" : "y", "view_from" : "feed"}
*/
for {
elem <- Graph.toGraphElement(line) if elem.isInstanceOf[Edge]
edge <- Some(elem.asInstanceOf[Edge]).filter { x =>
filterOps.contains(x.op)
}
} yield {
val label = edge.label
val labelName = label.label
val tgtService = label.tgtColumn.service.serviceName
val tgtId = edge.tgtVertex.innerId.toString()
val srcId = edge.srcVertex.innerId.toString()
// make empty property if no exist edge property
val dimension = Json.parse(Some(GraphUtil.split(line)).filter(_.length >= 7).map(_(6)).getOrElse("{}"))
val bucketKeys = Seq("_from")
val bucketKeyValues = {
for {
variable <- bucketKeys
} yield {
val jsValue = variable match {
case "_from" => JsString(srcId)
case s => dimension \\ s
}
s"[[$variable]]" -> jsValue
}
}
val property = Json.toJson(bucketKeyValues :+ ("value" -> JsString("1")) toMap)
// val property = Json.toJson(Map("_from" -> srcId, "_to" -> tgtId, "value" -> "1"))
CounterEtlItem(edge.ts, tgtService, labelName, tgtId, dimension, property)
}
}
def parseEdgeFormat(lines: List[String]): List[CounterEtlItem] = {
for {
line <- lines
item <- parseEdgeFormat(line)
} yield {
item
}
}
def checkPolicyAndMergeDimension(service: String, action: String, items: List[CounterEtlItem]): List[CounterEtlItem] = {
counterModel.findByServiceAction(service, action).map { policy =>
if (policy.useProfile) {
policy.bucketImpId match {
case Some(_) => DimensionProps.mergeDimension(policy, items)
case None => Nil
}
} else {
items
}
}.getOrElse(Nil)
}
}
| jongwook/incubator-s2graph | s2counter_loader/src/main/scala/org/apache/s2graph/counter/loader/core/CounterEtlFunctions.scala | Scala | apache-2.0 | 3,801 |
package com.sksamuel.elastic4s.http
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.language.higherKinds
trait Executor[F[_]] {
def exec(client: HttpClient, request: ElasticRequest): F[HttpResponse]
}
object Executor {
def apply[F[_]: Executor](): Executor[F] = implicitly[Executor[F]]
implicit def FutureExecutor(implicit ec: ExecutionContext = ExecutionContext.Implicits.global) =
new Executor[Future] {
override def exec(client: HttpClient, request: ElasticRequest): Future[HttpResponse] = {
val promise = Promise[HttpResponse]()
val callback: Either[Throwable, HttpResponse] => Unit = {
case Left(t) => promise.tryFailure(t)
case Right(r) => promise.trySuccess(r)
}
client.send(request, callback)
promise.future
}
}
}
| Tecsisa/elastic4s | elastic4s-http/src/main/scala/com/sksamuel/elastic4s/http/Executor.scala | Scala | apache-2.0 | 839 |
package org.mitre.mandolin.mx
import org.mitre.mandolin.optimize.{ Updater, LossGradient, TrainingUnitEvaluator }
import org.mitre.mandolin.mlp.MMLPFactor
import ml.dmlc.mxnet.{ EpochEndCallback, Uniform, Initializer, Xavier, Model, Shape, FeedForward,
Symbol, Context, Optimizer, NDArray, DataIter, DataBatch, Accuracy }
import ml.dmlc.mxnet.io.{ NDArrayIter }
import ml.dmlc.mxnet.optimizer.SGD
import ml.dmlc.mxnet.Callback.Speedometer
import collection.mutable.ArrayBuffer
import org.slf4j.LoggerFactory
class MxNetLossGradient(loss: Double) extends LossGradient[MxNetLossGradient](loss) {
def add(other: MxNetLossGradient) = new MxNetLossGradient(this.loss + other.loss)
def asArray = throw new RuntimeException("Loss gradient not convertable to array")
}
class MxNetFactor(val data: NDArray, val label: NDArray) extends Serializable
/**
* This iterator creates a MxNet DataIter from Iterator[DataBatch]
* THis allows the API to remain separate from MxNet but should allow mxNet DataIter objects to be
* passed in directly to evaluateTrainingMiniBatch since DataIter extends Iterator[DataBatch]
*/
class GenMxIter(it: Iterator[DataBatch], _batchSize: Int, _shape: Shape) extends GenIter(_shape, _batchSize, "data", "softmax_label") {
override def next() : DataBatch = {
if (!hasNext) {
throw new NoSuchElementException("No more data")
}
index += 1
if (index >= 0 && index < cache.size) {
cache(index)
} else {
val db = it.next()
cache += db
db
}
}
override def hasNext() : Boolean = it.hasNext || (index < cache.size - 1 && cache.size > 0)
}
class MxModelCheckPoint(prefix: String, freq: Int = 1) extends EpochEndCallback {
def invoke(epoch: Int, symbol: Symbol, argParams: Map[String, NDArray], auxParams: Map[String, NDArray]) : Unit = {
if ((epoch+1) % freq == 0)
Model.saveCheckpoint(prefix, epoch, symbol, argParams, auxParams)
}
}
class MxNetEvaluator(val net: Symbol, val ctx: Array[Context], shape: Shape, batchSz: Int, init: Initializer,
checkPointPrefix: Option[String] = None, checkPointFreq: Int = 1)
extends TrainingUnitEvaluator[DataBatch, MxNetWeights, MxNetLossGradient, MxNetOptimizer] {
val logger = LoggerFactory.getLogger(getClass)
val checkPointer =
if (checkPointFreq > 0) checkPointPrefix match {case Some(p) => new MxModelCheckPoint(p, checkPointFreq) case None => null}
else null
def evaluateTrainingUnit(unit: DataBatch, weights: MxNetWeights, u: MxNetOptimizer) : MxNetLossGradient =
throw new RuntimeException("Closed evaluator MxNetEvaluator does not implement singleton point evaluations")
def copy() = throw new RuntimeException("MxNetEvaluator should/can not be copied")
def evaluateTrainingMiniBatch(tr: DataIter, tst: DataIter, weights: MxNetWeights,
u: MxNetOptimizer, epochCnt: Int = 0, startFrom: Int = -1) : MxNetLossGradient = {
val metric = new Accuracy()
if ((checkPointPrefix.isDefined) && (startFrom > 0)) { // in this case we're resuming training from a saved checkpoint
val epochSize = math.ceil(tr.size.toDouble / batchSz)
val (sym, args, auxs) = Model.loadCheckpoint(checkPointPrefix.get, startFrom)
logger.info("Loading model " + checkPointPrefix.get)
val ff = new FeedForward(net, ctx, optimizer = u.optimizer,
initializer = init, numEpoch = epochCnt, batchSize = batchSz, argParams = args, auxParams = auxs,
beginEpoch = startFrom, allowExtraParams = true)
ff.fit(trainData = tr, evalData = tst, evalMetric = metric, kvStoreType = "local_update_cpu", epochEndCallback = checkPointer,
batchEndCallback = new Speedometer(batchSz, 50))
checkPointPrefix foreach {p => Model.saveCheckpoint(p, epochCnt, sym, ff.getArgParams, ff.getAuxParams)}
} else {
val ff = new FeedForward(net, ctx, optimizer = u.optimizer,
initializer = init, numEpoch = epochCnt, batchSize = batchSz, argParams = null, auxParams = null)
ff.fit(trainData = tr, evalData = tst, evalMetric = metric, kvStoreType = "local_update_cpu", epochEndCallback = checkPointer,
batchEndCallback = new Speedometer(batchSz, 50))
weights.setArgParams(ff.getArgParams)
weights.setAuxParams(ff.getAuxParams)
checkPointPrefix foreach {p => Model.saveCheckpoint(p, epochCnt, net, ff.getArgParams, ff.getAuxParams)}
}
new MxNetLossGradient(metric.get._2(0).toDouble)
}
}
class MxNetGlpEvaluator(val net: Symbol, val ctx: Array[Context], idim: Int)
extends TrainingUnitEvaluator[MMLPFactor, MxNetWeights, MxNetLossGradient, MxNetOptimizer] {
val batchSize = 64
// indicates that only minibatch training is supported and gradient updates are handled by evaluator directly
val isClosed = true
private def factorsToIterator(units: Iterator[MMLPFactor]) : MMLPFactorIter = {
new MMLPFactorIter(units, Shape(idim), batchSize)
}
def evaluateTrainingMiniBatch(units: Iterator[MMLPFactor], weights: MxNetWeights, u: MxNetOptimizer, epochCnt: Int = 0) : MxNetLossGradient = {
// make a single epoch/pass over the data
val args = weights.argParams.getOrElse(null)
val auxs = weights.auxParams.getOrElse(null)
val ff = new FeedForward(net, ctx, optimizer = u.optimizer, numEpoch = epochCnt+1,
argParams = args, auxParams = auxs, beginEpoch=epochCnt)
val trIter = factorsToIterator(units)
val tstIter = factorsToIterator(units) // separate iterators here
val metric = new Accuracy()
ff.fit(trIter, tstIter, metric)
trIter.dispose() // clean out the cache for this iterator
tstIter.dispose()
new MxNetLossGradient(metric.get._2(0).toDouble)
}
def evaluateTrainingUnit(unit: MMLPFactor, weights: MxNetWeights, u: MxNetOptimizer) : MxNetLossGradient =
throw new RuntimeException("Closed evaluator MxNetEvaluator does not implement singleton point evaluations")
def copy() = throw new RuntimeException("MxNetEvaluator should/can not be copied")
}
class MxNetOptimizer(val optimizer: ml.dmlc.mxnet.Optimizer) extends Updater[MxNetWeights, MxNetLossGradient, MxNetOptimizer] {
def asArray: Array[Float] = throw new RuntimeException("Unimplemented")
def compose(u: org.mitre.mandolin.mx.MxNetOptimizer): org.mitre.mandolin.mx.MxNetOptimizer = {
this
}
def compress(): org.mitre.mandolin.mx.MxNetOptimizer = this
def copy(): org.mitre.mandolin.mx.MxNetOptimizer = throw new RuntimeException("MxNetOptimizer should/can not be copied")
def decompress(): org.mitre.mandolin.mx.MxNetOptimizer = this
def resetLearningRates(v: Float): Unit = {}
def updateFromArray(a: Array[Float]): Unit = {}
def updateWeights(g: org.mitre.mandolin.mx.MxNetLossGradient,w: org.mitre.mandolin.mx.MxNetWeights): Unit = {}
} | project-mandolin/mandolin | mandolin-mx/src/main/scala/org/mitre/mandolin/mx/MxNetEvaluator.scala | Scala | apache-2.0 | 6,833 |
package com.blogspot.nurkiewicz.akka.demo.fs
import java.nio.file._
import java.util.concurrent.TimeUnit
import akka.actor.{Props, ActorSystem}
import com.weiglewilczek.slf4s.Logging
/**
* @author Tomasz Nurkiewicz
* @since 3/30/13, 1:13 PM
*/
object WatchMain extends App with Logging {
val system = ActorSystem("WatchFsSystem")
system.log.info("Started")
val fsActor = system.actorOf(Props[FileSystemActor], "fileSystem")
fsActor ! MonitorDir(Paths get ".")
TimeUnit.SECONDS.sleep(60)
system.shutdown()
}
| nurkiewicz/learning-akka | src/main/scala/com/blogspot/nurkiewicz/akka/demo/fs/WatchMain.scala | Scala | apache-2.0 | 519 |
package me.fornever.platonus
import scala.util.Random
import scala.collection.mutable
object Network {
def apply(): Network = Network(1)
def apply(depth: Int): Network = Network(depth, mutable.Map[Vector[Word], mutable.Map[Word, Int]]())
def apply(depth: Int, data: mutable.Map[Vector[Word], mutable.Map[Word, Int]]) = new Network(depth, data)
}
class Network(val depth: Int,
val data: mutable.Map[Vector[Word], mutable.Map[Word, Int]]) {
def size = data.size
def add(phrase: Vector[String]) = {
val words = Stream.concat(
Stream(PhraseBegin()),
phrase.toStream.map(OrdinarWord),
Stream(PhraseEnd())
).toList
// initialData generates a "begin -> 1", "begin 1 -> 2" etc. from a phrase "1 2 3". It helps at the initial phrase
// generation stage.
val initialData: Map[Vector[Word], Map[Word, Int]] = (1 to depth - 1).map(
length => {
val key = words.take(length)
val value = words.drop(length).take(1).toList
key.toVector -> List(value)
}
).toMap.map(prepareWordValue)
val phraseData: Map[Vector[Word], Map[Word, Int]] = words
.sliding(depth + 1)
.toList
.groupBy(getWordsKey)
.map(prepareWordValue)
for (pair <- Stream.concat(initialData, phraseData)) {
pair match {
case (keys, valueMap) =>
val values = data.getOrElse(keys, scala.collection.mutable.Map[Word, Int]())
data.update(keys, values)
for (valueCount <- valueMap) {
valueCount match {
case (word, count) =>
values.update(word, count + values.getOrElse(word, 0))
}
}
}
}
this
}
def generate(limit: Option[Int]): Stream[String] = {
def generate(init: Vector[Word]): Vector[Word] = {
if (limit.isDefined && init.length > limit.get) {
init
} else {
val key = init.takeRight(depth)
val nextWordMap = data.getOrElse(key, Map())
val nextWord = if (nextWordMap.isEmpty) {
None
} else {
val sum = nextWordMap.values.sum
val position = Random.nextInt(sum)
def getter(index: Int, iterator: Iterator[(Word, Int)]): Word = {
iterator.next() match {
case (word, value) =>
val nextValue = index + value
if (nextValue > position) {
word
} else {
getter(nextValue, iterator)
}
}
}
Some(getter(0, nextWordMap.toIterator))
}
nextWord match {
case Some(word@OrdinarWord(_)) => generate(init :+ word)
case _ => init :+ PhraseEnd()
}
}
}
val phrase = generate(Vector(PhraseBegin()))
phrase.toStream.filter(_.isInstanceOf[OrdinarWord]).map {
case OrdinarWord(word) => word
case _ => throw new Exception("Impossible")
}
}
def generate(): Stream[String] = generate(None)
def generate(limit: Int): Stream[String] = generate(Some(limit))
private def getWordsKey(words: List[Word]) = words.take(depth).toVector
private def prepareWordValue(item: (Vector[Word], List[List[Word]])): (Vector[Word], Map[Word, Int]) = {
val key = item._1
val finalWords = item._2.map(_.last)
val countMap = finalWords.groupBy(identity).mapValues(_.length)
(key, countMap)
}
}
| ForNeVeR/platonus | src/main/scala/me/fornever/platonus/Network.scala | Scala | mit | 3,421 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.server
import akka.actor._
import akka.event.LoggingReceive
import org.ensime.api._
import org.ensime.core._
//import shapeless._
/**
* Accepts RpcRequestEnvelope and responds with an RpcResponseEnvelope to target.
* Also sends asynchronous RpcResponseEnvelopes to target.
* Ensures that everything in and out is canonised.
*/
class ConnectionHandler(
project: ActorRef,
broadcaster: ActorRef,
target: ActorRef
) extends Actor
with ActorLogging {
override def preStart(): Unit =
broadcaster ! Broadcaster.Register
override def postStop(): Unit =
broadcaster ! Broadcaster.Unregister
// not Receive, thanks to https://issues.scala-lang.org/browse/SI-8861
// (fixed in 2.11.7)
def receive: PartialFunction[Any, Unit] =
receiveRpc orElse LoggingReceive { receiveEvents }
def receiveRpc: Receive = {
case req: RpcRequestEnvelope =>
val handler = RequestHandler(Canonised(req), project, self)
context.actorOf(handler, s"${req.callId}")
case outgoing: RpcResponseEnvelope =>
target forward Canonised(outgoing)
}
def receiveEvents: Receive = {
case outgoing: EnsimeEvent =>
target forward RpcResponseEnvelope(None, Canonised(outgoing))
}
}
object ConnectionHandler {
def apply(
project: ActorRef,
broadcaster: ActorRef,
target: ActorRef
): Props = Props(classOf[ConnectionHandler], project, broadcaster, target)
}
| VC1995/ensime-server | server/src/main/scala/org/ensime/server/ConnectionHandler.scala | Scala | gpl-3.0 | 1,560 |
package models.quiz
import com.artclod.mathml.scalar.MathMLElem
import com.artclod.slick.NumericBoolean
import models.support.HasOrder
import models._
import play.twirl.api.Html
case class AnswerPartFunction(// =================== Ids ====================
id: AnswerPartId, answerSectionId: AnswerSectionId, answerId: AnswerId, questionPartId: QuestionPartId, sectionId: QuestionSectionId, questionId: QuestionId,
// =============== Answer stuff ===============
functionRaw: String, functionMath: MathMLElem, correctNum: Short, order: Short) extends HasOrder[AnswerPartFunction] {
def correct = NumericBoolean(correctNum)
} | kristiankime/calc-tutor | app/models/quiz/AnswerPartFunction.scala | Scala | mit | 717 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.communication.actors
import akka.actor.{Actor, ActorRef}
import akka.util.ByteString
import org.apache.toree.communication.{ZMQMessage, SocketManager}
import org.apache.toree.utils.LogLike
import org.zeromq.ZMQ
/**
* Represents an actor containing a dealer socket.
*
* @param connection The address to connect to
* @param listener The actor to send incoming messages back to
*/
class DealerSocketActor(connection: String, listener: ActorRef)
extends Actor with LogLike
{
logger.debug(s"Initializing dealer socket actor for $connection")
private val manager: SocketManager = new SocketManager
private val socket = manager.newDealerSocket(connection, (message: Seq[Array[Byte]]) => {
listener ! ZMQMessage(message.map(ByteString.apply): _*)
})
override def postStop(): Unit = {
manager.closeSocket(socket)
}
override def receive: Actor.Receive = {
case zmqMessage: ZMQMessage =>
val frames = zmqMessage.frames.map(byteString => byteString.toArray )
socket.send(frames: _*)
}
}
| Myllyenko/incubator-toree | communication/src/main/scala/org/apache/toree/communication/actors/DealerSocketActor.scala | Scala | apache-2.0 | 1,864 |
package com.mattwittmann.scabasic
class Interpreter {
def interpret(line: String): Unit = {
}
}
| matt-wittmann/sca_basic | src/main/scala/com/mattwittmann/scabasic/Interpreter.scala | Scala | mit | 101 |
import com.mojolly.scalate.ScalatePlugin.ScalateKeys._
import com.mojolly.scalate.ScalatePlugin._
import org.scalatra.sbt._
import sbt.Keys._
import sbt._
//noinspection ScalaFileName
object ScabBuild extends Build {
val Organization = "com.robbyp"
val Name = "scab"
val Version = "0.1.0-SNAPSHOT"
val ScalaVersion = "2.11.6"
val ScalatraVersion = "2.4.0.RC1"
lazy val testScalastyle = taskKey[Unit]("testScalastyle")
lazy val project = Project(
"scab",
file("."),
settings = ScalatraPlugin.scalatraWithJRebel ++ scalateSettings ++ Seq(
organization := Organization,
name := Name,
version := Version,
scalaVersion := ScalaVersion,
resolvers += Classpaths.typesafeReleases,
libraryDependencies ++= Seq(
"org.scalatra" %% "scalatra" % ScalatraVersion,
"org.scalatra" %% "scalatra-scalate" % ScalatraVersion,
"org.scalatra" %% "scalatra-scalatest" % ScalatraVersion % "test",
"ch.qos.logback" % "logback-classic" % "1.1.2" % "runtime",
"org.eclipse.jetty" % "jetty-webapp" % "9.1.5.v20140505" % "container",
"org.eclipse.jetty" % "jetty-plus" % "9.1.5.v20140505" % "container",
"javax.servlet" % "javax.servlet-api" % "3.1.0",
"org.joda" % "joda-money" % "0.10.0",
"com.github.nscala-time" %% "nscala-time" % "1.8.0"
),
scalateTemplateConfig in Compile <<= (sourceDirectory in Compile) { base =>
Seq(
TemplateConfig(
base / "webapp" / "WEB-INF" / "templates",
Seq.empty, /* default imports should be added here */
Seq(
Binding("context", "_root_.org.scalatra.scalate.ScalatraRenderContext", importMembers = true, isImplicit = true)
), /* add extra bindings here */
Some("templates")
)
)
},
testScalastyle := org.scalastyle.sbt.ScalastylePlugin.scalastyle.in(Test).toTask("").value,
(test in Test) <<= (test in Test) dependsOn testScalastyle
)
)
}
| robpurcell/scab | project/build.scala | Scala | apache-2.0 | 2,026 |
// Project: angulate2 (https://github.com/jokade/angulate2)
// Module: @angular/core/Renderer
// Copyright (c) 2016 Johannes.Kastner <jokade@karchedon.de>
// Distributed under the MIT License (see included LICENSE file)
package angulate2.core
import scala.scalajs.js
import scala.scalajs.js.annotation.JSImport
@js.native
@JSImport("@angular/core","Renderer")
class Renderer extends js.Any {
import Renderer._
def selectRootElement(selectorOrNode: js.Any, debugInfo: OptRenderDebugInfo = js.undefined) : js.Any = js.native
def createElement(parentElement: js.Any, name: String, debugInfo: OptRenderDebugInfo = js.undefined) : js.Any = js.native
def createViewRoot(hostElement: js.Any) : js.Any = js.native
def createTemplateAnchor(parentElement: js.Any, debugInfo: OptRenderDebugInfo = js.undefined) : js.Any = js.native
def createText(parentElement: js.Any, value: String, debugInfo: OptRenderDebugInfo = js.undefined) : js.Any = js.native
def projectNodes(parentElement: js.Any, nodes: js.Array[js.Any]) : Unit = js.native
def attachViewAfter(node: js.Any, viewRootNodes: js.Array[js.Any]) : Unit = js.native
def detachView(viewRootNodes: js.Array[js.Any]) : Unit = js.native
def destroyView(hostElement: js.Any, viewAllNodes: js.Array[js.Any]) : Unit = js.native
def listen(renderElement: js.Any, name: String, callback: js.Function) : js.Function = js.native
def listenGlobal(target: String, name: String, callback: js.Function) : js.Function = js.native
def setElementProperty(renderElement: js.Any, propertyName: String, propertyValue: js.Any) : Unit = js.native
def setElementAttribute(renderElement: js.Any, attributeName: String, attributeValue: String) : Unit = js.native
def setBindingDebugInfo(renderElement: js.Any, propertyName: String, propertyValue: String) : Unit = js.native
def setElementClass(renderElement: js.Any, className: String, isAdd: Boolean) : Unit = js.native
def setElementStyle(renderElement: js.Any, styleName: String, styleValue: String) : Unit = js.native
def invokeElementMethod(renderElement: js.Any, methodName: String, args: OptAnyArray = js.undefined) : Unit = js.native
def setText(renderNode: js.Any, text: String) : Unit = js.native
def animate(element: js.Any, startingStyles: AnimationStyles, keyframes: js.Array[AnimationKeyframe], duration: Int, delay: Int, easing: String, previousPlayers: js.UndefOr[js.Array[AnimationPlayer]] = js.undefined) : AnimationPlayer = js.native
}
object Renderer {
type RenderDebugInfo = js.Dynamic
type OptRenderDebugInfo = js.UndefOr[RenderDebugInfo]
type AnimationStyles = js.Dynamic
type AnimationKeyframe = js.Dynamic
type AnimationPlayer = js.Dynamic
}
| jokade/angulate2 | bindings/src/main/scala/angulate2/core/Renderer.scala | Scala | mit | 2,713 |
package org.jetbrains.plugins.scala.codeInsight.intention.format
import org.jetbrains.plugins.scala.format.{FormattedStringFormatter, StringConcatenationParser}
/**
* Pavel Fatin
*/
class ConvertStringConcatenationToFormattedString extends AbstractFormatConversionIntention(
"Convert to formatted string", StringConcatenationParser, FormattedStringFormatter, eager = true) {
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/codeInsight/intention/format/ConvertStringConcatenationToFormattedString.scala | Scala | apache-2.0 | 384 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.stream.sql
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.table.api.scala._
import org.apache.flink.table.planner.expressions.utils.FuncWithOpen
import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.StateBackendMode
import org.apache.flink.table.planner.runtime.utils._
import org.apache.flink.types.Row
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import scala.collection.{Seq, mutable}
@RunWith(classOf[Parameterized])
class JoinITCase(state: StateBackendMode) extends StreamingWithStateTestBase(state) {
val smallTuple5Data = List(
(1, 1L, 0, "Hallo", 1L),
(2, 2L, 1, "Hallo Welt", 2L),
(2, 3L, 2, "Hallo Welt wie", 1L),
(3, 4L, 3, "Hallo Welt wie gehts?", 2L),
(3, 5L, 4, "ABC", 2L),
(3, 6L, 5, "BCD", 3L)
)
val tuple3Data = List(
(1, 1L, "Hi"),
(2, 2L, "Hello"),
(3, 2L, "Hello world")
)
val dataCannotBeJoin = List(
(2, 3L, 2, "Hallo Welt wie", 1L),
(3, 4L, 3, "Hallo Welt wie gehts?", 2L),
(3, 5L, 4, "ABC", 2L),
(3, 6L, 5, "BCD", 3L)
)
override def before(): Unit = {
super.before()
val tableA = failingDataSource(TestData.smallTupleData3)
.toTable(tEnv, 'a1, 'a2, 'a3)
val tableB = failingDataSource(TestData.tupleData5)
.toTable(tEnv, 'b1, 'b2, 'b3, 'b4, 'b5)
tEnv.registerTable("A", tableA)
tEnv.registerTable("B", tableB)
}
// Tests for inner join.
override def after(): Unit = {}
/** test non-window inner join **/
@Test
def testNonWindowInnerJoin(): Unit = {
val data1 = new mutable.MutableList[(Int, Long, String)]
data1.+=((1, 1L, "Hi1"))
data1.+=((1, 2L, "Hi2"))
data1.+=((1, 2L, "Hi2"))
data1.+=((1, 5L, "Hi3"))
data1.+=((2, 7L, "Hi5"))
data1.+=((1, 9L, "Hi6"))
data1.+=((1, 8L, "Hi8"))
data1.+=((3, 8L, "Hi9"))
val data2 = new mutable.MutableList[(Int, Long, String)]
data2.+=((1, 1L, "HiHi"))
data2.+=((2, 2L, "HeHe"))
data2.+=((3, 2L, "HeHe"))
val t1 = failingDataSource(data1).toTable(tEnv, 'a, 'b, 'c)
val t2 = failingDataSource(data2).toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("T1", t1)
tEnv.registerTable("T2", t2)
val sqlQuery =
"""
|SELECT t2.a, t2.c, t1.c
|FROM (
| SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T1
|) as t1
|JOIN (
| SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T2
|) as t2
|ON t1.a = t2.a AND t1.b > t2.b
|""".stripMargin
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = mutable.MutableList(
"1,HiHi,Hi2",
"1,HiHi,Hi2",
"1,HiHi,Hi3",
"1,HiHi,Hi6",
"1,HiHi,Hi8",
"2,HeHe,Hi5")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testIsNullInnerJoinWithNullCond(): Unit = {
val data1 = new mutable.MutableList[(Int, Long, String)]
data1.+=((1, 1L, "Hi1"))
data1.+=((1, 2L, "Hi2"))
data1.+=((1, 2L, "Hi2"))
data1.+=((1, 5L, "Hi3"))
data1.+=((2, 7L, "Hi5"))
data1.+=((1, 9L, "Hi6"))
data1.+=((1, 8L, "Hi8"))
data1.+=((3, 8L, "Hi9"))
val data2 = new mutable.MutableList[(Int, Long, String)]
data2.+=((1, 1L, "HiHi"))
data2.+=((2, 2L, "HeHe"))
data2.+=((3, 2L, "HeHe"))
val t1 = failingDataSource(data1).toTable(tEnv, 'a, 'b, 'c)
val t2 = failingDataSource(data2).toTable(tEnv, 'a, 'b, 'c)
tEnv.registerTable("T1", t1)
tEnv.registerTable("T2", t2)
val sqlQuery =
"""
|SELECT t2.a, t2.c, t1.c
|FROM (
| SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T1
|) as t1
|JOIN (
| SELECT if(a = 3, cast(null as int), a) as a, b, c FROM T2
|) as t2
|ON
| ((t1.a is null AND t2.a is null) OR
| (t1.a = t2.a))
| AND t1.b > t2.b
|""".stripMargin
val sink = new TestingAppendSink
tEnv.sqlQuery(sqlQuery).toAppendStream[Row].addSink(sink)
env.execute()
val expected = mutable.MutableList(
"1,HiHi,Hi2",
"1,HiHi,Hi2",
"1,HiHi,Hi3",
"1,HiHi,Hi6",
"1,HiHi,Hi8",
"2,HeHe,Hi5",
"null,HeHe,Hi9")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testJoin(): Unit = {
val sqlQuery = "SELECT a3, b4 FROM A, B WHERE a2 = b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("Hi,Hallo", "Hello,Hallo Welt", "Hello world,Hallo Welt")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testInnerJoin(): Unit = {
val ds1 = failingDataSource(tuple3Data).toTable(tEnv, 'a, 'b, 'c)
val ds2 = failingDataSource(smallTuple5Data).toTable(tEnv, 'd, 'e, 'f, 'g, 'h)
tEnv.registerTable("ds1", ds1)
tEnv.registerTable("ds2", ds2)
val query = "SELECT b, c, e, g FROM ds1 JOIN ds2 ON b = e"
val sink = new TestingAppendSink
tEnv.sqlQuery(query).toAppendStream[Row].addSink(sink)
env.execute()
val expected = Seq("1,Hi,1,Hallo", "2,Hello world,2,Hallo Welt", "2,Hello,2,Hallo Welt")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testInnerJoin2(): Unit = {
val query = "SELECT a1, b1 FROM A JOIN B ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("3,3", "1,1", "3,3", "2,2", "3,3", "2,2")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testJoinWithFilter(): Unit = {
val sqlQuery = "SELECT a3, b4 FROM A, B WHERE a2 = b2 AND a2 < 2"
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("Hi,Hallo")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testInnerJoinWithDuplicateKey(): Unit = {
val query = "SELECT a1, b1, b3 FROM A JOIN B ON a1 = b1 AND a1 = b3"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("2,2,2", "3,3,3")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testInnerJoinWithNonEquiJoinPredicate(): Unit = {
val sqlQuery = "SELECT c, g FROM Table3, Table5 WHERE b = e AND a < 6 AND h < b"
val ds1 = failingDataSource(TestData.tupleData3).toTable(tEnv, 'a, 'b, 'c)
val ds2 = failingDataSource(TestData.tupleData5).toTable(tEnv, 'd, 'e, 'f, 'g, 'h)
tEnv.registerTable("Table3", ds1)
tEnv.registerTable("Table5", ds2)
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("Hello world, how are you?,Hallo Welt wie", "I am fine.,Hallo Welt wie")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testJoinWithMultipleKeys(): Unit = {
val sqlQuery = "SELECT c, g FROM Table3, Table5 WHERE a = d AND b = h"
val ds1 = failingDataSource(TestData.tupleData3).toTable(tEnv, 'a, 'b, 'c)
val ds2 = failingDataSource(TestData.tupleData5).toTable(tEnv, 'd, 'e, 'f, 'g, 'h)
tEnv.registerTable("Table3", ds1)
tEnv.registerTable("Table5", ds2)
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq(
"Hi,Hallo", "Hello,Hallo Welt", "Hello world,Hallo Welt wie gehts?", "Hello world,ABC",
"I am fine.,HIJ", "I am fine.,IJK")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testJoinWithAlias(): Unit = {
val sqlQuery =
"SELECT B.b5, T.`1-_./Ü` FROM (SELECT a1, a2, a3 AS `1-_./Ü` FROM A) AS T, B " +
"WHERE a1 = b1 AND a1 < 4"
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,Hi", "2,Hello", "1,Hello",
"2,Hello world", "2,Hello world", "3,Hello world")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testDataStreamJoinWithAggregation(): Unit = {
val sqlQuery = "SELECT COUNT(b4), COUNT(a2) FROM A, B WHERE a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("6,6")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftOuterJoin(): Unit = {
val ds1 = failingDataSource(tuple3Data).toTable(tEnv, 'a, 'b, 'c)
val ds2 = failingDataSource(dataCannotBeJoin).toTable(tEnv, 'd, 'e, 'f, 'g, 'h)
tEnv.registerTable("ds1", ds1)
tEnv.registerTable("ds2", ds2)
val query = "SELECT b, c, e, g FROM ds1 LEFT OUTER JOIN ds2 ON b = e"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,Hi,null,null", "2,Hello world,null,null", "2,Hello,null,null")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftOuterJoinWithRetraction(): Unit = {
val ds1 = failingDataSource(tuple3Data).toTable(tEnv, 'a, 'b, 'c)
val ds2 = failingDataSource(smallTuple5Data).toTable(tEnv, 'd, 'e, 'f, 'g, 'h)
tEnv.registerTable("ds1", ds1)
tEnv.registerTable("ds2", ds2)
val query = "SELECT b, c, e, g FROM ds1 LEFT OUTER JOIN ds2 ON b = e"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,Hi,1,Hallo", "2,Hello world,2,Hallo Welt", "2,Hello,2,Hallo Welt")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testStreamJoinWithSameRecord(): Unit = {
val data1 = List(
(1, 1),
(1, 1),
(2, 2),
(2, 2),
(3, 3),
(3, 3),
(4, 4),
(4, 4),
(5, 5),
(5, 5))
val data2 = List(
(1, 1),
(2, 2),
(3, 3),
(4, 4),
(5, 5),
(6, 6),
(7, 7),
(8, 8),
(9, 9),
(10, 10))
val table1 = failingDataSource(data1).toTable(tEnv, 'pk, 'a)
val table2 = failingDataSource(data2).toTable(tEnv, 'pk, 'a)
tEnv.registerTable("ds1", table1)
tEnv.registerTable("ds2", table2)
val sql =
"""
|SELECT
| ds1.pk as leftPk,
| ds1.a as leftA,
| ds2.pk as rightPk,
| ds2.a as rightA
|FROM ds1 JOIN ds2 ON ds1.pk = ds2.pk
""".stripMargin
val sink = new TestingAppendSink
tEnv.sqlQuery(sql).toAppendStream[Row].addSink(sink)
env.execute()
val expected = Seq("1,1,1,1", "1,1,1,1",
"2,2,2,2", "2,2,2,2",
"3,3,3,3", "3,3,3,3",
"4,4,4,4", "4,4,4,4",
"5,5,5,5", "5,5,5,5")
assertEquals(expected.sorted, sink.getAppendResults.sorted)
}
@Test
def testFullOuterJoin(): Unit = {
val sqlQuery = "SELECT a3, b4 FROM A FULL OUTER JOIN B ON a2 = b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("Hi,Hallo", "Hello,Hallo Welt", "Hello world,Hallo Welt",
"null,Hallo Welt wie", "null,Hallo Welt wie gehts?", "null,ABC", "null,BCD",
"null,CDE", "null,DEF", "null,EFG", "null,FGH", "null,GHI", "null,HIJ",
"null,IJK", "null,JKL", "null,KLM")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftOuterJoin2(): Unit = {
val sqlQuery = "SELECT c, g FROM Table5 LEFT OUTER JOIN Table3 ON b = e"
val ds1 = failingDataSource(TestData.smallTupleData3).toTable(tEnv, 'a, 'b, 'c)
val ds2 = failingDataSource(TestData.tupleData5).toTable(tEnv, 'd, 'e, 'f, 'g, 'h)
tEnv.registerTable("Table3", ds1)
tEnv.registerTable("Table5", ds2)
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("Hi,Hallo", "Hello,Hallo Welt", "Hello world,Hallo Welt",
"null,Hallo Welt wie", "null,Hallo Welt wie gehts?", "null,ABC", "null,BCD",
"null,CDE", "null,DEF", "null,EFG", "null,FGH", "null,GHI", "null,HIJ",
"null,IJK", "null,JKL", "null,KLM")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testRightOuterJoin(): Unit = {
val sqlQuery = "SELECT a3, b4 FROM A RIGHT OUTER JOIN B ON a2 = b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("Hi,Hallo", "Hello,Hallo Welt", "Hello world,Hallo Welt",
"null,Hallo Welt wie", "null,Hallo Welt wie gehts?", "null,ABC", "null,BCD",
"null,CDE", "null,DEF", "null,EFG", "null,FGH", "null,GHI", "null,HIJ",
"null,IJK", "null,JKL", "null,KLM")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testInnerJoinWithEqualPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, b1 FROM ($query1) JOIN ($query2) ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,1", "2,2", "3,3")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testInnerJoinWithPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, a2, b1, b2 FROM ($query1) JOIN ($query2) ON a2 = b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,1,1,1")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftJoinNonEqui(): Unit = {
val query = "SELECT a1, b1 FROM A LEFT JOIN B ON a1 = b1 AND a2 > b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("3,null", "1,null", "2,null")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftJoinWithEqualPkNonEqui(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, b1 FROM ($query1) LEFT JOIN ($query2) ON a1 = b1 AND a2 > b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,null", "3,null", "2,null")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftJoinWithRightNotPkNonEqui(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query = s"SELECT a1, b1 FROM ($query1) LEFT JOIN B ON a1 = b1 AND a2 > b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,null", "3,null", "2,null")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftJoinWithPkNonEqui(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, a2, b1, b2 FROM ($query1) LEFT JOIN ($query2) ON a2 = b2 AND a1 > b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,1,null,null", "3,2,null,null", "2,2,null,null")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftJoin(): Unit = {
val query = "SELECT a1, b1 FROM A LEFT JOIN B ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,1", "2,2", "3,3", "2,2", "3,3", "3,3")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftJoinWithEqualPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, b1 FROM ($query1) LEFT JOIN ($query2) ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("2,2", "1,1", "3,3")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftJoinWithRightNotPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query = s"SELECT a1, b1 FROM ($query1) LEFT JOIN B ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("3,3", "3,3", "3,3", "2,2", "2,2", "1,1")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testLeftJoinWithPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, a2, b1, b2 FROM ($query1) LEFT JOIN ($query2) ON a2 = b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,1,1,1", "3,2,null,null", "2,2,null,null")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testRightJoinNonEqui(): Unit = {
val query = "SELECT a1, b1 FROM A RIGHT JOIN B ON a1 = b1 AND a2 > b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,2", "null,1", "null,3", "null,3", "null,2", "null,5", "null,3",
"null,5", "null,4", "null,5", "null,4", "null,5", "null,4", "null,5", "null,4")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testRightJoinWithEqualPkNonEqui(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, b1 FROM ($query1) RIGHT JOIN ($query2) ON a1 = b1 AND a2 > b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,1", "null,3", "null,2", "null,5", "null,4")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testRightJoinWithRightNotPkNonEqui(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query = s"SELECT a1, b1 FROM ($query1) RIGHT JOIN B ON a1 = b1 AND a2 > b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,2", "null,1", "null,3", "null,2", "null,3", "null,5", "null,5",
"null,3", "null,5", "null,5", "null,4", "null,5", "null,4", "null,4", "null,4")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testRightJoinWithPkNonEqui(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, a2, b1, b2 FROM ($query1) RIGHT JOIN ($query2) ON a2 = b2 AND a1 > b1"
env.setParallelism(1)
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,null,3,15", "null,null,4,34", "null,null,2,5", "null,null,5,65",
"null,null,1,1")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testRightJoin(): Unit = {
val query = "SELECT a1, b1 FROM A RIGHT JOIN B ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("2,2", "3,3", "3,3", "2,2", "3,3", "null,5", "null,4", "1,1", "null,5",
"null,4", "null,5", "null,5", "null,5", "null,4", "null,4")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testRightJoinWithEqualPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, b1 FROM ($query1) RIGHT JOIN ($query2) ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,1", "2,2", "null,5", "3,3", "null,4")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testRightJoinWithRightNotPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query = s"SELECT a1, b1 FROM ($query1) RIGHT JOIN B ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,4", "null,4", "null,4", "null,4", "null,5", "null,5", "null,5",
"null,5", "null,5", "1,1", "2,2", "3,3", "3,3", "3,3", "2,2")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testRightJoinWithPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, a2, b1, b2 FROM ($query1) RIGHT JOIN ($query2) ON a2 = b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,null,3,15", "null,null,4,34", "null,null,5,65",
"1,1,1,1", "null,null,2,5")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testFullJoinNonEqui(): Unit = {
val query = "SELECT a1, b1 FROM A FULL JOIN B ON a1 = b1 AND a2 > b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,null", "3,null", "2,null", "null,3", "null,2", "null,2", "null,3",
"null,5", "null,3", "null,5", "null,4", "null,5", "null,4", "null,1", "null,5", "null,4",
"null,5", "null,4")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testFullJoinWithEqualPkNonEqui(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, b1 FROM ($query1) FULL JOIN ($query2) ON a1 = b1 AND a2 > b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,2", "null,5", "null,3", "null,4", "3,null", "1,null", "null,1", "2," +
"null")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testFullJoinWithFullNotPkNonEqui(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query = s"SELECT a1, b1 FROM ($query1) FULL JOIN B ON a1 = b1 AND a2 > b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,2", "null,1", "null,2", "null,5", "null,5", "null,5", "null,5",
"null,5", "null,3", "null,3", "null,3", "null,4", "null,4", "null,4", "null,4", "3,null",
"1,null", "2,null")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testFullJoinWithPkNonEqui(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, a2, b1, b2 FROM ($query1) FULL JOIN ($query2) ON a2 = b2 AND a1 > b1"
env.setParallelism(1)
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,1,null,null", "null,null,5,65", "null,null,2,5", "2,2,null,null", "3,2," +
"null,null", "null,null,3,15", "null,null,4,34", "null,null,1,1")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testFullJoin(): Unit = {
val query = "SELECT a1, b1 FROM A FULL JOIN B ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("1,1", "null,5", "null,5", "null,5", "null,4", "null,5", "null,4", "null," +
"5", "null,4", "null,4", "2,2", "2,2", "3,3", "3,3", "3,3")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testFullJoinWithEqualPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, b1 FROM ($query1) FULL JOIN ($query2) ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,4", "1,1", "3,3", "2,2", "null,5")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testFullJoinWithFullNotPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query = s"SELECT a1, b1 FROM ($query1) FULL JOIN B ON a1 = b1"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,4", "null,4", "null,4", "null,4", "null,5", "null,5", "null,5",
"null,5", "null,5", "3,3", "3,3", "3,3", "1,1", "2,2", "2,2")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def FullJoinWithPk(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM A group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM B group by b1"
val query = s"SELECT a1, a2, b1, b2 FROM ($query1) FULL JOIN ($query2) ON a2 = b2"
val sink = new TestingRetractSink
tEnv.sqlQuery(query).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("null,null,3,15", "null,null,4,34", "null,null,5,65", "3,2,null,null", "2," +
"2,null,null", "null,null,2,5", "1,1,1,1")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testNullLeftOuterJoin(): Unit = {
val data1 = new mutable.MutableList[(Int, Long)]
data1.+=((1, 1L))
data1.+=((3, 8L))
data1.+=((4, 2L))
val data2 = new mutable.MutableList[(Int, Long)]
data2.+=((1, 1L))
data2.+=((2, 2L))
data2.+=((3, 2L))
val t1 = failingDataSource(data1).toTable(tEnv, 'a, 'b)
val t2 = failingDataSource(data2).toTable(tEnv, 'a, 'b)
tEnv.registerTable("T1", t1)
tEnv.registerTable("T2", t2)
val sqlQuery =
"""
|SELECT t1.a, t1.b, t2.a, t2.b
|FROM (
| SELECT if(a = 3, cast(null as int), a) as a, b FROM T1
|) as t1
|LEFT OUTER JOIN (
| SELECT if(a = 3, cast(null as int), a) as a, b FROM T2
|) as t2
|ON t1.a = t2.a
|""".stripMargin
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = mutable.MutableList(
"1,1,1,1",
"4,2,null,null",
"null,8,null,null"
)
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testNullLeftOuterJoinWithNullCond(): Unit = {
val data1 = new mutable.MutableList[(Int, Long)]
data1.+=((1, 1L))
data1.+=((3, 8L))
data1.+=((4, 2L))
val data2 = new mutable.MutableList[(Int, Long)]
data2.+=((1, 1L))
data2.+=((2, 2L))
data2.+=((3, 2L))
val t1 = failingDataSource(data1).toTable(tEnv, 'a, 'b)
val t2 = failingDataSource(data2).toTable(tEnv, 'a, 'b)
tEnv.registerTable("T1", t1)
tEnv.registerTable("T2", t2)
val sqlQuery =
"""
|SELECT t1.a, t1.b, t2.a, t2.b
|FROM (
| SELECT if(a = 3, cast(null as int), a) as a, b FROM T1
|) as t1
|LEFT OUTER JOIN (
| SELECT if(a = 3, cast(null as int), a) as a, b FROM T2
|) as t2
|ON t1.a = t2.a OR (t1.a is null AND t2.a is null)
|""".stripMargin
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = mutable.MutableList(
"1,1,1,1",
"4,2,null,null",
"null,8,null,2"
)
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testNullRightOuterJoin(): Unit = {
val data1 = new mutable.MutableList[(Int, Long)]
data1.+=((1, 1L))
data1.+=((3, 8L))
data1.+=((4, 2L))
val data2 = new mutable.MutableList[(Int, Long)]
data2.+=((1, 1L))
data2.+=((2, 2L))
data2.+=((3, 2L))
val t1 = failingDataSource(data1).toTable(tEnv, 'a, 'b)
val t2 = failingDataSource(data2).toTable(tEnv, 'a, 'b)
tEnv.registerTable("T1", t1)
tEnv.registerTable("T2", t2)
val sqlQuery =
"""
|SELECT t1.a, t1.b, t2.a, t2.b
|FROM (
| SELECT if(a = 3, cast(null as int), a) as a, b FROM T1
|) as t1
|RIGHT OUTER JOIN (
| SELECT if(a = 3, cast(null as int), a) as a, b FROM T2
|) as t2 ON t1.a = t2.a
|""".stripMargin
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = mutable.MutableList(
"1,1,1,1",
"null,null,2,2",
"null,null,null,2"
)
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testNullRightOuterJoinWithNullCond(): Unit = {
val data1 = new mutable.MutableList[(Int, Long)]
data1.+=((1, 1L))
data1.+=((3, 8L))
data1.+=((4, 2L))
val data2 = new mutable.MutableList[(Int, Long)]
data2.+=((1, 1L))
data2.+=((2, 2L))
data2.+=((3, 2L))
val t1 = failingDataSource(data1).toTable(tEnv, 'a, 'b)
val t2 = failingDataSource(data2).toTable(tEnv, 'a, 'b)
tEnv.registerTable("T1", t1)
tEnv.registerTable("T2", t2)
val sqlQuery =
"""
|SELECT t1.a, t1.b, t2.a, t2.b
|FROM (
| SELECT if(a = 3, cast(null as int), a) as a, b FROM T1
|) as t1
|RIGHT OUTER JOIN (
| SELECT if(a = 3, cast(null as int), a) as a, b FROM T2
|) as t2
|ON t1.a = t2.a OR (t1.a is null AND t2.a is null)
|""".stripMargin
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = mutable.MutableList(
"1,1,1,1",
"null,null,2,2",
"null,8,null,2"
)
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testNullFullOuterJoin(): Unit = {
val data1 = new mutable.MutableList[(Int, Long)]
data1.+=((1, 1L))
data1.+=((3, 8L))
data1.+=((4, 2L))
val data2 = new mutable.MutableList[(Int, Long)]
data2.+=((1, 1L))
data2.+=((2, 2L))
data2.+=((3, 2L))
val t1 = failingDataSource(data1).toTable(tEnv, 'a, 'b)
val t2 = failingDataSource(data2).toTable(tEnv, 'a, 'b)
tEnv.registerTable("T1", t1)
tEnv.registerTable("T2", t2)
val sqlQuery =
"""
|SELECT t1.a, t1.b, t2.a, t2.b
|FROM (
SELECT if(a = 3, cast(null as int), a) as a, b FROM T1
|) as t1
|FULL OUTER JOIN (
SELECT if(a = 3, cast(null as int), a) as a, b FROM T2
|) as t2
|ON t1.a = t2.a
|""".stripMargin
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = mutable.MutableList(
"1,1,1,1",
"null,null,2,2",
"4,2,null,null",
"null,8,null,null",
"null,null,null,2"
)
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testNullFullOuterJoinWithNullCond(): Unit = {
val data1 = new mutable.MutableList[(Int, Long)]
data1.+=((1, 1L))
data1.+=((3, 8L))
data1.+=((4, 2L))
val data2 = new mutable.MutableList[(Int, Long)]
data2.+=((1, 1L))
data2.+=((2, 2L))
data2.+=((3, 2L))
val t1 = failingDataSource(data1).toTable(tEnv, 'a, 'b)
val t2 = failingDataSource(data2).toTable(tEnv, 'a, 'b)
tEnv.registerTable("T1", t1)
tEnv.registerTable("T2", t2)
val sqlQuery =
"""
|SELECT t1.a, t1.b, t2.a, t2.b
|FROM (
SELECT if(a = 3, cast(null as int), a) as a, b FROM T1
|) as t1
|FULL OUTER JOIN (
SELECT if(a = 3, cast(null as int), a) as a, b FROM T2
|) as t2
|ON t1.a = t2.a
|OR (t1.a is null AND t2.a is null)
|""".stripMargin
val sink = new TestingRetractSink
tEnv.sqlQuery(sqlQuery).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = mutable.MutableList(
"1,1,1,1",
"null,null,2,2",
"4,2,null,null",
"null,8,null,2"
)
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testJoinWithoutWatermark(): Unit = {
// NOTE: Different from AggregateITCase, we do not set stream time characteristic
// of environment to event time, so that emitWatermark() actually does nothing.
env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)
val data1 = new mutable.MutableList[(Int, Long)]
data1 .+= ((1, 1L))
data1 .+= ((2, 2L))
data1 .+= ((3, 3L))
val data2 = new mutable.MutableList[(Int, Long)]
data2 .+= ((1, -1L))
data2 .+= ((2, -2L))
data2 .+= ((3, -3L))
val t1 = failingDataSource(data1).toTable(tEnv, 'a, 'b)
tEnv.registerTable("T1", t1)
val t2 = failingDataSource(data2).toTable(tEnv, 'a, 'c)
tEnv.registerTable("T2", t2)
val t3 = tEnv.sqlQuery(
"select T1.a, b, c from T1, T2 WHERE T1.a = T2.a")
val sink = new TestingRetractSink
t3.toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = List("1,1,-1", "2,2,-2", "3,3,-3")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testBigDataOfJoin(): Unit = {
env.setParallelism(1)
val data = new mutable.MutableList[(Int, Long, String)]
for (i <- 0 until 500) {
data.+=((i % 10, i, i.toString))
}
val t1 = failingDataSource(data).toTable(tEnv, 'a, 'b, 'c)
val t2 = failingDataSource(data).toTable(tEnv, 'd, 'e, 'f)
tEnv.registerTable("T1", t1)
tEnv.registerTable("T2", t2)
val sql =
"""
|SELECT COUNT(DISTINCT b) FROM (SELECT b FROM T1, T2 WHERE b = e)
""".stripMargin
val sink = new TestingRetractSink
tEnv.sqlQuery(sql).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = List("500")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
@Test
def testJoinWithUDFFilter(): Unit = {
val ds1 = failingDataSource(TestData.smallTupleData3).toTable(tEnv, 'a, 'b, 'c)
val ds2 = failingDataSource(TestData.tupleData5).toTable(tEnv, 'd, 'e, 'f, 'g, 'h)
tEnv.registerTable("T3", ds1)
tEnv.registerTable("T5", ds2)
tEnv.registerFunction("funcWithOpen", new FuncWithOpen)
val sql = "SELECT c, g FROM T3 join T5 on funcWithOpen(a + d) where b = e"
val sink = new TestingRetractSink
tEnv.sqlQuery(sql).toRetractStream[Row].addSink(sink).setParallelism(1)
env.execute()
val expected = Seq("Hi,Hallo", "Hello,Hallo Welt", "Hello world,Hallo Welt")
assertEquals(expected.sorted, sink.getRetractResults.sorted)
}
}
| hequn8128/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/JoinITCase.scala | Scala | apache-2.0 | 37,510 |
package com.scalableminds.webknossos.datastore.models
import com.github.ghik.silencer.silent
import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int}
import com.scalableminds.webknossos.datastore.models.datasource.DataSetViewConfiguration.DataSetViewConfiguration
import com.scalableminds.webknossos.datastore.models.datasource.inbox.GenericInboxDataSource
import play.api.libs.json._
package object datasource {
case class DataSourceId(name: String, team: String) // here team is not (yet) renamed to organization to avoid migrating all jsons
object DataSourceId {
implicit val dataSourceIdFormat: Format[DataSourceId] = Json.format[DataSourceId]
}
object DataSetViewConfiguration {
type DataSetViewConfiguration = Map[String, JsValue]
implicit val dataSetViewConfigurationFormat: Format[DataSetViewConfiguration] = Format.of[DataSetViewConfiguration]
}
case class GenericDataSource[+T <: DataLayerLike](id: DataSourceId,
dataLayers: List[T],
scale: Vec3Double,
defaultViewConfiguration: Option[DataSetViewConfiguration] = None)
extends GenericInboxDataSource[T] {
val toUsable: Option[GenericDataSource[T]] = Some(this)
val scaleOpt: Option[Vec3Double] = Some(scale)
val statusOpt: Option[String] = None
def getDataLayer(name: String): Option[T] =
dataLayers.find(_.name == name)
val center: Vec3Int = boundingBox.center
lazy val boundingBox: BoundingBox =
BoundingBox.combine(dataLayers.map(_.boundingBox))
}
object GenericDataSource {
@silent // Suppress unused warning. The passed Format[T] is expanded to more than what is really used. It can not be omitted, though.
implicit def dataSourceFormat[T <: DataLayerLike](implicit fmt: Format[T]): Format[GenericDataSource[T]] =
Json.format[GenericDataSource[T]]
}
type DataSource = GenericDataSource[DataLayer]
type DataSourceLike = GenericDataSource[DataLayerLike]
}
| scalableminds/webknossos | webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala | Scala | agpl-3.0 | 2,097 |
/**
*
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: 1.0
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.swagger.client.model
case class Employee (
name: String,
band: String,
location: String,
skills: List[SkillSet]
)
| SriramZafin/employee-service-server-client | src/main/scala/io/swagger/client/model/Employee.scala | Scala | apache-2.0 | 1,012 |
package controllers.security
import com.lvxingpai.yunkai.{ UserInfoProp, NotFoundException, UserInfo }
import Security.AuthInfo
import core.security.UserRole
import play.api.inject.BindingKey
import play.api.mvc.Request
import play.api.{ Configuration, Play }
import scala.collection.JavaConversions._
import scala.concurrent.Future
import scala.util.{ Failure, Success, Try }
/**
* Created by zephyre on 1/12/16.
*/
class TokenAuthenticator extends Authenticator {
/**
* 签名验证
* @return
*/
override def authenticate[A](request: Request[A], authMessage: String): Future[AuthInfo[UserInfo]] = {
import com.lvxingpai.yunkai.Userservice.{ FinagledClient => YunkaiClient }
import core.misc.Implicits.TwitterConverter._
import play.api.Play.current
import scala.concurrent.ExecutionContext.Implicits.global
val yunkai = Play.application.injector instanceOf classOf[YunkaiClient]
// 登录失败时, 返回的AuthInfo
val unauth = AuthInfo[UserInfo](authProvided = true, Set(), None)
(for {
tokens <- {
// 获得token列表
val confKey = BindingKey(classOf[Configuration]) qualifiedWith "default"
val conf = Play.application.injector instanceOf confKey
conf getConfig "security.auth.tokens" orElse Some(Configuration.empty)
}
} yield {
val roles = scala.collection.mutable.Set[UserRole.Value]()
// security.auth.tokens下面的每一个subkey, 都定义了一个role. 如果用户提供了相应的token, 表示获得这个role的权限
tokens.subKeys foreach (subKey => {
val tokenEntries = tokens getStringList subKey map (_.toSeq) getOrElse Seq()
if (tokenEntries contains authMessage) {
// 尝试赋予相应的role
Try(UserRole withName subKey) match {
case Success(v) => roles += v
case Failure(_) =>
}
}
})
// 在使用Token的情况下, 必须有roles
if (roles.isEmpty) {
Future.successful(unauth)
} else {
for {
userInfo <- {
request.headers get "X-Lvxingpai-Id" map (v => {
yunkai.getUserById(v.toLong, Some(Seq(UserInfoProp.UserId, UserInfoProp.NickName, UserInfoProp.Avatar))) map Option.apply recover {
case _: NotFoundException => None
}
}) getOrElse Future.successful(None)
}
} yield {
// 如果查到了user信息, 需要获得user权限
if (userInfo.nonEmpty)
roles += UserRole.User
AuthInfo(authProvided = true, roles.toSet, userInfo)
}
}
}) getOrElse Future.successful(unauth)
}
}
| Lvxingpai/Hanse | app/controllers/security/TokenAuthenticator.scala | Scala | apache-2.0 | 2,699 |
package edu.cmu.lti.oaqa.cse.scala.configuration
object Parameters {
sealed trait Parameter
case class IntegerParameter(value: Int) extends Parameter
case class StringParameter(value: String) extends Parameter
case class DoubleParameter(value: Double) extends Parameter
case class BooleanParameter(value: Boolean) extends Parameter
case class ListParameter(pList: List[Parameter]) extends Parameter
case class MapParameter(map: Map[String, Parameter]) extends Parameter
implicit def primitive2Parameter(value: Any): Parameter = value match {
case v: Int => IntegerParameter(v)
case v: String => StringParameter(v)
case v: Double => DoubleParameter(v)
case v: Boolean => BooleanParameter(v)
case plist: List[_] => ListParameter(plist.map(p => primitive2Parameter(p)))
case pmap: Map[_, _] => MapParameter(pmap.map { case (k: String, v) => (k, primitive2Parameter(v)) })
}
}
| oaqa/bagpipes-old | src/main/scala/edu/cmu/lti/oaqa/cse/scala/configuration/Parameters.scala | Scala | apache-2.0 | 904 |
/*
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
import sbt._
import Keys._
import sbt.File
object Generators {
// Generates a scala file that contains the play version for use at runtime.
def PlayVersion(version: String, scalaVersion: String, sbtVersion: String, dir: File): Seq[File] = {
val file = dir / "PlayVersion.scala"
val scalaSource =
"""|package play.core
|
|object PlayVersion {
| val current = "%s"
| val scalaVersion = "%s"
| val sbtVersion = "%s"
|}
""".stripMargin.format(version, scalaVersion, sbtVersion)
if (!file.exists() || IO.read(file) != scalaSource) {
IO.write(file, scalaSource)
}
Seq(file)
}
}
object Tasks {
def scalaTemplateSourceMappings = (excludeFilter in unmanagedSources, unmanagedSourceDirectories in Compile, baseDirectory) map {
(excludes, sdirs, base) =>
val scalaTemplateSources = sdirs.descendantsExcept("*.scala.html", excludes)
((scalaTemplateSources --- sdirs --- base) pair (relativeTo(sdirs) | relativeTo(base) | flat)).toSeq
}
}
| jyotikamboj/container | pf-framework/project/Tasks.scala | Scala | mit | 1,157 |
package io.skysail.server.app.events
import io.skysail.domain.RequestEvent
import io.skysail.domain.resources.ListResource
class EventsResource extends ListResource[EventApplication, Event] {
override def getList(requestEvent: RequestEvent): List[Event] = {
List()//getApplication().eventsService.list
}
}
| evandor/skysail-server | skysail.server.app.events/src/io/skysail/server/app/events/EventsResource.scala | Scala | apache-2.0 | 318 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.knn
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.geohash.GeoHash
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class TouchingGeoHashesTest extends Specification {
def generateCvilleGH = {
val precision = 30
val lat = 38.0752150
val lon = -78.4953560
GeoHash(lon, lat, precision)
}
def generateSuvaGH = {
val precision = 10
val lat = -18.140
val lon = 178.440
GeoHash(lon, lat, precision)
}
def generateMcMurdoGH = {
val precision = 5
val lat = -77.842
val lon = 166.68360
GeoHash(lon, lat, precision)
}
"Geomesa TouchingGeoHashes" should {
"find GeoHashes around Charlottesville, Virginia" in {
val touchingByCalculation = TouchingGeoHashes.touching(generateCvilleGH).map ( _.hash )
val touchingByVisualInspection = List(
"dqb0te",
"dqb0tf",
"dqb0td",
"dqb0tu",
"dqb0ts",
"dqb0w5",
"dqb0w4",
"dqb0wh")
touchingByCalculation.forall ( touchingByVisualInspection.contains ) must beTrue
}
"Correctly treat the antimeridian and find GeoHashes around Suva, Fiji" in {
val touchingByCalculation = TouchingGeoHashes.touching(generateSuvaGH).map ( _.hash )
val touchingByVisualInspection = List(
"rv",
"rg",
"re",
"rs",
"rt",
"2j",
"2h",
"25")
touchingByCalculation.forall ( touchingByVisualInspection.contains ) must beTrue
}
"Correctly treat the polar region and the antimeridian and find GeoHashes around McMurdo Station" in {
val touchingByCalculation = TouchingGeoHashes.touching(generateMcMurdoGH).map ( _.hash )
val touchingByVisualInspection = List(
"h",
"j",
"n",
"0",
"1",
"4",
"5",
"2",
"r",
"q")
touchingByCalculation.forall ( touchingByVisualInspection.contains ) must beTrue
}
}
}
| ddseapy/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/process/knn/TouchingGeoHashesTest.scala | Scala | apache-2.0 | 2,539 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.enhancement.enhancements
trait WeakeningMixtureCold extends WeakeningMixtureMultiSelector with MultiSelectorKeyGenerator {
override lazy val description: Option[String] = Some(
"When cast on a Cold spell on a creature that is immune to Cold, it makes them vulnerable to Cold for a short period of time."
)
abstract override def keyList: List[String] = "Cold" :: super.keyList
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/enhancement/enhancements/WeakeningMixtureCold.scala | Scala | apache-2.0 | 1,068 |
/* Copyright 2017-19, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow.api.implicits.helpers
import org.platanios.tensorflow.api.core.Shape
import org.platanios.tensorflow.api.core.types.TF
import org.platanios.tensorflow.api.implicits.Implicits._
import org.platanios.tensorflow.api.ops._
import org.platanios.tensorflow.api.ops.basic.Basic
import shapeless._
import shapeless.ops.hlist.Tupler
import scala.collection.compat._
/** Represents types that have a "zero" value (e.g., RNN states).
*
* @author Emmanouil Antonios Platanios
*/
trait Zero[T] {
type S // Shape type
def evOutputToShape: OutputToShape.Aux[T, S]
/** Generates a zero value of type `T`. */
def zero(
batchSize: Output[Int],
shape: S,
name: String = "Zero"
): T
}
object Zero {
def apply[T](implicit ev: Zero[T]): Zero.Aux[T, ev.S] = {
ev.asInstanceOf[Zero.Aux[T, ev.S]]
}
type Aux[T, SS] = Zero[T] {
type S = SS
}
implicit val fromUnit: Aux[Unit, Unit] = {
new Zero[Unit] {
override type S = Unit
override def evOutputToShape: OutputToShape.Aux[Unit, Unit] = {
OutputToShape.fromUnit
}
override def zero(
batchSize: Output[Int],
shape: Unit,
name: String = "Zero"
): Unit = {
()
}
}
}
implicit def fromOutput[T: TF]: Aux[Output[T], Shape] = {
new Zero[Output[T]] {
override type S = Shape
override def evOutputToShape: OutputToShape.Aux[Output[T], Shape] = {
OutputToShape.fromOutput[T]
}
override def zero(
batchSize: Output[Int],
shape: Shape,
name: String = "Zero"
): Output[T] = {
val staticBatchSize = Output.constantValue(batchSize).map(_.scalar).getOrElse(-1)
Op.nameScope(name) {
val fullShape = Basic.concatenate(Seq(
batchSize.expandDims(0),
shape.toOutput
), axis = 0)
val zero = Basic.zeros[T](fullShape)
zero.setShape(Shape(staticBatchSize) ++ shape)
zero
}
}
}
}
// TODO: [TYPES] !!! What about OutputIndexedSlices and TensorIndexedSlices?
implicit def fromOption[T](implicit
ev: Zero[T]
): Zero.Aux[Option[T], Option[ev.S]] = {
new Zero[Option[T]] {
override type S = Option[ev.S]
override def evOutputToShape: OutputToShape.Aux[Option[T], Option[ev.S]] = {
OutputToShape.fromOption[T](ev.evOutputToShape)
}
override def zero(
batchSize: Output[Int],
shape: Option[ev.S],
name: String
): Option[T] = {
Op.nameScope(name) {
shape.map(ev.zero(batchSize, _))
}
}
}
}
implicit def fromSeq[T](implicit
ev: Zero[T]
): Zero.Aux[Seq[T], Seq[ev.S]] = {
new Zero[Seq[T]] {
override type S = Seq[ev.S]
override def evOutputToShape: OutputToShape.Aux[Seq[T], Seq[ev.S]] = {
OutputToShape.fromSeq[T](ev.evOutputToShape)
}
override def zero(
batchSize: Output[Int],
shape: Seq[ev.S],
name: String
): Seq[T] = {
Op.nameScope(name) {
shape.map(ev.zero(batchSize, _))
}
}
}
}
implicit def fromMap[K, T](implicit
ev: Zero[T]
): Zero.Aux[Map[K, T], Map[K, ev.S]] = {
new Zero[Map[K, T]] {
override type S = Map[K, ev.S]
override def evOutputToShape: OutputToShape.Aux[Map[K, T], Map[K, ev.S]] = {
OutputToShape.fromMap[K, T](ev.evOutputToShape)
}
override def zero(
batchSize: Output[Int],
shape: Map[K, ev.S],
name: String
): Map[K, T] = {
Op.nameScope(name) {
shape.view.mapValues(ev.zero(batchSize, _)).toMap
}
}
}
}
implicit val fromHNil: Zero.Aux[HNil, HNil] = {
new Zero[HNil] {
override type S = HNil
override def evOutputToShape: OutputToShape.Aux[HNil, HNil] = {
OutputToShape.fromHNil
}
override def zero(
batchSize: Output[Int],
shape: HNil,
name: String = "Zero"
): HNil = {
HNil
}
}
}
implicit def fromHList[HT, HS, TT <: HList, TS <: HList](implicit
evH: Strict[Zero.Aux[HT, HS]],
evT: Strict[Zero.Aux[TT, TS]]
): Zero.Aux[HT :: TT, HS :: TS] = {
new Zero[HT :: TT] {
override type S = HS :: TS
override def evOutputToShape: OutputToShape.Aux[HT :: TT, HS :: TS] = {
OutputToShape.fromHList[HT, HS, TT, TS](evH.value.evOutputToShape, evT.value.evOutputToShape)
}
override def zero(
batchSize: Output[Int],
shape: HS :: TS,
name: String = "Zero"
): HT :: TT = {
Op.nameScope(name) {
evH.value.zero(batchSize, shape.head) ::
evT.value.zero(batchSize, shape.tail)
}
}
}
}
implicit def fromProduct[PT <: Product, PS <: Product, HT <: HList, HS <: HList](implicit
genT: Generic.Aux[PT, HT],
evH: Strict[Zero.Aux[HT, HS]],
tuplerS: Tupler.Aux[HS, PS],
genS: Generic.Aux[PS, HS]
): Zero.Aux[PT, PS] = {
new Zero[PT] {
override type S = PS
override def evOutputToShape: OutputToShape.Aux[PT, PS] = {
OutputToShape.fromProduct[PT, PS, HT, HS](genT, evH.value.evOutputToShape, tuplerS, genS)
}
override def zero(
batchSize: Output[Int],
shape: PS,
name: String = "Zero"
): PT = {
genT.from(evH.value.zero(batchSize, genS.to(shape), name))
}
}
}
}
| eaplatanios/tensorflow_scala | modules/api/src/main/scala/org/platanios/tensorflow/api/implicits/helpers/Zero.scala | Scala | apache-2.0 | 6,195 |
package creational
/**
* Lazy values in Scala can hold null values. Access to lazy value is thread-safe.
*
* @author Daniel Leon
*/
object LazyInit {
def main(args: Array[String]) {
lazy val x = {
print("(computing x in scala) ")
42
}
println(x)
}
}
| LeonDaniel/DesignPatterns | ScalaPatterns/src/main/scala/creational/LazyInit.scala | Scala | lgpl-3.0 | 285 |
//
// Copyright (c) 2014 Mirko Nasato
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.encoded.jersik.testservice
import io.encoded.jersik.scala.runtime.JsonCodec
import io.encoded.jersik.scala.runtime.servlet.RpcServlet
import io.encoded.jersik.testsuite.TestServiceOperations
class TestServiceServlet extends RpcServlet(TestServiceOperations.toMap(ValidatingTestService), JsonCodec)
| mirkonasato/jersik | scala/jersik-scala-testservice/src/main/scala/io/encoded/jersik/testservice/TestServiceServlet.scala | Scala | apache-2.0 | 910 |
package org.clulab.utils
import scala.language.postfixOps
/**
* Created by dfried on 5/22/14
*/
object EvaluationStatistics {
case class Table(var tp: Int, var fp: Int, var tn: Int, var fn: Int) {
def accuracy = (tp + tn).toDouble / (tp + fp + tn + fn)
def precision = tp.toDouble / (tp + fp)
def recall = tp.toDouble / (tp + fn)
def f1 = 2 * precision * recall / (precision + recall)
def total = tp + fp + tn + fn
def trueCount = tp + fn
def falseCount = fp + tn
def positiveCount = tp + fp
def negativeCount = tn + fn
def + (other: Table) = Table(tp + other.tp, fp + other.fp, tn + other.tn, fn + other.fn)
}
def makeTables[A](outcome: A)(predicted: Seq[A], actual: Seq[A]): Table = {
require(predicted.size == actual.size, "predicted and actual labels must be same size")
val counts = Table(0, 0, 0, 0)
for ((p, a) <- predicted zip actual) {
if (p == outcome) { // positive
if (a == outcome) counts.tp += 1
else counts.fp += 1
} else { // negative
if (a == outcome) counts.fn += 1
else counts.tn += 1
}
}
counts
}
def makeTables[A](predicted: Seq[A], actual: Seq[A]): Map[A, Table] = {
val vals: Set[A] = (predicted ++ actual).toSet
(for {
v <- vals.toSeq
} yield v -> makeTables(v)(predicted, actual)).toMap
}
def microAverage[A](tables: Map[A, Table])(accessor: Table => Double) = {
accessor(tables.values.reduce(_ + _))
}
def macroAverage[A](tables: Map[A, Table])(accessor: Table => Double): Double = {
val N_classes = tables.size
tables.values.map(accessor).sum / N_classes
}
def weightedAverage[A](tables: Map[A, Table])(accessor: Table => Double): Double = {
val trueCounts: Map[A, Int] = tables.mapValues(_.trueCount)
val N_data = tables.values.head.total
tables.map({
case (a, table) => accessor(table) * trueCounts(a).toDouble / N_data
}).sum
}
/**
* Calculate significance of the given evaluation statistic for labels predicted by a system over labels predicted by
* a baseline, compared to actual labels, using the bootstrap.
* @param stat A function of EvaluationStatistics, such as accuracy or microF1
* @param predicted The labels predicted by the treatment system
* @param baseline The labels predicted by the baseline (control) system
* @param actual The actual labels
* @param N_samples Number of samples to use in bootstrap
* @tparam A The label type
* @return The p-value of the statistic
*/
def classificationSignificance[A](stat: EvaluationStatistics[A] => Double)(predicted: Seq[A], baseline: Seq[A],
actual: Seq[A], N_samples: Int = 10000) = {
require(predicted.size == baseline.size && baseline.size == actual.size, "label arrays must be same size")
val N_labels = predicted.size
// create an array of the labels zipped for easy access in bootstrap resampling
val zipped = new Array[(A, A, A)](predicted.size)
for (i <- 0 until N_labels)
zipped(i) = (predicted(i), baseline(i), actual(i))
def bootstrapDifference = {
// generate a length N_labels vector of indices for sampling w/ replacement
val indices = (0 until N_labels).map(_ => util.Random.nextInt(N_labels))
// get the corresponding labels from each array
val (predSampled, baseSampled, actSampled) = indices map (zipped(_)) unzip3
// calculate the evaluation statistic, stat for predicted vs actual and for baseline vs actual
val predictedEval = new EvaluationStatistics[A](predSampled, actSampled)
val baselineEval = new EvaluationStatistics[A](baseSampled, actSampled)
// return the difference of the stat
stat(predictedEval) - stat(baselineEval)
}
// produce N_samples samples from the sequences, and compute the difference in stat between the
// predicted and baseline accuracies (compared to actual labels) for each sample. Sort by value, increasing, and
// find the index of the difference corresponding to the null hypothesis
val sampledDifferences: Array[Double] = (for {
sampleIx <- (0 until N_samples).toArray
} yield bootstrapDifference).sorted
val indexOfZero = {
val i = java.util.Arrays.binarySearch(sampledDifferences, 0.0)
if (i >= 0) i else -(i + 1)
}
// return the percentile
indexOfZero.toDouble / N_samples
}
/**
* Calculate accuracy significance for labels predicted by a system over labels
* predicted by a baseline, compared to actual labels, using the bootstrap.
* @param predicted The labels predicted by the treatment system
* @param baseline The labels predicted by the baseline (control) system
* @param actual The actual labels
* @param N_samples Number of samples to use in bootstrap
* @tparam A The label type
* @return The p-value significance of the accuracy statistic
*/
def classificationAccuracySignificance[A](predicted: Seq[A], baseline: Seq[A], actual: Seq[A], N_samples: Int = 10000) =
classificationSignificance[A](_.accuracy)(predicted, baseline, actual, N_samples)
}
class EvaluationStatistics[A](tables: Map[A, EvaluationStatistics.Table]) {
import EvaluationStatistics.Table
def this(predicted: Seq[A], actual: Seq[A]) =
this(EvaluationStatistics.makeTables(predicted, actual))
private def microAverage: ((Table) => Double) => Double = EvaluationStatistics.microAverage(tables)
private def macroAverage: ((Table) => Double) => Double = EvaluationStatistics.macroAverage(tables)
private def weightedAverage: ((Table) => Double) => Double = EvaluationStatistics.weightedAverage(tables)
lazy val microPrecision = microAverage(_.precision)
lazy val microRecall = microAverage(_.recall)
lazy val microF1 = microAverage(_.f1)
lazy val macroPrecision = macroAverage(_.precision)
lazy val macroRecall = macroAverage(_.recall)
lazy val macroF1 = macroAverage(_.f1)
lazy val accuracy = tables.values.map(_.tp).sum.toDouble / tables.values.head.total
/** format of this is based on David Hall's Nak */
override def toString = {
def f(d: Double) = "%.4f".format(d)
val tableRep = tables.map( { case (a, table) => {
s"$a:\\tPrecision: ${f(table.precision)}\\tRecall: ${f(table.recall)}\\tF1: ${f(table.f1)}\\tAccuracy: ${f(table.accuracy)}"
}}).mkString("\\n")
s"""Evaluation Statistics:
==========
Accuracy: ${f(accuracy)}
Macro\\t Precision: ${f(macroPrecision)}\\tRecall: ${f(macroRecall)}\\tF1: ${f(macroF1)}
Micro\\t Precision: ${f(microPrecision)}\\tRecall: ${f(microRecall)}\\tF1: ${f(microF1)}
==========
${tableRep}"""
}
}
| sistanlp/processors | main/src/main/scala/org/clulab/utils/EvaluationStatistics.scala | Scala | apache-2.0 | 6,702 |
// Project: surfice-docdb
// Module: common / js
// Description: A PropsDoc implementation based on a js.Dictionary
// Copyright (c) 2016. Distributed under the MIT License (see included LICENSE file).
package surfice.docdb
import scala.scalajs.js
/**
* A [[PropsDoc]] that wraps a [[js.Dictionary]].
*
* @param dict
*/
case class DictionaryPropsDoc(dict: js.Dictionary[js.Any]) extends AbstractPropsDoc {
@inline
final override def id: Option[Any] = dict.get("_id")
@inline
final override def apply(key: String): Any = dict(key)
@inline
final override def get[T](key: String): Option[T] = dict.get(key).asInstanceOf[Option[T]]
@inline
final override def keys: Iterable[String] = dict.keys
@inline
override def toMap: Map[String, Any] = dict.toMap
@inline
override def iterator: Iterator[(String, Any)] = dict.iterator
}
| jokade/surfice-docdb | js/src/main/scala/surfice/docdb/DictionaryPropsDoc.scala | Scala | mit | 862 |
/**
* Copyright (C) 2014 MediaMath <http://www.mediamath.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author themodernlife
* @author alekseyig
*/
package play.api.libs.dynamodb
import scala.collection.generic
import scala.language.higherKinds
import org.joda.time.{DateTime, LocalDate}
import org.joda.time.format.{DateTimeFormat, ISODateTimeFormat}
import play.api.libs.functional.{Alternative, Applicative, Functor}
trait Reads[A] { self =>
def reads(value: DdbValue): DdbResult[A]
def map[B](f: A => B): Reads[B] = Reads[B] { value => self.reads(value).map(f) }
def flatMap[B](f: A => Reads[B]): Reads[B] = Reads[B] { value => self.reads(value).flatMap(t => f(t).reads(value)) }
def filter(f: A => Boolean): Reads[A] = Reads[A] { item => self.reads(item).filter(f) }
}
object Reads {
def apply[A](f: DdbValue => DdbResult[A]): Reads[A] = new Reads[A] {
def reads(value: DdbValue) = f(value)
}
def at[A](key: DdbKey)(implicit reads: Reads[A]): Reads[A] = {
Reads[A] {
case i: DdbItem => key.findWithin(i).flatMap(reads.reads)
case _ => DdbError(Seq("error.expected.ddbitem"))
}
}
implicit def applicative(implicit applicativeDdbResult: Applicative[DdbResult]): Applicative[Reads] = new Applicative[Reads] {
def pure[A](a: A): Reads[A] = Reads[A] { _ => DdbSuccess(a) }
def map[A, B](m: Reads[A], f: A => B): Reads[B] = m.map(f)
def apply[A, B](mf: Reads[A => B], ma: Reads[A]): Reads[B] = new Reads[B] {
def reads(value: DdbValue) = applicativeDdbResult(mf.reads(value), ma.reads(value))
}
}
implicit def alternative(implicit a: Applicative[Reads]): Alternative[Reads] = new Alternative[Reads] {
val app = a
def |[A, B >: A](alt1: Reads[A], alt2: Reads[B]): Reads[B] = new Reads[B] {
def reads(value: DdbValue) = alt1.reads(value) match {
case d1 @ DdbSuccess(_) => d1
case DdbError(es1) => alt2.reads(value) match {
case d1 @ DdbSuccess(_) => d1
case DdbError(es2) => DdbError(es1 ++ es2)
}
}
}
def empty: Reads[Nothing] = new Reads[Nothing] { def reads(value: DdbValue) = DdbError(Seq()) }
}
implicit def functorReads(implicit a: Applicative[Reads]) = new Functor[Reads] {
def fmap[A, B](reads: Reads[A], f: A => B): Reads[B] = a.map(reads, f)
}
implicit object IntReads extends Reads[Int] {
def reads(item: DdbValue) = item match {
case DdbNumber(n) => DdbSuccess(n.toInt)
case _ => DdbError(Seq("error.expected.ddbnumber"))
}
}
implicit object LongReads extends Reads[Long] {
def reads(item: DdbValue) = item match {
case DdbNumber(n) => DdbSuccess(n.toLong)
case _ => DdbError(Seq("error.expected.ddbnumber"))
}
}
implicit object FloatReads extends Reads[Float] {
def reads(item: DdbValue) = item match {
case DdbNumber(n) => DdbSuccess(n.toFloat)
case _ => DdbError(Seq("error.expected.ddbnumber"))
}
}
implicit object DoubleReads extends Reads[Double] {
def reads(item: DdbValue) = item match {
case DdbNumber(n) => DdbSuccess(n.toDouble)
case _ => DdbError(Seq("error.expected.ddbnumber"))
}
}
implicit object BigDecimalReads extends Reads[BigDecimal] {
def reads(item: DdbValue) = item match {
case DdbNumber(n) => DdbSuccess(n.underlying())
case _ => DdbError(Seq("error.expected.ddbnumber"))
}
}
implicit object ByteArrayReads extends Reads[Array[Byte]] {
def reads(item: DdbValue) = item match {
case DdbBinary(n) => DdbSuccess(n)
case _ => DdbError(Seq("error.expected.ddbbinary"))
}
}
implicit object StringReads extends Reads[String] {
def reads(item: DdbValue) = item match {
case DdbString(n) => DdbSuccess(n)
case _ => DdbError(Seq("error.expected.ddbstring"))
}
}
implicit def OptionReads[T](implicit fmt: Reads[T]): Reads[Option[T]] = new Reads[Option[T]] {
def reads(json: DdbValue) = fmt.reads(json) match {
case DdbSuccess(v) ⇒ DdbSuccess(Some(v))
case _ ⇒ DdbSuccess(None)
}
}
def fromDdbValue[T](value: DdbValue)(implicit fdv: Reads[T]): DdbResult[T] = fdv.reads(value)
implicit def traversableReads[F[_], A](implicit bf: generic.CanBuildFrom[F[_], A, F[A]], ra: Reads[A]): Reads[F[A]] = new Reads[F[A]] {
def reads(item: DdbValue) = item match {
case DdbSet(xs) =>
var hasErrors = false
val either = xs.map{x =>
fromDdbValue[A](x)(ra) match {
case DdbSuccess(v) => Right(v)
case DdbError(e) =>
hasErrors = true
Left(e)
}
}
if (hasErrors) {
val allErrors = either.map(_.left.get).foldLeft(List[String]())((acc,v) => acc ++ v)
DdbError(allErrors)
} else {
val builder = bf()
either.foreach(builder += _.right.get)
DdbSuccess(builder.result())
}
case _ => DdbError(Seq("error.expected.ddbset"))
}
}
def localDateReads(pattern: String, corrector: String => String = identity) = new Reads[LocalDate] {
val df = if (pattern == "") ISODateTimeFormat.localDateParser else DateTimeFormat.forPattern(pattern)
private def parseDate(input: String): Option[LocalDate] =
scala.util.control.Exception.allCatch[LocalDate] opt LocalDate.parse(input, df)
def reads(item: DdbValue) = item match {
case DdbString(n) => parseDate(corrector(n)) match {
case Some(d) => DdbSuccess(d)
case None => DdbError(Seq("error.expected.localdate.format", pattern))
}
case _ => DdbError(Seq("error.expected.ddbstring"))
}
}
implicit val DefaultLocalDateReads = localDateReads("")
def dateTimeReads(pattern: String, corrector: String => String = identity) = new Reads[DateTime] {
val df = if (pattern == "") ISODateTimeFormat.dateTimeNoMillis() else DateTimeFormat.forPattern(pattern)
private def parseDate(input: String): Option[DateTime] =
scala.util.control.Exception.allCatch[DateTime] opt DateTime.parse(input, df)
def reads(item: DdbValue) = item match {
case DdbString(n) => parseDate(corrector(n)) match {
case Some(d) => DdbSuccess(d)
case None => DdbError(Seq("error.expected.datetime.format", pattern))
}
case _ => DdbError(Seq("error.expected.ddbstring"))
}
}
implicit val DefaultDateTimeReads = dateTimeReads("")
} | MediaMath/play-dynamodb | src/main/scala/play/api/libs/dynamodb/Reads.scala | Scala | apache-2.0 | 6,975 |
package polystyrene.core
class Coord{
def equals(c : Coord) : Boolean = {
true
}
def copy : Coord = {
println("Pouet")
new Coord
}
}
| HKervadec/Polystyrene | src/polystyrene/core/Coord.scala | Scala | gpl-2.0 | 146 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.arrow.vector
import java.io.Closeable
import java.util.{Collections, Date}
import org.locationtech.jts.geom.Geometry
import org.apache.arrow.memory.BufferAllocator
import org.apache.arrow.vector.complex.{ListVector, StructVector}
import org.apache.arrow.vector.types.FloatingPointPrecision
import org.apache.arrow.vector.types.pojo.{ArrowType, FieldType}
import org.apache.arrow.vector.{BigIntVector, FieldVector}
import org.locationtech.geomesa.arrow.features.ArrowSimpleFeature
import org.locationtech.geomesa.arrow.vector.SimpleFeatureVector.SimpleFeatureEncoding
import org.locationtech.geomesa.arrow.vector.SimpleFeatureVector.SimpleFeatureEncoding.Encoding
import org.locationtech.geomesa.arrow.vector.SimpleFeatureVector.SimpleFeatureEncoding.Encoding.Encoding
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
/**
* Abstraction for using simple features in Arrow vectors
*
* @param sft simple feature type
* @param underlying underlying arrow vector
* @param dictionaries map of field names to dictionary values, used for dictionary encoding fields.
* All values must be provided up front.
* @param encoding options for encoding
* @param allocator buffer allocator
*/
class SimpleFeatureVector private [arrow] (val sft: SimpleFeatureType,
val underlying: StructVector,
val dictionaries: Map[String, ArrowDictionary],
val encoding: SimpleFeatureEncoding)
(implicit allocator: BufferAllocator) extends Closeable {
// note: writer creates the map child vectors based on the sft, and should be instantiated before the reader
val writer = new Writer(this)
val reader = new Reader(this)
/**
* Clear any simple features currently stored in the vector
*/
def clear(): Unit = underlying.setValueCount(0)
override def close(): Unit = {
underlying.close()
writer.close()
}
class Writer(vector: SimpleFeatureVector) {
private [SimpleFeatureVector] val arrowWriter = vector.underlying.getWriter
private val idWriter = ArrowAttributeWriter.id(sft, Some(vector.underlying), vector.encoding)
private [arrow] val attributeWriters = ArrowAttributeWriter(sft, Some(vector.underlying), dictionaries, encoding).toArray
def set(index: Int, feature: SimpleFeature): Unit = {
arrowWriter.setPosition(index)
arrowWriter.start()
idWriter.apply(index, feature)
var i = 0
while (i < attributeWriters.length) {
attributeWriters(i).apply(index, feature.getAttribute(i))
i += 1
}
arrowWriter.end()
}
def setValueCount(count: Int): Unit = {
arrowWriter.setValueCount(count)
attributeWriters.foreach(_.setValueCount(count))
}
private [vector] def close(): Unit = arrowWriter.close()
}
class Reader(vector: SimpleFeatureVector) {
val idReader: ArrowAttributeReader = ArrowAttributeReader.id(sft, vector.underlying, vector.encoding)
val readers: Array[ArrowAttributeReader] =
ArrowAttributeReader(sft, vector.underlying, dictionaries, encoding).toArray
// feature that can be re-populated with calls to 'load'
val feature: ArrowSimpleFeature = new ArrowSimpleFeature(sft, idReader, readers, -1)
def get(index: Int): ArrowSimpleFeature = new ArrowSimpleFeature(sft, idReader, readers, index)
def load(index: Int): Unit = feature.index = index
def getValueCount: Int = vector.underlying.getValueCount
}
}
object SimpleFeatureVector {
val DefaultCapacity = 8096
val FeatureIdField = "id"
val DescriptorKey = "descriptor"
val OptionsKey = "options"
case class SimpleFeatureEncoding(fids: Option[Encoding], geometry: Encoding, date: Encoding)
object SimpleFeatureEncoding {
val Min = SimpleFeatureEncoding(Some(Encoding.Min), Encoding.Min, Encoding.Min)
val Max = SimpleFeatureEncoding(Some(Encoding.Max), Encoding.Max, Encoding.Max)
def min(includeFids: Boolean, proxyFids: Boolean = false): SimpleFeatureEncoding = {
val fids = if (includeFids) { Some(if (proxyFids) { Encoding.Min } else { Encoding.Max }) } else { None }
SimpleFeatureEncoding(fids, Encoding.Min, Encoding.Min)
}
object Encoding extends Enumeration {
type Encoding = Value
val Min, Max = Value
}
}
/**
* Create a new simple feature vector
*
* @param sft simple feature type
* @param dictionaries map of field names to dictionary values, used for dictionary encoding fields.
* All values must be provided up front.
* @param encoding options for encoding
* @param capacity initial capacity for number of features able to be stored in vectors
* @param allocator buffer allocator
* @return
*/
def create(sft: SimpleFeatureType,
dictionaries: Map[String, ArrowDictionary],
encoding: SimpleFeatureEncoding = SimpleFeatureEncoding.Min,
capacity: Int = DefaultCapacity)
(implicit allocator: BufferAllocator): SimpleFeatureVector = {
val metadata = Collections.singletonMap(OptionsKey, SimpleFeatureTypes.encodeUserData(sft))
val fieldType = new FieldType(true, ArrowType.Struct.INSTANCE, null, metadata)
val underlying = new StructVector(sft.getTypeName, allocator, fieldType, null)
val vector = new SimpleFeatureVector(sft, underlying, dictionaries, encoding)
// set capacity after all child vectors have been created by the writers, then allocate
underlying.setInitialCapacity(capacity)
underlying.allocateNew()
vector
}
/**
* Creates a simple feature vector based on an existing arrow vector
*
* @param vector arrow vector
* @param dictionaries map of field names to dictionary values, used for dictionary encoding fields.
* All values must be provided up front.
* @param allocator buffer allocator
* @return
*/
def wrap(vector: StructVector, dictionaries: Map[String, ArrowDictionary])
(implicit allocator: BufferAllocator): SimpleFeatureVector = {
val (sft, encoding) = getFeatureType(vector)
new SimpleFeatureVector(sft, vector, dictionaries, encoding)
}
/**
* Create a simple feature vector using a new arrow vector
*
* @param vector simple feature vector to copy
* @param underlying arrow vector
* @param allocator buffer allocator
* @return
*/
def clone(vector: SimpleFeatureVector, underlying: StructVector)
(implicit allocator: BufferAllocator): SimpleFeatureVector = {
new SimpleFeatureVector(vector.sft, underlying, vector.dictionaries, vector.encoding)
}
/**
* Reads the feature type and feature encoding from an existing arrow vector
*
* @param vector vector
* @return
*/
def getFeatureType(vector: StructVector): (SimpleFeatureType, SimpleFeatureEncoding) = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import scala.collection.JavaConverters._
val attributes = ArrayBuffer.empty[String]
var fidEncoding: Option[Encoding] = None
vector.getField.getChildren.asScala.foreach { field =>
if (field.getName == FeatureIdField) {
field.getType match {
case _: ArrowType.Int => fidEncoding = Some(Encoding.Min) // proxy id encoded fids
case _: ArrowType.FixedSizeList => fidEncoding = Some(Encoding.Max) // uuid encoded fids
case _: ArrowType.Utf8 => fidEncoding = Some(Encoding.Max) // normal string fids
case _ => throw new IllegalArgumentException(s"Found feature ID vector field of unexpected type: $field")
}
} else {
attributes.append(field.getMetadata.get(DescriptorKey))
}
}
// add sft-level metadata
val options = Option(vector.getField.getMetadata.get(OptionsKey)).getOrElse("")
val sft = SimpleFeatureTypes.createImmutableType(vector.getField.getName, attributes.mkString(",") + options)
val geomPrecision = {
val geomVector: Option[FieldVector] =
Option(sft.getGeomField).flatMap(d => Option(vector.getChild(d))).orElse(getNestedVector[Geometry](sft, vector))
val isDouble = geomVector.exists(v => GeometryFields.precisionFromField(v.getField) == FloatingPointPrecision.DOUBLE)
if (isDouble) { Encoding.Max } else { Encoding.Min }
}
val datePrecision = {
val dateVector: Option[FieldVector] =
sft.getDtgField.flatMap(d => Option(vector.getChild(d))).orElse(getNestedVector[Date](sft, vector))
val isLong = dateVector.exists(_.isInstanceOf[BigIntVector])
if (isLong) { Encoding.Max } else { Encoding.Min }
}
val encoding = SimpleFeatureEncoding(fidEncoding, geomPrecision, datePrecision)
(sft, encoding)
}
def isGeometryVector(vector: FieldVector): Boolean = {
Option(vector.getField.getMetadata.get(DescriptorKey))
.map(SimpleFeatureTypes.createDescriptor)
.exists(d => classOf[Geometry].isAssignableFrom(d.getType.getBinding))
}
/**
* Checks nested vector types (lists and maps) for instances of the given type
*
* @param sft simple feature type
* @param vector simple feature vector
* @param ct class tag
*
* @return
*/
private def getNestedVector[T](sft: SimpleFeatureType,
vector: StructVector)
(implicit ct: ClassTag[T]): Option[FieldVector] = {
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors.RichAttributeDescriptor
import scala.collection.JavaConversions._
sft.getAttributeDescriptors.flatMap {
case d if d.isList && ct.runtimeClass.isAssignableFrom(d.getListType()) =>
Option(vector.getChild(d.getLocalName).asInstanceOf[ListVector]).map(_.getDataVector)
case d if d.isMap && ct.runtimeClass.isAssignableFrom(d.getMapTypes()._1) =>
Option(vector.getChild(d.getLocalName).asInstanceOf[StructVector]).map(_.getChildrenFromFields.get(0))
case d if d.isMap && ct.runtimeClass.isAssignableFrom(d.getMapTypes()._2) =>
Option(vector.getChild(d.getLocalName).asInstanceOf[StructVector]).map(_.getChildrenFromFields.get(1))
case _ => None
}.headOption
}
}
| elahrvivaz/geomesa | geomesa-arrow/geomesa-arrow-gt/src/main/scala/org/locationtech/geomesa/arrow/vector/SimpleFeatureVector.scala | Scala | apache-2.0 | 11,027 |
package org.http4s
package headers
import cats.data.NonEmptyList
import org.http4s.parser.HttpHeaderParser
import org.http4s.util.{Renderable, Writer}
// See https://tools.ietf.org/html/rfc7233
object Range extends HeaderKey.Internal[Range] with HeaderKey.Singleton {
def apply(unit: RangeUnit, r1: SubRange, rs: SubRange*): Range =
Range(unit, NonEmptyList.of(r1, rs:_*))
def apply(r1: SubRange, rs: SubRange*): Range = apply(RangeUnit.Bytes, r1, rs:_*)
def apply(begin: Long, end: Long): Range = apply(SubRange(begin, Some(end)))
def apply(begin: Long): Range = apply(SubRange(begin, None))
object SubRange {
def apply(first: Long): SubRange = SubRange(first, None)
def apply(first: Long, second: Long): SubRange = SubRange(first, Some(second))
}
final case class SubRange(first: Long, second: Option[Long]) extends Renderable {
/** Base method for rendering this object efficiently */
override def render(writer: Writer): writer.type = {
writer << first
second.foreach( writer << '-' << _ )
writer
}
}
override def parse(s: String): ParseResult[Range] =
HttpHeaderParser.RANGE(s)
}
final case class Range(unit: RangeUnit, ranges: NonEmptyList[Range.SubRange]) extends Header.Parsed {
override def key: Range.type = Range
override def renderValue(writer: Writer): writer.type = {
writer << unit << '=' << ranges.head
ranges.tail.foreach( writer << ',' << _)
writer
}
}
| ZizhengTai/http4s | core/src/main/scala/org/http4s/headers/Range.scala | Scala | apache-2.0 | 1,465 |
import sbt._
object PreviousVersion {
val previousVersion = settingKey[String]("The previous version of the library, for mima checks")
}
| socrata-platform/socrata-curator-utils | project/PreviousVersion.scala | Scala | apache-2.0 | 140 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2014 Alexey Aksenov ezh@ezh.msk.ru
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: ezh@ezh.msk.ru
*/
package org.digimead.tabuddy.desktop.logic.ui.support.encryption
import org.digimead.tabuddy.desktop.core.support.{ App, WritableValue }
import org.digimead.tabuddy.desktop.logic.Messages
import org.digimead.tabuddy.desktop.logic.payload.marker.serialization.encryption.{ Base, Encryption }
import org.eclipse.core.databinding.DataBindingContext
import org.eclipse.jface.databinding.viewers.ViewersObservables
import org.eclipse.jface.dialogs.{ IDialogConstants, TitleAreaDialog }
import org.eclipse.jface.viewers.{ LabelProvider, StructuredSelection }
import org.eclipse.swt.SWT
import org.eclipse.swt.events.{ DisposeEvent, DisposeListener }
import org.eclipse.swt.layout.GridData
import org.eclipse.swt.widgets.{ Button, Composite, Control, Shell }
/**
* Adapter between logic.payload.marker.serialization.encryption.Base and UI
*/
class BaseNNAdapter extends EncryptionAdapter {
/** Identifier of the encryption mechanism. */
val identifier: Encryption.Identifier = Base.Identifier
/** Get composite for the encryption configuration. */
def composite(parent: Composite, default: Option[Encryption.Parameters]) =
default match {
case Some(p: Base.Parameters) ⇒
Option(new BaseNNAdapter.BaseNNComposite(parent, SWT.NONE, Option(p)))
case Some(other) ⇒
throw new IllegalArgumentException(s"Expect Base.Parameters, but ${other} found")
case None ⇒
Option(new BaseNNAdapter.BaseNNComposite(parent, SWT.NONE, None))
}
/** Get dialog for the encryption configuration. */
def dialog(parent: Shell, default: Option[Encryption.Parameters], tag: String = Messages.encryption_text) =
default match {
case Some(p: Base.Parameters) ⇒
Option(new BaseNNAdapter.BaseNNDialog(parent, Option(p), tag))
case Some(other) ⇒
throw new IllegalArgumentException(s"Expect Base.Parameters, but ${other} found")
case None ⇒
Option(new BaseNNAdapter.BaseNNDialog(parent, None, tag))
}
/** Flag indicating whether the parameters are supported. */
def parameters: Boolean = true
}
object BaseNNAdapter {
/** Length parameters. */
val length = Seq(("64", "the \\"base64\\" encoding specified by RFC 4648 section 4, Base 64 Encoding", Base.Dictionary64))
/**
* BaseNN adapter composite
*/
class BaseNNComposite(parent: Composite, style: Int, defaultParameters: Option[Base.Parameters])
extends BaseNNAdapterSkel(parent, style) with EncryptionAdapter.Composite {
/** Binding context. */
lazy val bindingContext = new DataBindingContext(App.realm)
/** Dictionary length field. */
lazy val dictionaryLengthField = WritableValue[(String, String, Base.LengthParameter)]
/** Composite result */
@volatile protected var result = Option.empty[Either[String, Encryption.Parameters]]
initializeUI()
initializeBindings()
initializeDefaults()
this.addDisposeListener(new DisposeListener {
def widgetDisposed(e: DisposeEvent) = onDispose
})
/** Get an error or encryption parameters. */
def get(): Option[Either[String, Encryption.Parameters]] = result
/** On dispose callback. */
protected def onDispose {
updateResult
bindingContext.dispose()
dictionaryLengthField.dispose()
}
/** Initialize UI part. */
protected def initializeUI() {
App.assertEventThread()
val comboViewerDictionaryLength = getComboViewerDictionaryLength()
comboViewerDictionaryLength.setLabelProvider(new LabelProvider() {
override def getText(element: AnyRef): String = element match {
case (null, null, null) ⇒ Messages.Adapter_selectDictionaryLength_text
case (name, description: String, length) ⇒ s"${name} - ${description.capitalize}"
case unknown ⇒ super.getText(unknown)
}
})
val dictionaryLengthFieldBinding = bindingContext.bindValue(ViewersObservables.
observeDelayedValue(50, ViewersObservables.observeSingleSelection(comboViewerDictionaryLength)), dictionaryLengthField)
comboViewerDictionaryLength.getCCombo().addDisposeListener(new DisposeListener {
def widgetDisposed(e: DisposeEvent) = bindingContext.removeBinding(dictionaryLengthFieldBinding)
})
comboViewerDictionaryLength.add((null, null, null))
BaseNNAdapter.length.foreach(comboViewerDictionaryLength.add)
}
/** Initialize binding part. */
protected def initializeBindings() {
dictionaryLengthField.addChangeListener { case ((name, description, length), event) ⇒ updateResult }
}
/** Initialize default values. */
protected def initializeDefaults() = defaultParameters match {
case Some(parameters) ⇒
BaseNNAdapter.length.find(_._3 == parameters.dictionaryLength).foreach(value ⇒ getComboViewerDictionaryLength.setSelection(new StructuredSelection(value)))
case None ⇒
getComboViewerDictionaryLength.setSelection(new StructuredSelection((null, null, null)))
}
/** Update result value. */
protected def updateResult = {
for {
dictionaryLength ← Option(dictionaryLengthField.value) if dictionaryLength._3 != null
} yield dictionaryLength._3
} match {
case Some(length) ⇒
result = Some(Right(Base(length)))
case _ ⇒
result = Some(Left(Messages.parametersRequired_text))
}
}
/**
* BaseNN adapter dialog
*/
class BaseNNDialog(parentShell: Shell, defaultValue: Option[Base.Parameters], tag: String)
extends TitleAreaDialog(parentShell) with EncryptionAdapter.Dialog {
/** Private field with content's composite. */
@volatile protected var content: BaseNNComposite = null
/** Get an error or encryption parameters. */
def get(): Option[Either[String, Encryption.Parameters]] = Option(content) match {
case Some(content) ⇒ content.get()
case None ⇒ Some(Left(Messages.parametersRequired_text))
}
override protected def configureShell(shell: Shell) {
super.configureShell(shell)
shell.setText(Messages.Adapter_selectXParameters_text.format(Base.Identifier.name.capitalize, tag))
}
override def create() {
super.create()
setTitle(Base.Identifier.name.capitalize)
setMessage(Base.Identifier.description.capitalize)
}
override protected def createButton(parent: Composite, id: Int, label: String, defaultButton: Boolean): Button = {
val button = super.createButton(parent, id, label, defaultButton)
if (id == IDialogConstants.OK_ID) {
button.setEnabled(content.get().map(_.isRight).getOrElse(false))
content.dictionaryLengthField.addChangeListener { case (_, event) ⇒ button.setEnabled(content.get().map(_.isRight).getOrElse(false)) }
}
button
}
override protected def createDialogArea(parent: Composite): Control = {
content = new BaseNNAdapter.BaseNNComposite(parent, SWT.NONE, defaultValue)
content.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1))
content
}
}
}
| digimead/digi-TABuddy-desktop | part-logic/src/main/scala/org/digimead/tabuddy/desktop/logic/ui/support/encryption/BaseNNAdapter.scala | Scala | agpl-3.0 | 9,303 |
/**
* The MIT License (MIT)
* <p/>
* Copyright (c) 2016 ScalateKids
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* <p/>
*
* @author Scalatekids
* @version 1.0
* @since 1.0
*/
package com.actorbase.cli.models
import java.io.FileNotFoundException
import com.actorbase.driver.exceptions._
import com.typesafe.config.ConfigFactory
import scala.collection.JavaConversions._
import java.io.FileNotFoundException
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import com.actorbase.driver.ActorbaseDriver
import scala.util.{ Failure, Success }
sealed trait Helper {
def as[A <: Any](o: Any): A = o.asInstanceOf[A]
}
/**
* Receiver class, process input arguments sent by the controller
* using a driver reference to send requests to a listening
* Actorbase instance.
*
* @param params a map containing the parameters that are used
* for the methods.
*/
class CommandReceiver(params: Map[String, Any], driver: ActorbaseDriver) extends Helper {
/**
* Insert an item to the actorbase server.
*
* @return a String, "Item inserted" if the method succeeded, an error message is returned if the method failed
*/
def insert(): String = {
var result = "Item inserted."
params get "key" map { k =>
params get "value" map { v =>
params get "collection" map { c =>
params get "update" map { u =>
val value = as[String](v) match {
case integer if integer matches("""^\d+$""") => integer.toInt
case double if double matches("""^\d+\.\d+""") => double.toDouble
case _ => as[String](v)
}
try {
val update = as[Boolean](u)
if (as[String](c) contains ".") {
val collection = as[String](c).split("\\.")
driver.insertTo(collection(1), update, (as[String](k) -> value))(collection(0))
} else driver.insert(as[String](c), update, (as[String](k) -> value))
}
catch {
case wce: WrongCredentialsExc => result = "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => result = "There was an internal server error, something wrong happened."
case dke: DuplicateKeyExc => result = "Key already stored"
}
}
}
}
}
result
}
/**
* Remove an item from the actorbase server.
*
* @return a String, "Item removed" if the method succeeded, an error message is returned if the method failed
*/
def removeItem() : String = {
params get "key" map { ka =>
params get "collection" map { ca =>
val c = as[String](ca)
val k = as[String](ka)
try {
if (c contains ".") {
val collection = c.split("\\.")
driver.removeFrom(collection(1), k)(collection(0))
} else driver.remove(c, k)
}
catch {
case uce: UndefinedCollectionExc => return "Undefined collection."
case wce: WrongCredentialsExc => return "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => return "There was an internal server error, something wrong happened."
}
}
}
"Item removed"
}
/**
* Logout the active connection with the server instance of Actorbase
*
* @return a String, "logout succeeded" if the method succeeded, an error message is returned if the method failed
*/
def listUsers(): String = {
var result = "\n "
try {
result += driver.listUsers.mkString("\n ")
} catch {
case wce: WrongCredentialsExc => result = "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => result = "There was an internal server error, something wrong happened."
case uae: UsernameAlreadyExistsExc=> result = "Username already exists in the system Actorbase"
}
result
}
/**
* Find command, this method is used to search in the server instance of Actorbase.
* Based on the params of the class this method can:
* _search for a key in one or more collections;
* _return one or more collections;
*
* @return a String representing the output from the server instance of Actorbase
*/
def find(): String = {
var response = ""
try {
params.get("key") match {
case None =>
params.get("collection") match {
case None =>
// get all collections
response = driver.getCollections.toString
case Some(c) =>
// get collections contained into a list
as[List[String]](c).foreach(x => {
if (x contains ".") {
val splitted = x.split("\\.")
response += driver.getCollection( splitted(1), splitted(0) ).toString+"\n"
}
else
response += driver.getCollection( x ).toString+"\n"
})
}
case Some(k) =>
params.get("collection") match {
case None =>
// find key from all database
val allCollections = driver.listCollections map (x => x.head._2.head -> x.head._1)
allCollections.foreach( x => {
val obj = (driver.findFrom(k.asInstanceOf[String], x._1)(x._2))
if(obj.nonEmpty)
response += obj.toString+"\n"
}
)
case Some(c) =>
// find key from a list of collections
c.asInstanceOf[List[String]].foreach{ x =>
if(x contains "."){
val collection = x.split("\\.")
response += (driver.findFrom(k.asInstanceOf[String], collection(1))(collection(0))).toString+"\n"
}
else
response += (driver.findFrom(k.asInstanceOf[String], x)() ).toString+"\n"
}
}
}
}
catch {
case uce: UndefinedCollectionExc => response = "Undefined collection"
case wce: WrongCredentialsExc => response = "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => response = "There was an internal server error, something wrong happened."
}
response
}
/**
* This method is used to get help to the user, can give a generic help containing
* all the possible commands that the user can call or help about one specific command
*
* @return a String representing the help message
*/
def help(): String = {
var result: String = "\n"
params get "command" map { c =>
ConfigFactory.load ("commands.conf").getConfig ("commands").entrySet.foreach {
entry =>
if(entry.getKey == c.toString) {
result += f" ${
entry.getKey
}%-25s${
entry.getValue.unwrapped
}\n"
}
}
} getOrElse {
ConfigFactory.load ("commands.conf").getConfig ("commands").entrySet.foreach {
entry =>
result += f" ${
entry.getKey
}%-25s${
entry.getValue.unwrapped
}\n"
}
}
if(result == "\n")
result += "Command not found, to have a list of commands available type <help>"
result
}
/**
* Create a collection in the server instance of Actorbase.
*
* @return a String, "Collection created" if the method succeeded, an error message is returned if the method failed
*/
def createCollection(): String = {
var name = ""
params get "name" map { c =>
name = as[String](c)
try {
driver.addCollection(name)
} catch {
case uc: UndefinedCollectionExc => return "Undefined collection"
case wce: WrongCredentialsExc => return "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => return "There was an internal server error, something wrong happened."
}
}
"collection " + name + " created"
}
/**
* List all the collections from the server instance of Actorbase.
*
* @return a String containing all the collections names the used has access to
*/
def listCollections(): String = {
val divisor = 1024 * 1024
val (header1, header2, header3) = ("OWNER", "COLLECTION", "SIZE")
var list = f"\n $header1%-14s | $header2%14s | $header3%7s \n"
list += " -------------------------------------------------\n"
try {
val collectionList = driver.listCollections
if (collectionList.length > 0) {
collectionList.foreach { c =>
val mb = c.head._2.last.toDouble / divisor
list += f" ${c.head._1}%-14s | ${c.head._2.head}%14s | ${mb}%.6f MB\n"
}
}
else list = "No collections found"
}
catch {
case wce: WrongCredentialsExc => list = "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => list = "There was an internal server error, something wrong happened."
}
list
}
/**
* Drop a collection in the server instance of Actorbase.
*
* @return a String, "Collection deleted" if the method succeeded, an error message is returned
* if the method failed
*/
def deleteCollection() : String = {
var response = "deleted"
params get "collection" map { c =>
try {
if(as[String](c) contains "."){
val coll = as[String](c).split("\\.")
driver.dropCollectionsFrom( coll(1) )( coll(0) )
}
else
driver.dropCollections(as[String](c))
response = as[String](c) + " " + response
}
catch {
case uc: UndefinedCollectionExc => response = "Undefined collection."
case wce: WrongCredentialsExc => response = "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => response = "There was an internal server error, something wrong happened."
}
}
response
}
/**
* Add a collaborator to a collection in the server instance of Actorbase.
*
* @return a String, "Collaborator added" if the method succeeded, an error message is returned
* if the method failed
*/
def addCollaborator() : String = {
var result: String = ""
params get "collection" map { c =>
params get "username" map { u =>
params get "permissions" map { p =>
val collection = as[String](c)
val username = as[String](u)
val permission = if (as[String](p) == "ReadOnly") false else true
try {
if (collection contains ".") {
val coll = collection.split("\\.")
driver.addContributorTo(username, coll(1), permission, coll(0))
}
else
driver.addContributorTo(username, collection, permission)
result = s"$username added to collection $collection"
} catch {
case wce: WrongCredentialsExc => result = "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => result = "There was an internal server error, something wrong happened."
case uue: UndefinedUsernameExc => result = "Contributor username not found."
case uae: UsernameAlreadyExistsExc => result = "Contributor already added."
case uc: UndefinedCollectionExc => result = "Undefined collection."
}
}
}
}
result
}
/**
* Remove a collaborator from a collection in the server instance of Actorbase.
*
* @return a String, "username removed from collection" if the method succeeded, an error message is returned
* if the method failed
*/
def removeCollaborator() : String = {
var result: String = ""
params get "collection" map { c =>
params get "username" map { u =>
val collection = as[String](c)
val username = as[String](u)
try {
if (collection contains ".") {
val coll = collection.split("\\.")
driver.removeContributorFrom(username, coll(1), coll(0))
}
else
driver.removeContributorFrom(username, collection)
result = s"$username removed from collection $collection"
} catch {
case wce: WrongCredentialsExc => result = "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => result = "There was an internal server error, something wrong happened."
case uue: UndefinedUsernameExc => result = "Contributor username not found."
case uae: UsernameAlreadyExistsExc => result = "Contributor already added."
case uc: UndefinedCollectionExc => result = "Undefined collection."
}
}
}
result
}
/**
* Change the user password in the server instance of Actorbase.
*
* @return a String, "Password changed" if the method succeeded, an error message is returned
* if the method failed
*/
def changePassword() : String = {
params get "oldPsw" map { o =>
params get "newPsw" map { n =>
try {
driver.changePassword(as[String](n))
}
catch {
case wce: WrongCredentialsExc => "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => "There was an internal server error, something wrong happened."
case wnp: WrongNewPasswordExc => "The password inserted does not meet Actorbase criteria"
case uue: UndefinedUsernameExc => "Wrong password"
}
}
}
"Password changed"
}
/**
* Add a user to the server instance of Actorbase. This operation needs Admin privileges
*
* @return a String, "User added" if the method succeeded, an error message is returned
* if the method failed
*/
def addUser() : String = {
try{
val username = params.get("username").get.asInstanceOf[String]
driver.addUser(username)
username + " added to the system"
}
catch{
case wce: WrongCredentialsExc => return "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => return "There was an internal server error, something wrong happened."
case uae: UsernameAlreadyExistsExc=> return "Username already exists in the system Actorbase"
}
}
/**
* Remove a user from the server instance of Actorbase. This operation needs Admin privileges
*
* @return a String, "User removed" if the method succeeded, an error message is returned
* if the method failed
*/
def removeUser() : String = {
try{
val username = params.get("username").get.asInstanceOf[String]
driver.removeUser(username)
username + " removed from the system"
}
catch{
case wce: WrongCredentialsExc => return "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => return "There was an internal server error, something wrong happened."
case uue: UndefinedUsernameExc => return "Undefined username: Actorbase does not contains such credential"
}
}
/**
* Reset the password of a user in the server instance of Actorbase. This operation needs Admin privileges.
* The password is reset to the default Actorbase password: Actorb4se
*
* @return a String, "Password reset" if the method succeeded, an error message is returned
* if the method failed
*/
def resetPassword() : String = {
try{
val user = params.get("username").get.asInstanceOf[String]
driver.resetPassword(user)
user + " password reset"
}
catch{
case wce: WrongCredentialsExc => return "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => return "There was an internal server error, something wrong happened."
case uue: UndefinedUsernameExc => return "Undefined username: Actorbase does not contains such credential"
case uae: UsernameAlreadyExistsExc => return "Username already exists in the system Actorbase"
}
}
/**
* Export actorbase data into a file. Based on params this method can export:
* _one or more collections;
*
* @return a String, "Exported" if the method succeeded, an error message is returned
* if the method failed
*/
def export() : String = {
val path = params.get("f_path").get.asInstanceOf[String]
var collList = List.empty[Tuple2[String,String]]
params.get("p_list") match {
case Some(c) =>
try {
val list = c.asInstanceOf[List[String]]
list.foreach { x =>
if (x contains ".") {
val collection = as[String](x).split("\\.")
collList ::= (collection(1) -> collection(0))
}
else
collList ::= (x -> "")
}
driver.exportData(path, collList)()
}
catch {
case wce: WrongCredentialsExc => return "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => return "There was an internal server error, something wrong happened."
case uue: UndefinedUsernameExc => return "Undefined username: Actorbase does not contains such credential"
case uae: UsernameAlreadyExistsExc => return "Username already exists in the system Actorbase"
}
case None =>
try {
driver.exportData(path)()
}
catch {
case wce: WrongCredentialsExc => return "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => return "There was an internal server error, something wrong happened."
case uue: UndefinedUsernameExc => return "Undefined username: Actorbase does not contains such credential"
case uae: UsernameAlreadyExistsExc => return "Username already exists in the system Actorbase"
}
}
"exported"
}
/**
* Import data from a well formatted JSON file.
*
* @return a String, "imported" if the method succeded, an error message if it fails
*/
def importFrom(): String = {
params get "path" map { p =>
try {
driver.importData(as[String](p))
}
catch {
case fnfe: FileNotFoundException => return "File not found"
case wce: WrongCredentialsExc => return "Credentials privilege level does not meet criteria needed to perform this operation."
case iec: InternalErrorExc => return "There was an internal server error, something wrong happened."
case mfe: MalformedFileExc => return "Malformed json file"
case uun: UndefinedUsernameExc => return "Undefined username"
case dk: DuplicateKeyExc => return "Duplicated key request found"
}
}
"imported"
}
}
| ScalateKids/Actorbase-Client | src/main/scala/com/actorbase/cli/models/CommandReceiver.scala | Scala | mit | 20,465 |
/*
* Copyright © 2015 Reactific Software LLC. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rxmongo.messages.cmds
import rxmongo.bson._
import rxmongo.messages.{ AdminCommand, Index, IndexOptions, Command }
import rxmongo.messages.Query
/** listCollections
* @see [[http://docs.mongodb.org/master/reference/command/listCollections/]]
* @param db The database in which to list the collections
* @param filter A filter to trim the results
*/
case class ListCollectionsCmd(db : String, filter : Query)
extends Command(db, BSONObject("listCollections" → 1, "filter" → filter.result))
/** dropCollection
*
* @see [[http://docs.mongodb.org/master/reference/command/drop/]]
* @param db The database the contains the collection to be dropped
* @param coll The collection to be dropped
*/
case class DropCollectionCmd(
db : String,
coll : String) extends Command(db, BSONObject("drop" → coll))
/** create - Explicitly creates a collection.
*
* @see [[http://docs.mongodb.org/master/reference/command/create/]]
* Create has the following form:
* {{{
* { create: <collection_name>,
* capped: <true|false>,
* autoIndexId: <true|false>,
* size: <max_size>,
* max: <max_documents>,
* flags: <0|1>
* }
* }}}
* @param db THe database in which to create the collection
* @param coll The collection name to create
* @param capped Whether this is a capped collection or not
* @param autoIndexId Whether to create the _id index automatically or not
* @param size The maximum collection size (in bytes) for capped collections
* @param max THe maximum number of documents for capped collections
* @param usePowerOf2Sizes Whether to use power of 2 sizes for space allocation
*/
case class CreateCollectionCmd(
db : String,
coll : String,
capped : Boolean = false,
autoIndexId : Boolean = true,
size : Option[Int] = None,
max : Option[Int] = None,
usePowerOf2Sizes : Int = 1) extends Command(db, BSONObject())
/** cloneCollection
* Copies a collection from a remote host to the current host.
* @see [[http://docs.mongodb.org/master/reference/command/cloneCollection/]]
* @param db The database name containing the collection to clone
* @param coll The name of the collection to clone
* @param host The host on which the source collection resides
* @param query A query to filter the documents cloned
*/
case class CloneCollectionCmd(db : String, coll : String, host : String, query : Query)
extends AdminCommand(BSONObject("cloneCollection" → (db + "." + coll), "host" → host, "query" → query.result))
/** cloneCollectionAsCapped
* Copies a non-capped collection as a new capped collection.
* @param db The database name to operate on
* @param existing THe existing collection to clone documents from
* @param target The name of the new capped collection
* @param size The maximum size of the new capped collection (bytes not # of documents)
*/
case class CloneCollectionAsCapped(db : String, existing : String, target : String, size : Int)
extends AdminCommand(
BSONObject("cloneCollectionAsCapped" → (db + "." + existing), "toCollection" → (db + "." + target), "size" → size)
)
/** convertToCapped
* Converts a non-capped collection to a capped collection.
* @see [[http://docs.mongodb.org/master/reference/command/convertToCapped/]]
* @param db The database name to operate on
* @param coll The name of an existing collectiont to be converted to a capped collection
* @param size The maximum size, in bytes, of the new capped collection
*/
case class ConvertToCapped(db : String, coll : String, size : Int)
extends AdminCommand(
BSONObject("convertToCapped" → (db + "." + coll), "size" → size)
)
/** createIndexes
* @see [[http://docs.mongodb.org/master/reference/command/createIndexes/]]
* @param db The database containing the collection
* @param coll The name of the collection to get the indices
* @param indices The indices and their options to add to the collection
*/
case class CreateIndicesCmd(
db : String,
coll : String,
indices : Iterable[(Index, IndexOptions)]) extends Command(db, {
val bldr = BSONBuilder()
bldr.string("createIndexes", coll)
val objects = for ((keys, options) ← indices) yield {
val obj = options.result + ("key" → keys.result)
if (obj.contains("name"))
obj
else
obj + ("name" -> BSONString(db + "." + coll + "." + keys.name))
}
bldr.array("indexes", objects)
bldr.toBSONObject
})
/** listIndexes
* @see [[http://docs.mongodb.org/master/reference/command/listIndexes/]]
* @param db The database containing the collection
* @param coll THe collection for which indices should be listed
*/
case class ListIndicesCmd(
db : String,
coll : String) extends Command(db, BSONObject("listIndexes" → coll))
/** dropIndexes
* @see [[http://docs.mongodb.org/master/reference/command/dropIndexes/]]
* @param db The database containing the collection
* @param coll The collection on which the index should be dropped
* @param index The name of the index to drop
*/
case class DropIndicesCmd(
db : String,
coll : String,
index : String) extends Command(db, BSONObject("dropIndexes" → coll, "index" → index))
/** dropIndexes
*
* @see [[http://docs.mongodb.org/master/reference/command/dropIndexes/]]
* @param db The database containing the collection
* @param coll The collection on which all indexes should be dropped
*/
case class DropAllIndicesCmd(db : String, coll : String)
extends Command(db, BSONObject("dropIndexes" → coll, "index" → "*"))
/** compact
* Defragments a collection and rebuilds the indexes.
* @param db The name of the database.
* @param coll The name of the collection.
* @param force If true, compact can run on the primary in a replica set. If false, compact returns an error when
* run on a primary, because the command blocks all other activity. Compact blocks activity only for
* the database it is compacting.
* @param paddingFactor Describes the record size allocated for each document as a factor of the document size for
* all records compacted during the compact operation. The paddingFactor does not affect
* the padding of subsequent record allocations after compact completes.
* @param paddingBytes Sets the padding as an absolute number of bytes for all records compacted during the compact
* operation. After compact completes, paddingBytes does not affect the padding of subsequent
* record allocations.
*/
case class CompactCmd(
db : String,
coll : String,
force : Option[Boolean] = None,
paddingFactor : Option[Double] = None,
paddingBytes : Option[Int] = None) extends Command(db, {
val b = BSONBuilder()
b.string("compact", coll)
force.map { f ⇒ b.boolean("force", f) }
paddingFactor.map { pf ⇒ b.double("paddingFactor", pf) }
paddingBytes.map { pb ⇒ b.integer("paddingBytes", pb) }
b.result
})
/** collMod
* @see http://docs.mongodb.org/master/reference/command/collMod/
* @param db The database name
* @param coll The TTL collection name
* @param fieldName The name of the data typed field for the TTL index
* @param newExpireAfterSeconds The new value for expireAfterSeconds
*/
case class SetExpirationCmd(
db : String,
coll : String,
fieldName : String,
newExpireAfterSeconds : Int) extends Command(db,
BSONObject(
"collMod" → coll,
"index" → BSONObject("keyPattern" → BSONObject(fieldName → 1), "expireAfterSeconds" → newExpireAfterSeconds)
)
)
/** collMod
* @see [[http://docs.mongodb.org/master/reference/command/collMod/]]
* @param db The database name
* @param coll The collection to have no padding
*/
case class SetNoPaddingCmd(
db : String,
coll : String) extends Command(db, BSONObject("collMod" → coll, "noPadding" → 1))
/** reindex
* @see [[http://docs.mongodb.org/master/reference/command/reIndex/]]
* @param db The database name
* @param coll The name of the collection to reindex
*/
case class ReIndexCmd(
db : String,
coll : String) extends Command(db, BSONObject("reindex" → coll))
/** touch
* @see [[http://docs.mongodb.org/master/reference/command/touch/]]
* @param db The database name
* @param coll The collection name to touch
* @param data Whether to touch the data (documents)
* @param index Whether to touch the index
*/
case class TouchCmd(
db : String,
coll : String,
data : Boolean = false,
index : Boolean = false) extends Command(db, BSONObject("touch" -> coll, "data" -> data, "index" -> index))
| reactific/RxMongo | messages/src/main/scala/rxmongo/messages/cmds/DatabaseCommands.scala | Scala | mit | 9,672 |
/*
* Copyright (c) 2015 Robert Conrad - All Rights Reserved.
* Unauthorized copying of this file, via any medium is strictly prohibited.
* This file is proprietary and confidential.
* Last modified by rconrad, 1/4/15 4:20 PM
*/
package base.socket.api.impl
import base.common.lib.{ Actors, Dispatchable }
import base.common.logging.Loggable
import base.common.test.Tags
import base.socket.api.SocketApiService
import base.socket.test.SocketBaseSuite
import org.json4s.DefaultFormats
/**
* Responsible for testing Server startup - highest level integration test possible
* @author rconrad
*/
class SocketApiServiceImplTest extends SocketBaseSuite with Dispatchable with Loggable {
implicit def json4sFormats = DefaultFormats
test("server startup", Tags.SLOW) {
implicit val system = Actors.actorSystem
implicit val timeout = longTimeout
assert(SocketApiService().start().await())
fail("need to connect to server and send a heartbeat or ping/pong or something")
}
}
| robconrad/base-api | project-socket/src/test/scala/base/socket/api/impl/SocketApiServiceImplTest.scala | Scala | mit | 1,002 |
/**
* 扩展那个可序列化的Person类,让它能以一个集合保存某个人的朋友信息。
* 构造出一些Person对象,让他们中的一些人成为朋友,然后将Array[Person]保存到文件。将这个数组从文件中重新读出来,校验朋友关系是否完好
*/
import collection.mutable.ArrayBuffer
import java.io.{ObjectInputStream, FileOutputStream, FileInputStream, ObjectOutputStream}
class Person(var name:String) extends Serializable{
val friends = new ArrayBuffer[Person]()
def addFriend(friend : Person){
friends += friend
}
override def toString() = {
var str = "My name is " + name + " and my friends name is "
friends.foreach(str += _.name + ",")
str
}
}
object Test extends App{
val p1 = new Person("Ivan")
val p2 = new Person("F2")
val p3 = new Person("F3")
p1.addFriend(p2)
p1.addFriend(p3)
println(p1)
val out = new ObjectOutputStream(new FileOutputStream("person.obj"))
out.writeObject(p1)
out.close()
val in = new ObjectInputStream(new FileInputStream("person.obj"))
val p = in.readObject().asInstanceOf[Person]
println(p)
} | vernonzheng/scala-for-the-Impatient | src/Chapter09/exercise10.scala | Scala | mit | 1,136 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.common
/**
* Indicates the follower or the future replica received records from the leader (or current
* replica) with first offset less than expected next offset.
* @param firstOffset The first offset of the records to append
* @param lastOffset The last offset of the records to append
*/
class UnexpectedAppendOffsetException(val message: String,
val firstOffset: Long,
val lastOffset: Long) extends RuntimeException(message) {
}
| KevinLiLu/kafka | core/src/main/scala/kafka/common/UnexpectedAppendOffsetException.scala | Scala | apache-2.0 | 1,334 |
package com.joshcough.minecraft
import org.bukkit.Server
import org.bukkit.entity.Player
import org.bukkit.event.{Listener, Event}
import org.bukkit.plugin.java.JavaPlugin
import util.Try
import java.util.logging.{Level, Logger}
import javax.persistence.PersistenceException
/**
* The base class that helps make writing Bukkit plugins vastly easier.
* However, it's unlikely that you'll subclass ScalaPlugin directly. It's
* far more likely that you'll subclass com.joshcough.minecraft.CommandsPlugin,
* com.joshcough.minecraft.ListenersPlugin, or both.
*/
abstract class ScalaPlugin extends JavaPlugin with BukkitEnrichment { scalaPlugin =>
lazy val log = Logger.getLogger("Minecraft")
// setup stuff
override def onEnable: Unit = {
super.onEnable
this.saveDefaultConfig
setupDatabase
logInfo(s"$name enabled!")
}
override def onDisable: Unit = { super.onDisable; logInfo(s"$name disabled!") }
/**
* A list of dependencies that this plugin depends on.
* JcdcPluginFactory is automatically included, which contains Scala, Clojure, and
* all of the classes in com.joshcough.minecraft.
* See http://wiki.bukkit.org/Plugin_YAML for more info
*/
def dependencies: List[String] = Nil
// the ScalaLibPlugin provides Scala at runtime.
// the ScalaPluginAPI provides com.joshcough.minecraft.* classes at runtime
private val mandatoryDependencies = List("ScalaLibPlugin", "ScalaPluginAPI")
/**
* A list of all the soft dependencies for this plugin.
* See http://wiki.bukkit.org/Plugin_YAML for more info
*/
def softDependencies: List[String] = Nil
// TODO: is there a real yml data type i could use?
def configs: Map[String, String] = Map()
/**
* Classes that want to use a database should override this def, providing
* all of the Entity classes. See WarpPlugin in examples.
*/
def dbClasses: List[Class[_]] = Nil
// this is here just so subclasses dont have to use java.util.ArrayList.
override def getDatabaseClasses = new java.util.ArrayList[Class[_]](){ dbClasses.foreach(add) }
// this is horrible bukkit nonsense that every plugin must do if it wants to use the database.
private def setupDatabase: Unit =
if(dbClasses.nonEmpty)
// this somehow forces attempting to initialize the database
try getDatabase.find(dbClasses.head).findRowCount
// and if it throws... that means you haven't yet initialized the db,
// and you need to call installDLL...
// really, this is just crap. happy to hide it from any users.
catch{ case e: PersistenceException => logTask("Installing DB"){ installDDL() } }
/**
* Generates the plugin.yml contents for this plugin.
* See http://wiki.bukkit.org/Plugin_YAML for more info
* @param author the author of the plugin
* @param version the version of the plugin
**/
def yml(author:String, version: String) = List(
"name: " + this.name,
"main: " + this.getClass.getName,
"author: " + author,
"version: " + version,
"database: " + (this.dbClasses.size > 0),
"depend: [" + (mandatoryDependencies ++ this.dependencies).mkString(", ") + "]",
"softdepend: [" + this.softDependencies.mkString(", ") + "]"
).mkString("\\n")
/**
* Writes out the plugin.yml file, and config.yml.
* @param author the author of the plugin
* @param version the version of the plugin
*/
def writeYML(author: String, version: String, outputDir: String = "."): Unit = {
val resources = new java.io.File(outputDir)
resources.mkdirs
def write(contents: String, filename:String): Unit = {
val f = new java.io.FileWriter(new java.io.File(resources, filename))
f.write(contents)
f.close
}
write(yml(author, version), "plugin.yml")
write(configs.toList.map{ case (k, v) => s"$k: $v" }.mkString("\\n"), "config.yml")
}
/**
* Broadcast a message to the world.
* The name of the plugin is prepended to the given message, like so:
* [plugin-name] - message
*/
def broadcast(message:String): Unit = server.broadcastMessage(s"[$name] - $message")
/**
* Log the given message at INFO level.
*/
def logInfo(message:String): Unit = logMessage(Level.INFO, message)
/**
* Log the given message at WARNING level.
*/
def logWarning(message:String): Unit = logMessage(Level.WARNING, message)
/**
* Log the given exception at SEVERE level.
*/
def logError(e:Throwable): Unit = logMessage(Level.SEVERE, e.getMessage + e.getStackTraceString)
private def logMessage(level: Level, message: String): Unit =
log.log(level, s"[$name] - $message")
/**
* Log around the given task like so:
* 'Starting - message'
* f
* 'Finished - message'
*/
def logTask[T](message:String)(f: => T): T = {
logInfo(s"Starting: $message"); val t = f; logInfo(s"Finished: $message"); t
}
// Various other little helper functions.
def name = Try(this.getDescription.getName).getOrElse(this.getClass.getSimpleName)
def server: Server = getServer
def pluginManager = getServer.getPluginManager
def fire(e:Event): Unit = server.getPluginManager.callEvent(e)
def registerListener(listener:Listener): Unit = pluginManager.registerEvents(listener, this)
// task stuff:
private lazy val scheduler = server.getScheduler
case class Task(id:Int)
def scheduleSyncTask(task: => Unit): Task =
Task(scheduler.scheduleSyncDelayedTask(this, task))
def scheduleSyncDelayedTask(initialDelay: Long)(task: => Unit): Task =
Task(scheduler.scheduleSyncDelayedTask(this, task, initialDelay))
def scheduleSyncRepeatingTask(period: Long)(task: => Unit): Task =
Task(scheduler.scheduleSyncRepeatingTask(this, task, 0L, period))
def scheduleSyncRepeatingTask(initialDelay: Long, period: Long)(task: => Unit): Task =
Task(scheduler.scheduleSyncRepeatingTask(this, task, initialDelay, period))
def cancelTask(t: Task) = scheduler cancelTask t.id
case class PlayerTasks(cancelOnExit: Boolean = true) extends PlayerState[Seq[Task]] { self =>
override val default: Option[Seq[Task]] = Some(Nil)
registerListener(Listeners.OnPlayerQuit((p, _) => if(cancelOnExit) p.cancelAll))
implicit class PlayerWithTaskFunctions(p:Player){
private def addTask(t: Task): Task = { self += (p -> (self(p) :+ t)); t }
def scheduleSyncTask(task: => Unit): Task = addTask(scalaPlugin.scheduleSyncTask(task))
def scheduleSyncRepeatingTask(initialDelay: Long, period: Long)(task: => Unit): Task =
addTask(scalaPlugin.scheduleSyncRepeatingTask(initialDelay, period)(task))
def cancelTask(t: Task): Unit = {
scheduler cancelTask t.id
self += (p -> self(p).filter(_ != t))
}
def cancelAll: Unit = {
logInfo(s"canceling all tasks for: $p")
(self -= p) foreach { t =>
logInfo(s"canceling: $t")
scheduler cancelTask t.id
}
}
}
}
/**
* Invokes a command programmatically.
*/
def runCommand(p: Player, commandName: String, args: Seq[String]) = {
p ! s"$name running: $commandName ${args.mkString(" ")}"
onCommand(p, getCommand(commandName), commandName, args.toArray)
}
}
/**
* This plugin and this code is not intended for use.
* It is just an empty plugin that is required for turning this library
* into a plugin, so that the API and Scala can be on the classpath for
* plugins that want to use this API.
*/
class ScalaPluginAPI extends org.bukkit.plugin.java.JavaPlugin {
override def onEnable : Unit = {}
override def onDisable : Unit = {}
}
| JunctionAt/JunctionAPI | src/main/scala/com/joshcough/minecraft/ScalaPlugin.scala | Scala | agpl-3.0 | 7,639 |
// lchannels - session programming in Scala
// Copyright (c) 2016, Alceste Scalas and Imperial College London
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
/** @author Alceste Scalas <alceste.scalas@imperial.ac.uk> */
package lchannels.examples.sleepingbarber.demo
object Local extends App {
// Helper method to ease external invocation
def run() = main(Array())
import scala.concurrent.duration._
implicit val timeout = 10.seconds
val customers = List(
"Alice", "Bob", "Carol", "Dave", "Erin", "Frank", "George", "Horace",
"Isabelle", "John", "Karen", "Louis", "Mark", "Nick", "Oliver", "Paul",
"Quentin", "Rebecca", "Sarah", "Thomas", "Ursula", "Valeri", "William",
"Xavier", "Yasemin", "Zacharias"
)
val maxSeats = 4
val shop = lchannels.examples.sleepingbarber.barbershop.Shop(maxSeats)
val customerObjs = for (name <- customers)
yield new lchannels.examples.sleepingbarber.customer.Customer(name, shop)
for (c <- customerObjs) c.join()
println("*** All customers served - shutting down barbershop")
shop.quit()
}
| scribble/scribble.github.io | src/main/jbake/assets/docs/lchannels/examples/src/main/scala/lchannels/examples/sleepingbarber/Demo.scala | Scala | apache-2.0 | 2,368 |
package im.actor.server.activation.gate
import scala.concurrent.{ ExecutionContext, Future }
import scalaz.{ -\/, \/, \/- }
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpMethods.{ GET, POST }
import akka.http.scaladsl.model.{ HttpRequest, Uri }
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.Materializer
import play.api.libs.json.Json
import slick.dbio.DBIO
import slick.driver.PostgresDriver.api._
import im.actor.server.activation.Activation.Code
import im.actor.server.activation.internal.CodeActivation
import im.actor.server.activation.{ InvalidHash, ValidationResponse }
import im.actor.server.persist
class GateCodeActivation(config: GateConfig)(
implicit
db: Database,
system: ActorSystem,
val materializer: Materializer,
ec: ExecutionContext
) extends CodeActivation with JsonImplicits {
private[this] val http = Http()
override def send(optTransactionHash: Option[String], code: Code): DBIO[String \/ Unit] =
optTransactionHash.map { transactionHash ⇒
val codeResponse: Future[CodeResponse] = for {
resp ← http.singleRequest(
HttpRequest(
method = POST,
uri = s"${config.uri}/v1/codes/send",
entity = Json.toJson(code).toString
).withHeaders(`X-Auth-Token`(config.authToken))
)
codeResp ← Unmarshal(resp).to[CodeResponse]
} yield codeResp
val action = for {
codeResponse ← DBIO.from(codeResponse)
result ← codeResponse match {
case CodeHash(hash) ⇒
for (_ ← persist.auth.GateAuthCode.create(transactionHash, hash)) yield \/-(())
case CodeError(message) ⇒
DBIO.successful(-\/(message))
}
} yield result
action
} getOrElse (throw new Exception("transactionHash should be defined for new transaction methods"))
override def validate(transactionHash: String, code: String): Future[ValidationResponse] = {
for {
optCodeHash ← db.run(persist.auth.GateAuthCode.find(transactionHash))
validationResponse ← optCodeHash map { codeHash ⇒
val validationUri = Uri(s"${config.uri}/v1/codes/validate/$codeHash").withQuery("code" → Json.toJson(code).toString)
for {
response ← http.singleRequest(HttpRequest(GET, validationUri).withHeaders(`X-Auth-Token`(config.authToken)))
vr ← Unmarshal(response).to[ValidationResponse]
} yield vr
} getOrElse Future.successful(InvalidHash)
} yield validationResponse
}
override def finish(transactionHash: String): DBIO[Unit] = persist.auth.GateAuthCode.delete(transactionHash).map(_ ⇒ ())
} | JamesWatling/actor-platform | actor-server/actor-activation/src/main/scala/im/actor/server/activation/gate/GateCodeActivation.scala | Scala | mit | 2,749 |
package scalariform.lexer
sealed trait TagState
case object InStartTag extends TagState
case object InEndTag extends TagState
case object Normal extends TagState
| mdr/scalariform | scalariform/src/main/scala/scalariform/lexer/TagState.scala | Scala | mit | 164 |
package be.mygod.view
import android.animation.{Animator, AnimatorListenerAdapter}
import android.content.Context
import android.view.View
/**
* @author Mygod
*/
object AnimationHelper {
def crossFade(context: Context, from: View, to: View) {
def shortAnimTime = context.getResources.getInteger(android.R.integer.config_shortAnimTime)
to.setAlpha(0)
to.setVisibility(View.VISIBLE)
to.animate().alpha(1).setDuration(shortAnimTime)
from.animate().alpha(0).setDuration(shortAnimTime).setListener(new AnimatorListenerAdapter {
override def onAnimationEnd(animation: Animator): Unit = from.setVisibility(View.GONE)
})
}
}
| Mygod/mygod-lib-android | src/main/scala/be/mygod/view/AnimationHelper.scala | Scala | gpl-3.0 | 655 |
/*
* Copyright (c) 2018. Lorem ipsum dolor sit amet, consectetur adipiscing elit.
* Morbi non lorem porttitor neque feugiat blandit. Ut vitae ipsum eget quam lacinia accumsan.
* Etiam sed turpis ac ipsum condimentum fringilla. Maecenas magna.
* Proin dapibus sapien vel ante. Aliquam erat volutpat. Pellentesque sagittis ligula eget metus.
* Vestibulum commodo. Ut rhoncus gravida arcu.
*/
package com.wallace.demo.app.utils
import java.util.Properties
import com.jcraft.jsch.{ChannelSftp, JSch}
import com.wallace.demo.app.common.Using
import org.apache.commons.net.ftp.{FTP, FTPClient}
import scala.util.{Failure, Success, Try}
/**
* Created by wallace on 2018/7/5.
*/
case class FtpMetaData(hostIp: String, port: Int, userName: String, passWord: String, timeOut: Int = 6000, ftpType: String = "sftp")
class FtpUtils(ftpMetadata: FtpMetaData) extends Using {
/** 规避多线程并发不断开问题 */
private val sftpLocal: ThreadLocal[FtpUtils] = new ThreadLocal[FtpUtils]()
/**
* 连接SFTP服务器
*
* @return boolean
*/
def createSftpChannel(): Option[ChannelSftp] = {
Try(new JSch().getSession(ftpMetadata.userName, ftpMetadata.hostIp, ftpMetadata.port)).flatMap {
session =>
if (ftpMetadata.passWord != null) session.setPassword(ftpMetadata.passWord)
val config: Properties = new Properties()
config.put("StrictHostKeyChecking", "no")
session.setConfig(config)
session.setTimeout(ftpMetadata.timeOut)
log.debug("sftp session connected")
log.debug("opening channel")
Try {
session.connect()
val channel = session.openChannel(ftpMetadata.ftpType).asInstanceOf[ChannelSftp]
channel.connect()
channel
}
}
} match {
case Success(ch) => Some(ch)
case Failure(e) =>
log.error("Failed to login sftp server", e)
None
}
def createFtpClient(): Option[FTPClient] = {
Try(new FTPClient()).flatMap {
client =>
Try {
client.connect(ftpMetadata.hostIp, ftpMetadata.port)
client.setControlEncoding("UTF-8")
client.login(ftpMetadata.userName, ftpMetadata.passWord)
}.flatMap {
_ =>
Try {
client.setFileTransferMode(FTP.BINARY_FILE_TYPE)
//client.listFiles().foreach(file => (file.getName, file.isFile, file.getTimestamp))
client
}
}
} match {
case Success(client) => Some(client)
case Failure(e) =>
log.error("Failed to create FTP client", e)
None
}
}
}
| BiyuHuang/CodePrototypesDemo | demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FtpUtils.scala | Scala | apache-2.0 | 2,620 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package scalaguide.http.errorhandling
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.mvc.Action
import play.api.test._
import scala.reflect.ClassTag
class ScalaErrorHandling extends PlaySpecification with WsTestClient {
def fakeApp[A](implicit ct: ClassTag[A]) = {
GuiceApplicationBuilder()
.configure("play.http.errorHandler" -> ct.runtimeClass.getName)
.routes {
case (_, "/error") => Action(_ => throw new RuntimeException("foo"))
}
.build()
}
"scala error handling" should {
"allow providing a custom error handler" in new WithServer(fakeApp[root.ErrorHandler]) {
await(wsUrl("/error").get()).body must_== "A server error occurred: foo"
}
"allow extending the default error handler" in {
import play.api._
import play.api.routing._
import javax.inject.Provider
def errorHandler(mode: Mode) = new default.ErrorHandler(
Environment.simple(mode = mode), Configuration.empty, new OptionalSourceMapper(None),
new Provider[Router] { def get = Router.empty }
)
def errorContent(mode: Mode) =
contentAsString(errorHandler(mode).onServerError(FakeRequest(), new RuntimeException("foo")))
errorContent(Mode.Prod) must startWith("A server error occurred: ")
errorContent(Mode.Dev) must not startWith("A server error occurred: ")
}
}
}
package root {
//#root
import play.api.http.HttpErrorHandler
import play.api.mvc._
import play.api.mvc.Results._
import scala.concurrent._
import javax.inject.Singleton;
@Singleton
class ErrorHandler extends HttpErrorHandler {
def onClientError(request: RequestHeader, statusCode: Int, message: String) = {
Future.successful(
Status(statusCode)("A client error occurred: " + message)
)
}
def onServerError(request: RequestHeader, exception: Throwable) = {
Future.successful(
InternalServerError("A server error occurred: " + exception.getMessage)
)
}
}
//#root
}
package default {
//#default
import javax.inject._
import play.api.http.DefaultHttpErrorHandler
import play.api._
import play.api.mvc._
import play.api.mvc.Results._
import play.api.routing.Router
import scala.concurrent._
@Singleton
class ErrorHandler @Inject() (
env: Environment,
config: Configuration,
sourceMapper: OptionalSourceMapper,
router: Provider[Router]
) extends DefaultHttpErrorHandler(env, config, sourceMapper, router) {
override def onProdServerError(request: RequestHeader, exception: UsefulException) = {
Future.successful(
InternalServerError("A server error occurred: " + exception.getMessage)
)
}
override def onForbidden(request: RequestHeader, message: String) = {
Future.successful(
Forbidden("You're not allowed to access this resource.")
)
}
}
//#default
}
| wsargent/playframework | documentation/manual/working/scalaGuide/main/http/code/ScalaErrorHandling.scala | Scala | apache-2.0 | 2,921 |
package com.rayrobdod.script
package parser
import java.io.{StringWriter, StringReader}
import scala.collection.immutable.{Seq, Set, Map}
import com.codecommit.antixml.{Elem, Selector, Text,
QName, Node, Attributes => XmlAttrs, XML}
import org.scalatest.{FunSuite, FunSpec}
import org.scalatest.prop.PropertyChecks
import parser.{AttrsToUseFun => BaseAttrsToUseFun}
import com.rayrobdod.scriptSample.{SampleScriptFromXml, SetName, SetGender}
class AggregateScriptFromXmlTest extends FunSpec {
object AlwaysUse extends BaseAttrsToUseFun[Any] {
def apply(attrs:XmlAttrs) = constTrue
}
val baseUrl = new java.net.URL("http", "localhost", "DoNotCare")
val dut = new AggregateScriptFromXml(
BaseScriptFromXml,
SampleScriptFromXml
)
describe ("Basic XML -> Object serialization") {
it ("<group><setName /><setGender /></group>") {
val input:Elem = XML.fromString("<group><setName /><setGender /></group>")
val expected = Group(Seq(SetName, SetGender))
assertResult(true){dut.isDefinedAt(input)}
assertResult(expected){dut(AlwaysUse, input, baseUrl, dut)}
}
it ("<thisElementDoesNotExist />") {
val input:Elem = XML.fromString("<thisElementDoesNotExist />")
assertResult(false){dut.isDefinedAt(input)}
intercept[IllegalArgumentException] {
dut(AlwaysUse, input, baseUrl, dut)
}
}
}
}
| rayrobdod/script | src/test/scala/com/rayrobdod/script/AggregateScriptFromXmlTest.scala | Scala | bsd-3-clause | 1,349 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package integration.security
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit}
import com.ibm.spark.kernel.protocol.v5.security.SignatureManagerActor
import com.ibm.spark.kernel.protocol.v5.{KernelMessage, _}
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
import play.api.libs.json.Json
import scala.concurrent.duration._
object SignatureManagerActorSpecForIntegration {
val config = """
akka {
loglevel = "WARNING"
}"""
}
class SignatureManagerActorSpecForIntegration extends TestKit(
ActorSystem(
"SignatureManagerActorSpec",
ConfigFactory.parseString(SignatureManagerActorSpecForIntegration.config)
)
) with ImplicitSender with FunSpecLike with Matchers with BeforeAndAfter
{
private val IncomingMessageType = "d" // Needed for valid signature
private val sigKey = "12345"
private val signature =
"1c4859a7606fd93eb5f73c3d9642f9bc860453ba42063961a00d02ed820147b5"
private val goodIncomingMessage =
KernelMessage(
List(), signature,
Header("a", "b", "c", IncomingMessageType, "e"),
ParentHeader("f", "g", "h", "i", "j"),
Metadata(),
"<STRING>"
)
private val badIncomingMessage =
KernelMessage(
List(), "wrong signature",
Header("a", "b", "c", IncomingMessageType, "e"),
ParentHeader("f", "g", "h", "i", "j"),
Metadata(),
"<STRING>"
)
private var signatureManager: ActorRef = _
private var signatureManagerWithNoIncoming: ActorRef = _
before {
signatureManager =
system.actorOf(Props(
classOf[SignatureManagerActor], sigKey
))
signatureManagerWithNoIncoming =
system.actorOf(Props(
classOf[SignatureManagerActor], sigKey
))
}
after {
signatureManager = null
}
describe("SignatureManagerActor") {
describe("#receive") {
describe("when receiving an incoming message") {
it("should return true if the signature is valid") {
val blob =
Json.stringify(Json.toJson(goodIncomingMessage.header)) ::
Json.stringify(Json.toJson(goodIncomingMessage.parentHeader)) ::
Json.stringify(Json.toJson(goodIncomingMessage.metadata)) ::
goodIncomingMessage.contentString ::
Nil
signatureManager ! ((goodIncomingMessage.signature, blob))
expectMsg(true)
}
it("should return false if the signature is invalid") {
val blob =
Json.stringify(Json.toJson(badIncomingMessage.header)) ::
Json.stringify(Json.toJson(badIncomingMessage.parentHeader)) ::
Json.stringify(Json.toJson(badIncomingMessage.metadata)) ::
badIncomingMessage.contentString ::
Nil
signatureManager ! ((badIncomingMessage.signature, blob))
expectMsg(false)
}
}
describe("when receiving an outgoing message") {
it("should insert a valid signature into the message and return it") {
// Sending to signature manager that has no incoming messages
signatureManagerWithNoIncoming ! badIncomingMessage
val newKernelMessage =
receiveOne(5.seconds).asInstanceOf[KernelMessage]
newKernelMessage.signature should be (signature)
}
}
}
}
}
| bpburns/spark-kernel | kernel/src/test/scala/integration/security/SignatureManagerActorSpecForIntegration.scala | Scala | apache-2.0 | 3,978 |
package com.github.blemale.fuse
import java.time.temporal.ChronoUnit.SECONDS
import java.time.{ Duration, Instant, ZoneId }
import java.util.Optional
import java.util.concurrent.{ CompletableFuture, CountDownLatch, TimeUnit }
import java.util.function.Supplier
import com.github.blemale.fuse.CircuitBreaker.CircuitBreakerOpenException
import com.github.blemale.fuse.TestKit.TestClock
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ ShouldMatchers, WordSpec }
import scala.compat.java8.FunctionConverters._
import scala.compat.java8.FutureConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.util.Try
class CircuitBreakerSpec extends WordSpec with ShouldMatchers with ScalaFutures {
import CircuitBreakerSpec._
val clock = TestClock(Instant.now(), ZoneId.systemDefault())
"CircuitBreaker" when {
"using sync API" should {
"allow execution when condition true" in {
val latch = new CountDownLatch(2)
val circuitBreaker =
new CircuitBreaker(
new Condition.FailureCount(2),
Duration.of(1, SECONDS),
clock,
Optional.of(latch)
)
Try(circuitBreaker.execute(asJava(() => throw new RuntimeException)))
Try(circuitBreaker.execute(asJava(() => "foo")))
latch.await(1, TimeUnit.SECONDS) shouldEqual true
circuitBreaker.execute(asJava(() => "bar")) shouldEqual "bar"
}
"trip when condition became false" in {
val latch = new CountDownLatch(2)
val circuitBreaker =
new CircuitBreaker(
new Condition.FailureCount(2),
Duration.of(1, SECONDS),
clock,
Optional.of(latch)
)
Try(circuitBreaker.execute(asJava(() => throw new RuntimeException)))
Try(circuitBreaker.execute(asJava(() => throw new RuntimeException)))
latch.await(1, TimeUnit.SECONDS) shouldEqual true
an[CircuitBreakerOpenException] should be thrownBy circuitBreaker.execute(asJava(() => "foo"))
}
}
"using async API" should {
"allow execution when condition true" in {
val latch = new CountDownLatch(2)
val circuitBreaker =
new CircuitBreaker(
new Condition.FailureCount(2),
Duration.of(1, SECONDS),
clock,
Optional.of(latch)
)
circuitBreaker.executeAsync(asJavaAsync(() => Future.failed(new RuntimeException)))
circuitBreaker.executeAsync(asJavaAsync(() => Future("foo")))
latch.await(1, TimeUnit.SECONDS) shouldEqual true
circuitBreaker.execute(asJavaAsync(() => Future("bar"))).toScala.futureValue shouldEqual "bar"
}
"trip when condition became false" in {
val latch = new CountDownLatch(2)
val circuitBreaker =
new CircuitBreaker(
new Condition.FailureCount(2),
Duration.of(1, SECONDS),
clock,
Optional.of(latch)
)
circuitBreaker.executeAsync(asJavaAsync(() => Future.failed(new RuntimeException)))
circuitBreaker.executeAsync(asJavaAsync(() => Future.failed(new RuntimeException)))
latch.await(1, TimeUnit.SECONDS) shouldEqual true
circuitBreaker.executeAsync(asJavaAsync(() => Future("foo"))).toScala.failed.futureValue shouldBe a[CircuitBreakerOpenException]
}
}
}
}
object CircuitBreakerSpec {
def asJava[T](f: () => T): Supplier[T] = asJavaSupplier(f)
def asJavaAsync[T](f: () => Future[T]): Supplier[CompletableFuture[T]] = asJavaSupplier(() => f().toJava.toCompletableFuture)
}
| blemale/fuse | core/src/test/scala/com/github/blemale/fuse/CircuitBreakerSpec.scala | Scala | apache-2.0 | 3,682 |
/*
* Copyright (c) 2014-2014 Erik van Oosten All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.grons.otagolog.shared.config
/**
* Default configurations.
*
* TODO: move all default values to here...
*/
object ConfigurationDefaults {
val DefaultServerPort = 8375
}
| erikvanoosten/otagolog | src/main/scala/nl/grons/otagolog/shared/config/ConfigurationDefaults.scala | Scala | apache-2.0 | 822 |
package jp.kenkov.smt.ibmmodel
import scala.collection.mutable.{Map => MMap}
import jp.kenkov.smt.{_}
abstract class IBMModel {
// def train: MMap[(TargetWord, SourceWord), Double]
def sourceKeys(tCorpus: TokenizedCorpus): Set[SourceWord] = {
var fKeys: Set[SourceWord] = Set()
tCorpus.foreach {
case (es, fs) => fs.foreach(fKeys += _)
}
fKeys
}
}
class IBMModel1(val tCorpus: TokenizedCorpus, val loopCount: Int) extends IBMModel {
def train: MMap[(TargetWord, SourceWord), Double] = {
// set fkeys
val fKeys: Set[SourceWord] = sourceKeys(tCorpus)
// set default value
val defaultValue: Double = 1.0 / fKeys.size
// initialize the returned collection
val t: MMap[(String, String), Double] =
scala.collection.mutable.Map().withDefaultValue(defaultValue)
for (i <- 1 to loopCount) {
// initialize vars
val count: MMap[(String, String), Double] =
scala.collection.mutable.Map().withDefaultValue(0.0)
val total: MMap[String, Double] =
scala.collection.mutable.Map().withDefaultValue(0.0)
val sTotal: MMap[String, Double] =
scala.collection.mutable.Map().withDefaultValue(0.0)
// main algorithm
for ((es, fs) <- this.tCorpus) {
for (e <- es) {
sTotal(e) = 0.0
for (f <- fs)
sTotal(e) += t((e, f))
}
for (e <- es) {
for (f <- fs) {
count((e, f)) += t((e, f)) / sTotal(e)
total(f) += t((e, f)) / sTotal(e)
}
}
}
for ((e, f) <- count.keys) {
val v = count((e, f)) / total(f)
t((e, f)) = count((e, f)) / total(f)
// println(v, t((e, f)))
}
}
// return the value
t
}
}
class IBMModel2(val tCorpus: TokenizedCorpus, val loopCount: Int) extends IBMModel {
def train: (MMap[(TargetWord, SourceWord), Double], AlignmentProbability) = {
val fKeys: Set[String] = sourceKeys(tCorpus)
// IBMModel1 training
val t: MMap[(TargetWord, SourceWord), Double] = new IBMModel1(tCorpus, loopCount).train
// alignment
val a: AlignmentProbability = MMap().withDefault {
case (i, j, lengthE, lengthF) => 1.0 / (lengthF + 1)
}
for (i <- 1 to loopCount) {
val count: MMap[(TargetWord, SourceWord), Double] = MMap().withDefaultValue(0.0)
val total: MMap[SourceWord, Double] = MMap().withDefaultValue(0.0)
val countA: MMap[(SourcePosition, TargetPosition, TargetLength, SourceLength), Double] = MMap().withDefaultValue(0.0)
val totalA: MMap[(TargetPosition, TargetLength, SourceLength), Double] = MMap().withDefaultValue(0.0)
val sTotal: MMap[TargetWord, Double] = MMap().withDefaultValue(0.0)
for ((es: TargetWords, fs: SourceWords) <- tCorpus) {
val lengthE = es.length
val lengthF = fs.length
// compute normalization
for ((e, j) <- es.zipWithIndex.map{case (k, i) => (k, i+1)}) {
sTotal(e) = 0
for ((f, i) <- fs.zipWithIndex.map{case (k, i) => (k, i+1)}) {
sTotal(e) += t((e, f)) * a((i, j, lengthE, lengthF))
}
}
for ((e, j) <- es.zipWithIndex.map{case (k, i) => (k, i+1)}) {
for ((f, i) <- fs.zipWithIndex.map{case (k, i) => (k, i+1)}) {
val c = t((e, f)) * a((i, j, lengthE, lengthF)) / sTotal(e)
count((e, f)) += c
total(f) += c
countA((i, j, lengthE, lengthF)) += c
totalA((j, lengthE, lengthF)) += c
}
}
}
for (((e, f), i) <- count.keys.zipWithIndex) {
t((e, f)) = count((e, f)) / total(f)
// if (i % 1000 == 0)
// println(e, f, t((e, f)))
}
for ((i, j, lengthE, lengthF) <- countA.keys) {
a((i, j, lengthE, lengthF)) = countA((i, j, lengthE, lengthF)) / totalA((j, lengthE, lengthF))
}
}
(t, a)
}
}
object Alignment {
def viterbiAlignment(es: TargetWords,
fs: SourceWords,
t: MMap[(TargetWord, SourceWord), Double],
a: AlignmentProbability) : MMap[TargetIndex, SourceIndex] = {
val maxA: MMap[TargetIndex, SourceIndex] = MMap().withDefaultValue(0)
val lengthE = es.length
val lengthF = fs.length
for ((e, j) <- es.zipWithIndex.map{case (k, i) => (k, i+1)}) {
var currentMax: (Int, Double) = (0, -1)
for ((f, i) <- fs.zipWithIndex.map{case (k, i) => (k, i+1)}) {
val v = t((e, f)) * a((i, j, lengthE, lengthF))
if (currentMax._2 < v) {
currentMax = (i, v)
}
}
maxA(j) = currentMax._1
}
maxA
}
def _alignment(eList: TargetList,
fList: SourceList,
e2f: Set[(TargetIndex,SourceIndex)],
f2e: Set[(TargetIndex, SourceIndex)]): Set[(Int, Int)] = {
val neighboring = Set((-1, 0), (0, -1), (1, 0), (0, 1),
(-1, -1), (-1, 1), (1, -1), (1, 1))
val m = eList.length
val n = fList.length
var ali: Set[(Int, Int)] = e2f intersect f2e
var setLen = ali.size
// marge with neighborhood
do {
setLen = ali.size
for (eIndex <- 1 to m) {
for (fIndex <- 1 to n) {
if (ali contains (eIndex, fIndex)) {
for ((eDiff, fDiff) <- neighboring) {
val eNew = eIndex + eDiff
val fNew = fIndex + fDiff
if (!ali.isEmpty) {
if ((e2f union f2e) contains (eNew, fNew)) {
ali += (eNew -> fNew)
}
} else {
val eIndexes = ali.map { case (i, _) => i }
val fIndexes = ali.map { case (_, j) => j}
if ((!(eIndexes contains eNew) || !(fIndexes contains fNew)) &&
((e2f union f2e) contains (eNew, fNew))) {
ali += (eNew -> fNew)
}
}
}
}
}
}
} while (setLen != ali.size)
// Finalize
for (eIndex <- 1 to m) {
for (fIndex <- 1 to n) {
if (!ali.isEmpty) {
if ((e2f union f2e) contains (eIndex, fIndex)) {
ali += (eIndex -> fIndex)
}
} else {
val eIndexes = ali.map { case (i, _) => i }
val fIndexes = ali.map { case (_, j) => j}
if ((!(eIndexes contains eIndex) || !(fIndexes contains fIndex)) &&
((e2f union f2e) contains (eIndex, fIndex))) {
ali += (eIndex -> fIndex)
}
}
}
}
ali
}
def alignment(eList: TargetList,
fList: SourceList,
e2f: Set[(SourceIndex,TargetIndex)],
f2e: Set[(TargetIndex, SourceIndex)]): Set[(Int, Int)] = {
val _e2f = for ((i, j) <- e2f) yield (j, i)
_alignment(eList, fList, _e2f, f2e)
}
def symmetrization(es: TargetWords,
fs: SourceWords,
f2eTrain: (MMap[(TargetWord, SourceWord), Double], AlignmentProbability),
e2fTrain: (MMap[(SourceWord, TargetWord), Double], AlignmentProbability)): Alignment = {
val (t, a) = f2eTrain
val f2e = viterbiAlignment(es, fs, t, a)
val (e2ft, e2fa) = e2fTrain
val e2f = viterbiAlignment(fs, es, e2ft, e2fa)
alignment(es, fs, e2f.toSet, f2e.toSet)
}
}
/*
object IBMModel1Test {
def testIBMModel1(corpus: List[(TargetSentence, SourceSentence)], loopCount: Int = 1000) {
val tCorpus: TokenizedCorpus = mkTokenizedCorpus(corpus)
// print the result
val model = new IBMModel1(tCorpus, loopCount)
println(model.tCorpus)
println(model.loopCount)
val ans = model.train
println(ans)
println()
ans.foreach {
case (k, v) => println("%15s -> %f".format(k, v))
}
}
def testIBMModel2(corpus: List[(TargetSentence, SourceSentence)], loopCount: Int = 1000) {
val tCorpus: TokenizedCorpus = mkTokenizedCorpus(corpus)
// print the result
val model = new IBMModel2(tCorpus, loopCount)
println(model.tCorpus)
println(model.loopCount)
val ans = model.train
println(ans)
println()
// ans.foreach {
// case (k, v) => println("%15s -> %f".format(k, v))
// }
}
def test1() {
val tokenizedCorpus: List[(TargetSentence, SourceSentence)] =
List(("the house", "das Haus"),
("the book", "das Buch"),
("a book", "ein Buch"))
testIBMModel1(tokenizedCorpus, 1000)
}
def test2() {
val corpus = List(("X で は ない か と つくづく 疑問 に 思う",
"I often wonder if it might be X."),
("X が いい な と いつも 思い ます",
"I always think X would be nice."),
("それ が ある よう に いつも 思い ます",
"It always seems like it is there."))
testIBMModel1(corpus, 10000)
}
def test3() {
val sentences: List[(TargetSentence, SourceSentence)] =
List(("the house", "das Haus"),
("the book", "das Buch"),
("a book", "ein Buch"))
testIBMModel2(sentences, 1000)
}
def test4() {
val corpus: List[(TargetSentence, SourceSentence)] =
List(("the house", "das Haus"),
("the book", "das Buch"),
("a book", "ein Buch"))
val tCorpus = mkTokenizedCorpus(corpus)
val (t, a) = new IBMModel2(tCorpus, 1000).train
println(t)
println(a)
val es: TargetWords = List("the", "house")
val fs: SourceWords = List("das", "Haus")
val ans = Alignment.viterbiAlignment(es, fs, t, a)
println(ans)
}
def main(args: Array[String]) {
test4()
}
}
*/
| kenkov/smtscala | src/main/scala/jp/kenkov/smt/ibmmodel/IBMModel.scala | Scala | mit | 9,690 |
package net.technowizardry.xmpp.auth
import net.technowizardry.xmpp.messages._
import net.technowizardry.xmpp.XmppConnection
class XmppAuthenticator(connection : XmppConnection, username : String, password : String) {
var authhandlers : Map[String, () => Unit] = Map()
var strategy : AuthStrategy = _
authhandlers += ("PLAIN" -> PerformPlainAuth)
authhandlers += ("SCRAM-SHA-1" -> PerformScramSha1)
connection.RegisterMessageCallback(classOf[SaslChallengeMessage], ProcessChallenge)
def AttemptAuthentication(mechanisms : List[String]) {
val chosenauth = mechanisms.find(p => authhandlers.contains(p)).get
println(String.format("Selected %s as authentication mechanism", chosenauth))
authhandlers.apply(chosenauth)()
}
private def PerformPlainAuth() {
connection.SendMessageImmediately(new SaslPlainAuthMessage(username, password))
}
private def PerformScramSha1() {
strategy = new ScramAuthStrategy(username, password)
connection.SendMessageImmediately(strategy.BeginAuthentication())
}
private def ProcessChallenge(message : XmppProtocolMessage) {
val challenge = message match {
case x : SaslChallengeMessage => x
}
connection.SendMessageImmediately(strategy.HandleChallenge(challenge))
}
} | ajacques/XmppClient | SeniorProject/src/net/technowizardry/xmpp/auth/XmppAuthenticator.scala | Scala | mit | 1,258 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rpc.netty
import java.util.concurrent.{CountDownLatch, TimeUnit}
import java.util.concurrent.atomic.AtomicInteger
import org.mockito.Mockito._
import org.apache.spark.SparkFunSuite
import org.apache.spark.rpc.{RpcEnv, RpcEndpoint, RpcAddress, TestRpcEndpoint}
class InboxSuite extends SparkFunSuite {
test("post") {
val endpoint = new TestRpcEndpoint
val endpointRef = mock(classOf[NettyRpcEndpointRef])
when(endpointRef.name).thenReturn("hello")
val dispatcher = mock(classOf[Dispatcher])
val inbox = new Inbox(endpointRef, endpoint)
val message = ContentMessage(null, "hi", false, null)
inbox.post(message)
inbox.process(dispatcher)
assert(inbox.isEmpty)
endpoint.verifySingleReceiveMessage("hi")
inbox.stop()
inbox.process(dispatcher)
assert(inbox.isEmpty)
endpoint.verifyStarted()
endpoint.verifyStopped()
}
test("post: with reply") {
val endpoint = new TestRpcEndpoint
val endpointRef = mock(classOf[NettyRpcEndpointRef])
val dispatcher = mock(classOf[Dispatcher])
val inbox = new Inbox(endpointRef, endpoint)
val message = ContentMessage(null, "hi", true, null)
inbox.post(message)
inbox.process(dispatcher)
assert(inbox.isEmpty)
endpoint.verifySingleReceiveAndReplyMessage("hi")
}
test("post: multiple threads") {
val endpoint = new TestRpcEndpoint
val endpointRef = mock(classOf[NettyRpcEndpointRef])
when(endpointRef.name).thenReturn("hello")
val dispatcher = mock(classOf[Dispatcher])
val numDroppedMessages = new AtomicInteger(0)
val inbox = new Inbox(endpointRef, endpoint) {
override def onDrop(message: InboxMessage): Unit = {
numDroppedMessages.incrementAndGet()
}
}
val exitLatch = new CountDownLatch(10)
for (_ <- 0 until 10) {
new Thread {
override def run(): Unit = {
for (_ <- 0 until 100) {
val message = ContentMessage(null, "hi", false, null)
inbox.post(message)
}
exitLatch.countDown()
}
}.start()
}
// Try to process some messages
inbox.process(dispatcher)
inbox.stop()
// After `stop` is called, further messages will be dropped. However, while `stop` is called,
// some messages may be post to Inbox, so process them here.
inbox.process(dispatcher)
assert(inbox.isEmpty)
exitLatch.await(30, TimeUnit.SECONDS)
assert(1000 === endpoint.numReceiveMessages + numDroppedMessages.get)
endpoint.verifyStarted()
endpoint.verifyStopped()
}
test("post: Associated") {
val endpoint = new TestRpcEndpoint
val endpointRef = mock(classOf[NettyRpcEndpointRef])
val dispatcher = mock(classOf[Dispatcher])
val remoteAddress = RpcAddress("localhost", 11111)
val inbox = new Inbox(endpointRef, endpoint)
inbox.post(RemoteProcessConnected(remoteAddress))
inbox.process(dispatcher)
endpoint.verifySingleOnConnectedMessage(remoteAddress)
}
test("post: Disassociated") {
val endpoint = new TestRpcEndpoint
val endpointRef = mock(classOf[NettyRpcEndpointRef])
val dispatcher = mock(classOf[Dispatcher])
val remoteAddress = RpcAddress("localhost", 11111)
val inbox = new Inbox(endpointRef, endpoint)
inbox.post(RemoteProcessDisconnected(remoteAddress))
inbox.process(dispatcher)
endpoint.verifySingleOnDisconnectedMessage(remoteAddress)
}
test("post: AssociationError") {
val endpoint = new TestRpcEndpoint
val endpointRef = mock(classOf[NettyRpcEndpointRef])
val dispatcher = mock(classOf[Dispatcher])
val remoteAddress = RpcAddress("localhost", 11111)
val cause = new RuntimeException("Oops")
val inbox = new Inbox(endpointRef, endpoint)
inbox.post(RemoteProcessConnectionError(cause, remoteAddress))
inbox.process(dispatcher)
endpoint.verifySingleOnNetworkErrorMessage(cause, remoteAddress)
}
}
| pronix/spark | core/src/test/scala/org/apache/spark/rpc/netty/InboxSuite.scala | Scala | apache-2.0 | 4,764 |
import org.scalacheck.Gen
import org.scalacheck.Gen.{someOf, oneOf, const, resultOf, nonEmptyListOf,
identifier, frequency}
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.commands.Commands
import scala.util.{Try, Success}
import scala.collection.immutable.Map
import com.redis.RedisClient
object CommandsRedis extends org.scalacheck.Properties("CommandsRedis") {
property("redisspec") = RedisSpec.property()
}
object RedisSpec extends Commands {
type Sut = RedisClient
case class State (
contents: collection.immutable.Map[String,String],
deleted: collection.immutable.Set[String],
connected: Boolean
)
def canCreateNewSut(newState: State, initSuts: Traversable[State],
runningSuts: Traversable[Sut]
): Boolean = {
initSuts.isEmpty && runningSuts.isEmpty
}
def destroySut(sut: Sut): Unit = {
// Reconnect if we should happen to be disconnected
// Probably want to have the state available here
sut.reconnect
sut.flushdb
sut.quit
}
def genInitialState: Gen[State] = State(
collection.immutable.Map.empty,
collection.immutable.Set.empty,
true
)
def initialPreCondition(state: State): Boolean = state.connected
def newSut(state: State): Sut = new RedisClient(
"localhost",
6379
)
def genCommand(state: State): Gen[Command] = {
if(!state.connected) ToggleConnected
else
frequency(
(20, genDel),
(10, genDelExisting(state)),
(50, genSet),
(10, genSetExisting(state)),
(20, genGet),
(20, genGetExisting(state)),
(20, genGetDeleted(state)),
(20, const(DBSize)),
( 1, const(FlushDB)),
( 3, const(ToggleConnected))
)
}
//val genKey = arbitrary[String]
//val genVal = arbitrary[String]
val genKey = identifier
val genVal = identifier
val genSet: Gen[Set] = for {
key <- genKey
value <- genVal
} yield Set(key, value)
def genDelExisting(state: State): Gen[Del] =
if(state.contents.isEmpty) genDel
else someOf(state.contents.keys.toSeq).map(Del)
def genSetExisting(state: State): Gen[Set] =
if(state.contents.isEmpty) genSet else for {
key <- oneOf(state.contents.keys.toSeq)
value <- oneOf(genVal, const(state.contents(key)))
} yield Set(key,value)
val genGet: Gen[Get] = genKey.map(Get)
val genDel: Gen[Del] = nonEmptyListOf(genKey).map(Del)
def genGetExisting(state: State): Gen[Get] =
if(state.contents.isEmpty) genGet else for {
key <- oneOf(state.contents.keys.toSeq)
} yield Get(key)
def genGetDeleted(state: State): Gen[Get] =
if(state.deleted.isEmpty) genGet else for {
key <- oneOf(state.deleted.toSeq)
} yield Get(key)
case object DBSize extends Command {
type Result = Option[Long]
def run(sut: Sut) = sut.dbsize
def preCondition(state: State) = state.connected
def nextState(state: State) = state
def postCondition(state: State, result: Try[Option[Long]]) =
result == Success(Some(state.contents.keys.size))
}
case class Set(key: String, value: String) extends Command {
type Result = Boolean
def run(sut: Sut) = sut.set(key, value)
def preCondition(state: State) = state.connected
def nextState(state: State) = state.copy(
contents = state.contents + (key -> value),
deleted = state.deleted.filter(_ != key)
)
def postCondition(state: State, result: Try[Boolean]) =
result == Success(true)
}
case class Del(keys: Seq[String]) extends Command {
type Result = Option[Long]
def run(sut: Sut) =
if(keys.isEmpty) Some(0)
else sut.del(keys.head, keys.tail: _*)
def preCondition(state: State) = state.connected
def nextState(state: State) = state.copy(
contents = state.contents -- keys,
deleted = state.deleted ++ keys
)
def postCondition(state: State, result: Try[Option[Long]]) =
result == Success(Some(state.contents.filterKeys(keys.contains).size))
}
case object FlushDB extends Command {
type Result = Boolean
def run(sut: Sut) = sut.flushdb
def preCondition(state: State) = state.connected
def nextState(state: State) = state.copy(
contents = Map.empty
)
def postCondition(state: State, result: Try[Boolean]) =
result == Success(true)
}
case object ToggleConnected extends Command {
type Result = Boolean
def run(sut: Sut) = {
if(sut.connected) sut.quit
else sut.connect
}
def preCondition(state: State) = true
def nextState(state: State) = state.copy(
connected = !state.connected
)
def postCondition(state: State, result: Try[Boolean]) =
result == Success(true)
}
case class Get(key: String) extends Command {
type Result = Option[String]
def run(sut: Sut) = sut.get(key)
def preCondition(state: State) = state.connected
def nextState(state: State) = state
def postCondition(state: State, result: Try[Option[String]]) =
result == Success(state.contents.get(key))
}
// case class BitCount(key: String) extends Command {
// type Result = Option[Int]
// def run(sut: Sut) = sut.bitcount(key, None)
// def preCondition(state: State) = state.connected
// def nextState(state: State) = state
// def postCondition(state: State, result: Try[Option[Int]]) = {
// val expected = state.contents.get(key) match {
// case None => 0
// case Some(str) bitcount(str)
// result == Success(state.contents.get(key))
// }
}
| jedws/scalacheck | examples/commands-redis/src/test/scala/CommandsRedis.scala | Scala | bsd-3-clause | 5,540 |
///////////////////////////////////////////////////////////////////////////////
// Copyright (C) 2010 Travis Brown, The University of Texas at Austin
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
///////////////////////////////////////////////////////////////////////////////
package opennlp.fieldspring.tr.app
import java.io._
import opennlp.fieldspring.tr.eval._
import opennlp.fieldspring.tr.resolver._
import opennlp.fieldspring.tr.topo.gaz._
import opennlp.fieldspring.tr.text._
import opennlp.fieldspring.tr.text.io._
import opennlp.fieldspring.tr.text.prep._
import opennlp.fieldspring.tr.util.Constants
object ReprocessTrApp {
def main(args: Array[String]) {
val tokenizer = new OpenNLPTokenizer
val recognizer = new OpenNLPRecognizer
val gazetteer = new InMemoryGazetteer
gazetteer.load(new WorldReader(new File(
Constants.getGazetteersDir() + File.separator + "dataen-fixed.txt.gz"
)))
val corpus = Corpus.createStreamCorpus
val source = new TrXMLDirSource(new File(args(0)), tokenizer)
val stripped = new ToponymRemover(source)
corpus.addSource(new ToponymAnnotator(stripped, recognizer, gazetteer))
val writer = new CorpusXMLWriter(corpus)
writer.write(new File(args(1)))
}
}
| utcompling/fieldspring | src/main/scala/opennlp/fieldspring/tr/app/ReprocessTrApp.scala | Scala | apache-2.0 | 1,771 |
package net.scalaleafs.sample
import spray.json.{JsString, JsValue, RootJsonFormat, DefaultJsonProtocol}
import org.joda.time.DateTime
import org.joda.time.format.ISODateTimeFormat
object Calendar2JsonProtocol extends DefaultJsonProtocol {
implicit val eventFormat = jsonFormat7(Event)
implicit object JodaTimeJsonFormat extends RootJsonFormat[DateTime] {
def write(datetime: DateTime): JsValue = JsString(ISODateTimeFormat.basicDateTime().print(datetime))
def read(json: JsValue): DateTime = ISODateTimeFormat.basicDateTime().parseDateTime(json.toString())
}
}
| scalastuff/scalaleafs | sample/src/main/scala/net/scalaleafs/sample/Calendar2JsonProtocol.scala | Scala | apache-2.0 | 583 |
package com.arcusys.valamis.certificate.service
import com.arcusys.valamis.certificate.model.{CertificateState, UserStatusHistory}
import com.arcusys.valamis.certificate.storage.schema.{CertificateHistoryTableComponent, UserStatusHistoryTableComponent}
import com.arcusys.valamis.persistence.common.SlickProfile
import com.arcusys.valamis.persistence.common.filters.ColumnFiltering
import org.joda.time.DateTime
import slick.driver.JdbcProfile
import slick.jdbc.JdbcBackend
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future}
class UserStatusHistoryServiceImpl(val db: JdbcBackend#DatabaseDef, val driver: JdbcProfile)
extends UserStatusHistoryService
with SlickProfile
with UserStatusHistoryTableComponent
with CertificateHistoryTableComponent
with ColumnFiltering {
import driver.api._
def add(userStatus: CertificateState, isDeleted: Boolean): Unit = {
val insertQ = userHistoryTQ += UserStatusHistory(
userStatus.certificateId,
userStatus.userId,
userStatus.status,
date = DateTime.now,
isDeleted
)
Await.result(db.run(insertQ), Duration.Inf)
}
def getUserStatus(userId: Long, certificateId: Long, date: DateTime): Future[Option[UserStatusHistory]] = {
val selectQ = userHistoryTQ
.filter(_.certificateId === certificateId)
.filter(_.userId === userId)
.filter(_.date <= date)
.sortBy(_.date.desc)
.result.headOption
db.run(selectQ)
}
def getUsersHistory(usersIds: Seq[Long],
certificateIds: Seq[Long],
since: DateTime,
until: DateTime): Future[Seq[UserStatusHistory]] = {
val selectQ = userHistoryTQ
.filter(_.userId containsIn usersIds)
.filter(_.certificateId containsIn certificateIds)
.filter(_.date >= since)
.filter(_.date < until)
.groupBy(r => (r.userId, r.certificateId))
.map(g => (g._1._1, g._1._2, g._2.map(_.date).max))
.join(userHistoryTQ).on((g, s) => g._1 === s.userId && g._2 === s.certificateId && g._3 === s.date).map(_._2)
db.run(selectQ.result)
}
def getUsersHistoryByPeriod(since: DateTime, until: DateTime, companyId: Long): Future[Seq[UserStatusHistory]] = {
val certificatesIdsQ = certificatesHistoryTQ.filter(_.companyId === companyId).map(_.certificateId)
val selectQ = userHistoryTQ
.filter(_.certificateId in certificatesIdsQ)
.filter(_.date > since)
.filter(_.date <= until)
.groupBy(r => (r.userId, r.certificateId))
.map(g => (g._1._1, g._1._2, g._2.map(_.date).max))
.join(userHistoryTQ).on((g, s) => g._1 === s.userId && g._2 === s.certificateId && g._3 === s.date).map(_._2)
db.run(selectQ.result)
}
}
| arcusys/Valamis | valamis-certificate/src/main/scala/com/arcusys/valamis/certificate/service/UserStatusHistoryServiceImpl.scala | Scala | gpl-3.0 | 2,769 |
package dk.tennisprob.set
import dk.tennisprob.TennisProbFormulaCalc
import dk.tennisprob.tiebreak.GenericTiebreakProb
import dk.tennisprob.game.GenericGameProb
import scala.annotation.tailrec
object GenericSetProb extends SetProb {
def prob(p1AceProb: Double, p1PointProb: Double, p2AceProb: Double, p2PointProb: Double): Double = {
val p1GameProb = GenericGameProb.prob(p1AceProb, p1PointProb)
val p2GameProb = GenericGameProb.prob(p2AceProb, p2PointProb)
val p1TiebreakProb = GenericTiebreakProb.prob(p1AceProb, p1PointProb, p2AceProb, p2PointProb)
val p2TiebreakProb = GenericTiebreakProb.prob(p2AceProb, p2PointProb, p1AceProb, p1PointProb)
def markovChain(p1Points: Int, p2Points: Int, player1OnServe: Boolean): Double = {
val setProb = (p1Points, p2Points) match {
case (6, _) if p2Points < 5 => 1
case (7, 5) => 1
case (_, 6) if p1Points < 5 => 0
case (5, 7) => 0
case (6, 6) => {
player1OnServe match {
case true => p1TiebreakProb
case false => 1 - p2TiebreakProb
}
}
case _ => {
player1OnServe match {
case true => p1GameProb * markovChain(p1Points + 1, p2Points, !player1OnServe) + (1 - p1GameProb) * markovChain(p1Points, p2Points + 1, !player1OnServe)
case false => p2GameProb * markovChain(p1Points, p2Points + 1, !player1OnServe) + (1 - p2GameProb) * markovChain(p1Points + 1, p2Points, !player1OnServe)
}
}
}
setProb
}
markovChain(0, 0, player1OnServe = true)
}
} | danielkorzekwa/tennis-probability-calculator | src/main/scala/dk/tennisprob/set/GenericSetProb.scala | Scala | bsd-2-clause | 1,631 |
//package com.tribbloids.spookystuff.uav.planning
//
//import com.tribbloids.spookystuff.SpookyContext
//import com.tribbloids.spookystuff.actions.{Trace, TraceView}
//import com.tribbloids.spookystuff.execution.ExecutionContext
//import com.tribbloids.spookystuff.uav.UAVConf
//import com.tribbloids.spookystuff.uav.dsl.GenPartitioners
//import com.tribbloids.spookystuff.uav.telemetry.Link
//import com.tribbloids.spookystuff.utils.{NOTSerializable, SpookyUtils}
//import org.apache.commons.math3.exception.MathIllegalArgumentException
//import org.apache.commons.math3.exception.util.LocalizedFormats
//import org.apache.commons.math3.genetics._
//import org.apache.spark.rdd.RDD
//
//import scala.collection.mutable.ArrayBuffer
//import scala.util.{Random, Success, Try}
//
//object GASolver {
//
// case class Route(
// linkTry: Try[Link],
// is: Seq[Int]
// ) extends NOTSerializable {
//
// def toTracesOpt(allTraces: Seq[Trace]): Option[Seq[Trace]] = {
// linkTry.toOption.map {
// link =>
// val traces: Seq[Trace] = is.map {
// i =>
// allTraces(i)
// }
// val seq = traces.map {
// tr =>
// List(PreferUAV(link.status())) ++ tr
// }
// seq
// }
// }
//
// def estimateCost(solver: GASolver): Double = {
//
// // linkTry match {
// // case Success(link) =>
// //
// // }
// val seqOpt = toTracesOpt(solver.allTracesBroadcasted.value)
// seqOpt.map {
// seq =>
// solver.actionCosts.estimate(seq.toList.flatten, solver.spooky)
// }
// .getOrElse {
// Double.MaxValue
// }
// }
//
// def optimalInsertFrom(from: Seq[Int], solver: GASolver): Route = {
// var state = this
// for (i <- from) {
// // insert into left that yield the best cost
// val size = state.is.size
// val candidates_costs = (0 to size).map {
// j =>
// val splitted = state.is.splitAt(j)
// val inserted = splitted._1 ++ Seq(i) ++ splitted._2
// val insertedRoute = this.copy(is = inserted)
// val cost = insertedRoute.estimateCost(solver)
// insertedRoute -> cost
// }
// .sortBy(_._2)
// // LoggerFactory.getLogger(this.getClass).debug(
// // MessageView(candidates_costs).toJSON()
// // )
// state = candidates_costs.head._1
// }
// state
// }
// }
//}
//
///**
// * Multi-Depot k-Rural Postman Problem solver
// * genetic algorithm comes in handy.
// * use 1 shuffling per generation.
// * Unfortunately this may be suboptimal comparing to http://niels.nu/blog/2016/spark-of-life-genetic-algorithm.html
// * which has many micro local generations per shuffling.
// * takes further testing to know if the convenience of local estimation worth more shuffling.
// */
//case class GASolver(
// @transient allTraces: List[Trace],
// spooky: SpookyContext
// ) extends MinimaxSolver {
//
// import com.tribbloids.spookystuff.utils.SpookyViews._
// import GASolver._
//
// val allTracesBroadcasted = spooky.sparkContext.broadcast(allTraces)
//
// def conf = spooky.getConf[UAVConf]
// def actionCosts = conf.costEstimator
//
// @transient val allHypotheses: ArrayBuffer[Hypothesis] = ArrayBuffer.empty
//
// def evaluatePendingFitness(): Unit = this.synchronized{
// val unevaluatedHypotheses = GASolver.this.allHypotheses.filter {
// v =>
// v._fitness.isEmpty
// }
// if (unevaluatedHypotheses.nonEmpty) {
// val rdds: Seq[RDD[Route]] = unevaluatedHypotheses.map {
// h =>
// h.rdd
// }
// val costRDDs: Seq[RDD[Double]] = rdds.map {
// v =>
// v.map {
// subseq =>
// subseq.estimateCost(this)
// }
// }
// val reduced: Seq[Double] = SpookyUtils.RDDs.batchReduce(costRDDs) {
// Math.max
// }
// unevaluatedHypotheses.zip(reduced).foreach {
// tuple =>
// tuple._1._fitness = Some(tuple._2)
// }
// }
// }
//
// //all solutions refers to the same matrix
// //travelling path of each drone.
// //outer index indicates group, inner index indicates index of the edges being traversed.
// //include start and end index, start index is hardcoded and cannot be changed.
// //the last 3 parameters must have identical cadinality
// //total distance can be easily calculated
// case class Hypothesis(
// rdd: RDD[Route]
// ) extends Chromosome {
//
// allHypotheses += this
//
// var _fitness: Option[Double] = None
//
// /**
// * will batch evaluate fitnesses of all hypotheses that hasn't been evaluated using RDD zipping trick
// * theoretically, since fitness is only used on population rather than chomosome. only 1 evaluation is required for a batch
// * it should work with any Population implementations
// */
// override def fitness(): Double = {
// _fitness.getOrElse {
// evaluatePendingFitness()
// _fitness.get
// }
// }
// }
//
// def sampleWithoutReplacement(
// n: Int = 1,
// exclude: Seq[Int] = Nil,
// max: Int = allTraces.size
// ): Seq[Int] = {
//
// var next = -1
// while(next <= 0) {
// val h = Random.nextInt(max)
// if (!exclude.contains(h)) next = h
// }
//
// if (n > 1) sampleWithoutReplacement(n - 1, exclude :+ next, max) :+ next
// else Seq(next)
// }
//
// def swap(rdd: RDD[Route]): RDD[Route] = {
// val pair = this.sampleWithoutReplacement(2)
// rdd.map {
// subseq =>
// val mutated = subseq.is.map {
// i =>
// if (i == pair.head) pair.last
// else if (i == pair.last) pair.head
// else i
// }
// subseq.copy(is = mutated)
// }
// }
//
// def insert(rdd: RDD[Route]): RDD[Route] = {
// val pair = this.sampleWithoutReplacement(2)
// rdd.map {
// h =>
// val mutated = h.is.flatMap {
// i =>
// if (i == pair.head) Nil
// else if (i == pair.last) pair //TODO: not covering insert at the end of the queue.
// else Seq(i)
// }
// h.copy(is = mutated)
// }
// }
//
// case object Selection extends TournamentSelection(4) {
// }
//
// case object Mutation extends MutationPolicy {
//
// override def mutate(original: Chromosome): Chromosome = {
// original match {
// case Hypothesis(rdd) =>
// val rnd = Random.nextInt(2)
// rnd match {
// case 0 =>
// Hypothesis(swap(rdd))
// case 1 =>
// Hypothesis(insert(rdd))
// }
// case v@ _ =>
// throw new MathIllegalArgumentException(LocalizedFormats.UNSUPPORTED_OPERATION, v)
// }
// }
// }
//
// case object Crossover extends CrossoverPolicy {
//
// override def crossover(first: Chromosome, second: Chromosome): ChromosomePair = {
// (first, second) match {
// case (Hypothesis(rdd1), Hypothesis(rdd2)) =>
// val zipped = rdd1.zipPartitions(rdd2){
// (i1, i2) =>
// val r1 = i1.next()
// val r2 = i2.next()
// val seq2 = i2.toSeq
// assert(r1.linkTry == r2.linkTry)
// assert(i1.isEmpty)
// assert(i2.isEmpty)
// Iterator(r1 -> r2)
// }
// val wIndex = zipped.zipWithIndex()
// val numRoutes = wIndex.partitions.length
// val iRouteSelected = Random.nextInt(numRoutes)
// val routesSelected: (Seq[Int], Seq[Int]) = wIndex.filter {
// wi =>
// wi._2 == iRouteSelected
// }
// .map {
// _._1 match {
// case (r1, r2) =>
// r1.is -> r2.is
// }
// }
// .first()
//
// val swapped = wIndex.map {
// wi =>
// assert(wi._1._1.linkTry == wi._1._2.linkTry)
// val linkOpt = wi._1._1.linkTry
// val leftCleaned = wi._1._1.is.filterNot {
// i =>
// routesSelected._2.contains(i)
// }
// val rightCleaned = wi._1._2.is.filterNot {
// i =>
// routesSelected._1.contains(i)
// }
// val left = Route(linkOpt, leftCleaned)
// val right = Route(linkOpt, rightCleaned)
// if (wi._2 == iRouteSelected) {
// val leftInserted = left.optimalInsertFrom(routesSelected._2, GASolver.this)
// val rightInserted = right.optimalInsertFrom(routesSelected._1, GASolver.this)
// leftInserted -> rightInserted
// }
// else {
// left -> right
// }
// }
// val rddLeft = swapped.keys
// val rddRight = swapped.values
// new ChromosomePair(
// Hypothesis(rddLeft),
// Hypothesis(rddRight)
// )
// case _ =>
// throw new MathIllegalArgumentException(LocalizedFormats.UNSUPPORTED_OPERATION, first, second)
// }
// }
// }
//
// def getLinkRDD: RDD[Link] = {
// import com.tribbloids.spookystuff.uav.utils.UAVViews._
// val proto: RDD[Link] = spooky.sparkContext.mapAtLeastOncePerExecutorCore {
// val linkTry = spooky.withSession {
// session =>
// session.linkTry
// }
// linkTry
// }
// .flatMap(_.toOption)
// proto
// }
//
// def generateSeedPairs(
// numTraces: Int,
// numSeeds: Int
// ): RDD[(Link, Seq[Seq[Int]])] = {
// val seeds = (1 to numSeeds).map {
// i =>
// Random.nextLong()
// }
// val proto: RDD[Link] = getLinkRDD
// proto.persist()
// val numLinks = proto.count().toInt
//
// val pairs = proto
// .zipWithIndex()
// .map {
// tuple =>
// val is: Seq[Seq[Int]] = seeds.map {
// seed =>
// val random = new Random(seed)
// val inPartition = (0 until numTraces).flatMap {
// i =>
// val partition = random.nextInt(numLinks) % numLinks
// if (partition == tuple._2) Some(i)
// else None
// }
// val result = Random.shuffle(inPartition)
// result
// }
// tuple._1 -> is
// }
//
// pairs.persist()
// pairs.count()
//
// proto.unpersist()
//
// pairs
// }
//
// def generateSeeds(
// numSeeds: Int
// ): Seq[Hypothesis] = {
// val pairs = generateSeedPairs(this.allTraces.size, numSeeds)
// (0 until numSeeds).map {
// i =>
// val subRDD = pairs.map {
// case (link, iss) =>
// Route(Success(link), iss(i))
// }
// Hypothesis(subRDD)
// }
// }
//
// override def rewrite[V](
// gp: GenPartitioners.MinimaxCost,
// ec: ExecutionContext,
// rdd: RDD[(TraceView, Iterable[V])]
// ) = ???
//}
| tribbloid/spookystuff | uav/src/main/scala/com/tribbloids/spookystuff/uav/planning/VRPOptimizers/GA.scala | Scala | apache-2.0 | 11,556 |
package org.http4s
import java.time.{Instant, ZonedDateTime}
import org.http4s.parser.AdditionalRules
import org.http4s.util.{Renderable, Writer}
/**
* An HTTP-date value represents time as an instance of Coordinated Universal
* Time (UTC). It expresses time at a resolution of one second. By using it
* over java.time.Instant in the model, we assure that if two headers render
* equally, their values are equal.
*
* @see https://tools.ietf.org/html/rfc7231#page65
*/
class HttpDate private (val epochSecond: Long) extends Renderable with Ordered[HttpDate] {
def compare(that: HttpDate): Int =
this.epochSecond.compare(that.epochSecond)
def toInstant: Instant =
Instant.ofEpochSecond(epochSecond)
def render(writer: Writer): writer.type =
writer << toInstant
override def equals(o: Any): Boolean =
o match {
case that: HttpDate => this.epochSecond == that.epochSecond
case _ => false
}
override def hashCode(): Int =
epochSecond.##
}
object HttpDate {
private val MinEpochSecond = -2208988800L
private val MaxEpochSecond = 253402300799L
/** The earliest value reprsentable as an HTTP-date, `Mon, 01 Jan 1900 00:00:00 GMT`.
*
* The minimum year is specified by RFC5322 as 1900.
*
* @see https://tools.ietf.org/html/rfc7231#page-65
* @see https://tools.ietf.org/html/rfc5322#page-14
*/
val MinValue = HttpDate.unsafeFromEpochSecond(MinEpochSecond)
/** The latest value reprsentable by RFC1123, `Fri, 31 Dec 9999 23:59:59 GMT`. */
val MaxValue = HttpDate.unsafeFromEpochSecond(MaxEpochSecond)
/**
* Constructs an `HttpDate` from the current time. Starting on January 1,n
* 10000, this will throw an exception. The author intends to leave this
* problem for future generations.
*/
def now: HttpDate =
unsafeFromInstant(Instant.now)
/** The `HttpDate` equal to `Thu, Jan 01 1970 00:00:00 GMT` */
val Epoch: HttpDate =
unsafeFromEpochSecond(0)
/** Parses a date according to RFC7321, Section 7.1.1.1
*
* @see https://tools.ietf.org/html/rfc7231#page-65
*/
def fromString(s: String): ParseResult[HttpDate] =
AdditionalRules.httpDate(s)
/** Like `fromString`, but throws on invalid input */
def unsafeFromString(s: String): HttpDate =
fromString(s).fold(throw _, identity)
/** Constructs a date from the seconds since the [[Epoch]]. If out of range,
* returns a ParseFailure. */
def fromEpochSecond(epochSecond: Long): ParseResult[HttpDate] =
if (epochSecond < MinEpochSecond || epochSecond > MaxEpochSecond)
ParseResult.fail(
"Invalid HTTP date",
s"${epochSecond} out of range for HTTP date. Must be between ${MinEpochSecond} and ${MaxEpochSecond}, inclusive")
else
ParseResult.success(new HttpDate(epochSecond))
/** Like `fromEpochSecond`, but throws any parse failures */
def unsafeFromEpochSecond(epochSecond: Long): HttpDate =
fromEpochSecond(epochSecond).fold(throw _, identity)
/** Constructs a date from an instant, truncating to the most recent second. If
* out of range, returns a ParseFailure. */
def fromInstant(instant: Instant): ParseResult[HttpDate] =
fromEpochSecond(instant.toEpochMilli / 1000)
/** Like `fromInstant`, but throws any parse failures */
def unsafeFromInstant(instant: Instant): HttpDate =
unsafeFromEpochSecond(instant.toEpochMilli / 1000)
/** Constructs a date from an zoned date-time, truncating to the most recent
* second. If out of range, returns a ParseFailure. */
def fromZonedDateTime(dateTime: ZonedDateTime): ParseResult[HttpDate] =
fromInstant(dateTime.toInstant)
/** Like `fromZonedDateTime`, but throws any parse failures */
def unsafeFromZonedDateTime(dateTime: ZonedDateTime): HttpDate =
unsafeFromInstant(dateTime.toInstant)
}
| reactormonk/http4s | core/src/main/scala/org/http4s/HttpDate.scala | Scala | apache-2.0 | 3,838 |
package com.plasmaconduit.fact.compiler.ast
sealed trait ParsedAST
case class TypeNode(name: String, parameters: Seq[TypeNode] = Seq())
case class TypeParameterNode(typeN: TypeNode, contexts: Seq[TypeNode])
case class EnumDefinitionNode(typeNode: TypeNode, cases: Seq[TypeNode]) extends ParsedAST
case class TypedValueParameterNode(name: String, typeN: TypeNode)
case class StructDefinitionNode(name: String, members: Seq[TypedValueParameterNode]) extends ParsedAST
case class FunctionSignatureNode(name: String,
typeParameters: Seq[TypeParameterNode],
parameters: Seq[TypedValueParameterNode],
returnType: TypeNode)
case class FunctionDefinitionNode(signature: FunctionSignatureNode, body: ExpressionNode) extends ParsedAST
case class ForeignFunctionDefinitionNode(signature: FunctionSignatureNode) extends ParsedAST
case class TraitDefinitionNode(name: String, functions: Seq[FunctionSignatureNode]) extends ParsedAST
case class TraitImplementationNode(traitName: String, target: TypeNode, functions: Seq[FunctionDefinitionNode]) extends ParsedAST
case class ForeignTraitImplementationNode(traitName: String, target: TypeNode) extends ParsedAST
sealed trait ExpressionNode
case class IfExpressionNode(condition: ExpressionNode,
thenE: ExpressionNode,
elseE: ExpressionNode) extends ExpressionNode
case class FunctionCallExpressionNode(name: String,
typeParameters: Seq[TypeParameterNode],
parameters: Seq[ExpressionNode]) extends ExpressionNode
case class ValueExpressionNode(name: String) extends ExpressionNode
case class TypeConstructionExpressionNode(name: String, parameters: Seq[ExpressionNode]) extends ExpressionNode
case class StringLiteralExpression(value: String) extends ExpressionNode
case class IntLiteralExpression(value: Int) extends ExpressionNode
sealed trait PatternMatchCase
case class TypeConstructionMatchCase(construction: TypeConstructionExpressionNode) extends PatternMatchCase
case class PatternMatchCaseExpression(caseE: PatternMatchCase, expression: ExpressionNode)
case class PatternMatchExpression(value: String, cases: Seq[PatternMatchCaseExpression]) extends ExpressionNode
case class ValueAssignment(value: String, expression: ExpressionNode)
case class ValuePrefixedExpressionNode(assignments: Seq[ValueAssignment], expression: ExpressionNode) extends ExpressionNode
| JosephMoniz/fact | src/main/scala/com/plasmaconduit/fact/compiler/ast/ParsedAST.scala | Scala | mit | 2,535 |
package concurrent_programming.data_parallel.smoothing
import scala.util.Random
object SmoothUtils {
type Type = Boolean
type Row = Array[Type]
type Image = Array[Row]
val BLACK = true
val WHITE = false
val dirs: Array[(Int, Int)] = Array.apply((-1, 0), (1, 0), (0, -1), (0, 1))
def generateEmptyImage(n: Int, m: Int): Image = {
Array.ofDim[Type](n, m)
}
def copy(source: Image, rowSource: Int, colSource: Int)(destination: Image, rowDest: Int, colDest: Int)
(numRows: Int, numCols: Int): Unit = {
for (i <- 0.until(numRows); j <- 0.until(numCols)){
destination(rowDest + i)(colDest + j) = source(rowSource + i)(colSource + j)
}
}
def copyImage(image: Image): Image = {
val (n, m) = (image.length, image(0).length)
val newImage = generateEmptyImage(n, m)
for (i <- 0.until(n))
for (j <- 0.until(m))
newImage(i)(j) = image(i)(j)
newImage
}
def makeLocalCopy(startRow: Int, numRows: Int, M: Int, image: Image): SmoothUtils.Image = {
val localImage = SmoothUtils.generateEmptyImage(numRows, M)
for (step <- 0.until(numRows))
for (j <- 0.until(M))
localImage(step)(j) = image(startRow + step)(j)
localImage
}
def generateRandomImage(n: Int, m: Int): SmoothUtils.Image = {
val image = generateEmptyImage(n, m)
val random = new Random
for (i <- 0.until(n); j <- 0.until(m)){
image(i)(j) = random.nextBoolean()
}
image
}
def printImage(image: Image): Unit ={
val (n, m) = (image.length, image(0).length)
for (i <- 0.until(n)){
for (j <- 0.until(m)){
if (image(i)(j) == BLACK)
printf("1")
else
printf("0")
}
println()
}
}
}
| AlexandruValeanu/Concurrent-Programming-in-Scala | src/concurrent_programming/data_parallel/smoothing/SmoothUtils.scala | Scala | gpl-3.0 | 1,745 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log4j
import kafka.consumer.SimpleConsumer
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils.{TestUtils, Utils, Logging}
import kafka.api.FetchRequestBuilder
import kafka.producer.async.MissingConfigException
import kafka.serializer.Encoder
import kafka.zk.ZooKeeperTestHarness
import java.util.Properties
import java.io.File
import org.apache.log4j.spi.LoggingEvent
import org.apache.log4j.{PropertyConfigurator, Logger}
import org.junit.{After, Before, Test}
import org.scalatest.junit.JUnit3Suite
import junit.framework.Assert._
class KafkaLog4jAppenderTest extends JUnit3Suite with ZooKeeperTestHarness with Logging {
var logDirZk: File = null
var config: KafkaConfig = null
var server: KafkaServer = null
var simpleConsumerZk: SimpleConsumer = null
val tLogger = Logger.getLogger(getClass())
private val brokerZk = 0
private val ports = TestUtils.choosePorts(2)
private val portZk = ports(0)
@Before
override def setUp() {
super.setUp()
val propsZk = TestUtils.createBrokerConfig(brokerZk, portZk)
val logDirZkPath = propsZk.getProperty("log.dir")
logDirZk = new File(logDirZkPath)
config = new KafkaConfig(propsZk)
server = TestUtils.createServer(config);
simpleConsumerZk = new SimpleConsumer("localhost", portZk, 1000000, 64*1024, "")
}
@After
override def tearDown() {
simpleConsumerZk.close
server.shutdown
Utils.rm(logDirZk)
super.tearDown()
}
@Test
def testKafkaLog4jConfigs() {
var props = new Properties()
props.put("log4j.rootLogger", "INFO")
props.put("log4j.appender.KAFKA", "kafka.producer.KafkaLog4jAppender")
props.put("log4j.appender.KAFKA.layout", "org.apache.log4j.PatternLayout")
props.put("log4j.appender.KAFKA.layout.ConversionPattern", "%-5p: %c - %m%n")
props.put("log4j.appender.KAFKA.Topic", "test-topic")
props.put("log4j.appender.KAFKA.SerializerClass", "kafka.log4j.AppenderStringEncoder")
props.put("log4j.logger.kafka.log4j", "INFO, KAFKA")
// port missing
try {
PropertyConfigurator.configure(props)
fail("Missing properties exception was expected !")
} catch {
case e: MissingConfigException =>
}
props = new Properties()
props.put("log4j.rootLogger", "INFO")
props.put("log4j.appender.KAFKA", "kafka.producer.KafkaLog4jAppender")
props.put("log4j.appender.KAFKA.layout", "org.apache.log4j.PatternLayout")
props.put("log4j.appender.KAFKA.layout.ConversionPattern", "%-5p: %c - %m%n")
props.put("log4j.appender.KAFKA.Topic", "test-topic")
props.put("log4j.appender.KAFKA.SerializerClass", "kafka.log4j.AppenderStringEncoder")
props.put("log4j.logger.kafka.log4j", "INFO, KAFKA")
// host missing
try {
PropertyConfigurator.configure(props)
fail("Missing properties exception was expected !")
} catch {
case e: MissingConfigException =>
}
props = new Properties()
props.put("log4j.rootLogger", "INFO")
props.put("log4j.appender.KAFKA", "kafka.producer.KafkaLog4jAppender")
props.put("log4j.appender.KAFKA.layout", "org.apache.log4j.PatternLayout")
props.put("log4j.appender.KAFKA.layout.ConversionPattern", "%-5p: %c - %m%n")
props.put("log4j.appender.KAFKA.SerializerClass", "kafka.log4j.AppenderStringEncoder")
props.put("log4j.appender.KAFKA.brokerList", TestUtils.getBrokerListStrFromConfigs(Seq(config)))
props.put("log4j.logger.kafka.log4j", "INFO, KAFKA")
// topic missing
try {
PropertyConfigurator.configure(props)
fail("Missing properties exception was expected !")
} catch {
case e: MissingConfigException =>
}
props = new Properties()
props.put("log4j.rootLogger", "INFO")
props.put("log4j.appender.KAFKA", "kafka.producer.KafkaLog4jAppender")
props.put("log4j.appender.KAFKA.layout", "org.apache.log4j.PatternLayout")
props.put("log4j.appender.KAFKA.layout.ConversionPattern", "%-5p: %c - %m%n")
props.put("log4j.appender.KAFKA.brokerList", TestUtils.getBrokerListStrFromConfigs(Seq(config)))
props.put("log4j.appender.KAFKA.Topic", "test-topic")
props.put("log4j.logger.kafka.log4j", "INFO, KAFKA")
// serializer missing
try {
PropertyConfigurator.configure(props)
} catch {
case e: MissingConfigException => fail("should default to kafka.serializer.StringEncoder")
}
}
@Test
def testLog4jAppends() {
PropertyConfigurator.configure(getLog4jConfig)
for(i <- 1 to 5)
info("test")
val response = simpleConsumerZk.fetch(new FetchRequestBuilder().addFetch("test-topic", 0, 0L, 1024*1024).build())
val fetchMessage = response.messageSet("test-topic", 0)
var count = 0
for(message <- fetchMessage) {
count = count + 1
}
assertEquals(5, count)
}
private def getLog4jConfig: Properties = {
var props = new Properties()
props.put("log4j.rootLogger", "INFO")
props.put("log4j.appender.KAFKA", "kafka.producer.KafkaLog4jAppender")
props.put("log4j.appender.KAFKA.layout","org.apache.log4j.PatternLayout")
props.put("log4j.appender.KAFKA.layout.ConversionPattern","%-5p: %c - %m%n")
props.put("log4j.appender.KAFKA.brokerList", TestUtils.getBrokerListStrFromConfigs(Seq(config)))
props.put("log4j.appender.KAFKA.Topic", "test-topic")
props.put("log4j.logger.kafka.log4j", "INFO,KAFKA")
props.put("log4j.appender.KAFKA.requiredNumAcks", "1")
props
}
}
class AppenderStringEncoder(encoding: String = "UTF-8") extends Encoder[LoggingEvent] {
def toBytes(event: LoggingEvent): Array[Byte] = {
event.getMessage.toString.getBytes(encoding)
}
}
| unix1986/universe | tool/kafka-0.8.1.1-src/core/src/test/scala/unit/kafka/log4j/KafkaLog4jAppenderTest.scala | Scala | bsd-2-clause | 6,470 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Statistics}
import org.apache.spark.sql.catalyst.plans.logical.statsEstimation.EstimationUtils
import org.apache.spark.sql.execution.SparkPlan
/**
* Logical plan node for holding data from a command.
*
* `commandLogicalPlan` and `commandPhysicalPlan` are just used to display the plan tree
* for EXPLAIN.
* `rows` may not be serializable and ideally we should not send `rows` to the executors.
* Thus marking them as transient.
*/
case class CommandResult(
output: Seq[Attribute],
@transient commandLogicalPlan: LogicalPlan,
@transient commandPhysicalPlan: SparkPlan,
@transient rows: Seq[InternalRow]) extends LeafNode {
override def innerChildren: Seq[QueryPlan[_]] = Seq(commandLogicalPlan)
override def computeStats(): Statistics =
Statistics(sizeInBytes = EstimationUtils.getSizePerRow(output) * rows.length)
}
| cloud-fan/spark | sql/core/src/main/scala/org/apache/spark/sql/expressions/CommandResult.scala | Scala | apache-2.0 | 1,929 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.regression
import scala.util.Random
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.util.{LinearDataGenerator, LocalClusterSparkContext,
MLlibTestSparkContext}
import org.apache.spark.util.Utils
private object LassoSuite {
/** 3 features */
val model = new LassoModel(weights = Vectors.dense(0.1, 0.2, 0.3), intercept = 0.5)
}
class LassoSuite extends SparkFunSuite with MLlibTestSparkContext {
def validatePrediction(predictions: Seq[Double], input: Seq[LabeledPoint]) {
val numOffPredictions = predictions.zip(input).count { case (prediction, expected) =>
// A prediction is off if the prediction is more than 0.5 away from expected value.
math.abs(prediction - expected.label) > 0.5
}
// At least 80% of the predictions should be on.
assert(numOffPredictions < input.length / 5)
}
test("Lasso local random SGD") {
val nPoints = 1000
val A = 2.0
val B = -1.5
val C = 1.0e-2
val testData = LinearDataGenerator.generateLinearInput(A, Array[Double](B, C), nPoints, 42)
.map { case LabeledPoint(label, features) =>
LabeledPoint(label, Vectors.dense(1.0 +: features.toArray))
}
val testRDD = sc.parallelize(testData, 2).cache()
val ls = new LassoWithSGD()
ls.optimizer.setStepSize(1.0).setRegParam(0.01).setNumIterations(40)
val model = ls.run(testRDD)
val weight0 = model.weights(0)
val weight1 = model.weights(1)
val weight2 = model.weights(2)
assert(weight0 >= 1.9 && weight0 <= 2.1, weight0 + " not in [1.9, 2.1]")
assert(weight1 >= -1.60 && weight1 <= -1.40, weight1 + " not in [-1.6, -1.4]")
assert(weight2 >= -1.0e-3 && weight2 <= 1.0e-3, weight2 + " not in [-0.001, 0.001]")
val validationData = LinearDataGenerator
.generateLinearInput(A, Array[Double](B, C), nPoints, 17)
.map { case LabeledPoint(label, features) =>
LabeledPoint(label, Vectors.dense(1.0 +: features.toArray))
}
val validationRDD = sc.parallelize(validationData, 2)
// Test prediction on RDD.
validatePrediction(model.predict(validationRDD.map(_.features)).collect(), validationData)
// Test prediction on Array.
validatePrediction(validationData.map(row => model.predict(row.features)), validationData)
}
test("Lasso local random SGD with initial weights") {
val nPoints = 1000
val A = 2.0
val B = -1.5
val C = 1.0e-2
val testData = LinearDataGenerator.generateLinearInput(A, Array[Double](B, C), nPoints, 42)
.map { case LabeledPoint(label, features) =>
LabeledPoint(label, Vectors.dense(1.0 +: features.toArray))
}
val initialA = -1.0
val initialB = -1.0
val initialC = -1.0
val initialWeights = Vectors.dense(initialA, initialB, initialC)
val testRDD = sc.parallelize(testData, 2).cache()
val ls = new LassoWithSGD()
ls.optimizer.setStepSize(1.0).setRegParam(0.01).setNumIterations(40).setConvergenceTol(0.0005)
val model = ls.run(testRDD, initialWeights)
val weight0 = model.weights(0)
val weight1 = model.weights(1)
val weight2 = model.weights(2)
assert(weight0 >= 1.9 && weight0 <= 2.1, weight0 + " not in [1.9, 2.1]")
assert(weight1 >= -1.60 && weight1 <= -1.40, weight1 + " not in [-1.6, -1.4]")
assert(weight2 >= -1.0e-3 && weight2 <= 1.0e-3, weight2 + " not in [-0.001, 0.001]")
val validationData = LinearDataGenerator
.generateLinearInput(A, Array[Double](B, C), nPoints, 17)
.map { case LabeledPoint(label, features) =>
LabeledPoint(label, Vectors.dense(1.0 +: features.toArray))
}
val validationRDD = sc.parallelize(validationData, 2)
// Test prediction on RDD.
validatePrediction(model.predict(validationRDD.map(_.features)).collect(), validationData)
// Test prediction on Array.
validatePrediction(validationData.map(row => model.predict(row.features)), validationData)
}
test("model save/load") {
val model = LassoSuite.model
val tempDir = Utils.createTempDir()
val path = tempDir.toURI.toString
// Save model, load it back, and compare.
try {
model.save(sc, path)
val sameModel = LassoModel.load(sc, path)
assert(model.weights == sameModel.weights)
assert(model.intercept == sameModel.intercept)
} finally {
Utils.deleteRecursively(tempDir)
}
}
}
class LassoClusterSuite extends SparkFunSuite with LocalClusterSparkContext {
test("task size should be small in both training and prediction") {
val m = 4
val n = 200000
val points = sc.parallelize(0 until m, 2).mapPartitionsWithIndex { (idx, iter) =>
val random = new Random(idx)
iter.map(i => LabeledPoint(1.0, Vectors.dense(Array.fill(n)(random.nextDouble()))))
}.cache()
// If we serialize data directly in the task closure, the size of the serialized task would be
// greater than 1MB and hence Spark would throw an error.
val model = LassoWithSGD.train(points, 2)
val predictions = model.predict(points.map(_.features))
}
}
| wangyixiaohuihui/spark2-annotation | mllib/src/test/scala/org/apache/spark/mllib/regression/LassoSuite.scala | Scala | apache-2.0 | 6,074 |
/*
* Task.scala
*
* Copyright (c) 2014 Ronald Kurniawan. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301 USA
*/
package net.fluxo.dd.dbo
/**
* Data Object for representing a Task (active or finished) on the database.
*
* @author Ronald Kurniawan (viper)
* @version 0.4.5, 5/03/14
*/
class Task {
private var _gid: Option[String] = None
def TaskGID: Option[String] = _gid
def TaskGID_= (value: String) {
_gid = Some(value)
}
private var _tailGID: Option[String] = None
def TaskTailGID: Option[String] = _tailGID
def TaskTailGID_= (value: String) {
_tailGID = Some(value)
}
private var _input: Option[String] = None
def TaskInput: Option[String] = _input
def TaskInput_= (value: String) {
_input = Some(value)
}
private var _started: Long = 0
def TaskStarted: Long = _started
def TaskStarted_= (value: Long) {
_started = value
}
private var _ended: Long = 0
def TaskEnded: Long = _ended
def TaskEnded_= (value: Long) {
_ended = value
}
private var _completed: Boolean = false
def IsTaskCompleted: Boolean = _completed
def IsTaskCompleted_= (value: Boolean) {
_completed = value
}
private var _owner: Option[String] = None
def TaskOwner: Option[String] = _owner
def TaskOwner_= (value: String) {
_owner = Some(value)
}
private var _package: Option[String] = None
def TaskPackage: Option[String] = _package
def TaskPackage_= (value: String) {
_package = Some(value)
}
private var _status: Option[String] = None
def TaskStatus: Option[String] = _status
def TaskStatus_= (value: String) {
_status = Some(value)
}
private var _totalLength: Long = 0
def TaskTotalLength: Long = _totalLength
def TaskTotalLength_= (value: Long) {
_totalLength = value
}
private var _completedLength: Long = 0
def TaskCompletedLength: Long = _completedLength
def TaskCompletedLength_= (value: Long) {
_completedLength = value
}
private var _infoHash: Option[String] = None
def TaskInfoHash: Option[String] = _infoHash
def TaskInfoHash_=(value: String) {
_infoHash = Some(value)
}
private var _isHttp: Boolean = false
def TaskIsHttp: Boolean = _isHttp
def TaskIsHttp_=(value: Boolean) { _isHttp = value }
private var _httpUsername: Option[String] = None
def TaskHttpUsername: Option[String] = _httpUsername
def TaskHttpUsername_=(value: String) { _httpUsername = Some(value) }
private var _httpPassword: Option[String] = None
def TaskHttpPassword: Option[String] = _httpPassword
def TaskHttpPassword_=(value: String) { _httpPassword = Some(value) }
}
| fluxodesign/DownloadDaemon | src/main/scala/net/fluxo/dd/dbo/Task.scala | Scala | gpl-2.0 | 3,259 |
/* Code Pulse: a real-time code coverage tool, for more information, see <http://code-pulse.com/>
*
* Copyright (C) 2014-2017 Code Dx, Inc. <https://codedx.com/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.secdec.bytefrog.agent.message.test
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.InputStream
import java.io.OutputStream
import java.net.Socket
import java.net.SocketImpl
import org.scalatest.FunSpec
import org.scalatest.concurrent.AsyncAssertions
import org.scalatest._
import org.scalatest.Matchers._
import org.scalamock.scalatest.MockFactory
import com.codedx.codepulse.agent.init.DataConnectionHandshake
import com.codedx.codepulse.agent.message.MessageSenderManager
import com.secdec.bytefrog.agent.util.ControlSimulation
import com.secdec.bytefrog.agent.util.MockHelpers
import com.codedx.codepulse.agent.util.SocketFactory
class SenderManagerV1Spec extends FunSpec with Matchers with MockFactory with AsyncAssertions with ControlSimulation with MockHelpers {
class UselessSocket extends Socket {
val in = new ByteArrayInputStream(Array())
val out = new ByteArrayOutputStream
override def getInputStream = in
override def getOutputStream = out
}
class MockableSocketFactory extends SocketFactory("localhost", 8765)
class PretendSocket(in: InputStream, out: OutputStream) extends Socket {
override def getInputStream = in
override def getOutputStream = out
}
describe("MessageSenderManager.start") {
it("should fail if the socket factory fails to connect to a socket") {
val sf = mock[MockableSocketFactory]
(sf.connect _).expects().anyNumberOfTimes.returning(null)
val m = new MessageSenderManager(sf, mock[DataConnectionHandshake], mock[BufferPoolMockable], 3, 1)
try {
m.start should equal(false)
} finally {
m.shutdown
}
}
it("should fail if the handshake fails") {
val sf = mock[MockableSocketFactory]
(sf.connect _).expects().anyNumberOfTimes.returning { new UselessSocket }
val handshaker = mock[DataConnectionHandshake]
(handshaker.performHandshake _).expects(*, *).anyNumberOfTimes.returning(false)
val m = new MessageSenderManager(sf, handshaker, mock[BufferPoolMockable], 3, 1)
try {
m.start should equal(false)
} finally {
m.shutdown
}
}
it("should succeed when the socket handshake works") {
val sf = mock[MockableSocketFactory]
(sf.connect _).expects().anyNumberOfTimes.returning { new UselessSocket }
val handshaker = mock[DataConnectionHandshake]
(handshaker.performHandshake _).expects(*, *).anyNumberOfTimes.returning(true)
val m = new MessageSenderManager(sf, handshaker, mock[BufferPoolMockable], 3, 1)
try {
m.start should equal(true)
} finally {
m.shutdown
}
}
it("should return false when called after the first time") {
val sf = mock[MockableSocketFactory]
(sf.connect _).expects().anyNumberOfTimes.returning { new UselessSocket }
val handshaker = mock[DataConnectionHandshake]
(handshaker.performHandshake _).expects(*, *).anyNumberOfTimes.returning(true)
val m = new MessageSenderManager(sf, handshaker, mock[BufferPoolMockable], 3, 1)
try {
m.start should equal(true)
m.start should equal(false)
} finally {
m.shutdown
}
}
}
} | secdec/codepulse | agent/src/test/scala/com/secdec/bytefrog/agent/message/test/SenderManagerV1Spec.scala | Scala | apache-2.0 | 3,825 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript.provenance
import slamdata.Predef._
import quasar.fp.ski.ι
import matryoshka._
import scalaz._, Scalaz._
trait Dimension[D, I, P] {
val prov: Prov[D, I, P]
import prov._
/** Returns the `JoinKeys` describing the autojoin of the two dimension stacks. */
def autojoinKeys(ls: Dimensions[P], rs: Dimensions[P])(implicit D: Equal[D]): JoinKeys[I] =
ls.reverse.fzipWith(rs.reverse)(joinKeys).fold
/** The empty dimension stack. */
val empty: Dimensions[P] =
IList[P]()
def canonicalize(ds: Dimensions[P])(implicit eqD: Equal[D], eqI: Equal[I]): Dimensions[P] =
ds.map(normalize)
/** Updates the dimensional stack by sequencing a new dimension from value
* space with the current head dimension.
*/
def flatten(id: I, ds: Dimensions[P]): Dimensions[P] =
nest(lshift(id, ds))
/** Joins two dimensions into a single dimension stack, starting from the base. */
def join(ls: Dimensions[P], rs: Dimensions[P])(implicit eqD: Equal[D], eqI: Equal[I]): Dimensions[P] =
canonicalize(alignRtoL(ls, rs)(ι, ι, both(_, _)))
/** Shifts the dimensional stack by pushing a new dimension from value space
* onto the stack.
*/
def lshift(id: I, ds: Dimensions[P]): Dimensions[P] =
value(id) :: ds
/** Sequences the first and second dimensions. */
def nest(ds: Dimensions[P]): Dimensions[P] =
ds.toNel.fold(ds)(nel => extend[P](thenn(_, _), nel.head)(nel.tail))
/** Project a static key/index from maps and arrays. */
def project(field: D, ds: Dimensions[P]): Dimensions[P] =
extend[P](thenn(_, _), proj(field))(ds)
/** Reduces the dimensional stack by peeling off the current dimension. */
def reduce(ds: Dimensions[P]): Dimensions[P] =
ds drop 1
/** Collapses all dimensions into a single one. */
def squash(ds: Dimensions[P]): Dimensions[P] =
ds.toNel.fold(ds)(nel => IList(nel.foldRight1(thenn(_, _))))
/** Swaps the dimensions at the nth and mth indices. */
def swap(idxN: Int, idxM: Int, ds: Dimensions[P]): Dimensions[P] = {
val n = if (idxN < idxM) idxN else idxM
val m = if (idxM > idxN) idxM else idxN
val swapped = for {
z0 <- ds.toZipper
z0n <- z0.move(n)
vn = z0n.focus
z0m <- z0n.move(m - n)
vm = z0m.focus
z1 = z0m.update(vn)
z1n <- z1.move(n - m)
z2 = z1n.update(vm)
} yield z2.toIList
swapped getOrElse ds
}
/** Unions the two dimensions into a single dimensional stack, starting from the base. */
def union(ls: Dimensions[P], rs: Dimensions[P])(implicit eqD: Equal[D], eqI: Equal[I]): Dimensions[P] =
canonicalize(alignRtoL(ls, rs)(oneOf(_, nada()), oneOf(nada(), _), oneOf(_, _)))
////
private def alignRtoL[A]
(ls: Dimensions[P], rs: Dimensions[P])
(ths: P => A, tht: P => A, bth: (P, P) => A)
: Dimensions[A] =
ls.reverse.alignWith(rs.reverse)(_.fold(ths, tht, bth)).reverse
private def extend[A](f: (A, A) => A, a: A): IList[A] => IList[A] =
_.toNel.fold(IList(a))(nel => f(a, nel.head) :: nel.tail)
}
object Dimension {
def apply[D, I, P](prov0: Prov[D, I, P]): Dimension[D, I, P] =
new Dimension[D, I, P] { val prov = prov0 }
}
| jedesah/Quasar | connector/src/main/scala/quasar/qscript/provenance/Dimension.scala | Scala | apache-2.0 | 3,807 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.tools
import org.scalatest._
import java.lang.reflect.Modifier
import java.util.Enumeration
import java.util.jar.JarFile
import java.util.jar.JarEntry
import scala.collection.mutable
import java.io.File
import java.net.URL
import java.net.MalformedURLException
import java.io.IOException
import java.util.regex.Pattern
import scala.collection.mutable.ListBuffer
/**
* Discovers Suites on the runpath.
*
* @author Bill Venners
*/
private[scalatest] object SuiteDiscoveryHelper {
//
// Finds Suites containing specified tests.
//
// Tests are specified either by name or substring. This method
// searches all the accessibleSuites for matching tests, and
// returns a SuiteParam for each matching Suite found.
//
def discoverTests(testSpecs: List[TestSpec],
accessibleSuites: Set[String],
loader: ClassLoader): List[SuiteParam] =
{
val buf = new ListBuffer[SuiteParam]
if (!testSpecs.isEmpty) {
val names: Set[String] =
testSpecs.filter(_.isSubstring == false).map(_.spec).toSet
val substrings: Set[String] =
testSpecs.filter(_.isSubstring == true).map(_.spec).toSet
for (suiteName <- accessibleSuites) {
val suiteInstance: Suite =
DiscoverySuite.getSuiteInstance(suiteName, loader)
val nameMatches: Set[String] =
names.intersect(suiteInstance.testNames)
val substringMatches: Set[String] =
substrings.filter(substring =>
suiteInstance.testNames.exists(_.contains(substring)))
if ((nameMatches.size > 0) || (substringMatches.size > 0))
buf += SuiteParam(suiteName,
nameMatches.toList.sortWith(_<_).toArray,
substringMatches.toList.sortWith(_<_).toArray,
Array.empty)
}
}
buf.toList.sortWith(_.className<_.className)
}
def discoverSuiteNames(runpath: List[String], loader: ClassLoader,
suffixes: Option[Pattern]): Set[String] =
{
val fileSeparatorString = System.getProperty("path.separator")
val fileSeparator = if (!fileSeparatorString.isEmpty) fileSeparatorString(0) else ':'
def getJarFileFromURL(url: URL): Option[JarFile] = {
val o = url.openConnection().getContent()
if (o != null) {
try {
Some(o.asInstanceOf[JarFile])
}
catch {
case e: ClassCastException => None
}
}
else {
None
}
}
def getJarFileFromFileSystem(path: String): Option[JarFile] = {
try {
Some(new JarFile(path))
}
catch {
case e: IOException => None
}
}
val listOfSets: List[Set[String]] =
for (path <- runpath)
yield {
val urlOption =
try {
Some(new URL(path))
}
catch {
case e: MalformedURLException => None
}
val endsWithDotJar = path.endsWith(".jar")
if (endsWithDotJar) {
val jarFileOption =
urlOption match {
case Some(url) => getJarFileFromURL(url)
case None => getJarFileFromFileSystem(path)
}
jarFileOption match {
case Some(jf) => processFileNames(getFileNamesIteratorFromJar(jf), '/', loader, suffixes)
case None => Set[String]()
}
}
else {
processFileNames(getFileNamesSetFromFile(new File(path), fileSeparator).iterator, fileSeparator, loader, suffixes)
}
}
Set() ++ listOfSets.flatMap(_.iterator.toList)
}
//
// Given a file name composed using specified separator, converts name to
// corresponding class name. E.g., for separator '/':
//
// org/scalatest/fixture/FixtureFunSuiteSpec.class
//
// -> org.scalatest.fixture.FixtureFunSuiteSpec
//
// Returns None if file name doesn't end in '.class'.
//
// (Typically we compose file names using ':' instead of '/', but
// that's probably just a mistake where path.separator got used instead
// of file.separator and doesn't affect how things turn out.)
//
private def transformToClassName(fileName: String, fileSeparator: Char): Option[String] = {
// If the fileName starts with a file separator char, lop that off
val fn =
if (!fileName.isEmpty && fileName(0) == fileSeparator)
fileName.substring(1)
else
fileName
if (fn.endsWith(".class") && fn != ".class")
Some(fn.substring(0, fn.length - 6).replace(fileSeparator, '.'))
else
None
}
private val emptyClassArray = new Array[java.lang.Class[_]](0)
private[scalatest] def isAccessibleSuite(clazz: java.lang.Class[_]): Boolean = {
try {
classOf[Suite].isAssignableFrom(clazz) &&
Modifier.isPublic(clazz.getModifiers) &&
!Modifier.isAbstract(clazz.getModifiers) &&
Modifier.isPublic(clazz.getConstructor(emptyClassArray: _*).getModifiers)
}
catch {
case nsme: NoSuchMethodException => false
case se: SecurityException => false
}
}
private[scalatest] def isAccessibleSuite(className: String, loader: ClassLoader): Boolean = {
try {
isAccessibleSuite(loader.loadClass(className))
}
catch {
case e: ClassNotFoundException => false
case e: NoClassDefFoundError => false
}
}
private[scalatest] def isDiscoverableSuite(clazz: java.lang.Class[_]): Boolean = {
!clazz.isAnnotationPresent(classOf[DoNotDiscover])
}
private def isDiscoverableSuite(className: String, loader: ClassLoader): Boolean = {
try {
isDiscoverableSuite(loader.loadClass(className))
}
catch {
case e: ClassNotFoundException => false
case e: NoClassDefFoundError => false
}
}
private[scalatest] def isRunnable(clazz: java.lang.Class[_]): Boolean = {
val wrapWithAnnotation = clazz.getAnnotation(classOf[WrapWith])
if (wrapWithAnnotation != null) {
val wrapperSuiteClazz = wrapWithAnnotation.value
val constructorList = wrapperSuiteClazz.getDeclaredConstructors()
constructorList.exists { c =>
val types = c.getParameterTypes
types.length == 1 && types(0) == classOf[java.lang.Class[_]]
}
}
else
false
}
private[scalatest] def isRunnable(className: String, loader: ClassLoader): Boolean = {
try {
isRunnable(loader.loadClass(className))
}
catch {
case e: ClassNotFoundException => false
case e: NoClassDefFoundError => false
}
}
//
// Determines whether specified class is to be included in
// test run.
//
// Returns Some(<class name>) if processed, else None
private def processClassName(className: String, loader: ClassLoader, suffixes: Option[Pattern]): Option[String] = {
if (classNameSuffixOkay(className, suffixes) && isDiscoverableSuite(className, loader)
&&
(isAccessibleSuite(className, loader) || isRunnable(className, loader)))
Some(className)
else
None
}
//
// Determines whether class should be included in test based
// on whether its class name matches one of the suffixes
// specified by user.
//
// Users may specify that only classes whose names end with
// specified suffixes be included in test.
//
private def classNameSuffixOkay(className: String,
suffixes: Option[Pattern]): Boolean =
{
(suffixes == None) ||
suffixes.get.matcher(className).matches
}
//
// Scans specified files and returns names of classes to
// be included in test run.
//
// Extracts class names from the file names of .class files
// specified by the passed-in iterator, and returns those
// classes found that are to be included in run.
//
private def processFileNames(fileNames: Iterator[String], fileSeparator: Char, loader: ClassLoader,
suffixes: Option[Pattern]): Set[String] =
{
val classNameOptions = // elements are Some(<class name>) if processed, else None
for (className <- extractClassNames(fileNames, fileSeparator))
yield processClassName(className, loader, suffixes)
val classNames =
for (Some(className) <- classNameOptions)
yield className
Set[String]() ++ classNames.toIterable
}
private def getFileNamesSetFromFile(file: File, fileSeparator: Char): Set[String] = {
def prependPrevName(prevName: String, fileName: String) = prevName + fileSeparator + fileName
def listFilesInDir(dir: File, prevName: String): List[String] = {
if (!dir.isDirectory)
throw new IllegalArgumentException
val subDirs = for (entry <- dir.listFiles.toList; if entry.isDirectory) yield entry
val fileLists: List[List[String]] =
for (subDir <- subDirs)
yield listFilesInDir(subDir, prependPrevName(prevName, subDir.getName))
val files: List[String] =
for (entry <- dir.listFiles.toList; if !entry.isDirectory)
yield prependPrevName(prevName, entry.getName)
files ::: fileLists.flatMap(e => e)
}
val allFiles = if (file.isDirectory)
listFilesInDir(file, "")
else
List(file.getName)
Set() ++ allFiles.map(fn => if (!fn.isEmpty && fn(0) == fileSeparator) fn.substring(1) else fn)
}
private def getFileNamesIteratorFromJar(file: JarFile): Iterator[String] = {
class EnumerationWrapper[T](e: Enumeration[T]) extends Iterator[T] {
def next(): T = e.nextElement
def hasNext: Boolean = e.hasMoreElements
}
new EnumerationWrapper[JarEntry](file.entries).map(_.getName)
}
//
// Given a fileNames iterator, returns an iterator of class names
// corresponding to .class files found.
//
private def extractClassNames(fileNames: Iterator[String], fileSeparator: Char): Iterator[String] = {
val options =
for (fileName <- fileNames) yield
transformToClassName(fileName, fileSeparator)
for (Some(className) <- options) yield
className
}
}
| scalatest/scalatest | jvm/core/src/main/scala/org/scalatest/tools/SuiteDiscoveryHelper.scala | Scala | apache-2.0 | 10,815 |
package org.scalafmt.rewrite
import org.scalafmt.util.TokenOps
import scala.meta.tokens.Token.{
LF,
LeftBrace,
LeftParen,
RightBrace,
RightParen
}
import scala.meta.{Tree, _}
case object AvoidInfix extends Rewrite {
// In a perfect world, we could just use
// Tree.transform {
// case t: Term.ApplyInfix => Term.Apply(Term.Select(t.lhs, t.op), t.args)
// }
// and be done with it. However, until transform becomes token aware (see https://github.com/scalameta/scalameta/pull/457)
// we will do these dangerous rewritings by hand.
override def rewrite(code: Tree, ctx: RewriteCtx): Seq[Patch] = {
val matcher = ctx.style.rewrite.neverInfix.toMatcher
code.collect {
case Term.ApplyInfix(lhs, op, _, args) if matcher.matches(op.value) =>
val fstOpToken = op.tokens.head
val selectorToBeAdded = Seq(
TokenPatch.AddLeft(fstOpToken, ".", keepTok = true)
)
val fstArgsToken = args.head.tokens.head
val lastArgsToken = args.last.tokens.last
val fstIsNotLeftParenAndBrace = fstArgsToken
.isNot[LeftParen] && fstArgsToken
.isNot[LeftBrace]
val lastIsNotRightParenAndBrace = lastArgsToken
.isNot[RightParen] && lastArgsToken
.isNot[RightBrace]
val isSingleArg = args.size == 1
val selectorParensToBeAdded =
if (isSingleArg && (fstIsNotLeftParenAndBrace || lastIsNotRightParenAndBrace))
Seq(TokenPatch.AddLeft(fstArgsToken, "(", keepTok = true),
TokenPatch.AddRight(lastArgsToken, ")", keepTok = true))
else
Nil
val lhsParensToBeAdded = lhs match {
case Term.ApplyInfix(lhs1, op1, _, _)
if !matcher.matches(op1.value)
&& lhs.tokens.head.isNot[LeftParen] =>
Seq(TokenPatch.AddLeft(lhs.tokens.head, "(", keepTok = true),
TokenPatch.AddRight(lhs.tokens.last, ")", keepTok = true))
case _ => Nil
}
val toBeRemoved = ctx.tokenTraverser
.filter(fstOpToken, fstArgsToken)(_.is[LF])
.map(TokenPatch.Remove)
val hasSingleLineComment = ctx.tokenTraverser
.filter(fstOpToken, fstArgsToken)(TokenOps.isSingleLineComment)
.nonEmpty
if (hasSingleLineComment)
Nil
else
lhsParensToBeAdded ++ selectorToBeAdded ++ selectorParensToBeAdded ++ toBeRemoved
}.flatten
}
}
| Daxten/scalafmt | core/src/main/scala/org/scalafmt/rewrite/AvoidInfix.scala | Scala | apache-2.0 | 2,462 |
package org.helgoboss.scala_osgi_metatype.interfaces
/**
* Provides access to metatypes. Interface modeled after [[org.osgi.service.metatype.MetaTypeProvider]].
*/
trait MetaTypeProvider {
/**
* Returns a list of available locales.
*/
def locales: Traversable[String]
/**
* Returns an object class definition for the specified id localized to the specified locale.
*/
def getObjectClassDefinition(id: String, locale: Option[String]): ObjectClassDefinition
}
| helgoboss/scala-osgi-metatype | src/main/scala/org/helgoboss/scala_osgi_metatype/interfaces/MetaTypeProvider.scala | Scala | mit | 484 |
package plugins.cloudimage
import org.specs2.mutable._
import play.api.Play.current
import play.api.test.Helpers.inMemoryDatabase
import play.api.test.Helpers.running
import play.api.test.FakeApplication
import plugins.use
import java.io.BufferedInputStream
import java.io.FileInputStream
class CloudinaryCloudImageSpec extends Specification {
private val fileName = "logo.png"
private val transformationProperties = Map[TransformationProperty.Value, String](
TransformationProperty.WIDTH -> "125",
TransformationProperty.HEIGHT -> "125",
TransformationProperty.CROP_MODE -> "c_fit");
"CloudinaryCloudImagePlugin" should {
"upload and destroy an image" in {
val fakeApplication = FakeApplication(additionalConfiguration = inMemoryDatabase())
running(fakeApplication) {
val cloudImageService = use[CloudImagePlugin].cloudImageService
// if test is run from a sub project take a look at modules directory
val file = fakeApplication.getExistingFile("/public/" + fileName).getOrElse(fakeApplication.getFile("/modules/play-cloud-plugins/public/" + fileName))
val bis = new BufferedInputStream(new FileInputStream(file))
val contents = Stream.continually(bis.read).takeWhile(-1 !=).map(_.toByte).toArray
val uploadResponse = cloudImageService.upload(fileName, contents)
uploadResponse must haveClass[CloudImageSuccessResponse]
val publicId = uploadResponse.asInstanceOf[CloudImageSuccessResponse].publicId
val destroyResponse = cloudImageService.destroy(publicId)
destroyResponse must haveClass[CloudImageDestroySuccessResponse]
}
}
"get transformation URL" in {
running(FakeApplication(additionalConfiguration = inMemoryDatabase())) {
val cloudImageService = use[CloudImagePlugin].cloudImageService
val transformationUrl = cloudImageService.getTransformationUrl("url", transformationProperties)
transformationUrl must contain("w_125,h_125,c_fit")
}
}
}
} | lukaszbudnik/play-cloud-plugins | test/plugins/cloudimage/CloudinaryCloudImageSpec.scala | Scala | apache-2.0 | 2,049 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.content
import org.scalatest.{FunSpec, Matchers}
import play.api.libs.json.JsonValidationError
import play.api.libs.json._
class CompleteRequestSpec extends FunSpec with Matchers {
val completeRequestJson: JsValue = Json.parse("""
{
"code": "<STRING>",
"cursor_pos": 999
}
""")
val completeRequest: CompleteRequest = CompleteRequest(
"<STRING>", 999
)
describe("CompleteRequest") {
describe("#toTypeString") {
it("should return correct type") {
CompleteRequest.toTypeString should be ("complete_request")
}
}
describe("implicit conversions") {
it("should implicitly convert from valid json to a CompleteRequest instance") {
// This is the least safe way to convert as an error is thrown if it fails
completeRequestJson.as[CompleteRequest] should be (completeRequest)
}
it("should also work with asOpt") {
// This is safer, but we lose the error information as it returns
// None if the conversion fails
val newCompleteRequest = completeRequestJson.asOpt[CompleteRequest]
newCompleteRequest.get should be (completeRequest)
}
it("should also work with validate") {
// This is the safest as it collects all error information (not just first error) and reports it
val CompleteRequestResults = completeRequestJson.validate[CompleteRequest]
CompleteRequestResults.fold(
(invalid: Seq[(JsPath, Seq[JsonValidationError])]) => println("Failed!"),
(valid: CompleteRequest) => valid
) should be (completeRequest)
}
it("should implicitly convert from a CompleteRequest instance to valid json") {
Json.toJson(completeRequest) should be (completeRequestJson)
}
}
}
}
| lresende/incubator-toree | protocol/src/test/scala/org/apache/toree/kernel/protocol/v5/content/CompleteRequestSpec.scala | Scala | apache-2.0 | 2,642 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.quinine.rdd.variation
import org.apache.spark.rdd.RDD
import org.bdgenomics.adam.rich.RichVariant._
import org.bdgenomics.formats.avro.{ Genotype, GenotypeAllele }
import org.bdgenomics.quinine.rdd.variation.GenotypesSummary.StatisticsMap
import org.bdgenomics.quinine.rdd.variation.GenotypesSummaryCounts.ReferenceAndAlternate
import scala.collection.JavaConverters._
import scala.collection.immutable.Map
import scala.collection.mutable
/**
* Simple counts of various properties across a set of genotypes.
*
* Note: for counts of variants, both homozygous and heterozygous
* count as 1 (i.e. homozygous alternate is NOT counted as 2).
* This seems to be the most common convention.
*
* @param genotypesCounts Counts of genotypes: map from list of GenotypeAllele (of size ploidy) -> count
* @param singleNucleotideVariantCounts Map from ReferenceAndAlternate -> count
* where ReferenceAndAlternate is a single base variant.
* @param multipleNucleotideVariantCount Count of multiple nucleotide variants (e.g.: AA -> TG)
* @param insertionCount Count of insertions
* @param deletionCount Count of deletions
* @param readCount Sum of read depths for all genotypes with a called variant
* @param phasedCount Number of genotypes with phasing information
*
*/
case class GenotypesSummaryCounts(
genotypesCounts: GenotypesSummaryCounts.GenotypeAlleleCounts,
singleNucleotideVariantCounts: GenotypesSummaryCounts.VariantCounts,
multipleNucleotideVariantCount: Long,
insertionCount: Long,
deletionCount: Long,
readCount: Option[Long],
phasedCount: Long) {
lazy val genotypesCount: Long = genotypesCounts.values.sum
lazy val variantGenotypesCount: Long =
genotypesCounts.keys.filter(_.contains(GenotypeAllele.Alt)).map(genotypesCounts(_)).sum
lazy val singleNucleotideVariantCount: Long = singleNucleotideVariantCounts.values.sum
lazy val transitionCount: Long = GenotypesSummaryCounts.transitions.map(singleNucleotideVariantCounts).sum
lazy val transversionCount: Long = GenotypesSummaryCounts.transversions.map(singleNucleotideVariantCounts).sum
lazy val noCallCount: Long = genotypesCounts.count(_._1.contains(GenotypeAllele.NoCall))
lazy val averageReadDepthAtVariants =
if (variantGenotypesCount == 0) None
else for (readCount1 <- readCount) yield readCount1.toDouble / variantGenotypesCount.toDouble
lazy val withDefaultZeroCounts = GenotypesSummaryCounts(
genotypesCounts.withDefaultValue(0.toLong),
singleNucleotideVariantCounts.withDefaultValue(0.toLong),
multipleNucleotideVariantCount,
insertionCount,
deletionCount,
readCount,
phasedCount)
def combine(that: GenotypesSummaryCounts): GenotypesSummaryCounts = {
def combine_counts[A](map1: Map[A, Long], map2: Map[A, Long]): Map[A, Long] = {
val keys: Set[A] = map1.keySet.union(map2.keySet)
val pairs = keys.map(k => (k -> (map1.getOrElse(k, 0.toLong) + map2.getOrElse(k, 0.toLong))))
pairs.toMap
}
GenotypesSummaryCounts(
combine_counts(genotypesCounts, that.genotypesCounts),
combine_counts(singleNucleotideVariantCounts, that.singleNucleotideVariantCounts),
multipleNucleotideVariantCount + that.multipleNucleotideVariantCount,
insertionCount + that.insertionCount,
deletionCount + that.deletionCount,
for (readCount1 <- readCount; readcount2 <- that.readCount) yield readCount1 + readcount2,
phasedCount + that.phasedCount)
}
}
object GenotypesSummaryCounts {
case class ReferenceAndAlternate(reference: String, alternate: String)
type GenotypeAlleleCounts = Map[List[GenotypeAllele], Long]
type VariantCounts = Map[ReferenceAndAlternate, Long]
val simpleNucleotides = List("A", "C", "T", "G")
val transitions = List(
ReferenceAndAlternate("A", "G"),
ReferenceAndAlternate("G", "A"),
ReferenceAndAlternate("C", "T"),
ReferenceAndAlternate("T", "C"))
val transversions = List(
ReferenceAndAlternate("A", "C"),
ReferenceAndAlternate("C", "A"),
ReferenceAndAlternate("A", "T"),
ReferenceAndAlternate("T", "A"),
ReferenceAndAlternate("G", "C"),
ReferenceAndAlternate("C", "G"),
ReferenceAndAlternate("G", "T"),
ReferenceAndAlternate("T", "G"))
/**
* Factory for an empty GenotypesSummaryCounts.
*/
def apply(): GenotypesSummaryCounts =
GenotypesSummaryCounts(
Map(),
Map(),
0, // Multiple nucleotide variants
0, // Insertion count
0, // Deletion count
Some(0), // Read count
0) // Phased count
def apply(counts: GenotypesSummaryCounts) {
assert(false)
}
/**
* Factory for a GenotypesSummaryCounts that counts a single Genotype.
*/
def apply(genotype: Genotype): GenotypesSummaryCounts = {
val variant = genotype.getVariant
val ref_and_alt = ReferenceAndAlternate(variant.getReferenceAllele.toString,
variant.getAlternateAllele.toString)
// We always count our genotype. The other counts are set to 1 only if we have a variant genotype.
val isVariant = genotype.getAlleles.contains(GenotypeAllele.Alt)
val genotypeAlleleCounts = Map(genotype.getAlleles.asScala.toList -> 1.toLong)
val variantCounts = (
if (isVariant && variant.isSingleNucleotideVariant) Map(ref_and_alt -> 1.toLong)
else Map(): VariantCounts)
val readDepth = (
if (genotype.getReadDepth == null) None
else if (isVariant) Some(genotype.getReadDepth.toLong)
else Some(0.toLong))
GenotypesSummaryCounts(
genotypeAlleleCounts,
variantCounts,
if (isVariant && variant.isMultipleNucleotideVariant) 1 else 0,
if (isVariant && variant.isInsertion) 1 else 0,
if (isVariant && variant.isDeletion) 1 else 0,
readDepth,
if (isVariant && genotype.getIsPhased != null && genotype.getIsPhased) 1 else 0)
}
}
/**
* Summary statistics for a set of genotypes.
* @param perSampleStatistics A map from sample id -> GenotypesSummaryCounts for that sample
* @param singletonCount Number of variants that are called in exactly one sample.
* @param distinctVariantCount Number of distinct variants that are called at least once.
*
*/
case class GenotypesSummary(
perSampleStatistics: StatisticsMap,
singletonCount: Long,
distinctVariantCount: Long) {
lazy val aggregateStatistics =
perSampleStatistics.values.foldLeft(GenotypesSummaryCounts())(_.combine(_)).withDefaultZeroCounts
}
object GenotypesSummary {
type StatisticsMap = Map[String, GenotypesSummaryCounts]
/**
* Factory for a GenotypesSummary given an RDD of Genotype.
*/
def apply(rdd: RDD[Genotype]): GenotypesSummary = {
def combineStatisticsMap(stats1: StatisticsMap, stats2: StatisticsMap): StatisticsMap = {
stats1.keySet.union(stats2.keySet).map(sample => {
(stats1.get(sample), stats2.get(sample)) match {
case (Some(statsA), Some(statsB)) => sample -> statsA.combine(statsB)
case (Some(stats), None) => sample -> stats
case (None, Some(stats)) => sample -> stats
case (None, None) => throw new AssertionError("Unreachable")
}
}).toMap
}
val perSampleStatistics: StatisticsMap = rdd
.map(genotype => Map(genotype.getSampleId.toString -> GenotypesSummaryCounts(genotype)))
.fold(Map(): StatisticsMap)(combineStatisticsMap(_, _))
.map({ case (sample: String, stats: GenotypesSummaryCounts) => sample -> stats.withDefaultZeroCounts }).toMap
val variantCounts =
rdd.filter(_.getAlleles.contains(GenotypeAllele.Alt)).map(genotype => {
val variant = genotype.getVariant
(variant.getContig, variant.getStart, variant.getReferenceAllele, variant.getAlternateAllele)
}).countByValue
val singletonCount = variantCounts.count(_._2 == 1)
val distinctVariantsCount = variantCounts.size
GenotypesSummary(perSampleStatistics, singletonCount, distinctVariantsCount)
}
}
/**
* Functions for converting a GenotypesSummary object to various text formats.
*/
object GenotypesSummaryFormatting {
def format_csv(summary: GenotypesSummary): String = {
val genotypeAlleles = sortedGenotypeAlleles(summary.aggregateStatistics)
def format_statistics(stats: GenotypesSummaryCounts): Seq[String] = {
val row = mutable.MutableList[String]()
row += stats.genotypesCount.toString
row += stats.variantGenotypesCount.toString
row += stats.insertionCount.toString
row += stats.deletionCount.toString
row += stats.singleNucleotideVariantCount.toString
row += stats.transitionCount.toString
row += stats.transversionCount.toString
row += (stats.transitionCount.toDouble / stats.transversionCount.toDouble).toString
row ++= genotypeAlleles.map(stats.genotypesCounts(_).toString) // Genotype counts
row ++= allSNVs.map(stats.singleNucleotideVariantCounts(_).toString) // SNV counts
row
}
val basicHeader = List(
"Sample", "Genotypes", "Variant Genotypes", "Insertions", "Deletions", "SNVs", "Transitions", "Transversions", "Ti / Tv")
val genotypesHeader = genotypeAlleles.map(genotypeAllelesToString(_))
val snvsHeader = allSNVs.map(snv => "%s>%s".format(snv.reference, snv.alternate))
val result = new mutable.StringBuilder
result ++= "# " + (basicHeader ++ genotypesHeader ++ snvsHeader).mkString(", ") + "\\n"
for ((sample, stats) <- summary.perSampleStatistics) {
val row = mutable.MutableList(sample)
row ++= format_statistics(stats)
result ++= row.mkString(", ") + "\\n"
}
val final_row = List("Aggregated") ++ format_statistics(summary.aggregateStatistics)
result ++= final_row.mkString(", ") + "\\n"
result.toString
}
def format_human_readable(summary: GenotypesSummary): String = {
def format_statistics(stats: GenotypesSummaryCounts, result: mutable.StringBuilder) = {
result ++= "\\tVariant Genotypes: %d / %d = %1.3f%%\\n".format(
stats.variantGenotypesCount,
stats.genotypesCount,
stats.variantGenotypesCount.toDouble * 100.0 / stats.genotypesCount)
for (genotype <- sortedGenotypeAlleles(summary.aggregateStatistics)) {
val count = stats.genotypesCounts(genotype)
result ++= "\\t%20s: %9d = %1.3f%%\\n".format(
genotypeAllelesToString(genotype),
count,
count.toDouble * 100.0 / stats.genotypesCount.toDouble)
}
result ++= "\\tInsertions: %d\\n".format(stats.insertionCount)
result ++= "\\tDeletions: %d\\n".format(stats.deletionCount)
result ++= "\\tMultiple nucleotide variants: %d\\n".format(stats.multipleNucleotideVariantCount)
result ++= "\\tSingle nucleotide variants: %d\\n".format(stats.singleNucleotideVariantCount)
result ++= "\\t\\tTransitions / transversions: %4d / %4d = %1.3f\\n".format(
stats.transitionCount,
stats.transversionCount,
stats.transitionCount.toDouble / stats.transversionCount.toDouble)
var from, to = 0
for (snv <- allSNVs) {
result ++= "\\t\\t%s>%s %9d\\n".format(snv.reference, snv.alternate, stats.singleNucleotideVariantCounts(snv))
}
result ++= "\\tAverage read depth at called variants: %s\\n".format(stats.averageReadDepthAtVariants match {
case Some(depth) => "%1.1f".format(depth)
case None => "[no variant calls, or read depth missing for one or more variant calls]"
})
result ++= "\\tPhased genotypes: %d / %d = %1.3f%%\\n".format(
stats.phasedCount,
stats.genotypesCount,
stats.phasedCount.toDouble * 100 / stats.genotypesCount)
}
val result = new mutable.StringBuilder
for (sample <- summary.perSampleStatistics.keySet.toList.sorted) {
result ++= "Sample: %s\\n".format(sample)
format_statistics(summary.perSampleStatistics(sample), result)
result ++= "\\n"
}
result ++= "\\nSummary\\n"
result ++= "\\tSamples: %d\\n".format(summary.perSampleStatistics.size)
result ++= "\\tDistinct variants: %d\\n".format(summary.distinctVariantCount)
result ++= "\\tVariants found only in a single sample: %d = %1.3f%%\\n".format(
summary.singletonCount,
summary.singletonCount.toDouble * 100.0 / summary.distinctVariantCount)
format_statistics(summary.aggregateStatistics, result)
result.toString
}
private def sortedGenotypeAlleles(stats: GenotypesSummaryCounts): Seq[List[GenotypeAllele]] = {
def genotypeSortOrder(genotype: List[GenotypeAllele]): Int = genotype.map({
case GenotypeAllele.Ref => 0
case GenotypeAllele.Alt | GenotypeAllele.OtherAlt => 1 // alt/otheralt sort to same point
case GenotypeAllele.NoCall => 10 // arbitrary large number so any genotype with a NoCall sorts last.
}).sum
stats.genotypesCounts.keySet.toList.sortBy(genotypeSortOrder(_))
}
private def genotypeAllelesToString(alleles: List[GenotypeAllele]): String =
alleles.map(_.toString).mkString("-")
lazy val allSNVs: Seq[ReferenceAndAlternate] =
for (
from <- GenotypesSummaryCounts.simpleNucleotides;
to <- GenotypesSummaryCounts.simpleNucleotides;
if (from != to)
) yield GenotypesSummaryCounts.ReferenceAndAlternate(from, to)
}
| heuermh/bdg-qc-metrics | quinine-core/src/main/scala/org/bdgenomics/quinine/rdd/variation/GenotypesSummary.scala | Scala | apache-2.0 | 14,147 |
/*
* Copyright 2014 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.scrooge.linter
import com.twitter.logging.{ConsoleHandler, Formatter}
import com.twitter.scrooge.ast._
import com.twitter.scrooge.frontend.{FileParseException, Importer, ThriftParser, TypeResolver}
import java.io.File
import java.util.logging.{Logger, LogRecord, LogManager, Level}
import scala.collection.mutable.ArrayBuffer
object LintLevel extends Enumeration {
type LintLevel = Value
val Warning, Error = Value
}
import LintLevel._
case class LintMessage(msg: String, level: LintLevel = Warning)
trait LintRule extends (Document => Iterable[LintMessage]) {
def requiresIncludes: Boolean = false
def name = getClass.getSimpleName.replaceAll("\\\\p{Punct}", "") // no $
}
object LintRule {
def all(rules: Seq[LintRule]): LintRule =
new LintRule {
def apply(doc: Document): Seq[LintMessage] =
rules flatMap { r => r(doc) }
}
val DefaultRules = Seq(
Namespaces,
RelativeIncludes,
CamelCase,
RequiredFieldDefault,
Keywords
)
val Rules = DefaultRules ++ Seq(
TransitivePersistence,
DocumentedPersisted
)
/**
* All structs annotated persisted = "true" refer only to structs that are persisted as well
*/
object TransitivePersistence extends LintRule {
override def requiresIncludes: Boolean = true
def isPersisted(struct: StructLike) =
struct.annotations.getOrElse("persisted", "false") == "true"
def apply(doc0: Document) = {
// resolving ReferenceTypes
val resolver = TypeResolver()(doc0)
val doc = resolver.document
def findUnpersistedStructs(s: StructLike, scopePrefix: Option[SimpleID] = None): Seq[String] = {
val current =
if (!isPersisted(s)) Seq(scopePrefix.map(_.name + ".").getOrElse("") + s.sid.name)
else Seq.empty
(current ++ findUnpersistedStructsFromFields(s.fields.map(_.fieldType))).distinct
}
def findUnpersistedStructsFromFields(fieldTypes: Seq[FieldType]): Seq[String] = {
fieldTypes.flatMap {
case StructType(s, scopePrefix) => findUnpersistedStructs(s, scopePrefix) // includes Unions
case EnumType(enum: Enum, _) => Seq.empty // enums don't have annotations
case MapType(keyType, valueType, _) => findUnpersistedStructsFromFields(Seq(keyType, valueType))
case SetType(eltType, _) => findUnpersistedStructsFromFields(Seq(eltType))
case ListType(eltType, _) => findUnpersistedStructsFromFields(Seq(eltType))
case _:BaseType => Seq.empty // primitive types
case _:ReferenceType => // ReferenceTypes have been resolved, this can not happen
throw new UnsupportedOperationException("There should be no ReferenceType anymore after type resolution")
}
}
for {
struct <- doc.structs if isPersisted(struct) // structs contains all StructLikes including Structs and Unions
structChild <- findUnpersistedStructs(struct)
} yield LintMessage(
s"struct ${struct.originalName} with persisted annotation refers to struct ${structChild} that is not annotated persisted.",
Error)
}
}
/**
* all structs annotated (persisted = "true") must have their fields documented
*/
object DocumentedPersisted extends LintRule {
def apply(doc: Document) = {
val persistedStructs = doc.structs.filter(TransitivePersistence.isPersisted(_))
val fieldsErrors = for {
s <- persistedStructs
field <- s.fields if field.docstring.isEmpty
} yield LintMessage(s"Missing documentation on field ${field.originalName} in struct ${s.originalName} annotated (persisted = 'true').", Error)
val structErrors = for {
s <- persistedStructs if s.docstring.isEmpty
} yield LintMessage(s"Missing documentation on struct ${s.originalName} annotated (persisted = 'true').", Error)
structErrors ++ fieldsErrors
}
}
object Namespaces extends LintRule {
// All IDLs have a scala and a java namespace
def apply(doc: Document) = {
Seq("scala", "java").collect {
case lang if doc.namespace(lang).isEmpty =>
LintMessage("Missing namespace: %s.".format(lang), Error)
}
}
}
object RelativeIncludes extends LintRule {
// No relative includes
def apply(doc: Document) = {
doc.headers.collect {
case include @ Include(f, d) if f.contains("..") =>
LintMessage(s"Relative include path found:\\n${include.pos.longString}", Error)
}
}
}
object CamelCase extends LintRule {
// Struct names are UpperCamelCase.
// Field names are lowerCamelCase.
def apply(doc: Document) = {
val messages = new ArrayBuffer[LintMessage]
doc.defs.foreach {
case struct: StructLike =>
if (!isTitleCase(struct.originalName)) {
val correctName = Identifier.toTitleCase(struct.originalName)
messages += LintMessage(s"Struct name ${struct.originalName} is not UpperCamelCase. " +
s"Should be: ${correctName}. \\n${struct.pos.longString}")
}
struct.fields.foreach { f =>
if (!isCamelCase(f.originalName)) {
messages += LintMessage(s"Field name ${f.originalName} is not lowerCamelCase. " +
s"Should be: ${Identifier.toCamelCase(f.originalName)}. \\n${f.pos.longString}")
}
}
case _ =>
}
messages
}
private[this] def isCamelCase(name: String): Boolean = {
Identifier.toCamelCase(name) == name
}
private[this] def isTitleCase(name: String): Boolean = {
Identifier.toTitleCase(name) == name
}
}
object RequiredFieldDefault extends LintRule {
// No default values for required fields
def apply(doc: Document) = {
doc.defs.collect {
case struct: StructLike =>
struct.fields.collect {
case f if f.requiredness == Requiredness.Required && f.default.nonEmpty =>
LintMessage(s"Required field ${f.originalName} has a default value. " +
s"Make it optional or remove the default.\\n${f.pos.longString}",
Error)
}
}.flatten
}
}
object Keywords extends LintRule {
// Struct and field names should not be keywords in Scala, Java, Ruby, Python, PHP.
def apply(doc: Document) = {
val messages = new ArrayBuffer[LintMessage]
val identifiers = doc.defs.collect {
case struct: StructLike =>
languageKeywords.foreach { case (lang, keywords) =>
if (keywords.contains(struct.originalName)) {
messages += LintMessage(
s"Struct name ${struct.originalName}} is a $lang keyword. Avoid using keywords as identifiers.\\n" +
s"${struct.pos.longString}")
}
}
val fieldNames = struct.fields.map(_.originalName).toSet
for {
(lang, keywords) <- languageKeywords
fields = struct.fields.filter { f => keywords.contains(f.originalName) } if fields.nonEmpty
fieldNames = fields.map(_.originalName)
} messages += LintMessage(s"Found field names that are $lang keywords: ${fieldNames.mkString(", ")}. " +
s"Avoid using keywords as identifiers.\\n${fields.head.pos.longString}")
}
messages
}
private[this] val languageKeywords: Map[String, Set[String]] = Map(
"scala" -> Set("abstract", "case", "catch", "class", "def", "do", "else",
"extends", "false", "final", "finally", "for", "forSome", "if",
"implicit", "import", "lazy", "match", "new", "null", "object",
"override", "package", "private", "protected", "return", "sealed",
"super", "this", "throw", "trait", "try", "true",
"type", "val", "var", "while", "with", "yield"),
"java" -> Set("abstract",
"assert", "boolean", "break", "byte", "case", "catch", "char", "class",
"const", "continue", "default", "do", "double", "else", "enum", "extends",
"final", "finally", "float", "for", "goto", "if", "implements", "import",
"instanceof", "int", "interface", "long", "native", "new", "package",
"private", "protected", "public", "return", "short", "static", "strictfp",
"super", "switch", "synchronized", "this", "throw", "throws", "transient",
"try", "void", "volatile", "while"),
"ruby" -> Set("BEGIN", "END", "__ENCODING__", "__END__", "__FILE__", "__LINE__",
"alias", "and", "begin", "break", "case", "class", "def", "defined?",
"do", "else", "elsif", "end", "ensure", "false", "for", "if",
"in", "module", "next", "nil", "not", "or", "redo", "rescue", "retry",
"return", "self", "super", "then", "true", "undef", "unless", "until",
"when", "while", "yield"),
"php" -> Set("__halt_compiler", "abstract", "and", "array", "as", "break", "callable",
"case", "catch", "class", "clone", "const", "continue", "declare", "default",
"die", "do", "echo", "else", "elseif", "empty", "enddeclare", "endfor",
"endforeach", "endif", "endswitch", "endwhile", "eval", "exit", "extends",
"final", "finally", "for", "foreach", "function", "global", "goto", "if",
"implements", "include", "include_once", "instanceof", "insteadof", "interface",
"isset", "list", "namespace", "new", "or", "print", "private", "protected",
"public", "require", "require_once", "return", "static", "switch", "throw",
"trait", "try", "unset", "use", "var", "while", "xor", "yield"),
"python" -> Set("and", "as", "assert", "break", "class", "continue", "def",
"del", "elif", "else", "except", "exec", "finally", "for", "from", "global",
"if", "import", "in", "is", "lambda", "not", "or", "pass",
"print", "raise", "return", "try", "while", "with", "yield")
)
// Returns a list of languages in which id is a keyword.
private[this] def checkKeyword(id: String): Iterable[String] = {
languageKeywords.collect { case (lang, keywords) if keywords.contains(id) =>
lang
}
}
}
}
object ErrorLogLevel extends Level("LINT-ERROR", 999)
object WarningLogLevel extends Level("LINT-WARN", 998)
class Linter(cfg: Config) {
LogManager.getLogManager.reset
private[this] val log = Logger.getLogger("linter")
private[this] val formatter = new Formatter() {
override def format(record: LogRecord) =
"%s: %s%s".format(record.getLevel.getName, formatText(record), lineTerminator)
}
log.addHandler(new ConsoleHandler(formatter, None))
private[this] val rules = cfg.enabledRules
def error(msg: String): Unit = log.log(ErrorLogLevel, msg)
def warning(msg: String): Unit = {
if (cfg.showWarnings)
log.log(WarningLogLevel, msg)
}
// Lint a document, returning the number of lint errors found.
def apply(doc: Document, inputFile: String): Int = {
val messages = LintRule.all(rules)(doc)
messages.foreach {
case LintMessage(msg, Error) =>
error(s"$inputFile\\n$msg")
case LintMessage(msg, Warning) =>
warning(s"$inputFile\\n$msg")
}
val errorCount = messages.count(_.level == Error)
val warnCount = messages.count(_.level == Warning)
if (errorCount + warnCount > 0 ) {
warning("%d warnings and %d errors found".format(messages.size - errorCount, errorCount))
}
errorCount
}
// Lint cfg.files and return the total number of lint errors found.
def lint(): Int = {
val requiresIncludes = rules.exists { _.requiresIncludes }
val importer = Importer(new File(".")) +: Importer(cfg.includePaths)
val parser = new ThriftParser(importer, cfg.strict, defaultOptional = false, skipIncludes = !requiresIncludes)
val errorCounts = cfg.files.map { inputFile =>
if (cfg.verbose)
log.info("\\n+ Linting %s".format(inputFile))
try {
val doc0 = parser.parseFile(inputFile)
apply(doc0, inputFile)
} catch {
case e: FileParseException if (cfg.ignoreParseErrors) =>
e.printStackTrace()
0
}
}
errorCounts.sum
}
}
| nkhuyu/scrooge | scrooge-linter/src/main/scala/com/twitter/scrooge/linter/Linter.scala | Scala | apache-2.0 | 12,770 |
package org.coursera.naptime.ari.graphql.controllers.filters
import javax.inject.Inject
import javax.inject.Singleton
import org.coursera.naptime.ari.engine.EngineMetricsCollector
import play.api.libs.json.Json
import scala.concurrent.ExecutionContext
@Singleton
class EngineMetricsFilter @Inject() (
metricsCollector: EngineMetricsCollector)
(implicit executionContext: ExecutionContext)
extends Filter {
def apply(nextFilter: FilterFn): FilterFn = { incoming =>
nextFilter.apply(incoming).map { outgoingQuery =>
outgoingQuery.ariResponse.map { ariResponse =>
metricsCollector.markExecutionCompletion(ariResponse.metrics)
val meta = Json.obj("__meta" ->
Json.obj("downstreamRequests" -> ariResponse.metrics.numRequests))
val responseWithMetrics = outgoingQuery.response ++ meta
outgoingQuery.copy(response = responseWithMetrics)
}.getOrElse {
outgoingQuery
}
}
}
}
| vkuo-coursera/naptime | naptime-graphql/src/main/scala/org/coursera/naptime/ari/graphql/controllers/filters/EngineMetricsFilter.scala | Scala | apache-2.0 | 961 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.