code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package com.danielwestheide.kontextfrei.scalatest
import com.danielwestheide.kontextfrei.DCollectionOps
trait KontextfreiSpec[DColl[_]] {
implicit def ops: DCollectionOps[DColl]
}
| dwestheide/kontextfrei | scalatest/src/main/scala/com/danielwestheide/kontextfrei/scalatest/KontextfreiSpec.scala | Scala | apache-2.0 | 184 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet
import java.io.File
import java.util.concurrent.atomic.AtomicInteger
import org.apache.mxnet.NDArrayConversions._
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers}
import org.slf4j.LoggerFactory
import scala.collection.mutable.ArrayBuffer
class NDArraySuite extends FunSuite with BeforeAndAfterAll with Matchers {
private val sequence: AtomicInteger = new AtomicInteger(0)
private val logger = LoggerFactory.getLogger(classOf[NDArraySuite])
test("to java array") {
val ndarray = NDArray.zeros(2, 2)
assert(ndarray.toArray === Array(0f, 0f, 0f, 0f))
val float64Array = NDArray.zeros(Shape(2, 2), dtype = DType.Float64)
assert(float64Array.toFloat64Array === Array(0d, 0d, 0d, 0d))
}
test("to scalar") {
val ndzeros = NDArray.zeros(1)
assert(ndzeros.toScalar === 0f)
val ndones = NDArray.ones(1)
assert(ndones.toScalar === 1f)
}
test("to float 64 scalar") {
val ndzeros = NDArray.zeros(Shape(1), dtype = DType.Float64)
assert(ndzeros.toFloat64Scalar === 0d)
val ndones = NDArray.ones(Shape(1), dtype = DType.Float64)
assert(ndones.toFloat64Scalar === 1d)
}
test ("call toScalar on an ndarray which is not a scalar") {
intercept[Exception] { NDArray.zeros(1, 1).toScalar }
intercept[Exception] { NDArray.zeros(shape = Shape (1, 1),
dtype = DType.Float64).toFloat64Scalar }
}
test("size and shape") {
val ndzeros = NDArray.zeros(4, 1)
assert(ndzeros.shape === Shape(4, 1))
assert(ndzeros.size === 4)
}
test("dtype") {
val arr = NDArray.zeros(3, 2)
assert(arr.dtype === DType.Float32)
val float64Array = NDArray.zeros(shape = Shape(3, 2), dtype = DType.Float64)
assert(float64Array.dtype === DType.Float64)
}
test("set scalar value") {
val ndarray = NDArray.empty(2, 1)
ndarray.set(10f)
assert(ndarray.toArray === Array(10f, 10f))
val float64array = NDArray.empty(shape = Shape(2, 1), dtype = DType.Float64)
float64array.set(10d)
assert(float64array.toFloat64Array === Array(10d, 10d))
}
test("copy from java array") {
val ndarray = NDArray.empty(4, 1)
ndarray.set(Array(1f, 2f, 3f, 4f))
assert(ndarray.toArray === Array(1f, 2f, 3f, 4f))
}
test("create NDArray based on Java Matrix") {
def arrayGen(num : Any) : Array[Any] = {
val array = num match {
case f: Float =>
(for (_ <- 0 until 100) yield Array(1.0f, 1.0f, 1.0f, 1.0f)).toArray
case d: Double =>
(for (_ <- 0 until 100) yield Array(1.0d, 1.0d, 1.0d, 1.0d)).toArray
case _ => throw new IllegalArgumentException(s"Unsupported Type ${num.getClass}")
}
Array(
Array(
array
),
Array(
array
)
)
}
val floatData = 1.0f
var nd = NDArray.toNDArray(arrayGen(floatData))
require(nd.shape == Shape(2, 1, 100, 4))
val arr2 = Array(1.0f, 1.0f, 1.0f, 1.0f)
nd = NDArray.toNDArray(arr2)
require(nd.shape == Shape(4))
val doubleData = 1.0d
nd = NDArray.toNDArray(arrayGen(doubleData))
require(nd.shape == Shape(2, 1, 100, 4))
require(nd.dtype == DType.Float64)
}
test("test Visualize") {
var nd = NDArray.ones(Shape(1, 2, 1000, 1))
var data : String =
"""
|[
| [
| [
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
|
| ... with length 1000
| ]
| [
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
| [1.0]
|
| ... with length 1000
| ]
| ]
|]
|<NDArray (1,2,1000,1) cpu(0) float32>""".stripMargin
require(nd.toString.split("\\\\s+").mkString == data.split("\\\\s+").mkString)
nd = NDArray.ones(Shape(1, 4))
data =
"""
|[
| [1.0,1.0,1.0,1.0]
|]
|<NDArray (1,4) cpu(0) float32>""".stripMargin
require(nd.toString.split("\\\\s+").mkString == data.split("\\\\s+").mkString)
}
test("plus") {
var ndzeros = NDArray.zeros(2, 1)
var ndones = ndzeros + 1f
assert(ndones.toArray === Array(1f, 1f))
assert((ndones + ndzeros).toArray === Array(1f, 1f))
assert((1 + ndones).toArray === Array(2f, 2f))
// in-place
ndones += ndones
assert(ndones.toArray === Array(2f, 2f))
// Float64 method test
ndzeros = NDArray.zeros(shape = Shape(2, 1), dtype = DType.Float64)
ndones = ndzeros + 1d
assert(ndones.toFloat64Array === Array(1d, 1d))
assert((ndones + ndzeros).toFloat64Array === Array(1d, 1d))
assert((1d + ndones).toArray === Array(2d, 2d))
// in-place
ndones += ndones
assert(ndones.toFloat64Array === Array(2d, 2d))
}
test("minus") {
var ndones = NDArray.ones(2, 1)
var ndzeros = ndones - 1f
assert(ndzeros.toArray === Array(0f, 0f))
assert((ndones - ndzeros).toArray === Array(1f, 1f))
assert((ndzeros - ndones).toArray === Array(-1f, -1f))
assert((ndones - 1).toArray === Array(0f, 0f))
// in-place
ndones -= ndones
assert(ndones.toArray === Array(0f, 0f))
// Float64 methods test
ndones = NDArray.ones(shape = Shape(2, 1))
ndzeros = ndones - 1d
assert(ndzeros.toFloat64Array === Array(0d, 0d))
assert((ndones - ndzeros).toFloat64Array === Array(1d , 1d))
assert((ndzeros - ndones).toFloat64Array === Array(-1d , -1d))
assert((ndones - 1).toFloat64Array === Array(0d, 0d))
// in-place
ndones -= ndones
assert(ndones.toArray === Array(0d, 0d))
}
test("multiplication") {
var ndones = NDArray.ones(2, 1)
var ndtwos = ndones * 2
assert(ndtwos.toArray === Array(2f, 2f))
assert((ndones * ndones).toArray === Array(1f, 1f))
assert((ndtwos * ndtwos).toArray === Array(4f, 4f))
ndtwos *= ndtwos
// in-place
assert(ndtwos.toArray === Array(4f, 4f))
// Float64 methods test
ndones = NDArray.ones(shape = Shape(2, 1), dtype = DType.Float64)
ndtwos = ndones * 2d
assert(ndtwos.toFloat64Array === Array(2d, 2d))
assert((ndones * ndones).toFloat64Array === Array(1d, 1d))
assert((ndtwos * ndtwos).toFloat64Array === Array(4d, 4d))
ndtwos *= ndtwos
// in-place
assert(ndtwos.toFloat64Array === Array(4d, 4d))
}
test("division") {
var ndones = NDArray.ones(2, 1)
var ndzeros = ndones - 1f
var ndhalves = ndones / 2
assert(ndhalves.toArray === Array(0.5f, 0.5f))
assert((ndhalves / ndhalves).toArray === Array(1f, 1f))
assert((ndones / ndones).toArray === Array(1f, 1f))
assert((ndzeros / ndones).toArray === Array(0f, 0f))
ndhalves /= ndhalves
// in-place
assert(ndhalves.toArray === Array(1f, 1f))
// Float64 methods test
ndones = NDArray.ones(shape = Shape (2, 1), dtype = DType.Float64)
ndzeros = ndones - 1d
ndhalves = ndones / 2d
assert(ndhalves.toFloat64Array === Array(0.5d, 0.5d))
assert((ndhalves / ndhalves).toFloat64Array === Array(1d, 1d))
assert((ndones / ndones).toFloat64Array === Array(1d, 1d))
assert((ndzeros / ndones).toFloat64Array === Array(0d, 0d))
ndhalves /= ndhalves
// in-place
assert(ndhalves.toFloat64Array === Array(1d, 1d))
}
test("full") {
var arr = NDArray.full(Shape(1, 2), 3f)
assert(arr.shape === Shape(1, 2))
assert(arr.toArray === Array(3f, 3f))
// Float64 methods test
arr = NDArray.full(Shape(1, 2), value = 5d, Context.cpu())
assert(arr.toFloat64Array === Array (5d, 5d))
}
test("clip") {
var ndarray = NDArray.empty(3, 2)
ndarray.set(Array(1f, 2f, 3f, 4f, 5f, 6f))
assert(NDArray.clip(ndarray, 2f, 5f).toArray === Array(2f, 2f, 3f, 4f, 5f, 5f))
// Float64 methods test
ndarray = NDArray.empty(shape = Shape(3, 2), dtype = DType.Float64)
ndarray.set(Array(1d, 2d, 3d, 4d, 5d, 6d))
assert(NDArray.clip(ndarray, 2d, 5d).toFloat64Array === Array(2d, 2d, 3d, 4d, 5d, 5d))
}
test("sqrt") {
var ndarray = NDArray.empty(4, 1)
ndarray.set(Array(0f, 1f, 4f, 9f))
assert(NDArray.sqrt(ndarray).toArray === Array(0f, 1f, 2f, 3f))
// Float64 methods test
ndarray = NDArray.empty(shape = Shape(4, 1), dtype = DType.Float64)
ndarray.set(Array(0d, 1d, 4d, 9d))
assert(NDArray.sqrt(ndarray).toFloat64Array === Array(0d, 1d, 2d, 3d))
}
test("rsqrt") {
var ndarray = NDArray.array(Array(1f, 4f), shape = Shape(2, 1))
assert(NDArray.rsqrt(ndarray).toArray === Array(1f, 0.5f))
// Float64 methods test
ndarray = NDArray.array(Array(1d, 4d, 25d), shape = Shape(3, 1), Context.cpu())
assert(NDArray.rsqrt(ndarray).toFloat64Array === Array(1d, 0.5d, 0.2d))
}
test("norm") {
var ndarray = NDArray.empty(3, 1)
ndarray.set(Array(1f, 2f, 3f))
var normed = NDArray.norm(ndarray)
assert(normed.shape === Shape(1))
assert(normed.toScalar === math.sqrt(14.0).toFloat +- 1e-3f)
// Float64 methods test
ndarray = NDArray.empty(shape = Shape(3, 1), dtype = DType.Float64)
ndarray.set(Array(1d, 2d, 3d))
normed = NDArray.norm(ndarray)
assert(normed.get.dtype === DType.Float64)
assert(normed.shape === Shape(1))
assert(normed.toFloat64Scalar === math.sqrt(14.0) +- 1e-3d)
}
test("one hot encode") {
val indices = NDArray.array(Array(1f, 0f, 2f), shape = Shape(3))
val array = NDArray.empty(3, 3)
NDArray.onehotEncode(indices, array)
assert(array.shape === Shape(3, 3))
assert(array.toArray === Array(0f, 1f, 0f,
1f, 0f, 0f,
0f, 0f, 1f))
}
test("dot") {
val arr1 = NDArray.array(Array(1f, 2f), shape = Shape(1, 2))
val arr2 = NDArray.array(Array(3f, 4f), shape = Shape(2, 1))
val res = NDArray.dot(arr1, arr2)
assert(res.shape === Shape(1, 1))
assert(res.toArray === Array(11f))
}
test("arange") {
for (i <- 0 until 5) {
val start = scala.util.Random.nextFloat() * 5
val stop = start + scala.util.Random.nextFloat() * 100
val step = scala.util.Random.nextFloat() * 4
val repeat = 1
val result = (start.toDouble until stop.toDouble by step.toDouble)
.flatMap(x => Array.fill[Float](repeat)(x.toFloat))
val range = NDArray.arange(start = start, stop = Some(stop), step = step,
repeat = repeat, ctx = Context.cpu(), dType = DType.Float32)
assert(CheckUtils.reldiff(result.toArray, range.toArray) <= 1e-4f)
}
}
test("power") {
var arr = NDArray.array(Array(3f, 5f), shape = Shape(2, 1))
var arrPower1 = NDArray.power(2f, arr)
assert(arrPower1.shape === Shape(2, 1))
assert(arrPower1.toArray === Array(8f, 32f))
var arrPower2 = NDArray.power(arr, 2f)
assert(arrPower2.shape === Shape(2, 1))
assert(arrPower2.toArray === Array(9f, 25f))
var arrPower3 = NDArray.power(arr, arr)
assert(arrPower3.shape === Shape(2, 1))
assert(arrPower3.toArray === Array(27f, 3125f))
var arrPower4 = arr ** 2f
assert(arrPower4.shape === Shape(2, 1))
assert(arrPower4.toArray === Array(9f, 25f))
var arrPower5 = arr ** arr
assert(arrPower5.shape === Shape(2, 1))
assert(arrPower5.toArray === Array(27f, 3125f))
arr **= 2f
assert(arr.shape === Shape(2, 1))
assert(arr.toArray === Array(9f, 25f))
arr.set(Array(3f, 5f))
arr **= arr
assert(arr.shape === Shape(2, 1))
assert(arr.toArray === Array(27f, 3125f))
// Float64 tests
arr = NDArray.array(Array(3d, 5d), shape = Shape(2, 1))
arrPower1 = NDArray.power(2d, arr)
assert(arrPower1.shape === Shape(2, 1))
assert(arrPower1.dtype === DType.Float64)
assert(arrPower1.toFloat64Array === Array(8d, 32d))
arrPower2 = NDArray.power(arr, 2d)
assert(arrPower2.shape === Shape(2, 1))
assert(arrPower2.dtype === DType.Float64)
assert(arrPower2.toFloat64Array === Array(9d, 25d))
arrPower3 = NDArray.power(arr, arr)
assert(arrPower3.shape === Shape(2, 1))
assert(arrPower3.dtype === DType.Float64)
assert(arrPower3.toFloat64Array === Array(27d, 3125d))
arrPower4 = arr ** 2f
assert(arrPower4.shape === Shape(2, 1))
assert(arrPower4.dtype === DType.Float64)
assert(arrPower4.toFloat64Array === Array(9d, 25d))
arrPower5 = arr ** arr
assert(arrPower5.shape === Shape(2, 1))
assert(arrPower5.dtype === DType.Float64)
assert(arrPower5.toFloat64Array === Array(27d, 3125d))
arr **= 2d
assert(arr.shape === Shape(2, 1))
assert(arr.dtype === DType.Float64)
assert(arr.toFloat64Array === Array(9d, 25d))
arr.set(Array(3d, 5d))
arr **= arr
assert(arr.shape === Shape(2, 1))
assert(arr.dtype === DType.Float64)
assert(arr.toFloat64Array === Array(27d, 3125d))
}
test("equal") {
var arr1 = NDArray.array(Array(1f, 2f, 3f, 5f), shape = Shape(2, 2))
var arr2 = NDArray.array(Array(1f, 4f, 3f, 6f), shape = Shape(2, 2))
var arrEqual1 = NDArray.equal(arr1, arr2)
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.toArray === Array(1f, 0f, 1f, 0f))
var arrEqual2 = NDArray.equal(arr1, 3f)
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.toArray === Array(0f, 0f, 1f, 0f))
// Float64 methods test
arr1 = NDArray.array(Array(1d, 2d, 3d, 5d), shape = Shape(2, 2))
arr2 = NDArray.array(Array(1d, 4d, 3d, 6d), shape = Shape(2, 2))
arrEqual1 = NDArray.equal(arr1, arr2)
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.dtype === DType.Float64)
assert(arrEqual1.toFloat64Array === Array(1d, 0d, 1d, 0d))
arrEqual2 = NDArray.equal(arr1, 3d)
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.dtype === DType.Float64)
assert(arrEqual2.toFloat64Array === Array(0d, 0d, 1d, 0d))
}
test("not_equal") {
var arr1 = NDArray.array(Array(1f, 2f, 3f, 5f), shape = Shape(2, 2))
var arr2 = NDArray.array(Array(1f, 4f, 3f, 6f), shape = Shape(2, 2))
var arrEqual1 = NDArray.notEqual(arr1, arr2)
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.toArray === Array(0f, 1f, 0f, 1f))
var arrEqual2 = NDArray.notEqual(arr1, 3f)
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.toArray === Array(1f, 1f, 0f, 1f))
// Float64 methods test
arr1 = NDArray.array(Array(1d, 2d, 3d, 5d), shape = Shape(2, 2))
arr2 = NDArray.array(Array(1d, 4d, 3d, 6d), shape = Shape(2, 2))
arrEqual1 = NDArray.notEqual(arr1, arr2)
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.dtype === DType.Float64)
assert(arrEqual1.toFloat64Array === Array(0d, 1d, 0d, 1d))
arrEqual2 = NDArray.notEqual(arr1, 3d)
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.dtype === DType.Float64)
assert(arrEqual2.toFloat64Array === Array(1d, 1d, 0d, 1d))
}
test("greater") {
var arr1 = NDArray.array(Array(1f, 2f, 4f, 5f), shape = Shape(2, 2))
var arr2 = NDArray.array(Array(1f, 4f, 3f, 6f), shape = Shape(2, 2))
var arrEqual1 = arr1 > arr2
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.toArray === Array(0f, 0f, 1f, 0f))
var arrEqual2 = arr1 > 2f
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.toArray === Array(0f, 0f, 1f, 1f))
// Float64 methods test
arr1 = NDArray.array(Array(1d, 2d, 4d, 5d), shape = Shape(2, 2))
arr2 = NDArray.array(Array(1d, 4d, 3d, 6d), shape = Shape(2, 2))
arrEqual1 = arr1 > arr2
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.dtype === DType.Float64)
assert(arrEqual1.toFloat64Array === Array(0d, 0d, 1d, 0d))
arrEqual2 = arr1 > 2d
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.dtype === DType.Float64)
assert(arrEqual2.toFloat64Array === Array(0d, 0d, 1d, 1d))
}
test("greater_equal") {
var arr1 = NDArray.array(Array(1f, 2f, 4f, 5f), shape = Shape(2, 2))
var arr2 = NDArray.array(Array(1f, 4f, 3f, 6f), shape = Shape(2, 2))
var arrEqual1 = arr1 >= arr2
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.toArray === Array(1f, 0f, 1f, 0f))
var arrEqual2 = arr1 >= 2f
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.toArray === Array(0f, 1f, 1f, 1f))
// Float64 methods test
arr1 = NDArray.array(Array(1d, 2d, 4d, 5d), shape = Shape(2, 2))
arr2 = NDArray.array(Array(1d, 4d, 3d, 6d), shape = Shape(2, 2))
arrEqual1 = arr1 >= arr2
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.dtype === DType.Float64)
assert(arrEqual1.toFloat64Array === Array(1d, 0d, 1d, 0d))
arrEqual2 = arr1 >= 2d
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.dtype === DType.Float64)
assert(arrEqual2.toFloat64Array === Array(0d, 1d, 1d, 1d))
}
test("lesser") {
var arr1 = NDArray.array(Array(1f, 2f, 4f, 5f), shape = Shape(2, 2))
var arr2 = NDArray.array(Array(1f, 4f, 3f, 6f), shape = Shape(2, 2))
var arrEqual1 = arr1 < arr2
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.toArray === Array(0f, 1f, 0f, 1f))
var arrEqual2 = arr1 < 2f
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.toArray === Array(1f, 0f, 0f, 0f))
// Float64 methods test
arr1 = NDArray.array(Array(1d, 2d, 4d, 5d), shape = Shape(2, 2))
arr2 = NDArray.array(Array(1d, 4d, 3d, 6d), shape = Shape(2, 2))
arrEqual1 = arr1 < arr2
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.dtype === DType.Float64)
assert(arrEqual1.toFloat64Array === Array(0d, 1d, 0d, 1d))
arrEqual2 = arr1 < 2d
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.dtype === DType.Float64)
assert(arrEqual2.toFloat64Array === Array(1d, 0d, 0d, 0d))
}
test("lesser_equal") {
var arr1 = NDArray.array(Array(1f, 2f, 4f, 5f), shape = Shape(2, 2))
var arr2 = NDArray.array(Array(1f, 4f, 3f, 6f), shape = Shape(2, 2))
var arrEqual1 = arr1 <= arr2
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.toArray === Array(1f, 1f, 0f, 1f))
var arrEqual2 = arr1 <= 2f
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.toArray === Array(1f, 1f, 0f, 0f))
// Float64 methods test
arr1 = NDArray.array(Array(1d, 2d, 4d, 5d), shape = Shape(2, 2))
arr2 = NDArray.array(Array(1d, 4d, 3d, 6d), shape = Shape(2, 2))
arrEqual1 = arr1 <= arr2
assert(arrEqual1.shape === Shape(2, 2))
assert(arrEqual1.dtype === DType.Float64)
assert(arrEqual1.toFloat64Array === Array(1d, 1d, 0d, 1d))
arrEqual2 = arr1 <= 2d
assert(arrEqual2.shape === Shape(2, 2))
assert(arrEqual2.dtype === DType.Float64)
assert(arrEqual2.toFloat64Array === Array(1d, 1d, 0d, 0d))
}
test("choose_element_0index") {
val arr = NDArray.array(Array(1f, 2f, 3f, 4f, 6f, 5f), shape = Shape(2, 3))
val indices = NDArray.array(Array(0f, 1f), shape = Shape(2))
val res = NDArray.choose_element_0index(arr, indices)
assert(res.toArray === Array(1f, 6f))
}
test("copy to") {
var source = NDArray.array(Array(1f, 2f, 3f), shape = Shape(1, 3))
var dest = NDArray.empty(1, 3)
source.copyTo(dest)
assert(dest.shape === Shape(1, 3))
assert(dest.toArray === Array(1f, 2f, 3f))
// Float64 methods test
source = NDArray.array(Array(1d, 2d, 3d), shape = Shape(1, 3))
dest = NDArray.empty(shape = Shape(1, 3), dtype = DType.Float64)
source.copyTo(dest)
assert(dest.dtype === DType.Float64)
assert(dest.toFloat64Array === Array(1d, 2d, 3d))
}
test("abs") {
val arr = NDArray.array(Array(-1f, -2f, 3f), shape = Shape(3, 1))
assert(NDArray.abs(arr).toArray === Array(1f, 2f, 3f))
}
test("sign") {
val arr = NDArray.array(Array(-1f, -2f, 3f), shape = Shape(3, 1))
assert(NDArray.sign(arr).toArray === Array(-1f, -1f, 1f))
}
test("round") {
val arr = NDArray.array(Array(1.5f, 2.1f, 3.7f), shape = Shape(3, 1))
assert(NDArray.round(arr).toArray === Array(2f, 2f, 4f))
}
test("ceil") {
val arr = NDArray.array(Array(1.5f, 2.1f, 3.7f), shape = Shape(3, 1))
assert(NDArray.ceil(arr).toArray === Array(2f, 3f, 4f))
}
test("floor") {
val arr = NDArray.array(Array(1.5f, 2.1f, 3.7f), shape = Shape(3, 1))
assert(NDArray.floor(arr).toArray === Array(1f, 2f, 3f))
}
test("square") {
val arr = NDArray.array(Array(1f, 2f, 3f), shape = Shape(3, 1))
assert(NDArray.square(arr).toArray === Array(1f, 4f, 9f))
}
test("exp") {
val arr = NDArray.ones(1)
assert(NDArray.exp(arr).toScalar === 2.71828f +- 1e-3f)
}
test("log") {
val arr = NDArray.empty(1)
arr.set(10f)
assert(NDArray.log(arr).toScalar === 2.302585f +- 1e-5f)
}
test("cos") {
val arr = NDArray.empty(1)
arr.set(12f)
assert(NDArray.cos(arr).toScalar === 0.8438539f +- 1e-5f)
}
test("sin") {
val arr = NDArray.empty(1)
arr.set(12f)
assert(NDArray.sin(arr).toScalar === -0.536572918f +- 1e-5f)
}
test("max") {
val arr = NDArray.array(Array(1.5f, 2.1f, 3.7f), shape = Shape(3, 1))
assert(NDArray.max(arr).toScalar === 3.7f +- 1e-3f)
}
test("maximum") {
val arr1 = NDArray.array(Array(1.5f, 2.1f, 3.7f), shape = Shape(3, 1))
val arr2 = NDArray.array(Array(4f, 1f, 3.5f), shape = Shape(3, 1))
val arr = NDArray.maximum(arr1, arr2)
assert(arr.shape === Shape(3, 1))
assert(arr.toArray === Array(4f, 2.1f, 3.7f))
// Float64 methods test
val arr3 = NDArray.array(Array(1d, 2d, 3d), shape = Shape(3, 1))
val maxArr = NDArray.maximum(arr3, 10d)
assert(maxArr.shape === Shape(3, 1))
assert(maxArr.toArray === Array(10d, 10d, 10d))
}
test("min") {
val arr = NDArray.array(Array(1.5f, 2.1f, 3.7f), shape = Shape(3, 1))
assert(NDArray.min(arr).toScalar === 1.5f +- 1e-3f)
}
test("minimum") {
val arr1 = NDArray.array(Array(1.5f, 2.1f, 3.7f), shape = Shape(3, 1))
val arr2 = NDArray.array(Array(4f, 1f, 3.5f), shape = Shape(3, 1))
val arr = NDArray.minimum(arr1, arr2)
assert(arr.shape === Shape(3, 1))
assert(arr.toArray === Array(1.5f, 1f, 3.5f))
// Float64 methods test
val arr3 = NDArray.array(Array(4d, 5d, 6d), shape = Shape(3, 1))
val minArr = NDArray.minimum(arr3, 5d)
assert(minArr.shape === Shape(3, 1))
assert(minArr.toFloat64Array === Array(4d, 5d, 5d))
}
test("sum") {
var arr = NDArray.array(Array(1f, 2f, 3f, 4f), shape = Shape(2, 2))
assert(NDArray.sum(arr).toScalar === 10f +- 1e-3f)
}
test("argmaxChannel") {
val arr = NDArray.array(Array(1f, 2f, 4f, 3f), shape = Shape(2, 2))
val argmax = NDArray.argmax_channel(arr)
assert(argmax.shape === Shape(2))
assert(argmax.toArray === Array(1f, 0f))
}
test("concatenate axis-0") {
val arr1 = NDArray.array(Array(1f, 2f, 4f, 3f, 3f, 3f), shape = Shape(2, 3))
val arr2 = NDArray.array(Array(8f, 7f, 6f), shape = Shape(1, 3))
val arr = NDArray.concatenate(arr1, arr2)
assert(arr.shape === Shape(3, 3))
assert(arr.toArray === Array(1f, 2f, 4f, 3f, 3f, 3f, 8f, 7f, 6f))
// Try concatenating float32 arr with float64 arr. Should get exception
intercept[Exception] {
val arr3 = NDArray.array(Array (5d, 6d, 7d), shape = Shape(1, 3))
NDArray.concatenate(Array(arr1, arr3))
}
}
test("concatenate axis-1") {
val arr1 = NDArray.array(Array(1f, 2f, 3f, 4f), shape = Shape(2, 2))
val arr2 = NDArray.array(Array(5f, 6f), shape = Shape(2, 1))
val arr = NDArray.concatenate(Array(arr1, arr2), axis = 1)
assert(arr.shape === Shape(2, 3))
assert(arr.toArray === Array(1f, 2f, 5f, 3f, 4f, 6f))
// Try concatenating float32 arr with float64 arr. Should get exception
intercept[Exception] {
val arr3 = NDArray.array(Array (5d, 6d), shape = Shape(2, 1))
NDArray.concatenate(Array(arr1, arr3), axis = 1)
}
}
test("transpose") {
val arr = NDArray.array(Array(1f, 2f, 4f, 3f, 3f, 3f), shape = Shape(2, 3))
assert(arr.toArray === Array(1f, 2f, 4f, 3f, 3f, 3f))
assert(arr.T.shape === Shape(3, 2))
assert(arr.T.toArray === Array(1f, 3f, 2f, 3f, 4f, 3f))
}
test("save and load with names") {
val filename
= s"${System.getProperty("java.io.tmpdir")}/ndarray-${sequence.getAndIncrement}.bin"
try {
val ndarray = NDArray.array(Array(1f, 2f, 3f), shape = Shape(3, 1))
NDArray.save(filename, Map("local" -> ndarray))
val (keys, arrays) = NDArray.load(filename)
assert(keys.length === 1)
assert(keys(0) === "local")
assert(arrays.length === 1)
val loadedArray = arrays(0)
assert(loadedArray.shape === Shape(3, 1))
assert(loadedArray.toArray === Array(1f, 2f, 3f))
assert(loadedArray.dtype === DType.Float32)
} finally {
val file = new File(filename)
file.delete()
}
// Try the same for Float64 array
try {
val ndarray = NDArray.array(Array(1d, 2d, 3d), shape = Shape(3, 1), ctx = Context.cpu())
NDArray.save(filename, Map("local" -> ndarray))
val (keys, arrays) = NDArray.load(filename)
assert(keys.length === 1)
assert(keys(0) === "local")
assert(arrays.length === 1)
val loadedArray = arrays(0)
assert(loadedArray.shape === Shape(3, 1))
assert(loadedArray.toArray === Array(1d, 2d, 3d))
assert(loadedArray.dtype === DType.Float64)
} finally {
val file = new File(filename)
file.delete()
}
}
test("save and load without names") {
val filename
= s"${System.getProperty("java.io.tmpdir")}/ndarray-${sequence.getAndIncrement}.bin"
try {
val ndarray = NDArray.array(Array(1f, 2f, 3f), shape = Shape(3, 1))
NDArray.save(filename, Array(ndarray))
val (keys, arrays) = NDArray.load(filename)
assert(keys.length === 0)
assert(arrays.length === 1)
val loadedArray = arrays(0)
assert(loadedArray.shape === Shape(3, 1))
assert(loadedArray.toArray === Array(1f, 2f, 3f))
assert(loadedArray.dtype === DType.Float32)
} finally {
val file = new File(filename)
file.delete()
}
// Try the same thing for Float64 array :
try {
val ndarray = NDArray.array(Array(1d, 2d, 3d), shape = Shape(3, 1), ctx = Context.cpu())
NDArray.save(filename, Array(ndarray))
val (keys, arrays) = NDArray.load(filename)
assert(keys.length === 0)
assert(arrays.length === 1)
val loadedArray = arrays(0)
assert(loadedArray.shape === Shape(3, 1))
assert(loadedArray.toArray === Array(1d, 2d, 3d))
assert(loadedArray.dtype === DType.Float64)
} finally {
val file = new File(filename)
file.delete()
}
}
test("get context") {
val ndarray = NDArray.ones(3, 2)
val ctx = ndarray.context
assert(ctx.deviceType === "cpu")
assert(ctx.deviceId === 0)
}
test("equals") {
val ndarray1 = NDArray.array(Array(1f, 2f, 3f), shape = Shape(3, 1))
val ndarray2 = NDArray.array(Array(1f, 2f, 3f), shape = Shape(3, 1))
val ndarray3 = NDArray.array(Array(1f, 2f, 3f), shape = Shape(1, 3))
val ndarray4 = NDArray.array(Array(3f, 2f, 3f), shape = Shape(3, 1))
val ndarray5 = NDArray.array(Array(3d, 2d, 3d), shape = Shape(3, 1), ctx = Context.cpu())
ndarray1 shouldEqual ndarray2
ndarray1 shouldNot equal(ndarray3)
ndarray1 shouldNot equal(ndarray4)
ndarray5 shouldNot equal(ndarray3)
}
test("slice") {
val arr = NDArray.array(Array(1f, 2f, 3f, 4f, 5f, 6f), shape = Shape(3, 2))
val arr1 = arr.slice(1)
assert(arr1.shape === Shape(1, 2))
assert(arr1.toArray === Array(3f, 4f))
val arr2 = arr.slice(1, 3)
assert(arr2.shape === Shape(2, 2))
assert(arr2.toArray === Array(3f, 4f, 5f, 6f))
}
test("at") {
val arr = NDArray.array(Array(1f, 2f, 3f, 4f, 5f, 6f), shape = Shape(3, 2))
val arr1 = arr.at(1)
assert(arr1.shape === Shape(2))
assert(arr1.toArray === Array(3f, 4f))
}
test("reshape") {
val arr = NDArray.array(Array(1f, 2f, 3f, 4f, 5f, 6f), shape = Shape(3, 2))
val arr1 = arr.reshape(Array(2, 3))
assert(arr1.shape === Shape(2, 3))
assert(arr1.toArray === Array(1f, 2f, 3f, 4f, 5f, 6f))
arr.set(1f)
assert(arr1.toArray === Array(1f, 1f, 1f, 1f, 1f, 1f))
}
test("dispose deps") {
val arr1 = NDArray.ones(1, 2)
val arr2 = NDArray.ones(1, 2)
val arr3 = NDArray.ones(1, 2)
val arrWithDeps = (arr1 + arr2) + arr3
assert(arrWithDeps.dependencies.size === 4) // arr1 + arr2
assert(arrWithDeps.dependencies.contains(arr1.handle))
assert(arrWithDeps.dependencies.contains(arr2.handle))
assert(arrWithDeps.dependencies.contains(arr3.handle))
assert(!arr1.isDisposed)
assert(!arr2.isDisposed)
assert(!arr3.isDisposed)
val arrNoDeps = (arr1 + arr2 + arr3).disposeDeps()
assert(arrNoDeps.dependencies.isEmpty)
assert(arr1.isDisposed)
assert(arr2.isDisposed)
assert(arr3.isDisposed)
}
test("dispose deps except") {
val arr1 = NDArray.ones(1, 2)
val arr2 = NDArray.ones(1, 2)
val arr3 = NDArray.ones(1, 2)
val arr1_2 = arr1 + arr2
val arr = (arr1 + arr2 + arr1_2 + arr3).disposeDepsExcept(arr1_2)
// since arr1_2 depends on arr1 & arr2
// arr1 & arr2 will not be disposed either
assert(arr.dependencies.size === 3)
assert(arr.dependencies.contains(arr1.handle))
assert(arr.dependencies.contains(arr2.handle))
assert(arr.dependencies.contains(arr1_2.handle))
assert(!arr1.isDisposed)
assert(!arr2.isDisposed)
assert(!arr1_2.isDisposed)
assert(arr3.isDisposed)
}
test("serialize and deserialize") {
val arr = NDArray.ones(1, 2) * 3
val bytes = arr.serialize()
val arrCopy = NDArray.deserialize(bytes)
assert(arr === arrCopy)
assert(arrCopy.dtype === DType.Float32)
}
test("dtype int32") {
val arr = NDArray.ones(Shape(1, 2), dtype = DType.Int32) * 2
assert(arr.dtype === DType.Int32)
assert(arr.internal.getRaw.length === 8)
assert(arr.internal.toFloatArray === Array(2f, 2f))
assert(arr.internal.toIntArray === Array(2, 2))
assert(arr.internal.toDoubleArray === Array(2d, 2d))
assert(arr.internal.toByteArray === Array(2.toByte, 2.toByte))
}
test("dtype uint8") {
val arr = NDArray.ones(Shape(1, 2), dtype = DType.UInt8) * 2
assert(arr.dtype === DType.UInt8)
assert(arr.internal.getRaw.length === 2)
assert(arr.internal.toFloatArray === Array(2f, 2f))
assert(arr.internal.toIntArray === Array(2, 2))
assert(arr.internal.toDoubleArray === Array(2d, 2d))
assert(arr.internal.toByteArray === Array(2.toByte, 2.toByte))
}
test("dtype float64") {
val arr = NDArray.ones(Shape(1, 2), dtype = DType.Float64) * 2
assert(arr.dtype === DType.Float64)
assert(arr.internal.getRaw.length === 16)
assert(arr.internal.toFloatArray === Array(2f, 2f))
assert(arr.internal.toIntArray === Array(2, 2))
assert(arr.internal.toDoubleArray === Array(2d, 2d))
assert(arr.internal.toByteArray === Array(2.toByte, 2.toByte))
}
test("NDArray random module is generated properly") {
val lam = NDArray.ones(1, 2)
val rnd = NDArray.random.poisson(lam = Some(lam), shape = Some(Shape(3, 4)))
val rnd2 = NDArray.random.poisson(lam = Some(1f), shape = Some(Shape(3, 4)),
dtype = Some("float64"))
assert(rnd.shape === Shape(1, 2, 3, 4))
assert(rnd2.shape === Shape(3, 4))
assert(rnd2.head.dtype === DType.Float64)
}
test("NDArray random module is generated properly - special case of 'normal'") {
val mu = NDArray.ones(1, 2)
val sigma = NDArray.ones(1, 2) * 2
val rnd = NDArray.random.normal(mu = Some(mu), sigma = Some(sigma), shape = Some(Shape(3, 4)))
val rnd2 = NDArray.random.normal(mu = Some(1f), sigma = Some(2f), shape = Some(Shape(3, 4)),
dtype = Some("float64"))
assert(rnd.shape === Shape(1, 2, 3, 4))
assert(rnd2.shape === Shape(3, 4))
assert(rnd2.head.dtype === DType.Float64)
}
test("Generated api") {
// Without SomeConversion
val arr3 = NDArray.ones(Shape(1, 2), dtype = DType.Float64)
val arr4 = NDArray.ones(Shape(1), dtype = DType.Float64)
val arr5 = NDArray.api.norm(arr3, ord = Some(1), out = Some(arr4))
// With SomeConversion
import org.apache.mxnet.util.OptionConversion._
val arr = NDArray.ones(Shape(1, 2), dtype = DType.Float64)
val arr2 = NDArray.ones(Shape(1), dtype = DType.Float64)
NDArray.api.norm(arr, ord = 1, out = arr2)
val result = NDArray.api.dot(arr2, arr2)
}
}
| ptrendx/mxnet | scala-package/core/src/test/scala/org/apache/mxnet/NDArraySuite.scala | Scala | apache-2.0 | 33,485 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.job
import java.io.File
import org.apache.samza.config.Config
import org.apache.samza.coordinator.stream.MockCoordinatorStreamSystemFactory
import org.junit.After
import org.junit.Assert._
import org.junit.Test
object TestJobRunner {
var processCount = 0
var killCount = 0
var getStatusCount = 0
}
class TestJobRunner {
@After
def teardown {
MockCoordinatorStreamSystemFactory.disableMockConsumerCache()
}
@Test
def testJobRunnerWorks {
MockCoordinatorStreamSystemFactory.enableMockConsumerCache()
assertEquals(0, TestJobRunner.processCount)
JobRunner.main(Array(
"--config-factory",
"org.apache.samza.config.factories.PropertiesConfigFactory",
"--config-path",
"file://%s/src/test/resources/test.properties" format new File(".").getCanonicalPath))
assertEquals(1, TestJobRunner.processCount)
}
@Test
def testJobRunnerKillWorks {
MockCoordinatorStreamSystemFactory.enableMockConsumerCache()
assertEquals(0, TestJobRunner.killCount)
JobRunner.main(Array(
"--config-factory",
"org.apache.samza.config.factories.PropertiesConfigFactory",
"--config-path",
"file://%s/src/test/resources/test.properties" format new File(".").getCanonicalPath,
"--operation=kill"))
assertEquals(1, TestJobRunner.killCount)
}
@Test
def testJobRunnerStatusWorks {
MockCoordinatorStreamSystemFactory.enableMockConsumerCache()
assertEquals(0, TestJobRunner.getStatusCount)
JobRunner.main(Array(
"--config-factory",
"org.apache.samza.config.factories.PropertiesConfigFactory",
"--config-path",
"file://%s/src/test/resources/test.properties" format new File(".").getCanonicalPath,
"--operation=status"))
assertEquals(1, TestJobRunner.getStatusCount)
}
}
class MockJobFactory extends StreamJobFactory {
def getJob(config: Config): StreamJob = {
return new StreamJob {
def submit() = { TestJobRunner.processCount += 1; this }
def kill() = { TestJobRunner.killCount += 1; this }
def waitForFinish(timeoutMs: Long) = ApplicationStatus.SuccessfulFinish
def waitForStatus(status: ApplicationStatus, timeoutMs: Long) = status
def getStatus() = { TestJobRunner.getStatusCount += 1; ApplicationStatus.SuccessfulFinish }
}
}
}
| TiVo/samza | samza-core/src/test/scala/org/apache/samza/job/TestJobRunner.scala | Scala | apache-2.0 | 3,142 |
package registration.auditor
case class AuditorApiConfig(url: String, apiKey: String)
case class AuditorGroupConfig(contentApiConfig: AuditorApiConfig, paApiConfig: AuditorApiConfig) | guardian/mobile-n10n | registration/app/registration/auditor/AuditorApiConfig.scala | Scala | apache-2.0 | 184 |
package lila.fishnet
import org.joda.time.DateTime
import chess.format.{ Uci, FEN }
import chess.variant.Variant
sealed trait Work {
def _id: Work.Id
def game: Work.Game
def tries: Int
def lastTryByKey: Option[Client.Key]
def acquired: Option[Work.Acquired]
def createdAt: DateTime
def skill: Client.Skill
def id = _id
def acquiredAt = acquired.map(_.date)
def acquiredByKey = acquired.map(_.clientKey)
def isAcquiredBy(client: Client) = acquiredByKey contains client.key
def isAcquired = acquired.isDefined
def nonAcquired = !isAcquired
def canAcquire(client: Client) = lastTryByKey.fold(true)(client.key !=)
def acquiredBefore(date: DateTime) = acquiredAt.??(_ isBefore date)
}
object Work {
case class Id(value: String) extends AnyVal with StringValue
case class Acquired(
clientKey: Client.Key,
userId: Client.UserId,
date: DateTime) {
def ageInMillis = nowMillis - date.getMillis
override def toString = s"by $userId at $date"
}
case class Game(
id: String,
initialFen: Option[FEN],
variant: Variant,
moves: String) {
def moveList = moves.split(' ').toList
def uciList = Uci readList moves
}
case class Sender(
userId: Option[String],
ip: Option[String],
mod: Boolean,
system: Boolean) {
override def toString = if (system) "lichess" else userId orElse ip getOrElse "unknown"
}
case class Move(
_id: Work.Id, // random
game: Game,
currentFen: FEN,
level: Int,
tries: Int,
lastTryByKey: Option[Client.Key],
acquired: Option[Acquired],
createdAt: DateTime) extends Work {
def skill = Client.Skill.Move
def assignTo(client: Client) = copy(
acquired = Acquired(
clientKey = client.key,
userId = client.userId,
date = DateTime.now).some,
lastTryByKey = client.key.some,
tries = tries + 1)
def timeout = copy(acquired = none)
def invalid = copy(acquired = none)
def isOutOfTries = tries >= 3
def similar(to: Move) = game.id == to.game.id && currentFen == to.currentFen
override def toString = s"id:$id game:${game.id} variant:${game.variant.key} level:$level tries:$tries created:$createdAt acquired:$acquired"
}
case class Analysis(
_id: Work.Id, // random
sender: Sender,
game: Game,
startPly: Int,
nbPly: Int,
tries: Int,
lastTryByKey: Option[Client.Key],
acquired: Option[Acquired],
createdAt: DateTime) extends Work {
def skill = Client.Skill.Analysis
def assignTo(client: Client) = copy(
acquired = Acquired(
clientKey = client.key,
userId = client.userId,
date = DateTime.now).some,
lastTryByKey = client.key.some,
tries = tries + 1)
def timeout = copy(acquired = none)
def invalid = copy(acquired = none)
def weak = copy(acquired = none)
def isOutOfTries = tries >= 2
def abort = copy(acquired = none)
def inProgress = acquired map { a =>
InProgress(a.userId, a.date)
}
override def toString = s"id:$id game:${game.id} tries:$tries requestedBy:$sender acquired:$acquired"
}
def makeId = Id(scala.util.Random.alphanumeric take 8 mkString)
case class InProgress(by: Client.UserId, since: DateTime) {
def byLichess = by.value startsWith "lichess-"
}
}
| clarkerubber/lila | modules/fishnet/src/main/Work.scala | Scala | agpl-3.0 | 3,399 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3
package com.google.protobuf.struct
/** `Struct` represents a structured data value, consisting of fields
* which map to dynamically typed values. In some languages, `Struct`
* might be supported by a native representation. For example, in
* scripting languages like JS a struct is represented as an
* object. The details of that representation are described together
* with the proto support for the language.
*
* The JSON representation for `Struct` is JSON object.
*
* @param fields
* Unordered map of dynamically typed values.
*/
@SerialVersionUID(0L)
final case class Struct(
fields: _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, com.google.protobuf.struct.Value] = _root_.scala.collection.immutable.Map.empty
) extends scalapb.GeneratedMessage with scalapb.Message[Struct] with scalapb.lenses.Updatable[Struct] {
@transient
private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
private[this] def __computeSerializedValue(): _root_.scala.Int = {
var __size = 0
fields.foreach { __item =>
val __value = com.google.protobuf.struct.Struct._typemapper_fields.toBase(__item)
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
__size
}
final override def serializedSize: _root_.scala.Int = {
var read = __serializedSizeCachedValue
if (read == 0) {
read = __computeSerializedValue()
__serializedSizeCachedValue = read
}
read
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
fields.foreach { __v =>
val __m = com.google.protobuf.struct.Struct._typemapper_fields.toBase(__v)
_output__.writeTag(1, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
}
def mergeFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): com.google.protobuf.struct.Struct = {
val __fields = (_root_.scala.collection.immutable.Map.newBuilder[_root_.scala.Predef.String, com.google.protobuf.struct.Value] ++= this.fields)
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__fields += com.google.protobuf.struct.Struct._typemapper_fields.toCustom(_root_.scalapb.LiteParser.readMessage(_input__, com.google.protobuf.struct.Struct.FieldsEntry.defaultInstance))
case tag => _input__.skipField(tag)
}
}
com.google.protobuf.struct.Struct(
fields = __fields.result()
)
}
def clearFields = copy(fields = _root_.scala.collection.immutable.Map.empty)
def addFields(__vs: (_root_.scala.Predef.String, com.google.protobuf.struct.Value)*): Struct = addAllFields(__vs)
def addAllFields(__vs: Iterable[(_root_.scala.Predef.String, com.google.protobuf.struct.Value)]): Struct = copy(fields = fields ++ __vs)
def withFields(__v: _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, com.google.protobuf.struct.Value]): Struct = copy(fields = __v)
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @_root_.scala.unchecked) match {
case 1 => fields.iterator.map(com.google.protobuf.struct.Struct._typemapper_fields.toBase).toSeq
}
}
def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @_root_.scala.unchecked) match {
case 1 => _root_.scalapb.descriptors.PRepeated(fields.iterator.map(com.google.protobuf.struct.Struct._typemapper_fields.toBase(_).toPMessage).toVector)
}
}
def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
def companion = com.google.protobuf.struct.Struct
}
object Struct extends scalapb.GeneratedMessageCompanion[com.google.protobuf.struct.Struct] {
implicit def messageCompanion: scalapb.GeneratedMessageCompanion[com.google.protobuf.struct.Struct] = this
def fromFieldsMap(__fieldsMap: scala.collection.immutable.Map[_root_.com.google.protobuf.Descriptors.FieldDescriptor, _root_.scala.Any]): com.google.protobuf.struct.Struct = {
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.getContainingType() == javaDescriptor), "FieldDescriptor does not match message type.")
val __fields = javaDescriptor.getFields
com.google.protobuf.struct.Struct(
__fieldsMap.getOrElse(__fields.get(0), Nil).asInstanceOf[_root_.scala.Seq[com.google.protobuf.struct.Struct.FieldsEntry]].iterator.map(com.google.protobuf.struct.Struct._typemapper_fields.toCustom).toMap
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[com.google.protobuf.struct.Struct] = _root_.scalapb.descriptors.Reads{
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage == scalaDescriptor), "FieldDescriptor does not match message type.")
com.google.protobuf.struct.Struct(
__fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Seq[com.google.protobuf.struct.Struct.FieldsEntry]]).getOrElse(_root_.scala.Seq.empty).iterator.map(com.google.protobuf.struct.Struct._typemapper_fields.toCustom).toMap
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = StructProto.javaDescriptor.getMessageTypes.get(0)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = StructProto.scalaDescriptor.messages(0)
def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
(__number: @_root_.scala.unchecked) match {
case 1 => __out = com.google.protobuf.struct.Struct.FieldsEntry
}
__out
}
lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]](
_root_.com.google.protobuf.struct.Struct.FieldsEntry
)
def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
lazy val defaultInstance = com.google.protobuf.struct.Struct(
)
@SerialVersionUID(0L)
final case class FieldsEntry(
key: _root_.scala.Predef.String = "",
value: _root_.scala.Option[com.google.protobuf.struct.Value] = _root_.scala.None
) extends scalapb.GeneratedMessage with scalapb.Message[FieldsEntry] with scalapb.lenses.Updatable[FieldsEntry] {
@transient
private[this] var __serializedSizeCachedValue: _root_.scala.Int = 0
private[this] def __computeSerializedValue(): _root_.scala.Int = {
var __size = 0
{
val __value = key
if (__value != "") {
__size += _root_.com.google.protobuf.CodedOutputStream.computeStringSize(1, __value)
}
};
if (value.isDefined) {
val __value = value.get
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
__size
}
final override def serializedSize: _root_.scala.Int = {
var read = __serializedSizeCachedValue
if (read == 0) {
read = __computeSerializedValue()
__serializedSizeCachedValue = read
}
read
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
{
val __v = key
if (__v != "") {
_output__.writeString(1, __v)
}
};
value.foreach { __v =>
val __m = __v
_output__.writeTag(2, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
}
def mergeFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): com.google.protobuf.struct.Struct.FieldsEntry = {
var __key = this.key
var __value = this.value
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__key = _input__.readString()
case 18 =>
__value = Option(_root_.scalapb.LiteParser.readMessage(_input__, __value.getOrElse(com.google.protobuf.struct.Value.defaultInstance)))
case tag => _input__.skipField(tag)
}
}
com.google.protobuf.struct.Struct.FieldsEntry(
key = __key,
value = __value
)
}
def withKey(__v: _root_.scala.Predef.String): FieldsEntry = copy(key = __v)
def getValue: com.google.protobuf.struct.Value = value.getOrElse(com.google.protobuf.struct.Value.defaultInstance)
def clearValue: FieldsEntry = copy(value = _root_.scala.None)
def withValue(__v: com.google.protobuf.struct.Value): FieldsEntry = copy(value = Option(__v))
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @_root_.scala.unchecked) match {
case 1 => {
val __t = key
if (__t != "") __t else null
}
case 2 => value.orNull
}
}
def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @_root_.scala.unchecked) match {
case 1 => _root_.scalapb.descriptors.PString(key)
case 2 => value.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
}
}
def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
def companion = com.google.protobuf.struct.Struct.FieldsEntry
}
object FieldsEntry extends scalapb.GeneratedMessageCompanion[com.google.protobuf.struct.Struct.FieldsEntry] {
implicit def messageCompanion: scalapb.GeneratedMessageCompanion[com.google.protobuf.struct.Struct.FieldsEntry] = this
def fromFieldsMap(__fieldsMap: scala.collection.immutable.Map[_root_.com.google.protobuf.Descriptors.FieldDescriptor, _root_.scala.Any]): com.google.protobuf.struct.Struct.FieldsEntry = {
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.getContainingType() == javaDescriptor), "FieldDescriptor does not match message type.")
val __fields = javaDescriptor.getFields
com.google.protobuf.struct.Struct.FieldsEntry(
__fieldsMap.getOrElse(__fields.get(0), "").asInstanceOf[_root_.scala.Predef.String],
__fieldsMap.get(__fields.get(1)).asInstanceOf[_root_.scala.Option[com.google.protobuf.struct.Value]]
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[com.google.protobuf.struct.Struct.FieldsEntry] = _root_.scalapb.descriptors.Reads{
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage == scalaDescriptor), "FieldDescriptor does not match message type.")
com.google.protobuf.struct.Struct.FieldsEntry(
__fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Predef.String]).getOrElse(""),
__fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).flatMap(_.as[_root_.scala.Option[com.google.protobuf.struct.Value]])
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = com.google.protobuf.struct.Struct.javaDescriptor.getNestedTypes.get(0)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = com.google.protobuf.struct.Struct.scalaDescriptor.nestedMessages(0)
def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
(__number: @_root_.scala.unchecked) match {
case 2 => __out = com.google.protobuf.struct.Value
}
__out
}
lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
lazy val defaultInstance = com.google.protobuf.struct.Struct.FieldsEntry(
)
implicit class FieldsEntryLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.struct.Struct.FieldsEntry]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, com.google.protobuf.struct.Struct.FieldsEntry](_l) {
def key: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Predef.String] = field(_.key)((c_, f_) => c_.copy(key = f_))
def value: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.struct.Value] = field(_.getValue)((c_, f_) => c_.copy(value = Option(f_)))
def optionalValue: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[com.google.protobuf.struct.Value]] = field(_.value)((c_, f_) => c_.copy(value = f_))
}
final val KEY_FIELD_NUMBER = 1
final val VALUE_FIELD_NUMBER = 2
implicit val keyValueMapper: _root_.scalapb.TypeMapper[com.google.protobuf.struct.Struct.FieldsEntry, (_root_.scala.Predef.String, com.google.protobuf.struct.Value)] =
_root_.scalapb.TypeMapper[com.google.protobuf.struct.Struct.FieldsEntry, (_root_.scala.Predef.String, com.google.protobuf.struct.Value)](__m => (__m.key, __m.getValue))(__p => com.google.protobuf.struct.Struct.FieldsEntry(__p._1, Some(__p._2)))
def of(
key: _root_.scala.Predef.String,
value: _root_.scala.Option[com.google.protobuf.struct.Value]
): _root_.com.google.protobuf.struct.Struct.FieldsEntry = _root_.com.google.protobuf.struct.Struct.FieldsEntry(
key,
value
)
}
implicit class StructLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.struct.Struct]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, com.google.protobuf.struct.Struct](_l) {
def fields: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, com.google.protobuf.struct.Value]] = field(_.fields)((c_, f_) => c_.copy(fields = f_))
}
final val FIELDS_FIELD_NUMBER = 1
@transient
private val _typemapper_fields: _root_.scalapb.TypeMapper[com.google.protobuf.struct.Struct.FieldsEntry, (_root_.scala.Predef.String, com.google.protobuf.struct.Value)] = implicitly[_root_.scalapb.TypeMapper[com.google.protobuf.struct.Struct.FieldsEntry, (_root_.scala.Predef.String, com.google.protobuf.struct.Value)]]
def of(
fields: _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, com.google.protobuf.struct.Value]
): _root_.com.google.protobuf.struct.Struct = _root_.com.google.protobuf.struct.Struct(
fields
)
}
| dotty-staging/ScalaPB | scalapb-runtime/non-jvm/src/main/scala/com/google/protobuf/struct/Struct.scala | Scala | apache-2.0 | 15,477 |
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.base.model
import org.beangle.data.model.LongId
import org.beangle.data.model.pojo.{ Coded, Updated }
import org.openurp.code.edu.model.Language
import org.openurp.code.geo.model.Country
import org.openurp.code.person.model.{ CompatriotType, Gender, IdType, Nation, Religion, PoliticalStatus }
import java.time.LocalDate
/**
* 通用自然人信息
*/
class Person extends LongId with Updated with Coded {
/**身份证件类型 */
var idType: IdType = _
/**姓名*/
var name: Name = new Name
/**姓名拼音 */
var phoneticName: Option[String] = None
/**曾用名 */
var formerName: Option[String] = None
/**性别*/
var gender: Gender = _
/**出生日期 */
var birthday: LocalDate = _
/**出生地*/
var birthplace: Option[String] = None
/**籍贯 */
var homeTown: Option[String] = None
/**民族 */
var nation: Option[Nation] = None
/**政治面貌 */
var politicalStatus: Option[PoliticalStatus] = None
/**国籍/地区 */
var country: Option[Country] = None
/**首要使用语言*/
var language: Option[Language] = None
/**港澳台侨外 */
var compatriotType: Option[CompatriotType] = None
/**宗教信仰 */
var religion: Option[Religion] = None
}
import org.beangle.data.model.Component
/**
* 姓名
* @see http://www.w3.org/International/questions/qa-personal-names
* @see http://www.wikitree.com/wiki/Name_Fields
*/
class Name extends Component {
/**名*/
var givenName: Option[String] = None
/**中间名*/
var middleName: Option[String] = None
/**姓*/
var familyName: Option[String] = None
/**姓名*/
var formatedName: String = _
override def toString: String = {
formatedName
}
}
| openurp/api | base/src/main/scala/org/openurp/base/model/person.scala | Scala | lgpl-3.0 | 2,447 |
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.zavakid.commons.test
/**
* @author zavakid 2013-5-29 下午6:09:03
* @since 1.0
*/
class Person(val name: String) {
var age: Int = 0
age = 0
override def toString = "[name=]" + name + ",age=" + age + "]"
}
object Person extends scala.App {
println("abc".map(_.toInt))
} | zavakid/zcommons | commons.test/src/test/scala/com/zavakid/commons/test/Person.scala | Scala | apache-2.0 | 852 |
package nl.codecentric.assumption.dsl.core.parser
import java.io.{File, BufferedReader, FileNotFoundException}
import nl.codecentric.assumption.dsl.api.model.Experiment
import scala.collection.immutable.TreeSet
import scala.collection.mutable
import scala.io.Source
import scala.reflect.internal.util.ScalaClassLoader.URLClassLoader
import scala.util.parsing.input.{StreamReader, CharArrayReader}
;
/**
* Created by hylke on 01/07/15.
*/
object ExperimentParser {
val KEYWORD_EXPERIMENT = "Experiment:"
val EXPERMINTDIR = "experiments/"
def parserExperiment(fileName: String): Experiment = {
val experimentLines = readFile(EXPERMINTDIR + fileName)
ExperimentTextParser.parseExperimentText(experimentLines)
}
/**
* Read file and convert to lines
* @param fileName
* @return
*/
private def readFile(fileName: String): String = {
var lines: String = null
try {
val experimentClassLoader = new URLClassLoader(Array(new File("src/experiment/resources").toURI.toURL), getClass().getClassLoader())
val experimentFile = Source.fromInputStream(experimentClassLoader.getResourceAsStream(fileName))
try
lines = Source.fromInputStream(experimentClassLoader
.getResourceAsStream(fileName))
.mkString
finally
experimentFile.close()
} catch {
case npe: NullPointerException => throw new FileNotFoundException("File cannot be found")
}
lines
}
}
| craftsmenlabs/gareth-poc | dsl/core/src/main/scala/nl/codecentric/assumption/dsl/core/parser/ExperimentParser.scala | Scala | gpl-2.0 | 1,461 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package toplevel
import _root_.java.util.{Collection, Collections, List}
import com.intellij.openapi.util.Pair
import com.intellij.psi.PsiReferenceList.Role
import com.intellij.psi.javadoc.PsiDocComment
import com.intellij.psi.meta.PsiMetaData
import com.intellij.psi.{PsiClass, PsiElement, _};
/**
* @author ilyas
*/
trait PsiClassFake extends PsiClass with PsiReferenceList {
//todo: this methods from PsiReferenceList to avoid NPE. It's possible for asking different roles, so we can
//todo: have problems for simple implementation of them
def getRole: Role = Role.EXTENDS_LIST
def getReferencedTypes: Array[PsiClassType] = PsiClassType.EMPTY_ARRAY
def getReferenceElements: Array[PsiJavaCodeReferenceElement] = PsiJavaCodeReferenceElement.EMPTY_ARRAY
def isInterface: Boolean = false
def isAnnotationType: Boolean = false
def isEnum: Boolean = false
def getExtendsList: PsiReferenceList = this //todo: to avoid NPE from Java
def getImplementsList: PsiReferenceList = this //todo: to avoid NPE from Java
def getExtendsListTypes: Array[PsiClassType] = PsiClassType.EMPTY_ARRAY
def getImplementsListTypes: Array[PsiClassType] = PsiClassType.EMPTY_ARRAY
def getSuperClass: PsiClass = null
def getInterfaces: Array[PsiClass] = PsiClass.EMPTY_ARRAY
def getSupers: Array[PsiClass] = PsiClass.EMPTY_ARRAY
def getSuperTypes: Array[PsiClassType] = PsiClassType.EMPTY_ARRAY
def getFields: Array[PsiField] = PsiField.EMPTY_ARRAY // todo
def getConstructors: Array[PsiMethod] = PsiMethod.EMPTY_ARRAY // todo
def getInnerClasses: Array[PsiClass] = PsiClass.EMPTY_ARRAY // todo
def getInitializers: Array[PsiClassInitializer] = PsiClassInitializer.EMPTY_ARRAY
def getAllFields: Array[PsiField] = getFields
def getAllMethods: Array[PsiMethod] = getMethods
def getAllInnerClasses: Array[PsiClass] = getInnerClasses
def findFieldByName(name: String, checkBases: Boolean): PsiField = null
def findMethodBySignature(patternMethod: PsiMethod, checkBases: Boolean): PsiMethod = null
def findMethodsBySignature(patternMethod: PsiMethod, checkBases: Boolean): Array[PsiMethod] = PsiMethod.EMPTY_ARRAY
def findMethodsAndTheirSubstitutorsByName(name: String, checkBases: Boolean): List[Pair[PsiMethod, PsiSubstitutor]] = Collections.emptyList[Pair[PsiMethod, PsiSubstitutor]]
def findMethodsAndTheirSubstitutors: List[Pair[PsiMethod, PsiSubstitutor]] = Collections.emptyList[Pair[PsiMethod, PsiSubstitutor]]
def getAllMethodsAndTheirSubstitutors: List[Pair[PsiMethod, PsiSubstitutor]] = Collections.emptyList[Pair[PsiMethod, PsiSubstitutor]]
def findInnerClassByName(name: String, checkBases: Boolean): PsiClass = null
def getLBrace: PsiJavaToken = null
def getRBrace: PsiJavaToken = null
def getScope: PsiElement = null
def isInheritor(baseClass: PsiClass, checkDeep: Boolean): Boolean = false
def isInheritorDeep(baseClass: PsiClass, classToPass: PsiClass): Boolean = false
def getVisibleSignatures: Collection[HierarchicalMethodSignature] = Collections.emptyList[HierarchicalMethodSignature]
def getModifierList: PsiModifierList = ScalaPsiUtil.getEmptyModifierList(getManager)
def hasModifierProperty(name: String): Boolean = name.equals(PsiModifier.PUBLIC)
def getDocComment: PsiDocComment = null
def isDeprecated: Boolean = false
def getMetaData: PsiMetaData = null
def isMetaEnough: Boolean = false
def hasTypeParameters: Boolean = false
def getTypeParameterList: PsiTypeParameterList = null
def getTypeParameters: Array[PsiTypeParameter] = PsiTypeParameter.EMPTY_ARRAY
def findMethodsByName(name: String, checkBases: Boolean): Array[PsiMethod] = Array[PsiMethod]()
def getMethods = Array[PsiMethod]()
def getQualifiedName: String = null
def getContainingClass: PsiClass = null
} | triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/PsiClassFake.scala | Scala | apache-2.0 | 3,886 |
// should not result in a stack overflow
object Test {
def main(args: Array[String]): Unit = {
import collection.mutable.LinkedList
val l = new LinkedList[Int]() ++ (0 until 10000)
assert(l.length == 10000)
}
}
| folone/dotty | tests/pending/run/t3996.scala | Scala | bsd-3-clause | 232 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.qscript.provenance
import slamdata.Predef._
import quasar.contrib.matryoshka.PatternArbitrary
import quasar.pkg.tests._
import matryoshka.Delay
import scalaz._
trait ProvFGenerator {
import ProvF._
implicit def arbitraryProvF[D: Arbitrary, I: Arbitrary]: Delay[Arbitrary, ProvF[D, I, ?]] =
new PatternArbitrary[ProvF[D, I, ?]] {
val O = new Optics[D, I]
def leafGenerators[A] =
NonEmptyList(
2 -> Gen.delay(Gen.const(O.fresh[A]())),
10 -> (arbitrary[D] ^^ (O.prjPath[A](_))),
10 -> (arbitrary[D] ^^ (O.prjValue[A](_))),
10 -> (arbitrary[D] ^^ (O.injValue[A](_))),
10 -> (arbitrary[I] ^^ (O.value[A](_))))
def branchGenerators[A: Arbitrary] =
uniformly(
arbitrary[(A, A)] ^^ (O.both[A](_)),
arbitrary[(A, A)] ^^ (O.thenn[A](_)))
}
}
object ProvFGenerator extends ProvFGenerator
| slamdata/slamengine | qscript/src/test/scala/quasar/qscript/provenance/ProvFGenerator.scala | Scala | apache-2.0 | 1,520 |
package eu.phisikus.plotka.network.consumer
import eu.phisikus.plotka.model.{Message, NetworkMessage, NetworkPeer, Peer}
import eu.phisikus.plotka.network.listener.dto.TestMessage
import eu.phisikus.plotka.network.talker.Talker
import org.scalatest.{FunSuite, Matchers}
class StandardNetworkMessageConsumerTest extends FunSuite with Matchers {
private val testPeer = new NetworkPeer("127.0.0.1", 9090)
test("Create working message consumer") {
val expectedMessage = NetworkMessage(testPeer, testPeer, TestMessage("TRUE"))
var actualMessage: NetworkMessage = NetworkMessage(testPeer, testPeer, TestMessage("FALSE"))
val testMessageHandler = (message: NetworkMessage, talker: Talker) => {
actualMessage = message
}
val testNetworkMessageConsumer = new StandardNetworkMessageConsumer(
testPeer,
testMessageHandler
)
testNetworkMessageConsumer.consumeMessage(
expectedMessage.asInstanceOf[Message[NetworkPeer, Peer, Serializable]]
)
assert(actualMessage == expectedMessage)
}
}
| phisikus/plotka | networking/src/test/scala/eu/phisikus/plotka/network/consumer/StandardNetworkMessageConsumerTest.scala | Scala | bsd-3-clause | 1,048 |
package json
import java.util.UUID
import io.sphere.json._
import io.sphere.json.generic._
import io.sphere.mongo.generic._
import io.sphere.mongo.format.MongoFormat
import io.sphere.mongo.format.DefaultMongoFormats._
import io.sphere.mongo.format._
import io.sphere.util.BaseMoney
import org.joda.time.format.ISODateTimeFormat
import org.joda.time.{DateTime, DateTimeZone}
import scala.collection.generic.CanBuildFrom
import scala.language.higherKinds
case class Reference(typeId: String, id: UUID)
object Reference {
implicit val json: JSON[Reference] = jsonProduct(apply _)
implicit val mongoFormat: MongoFormat[Reference] = mongoProduct(apply _)
}
case class Price(id: String, value: BaseMoney, validUntil: DateTime)
object Price {
// the lib does not ship a `MongoFormat[DateTime]`
implicit val dateTimeAsIsoStringFormat: MongoFormat[DateTime] = new MongoFormat[DateTime] {
override def toMongoValue(dt: DateTime): Any =
ISODateTimeFormat.dateTime.print(dt.withZone(DateTimeZone.UTC))
override def fromMongoValue(any: Any): DateTime = any match {
case s: String => new DateTime(s, DateTimeZone.UTC)
case _ => sys.error("String expected")
}
}
implicit val json: JSON[Price] = jsonProduct(apply _)
implicit val mongoFormat: MongoFormat[Price] = mongoProduct(apply _)
}
case class ProductVariant(id: Long, prices: Vector[Price], attributes: Map[String, String])
object ProductVariant {
implicit val json: JSON[ProductVariant] = jsonProduct(apply _)
implicit val mongoFormat: MongoFormat[ProductVariant] = mongoProduct(apply _)
}
case class Product(
id: UUID,
version: Long,
productType: Reference,
variants: Vector[ProductVariant])
object Product {
implicit val json: JSON[Product] = jsonProduct(apply _)
implicit val mongoFormat: MongoFormat[Product] = mongoProduct(apply _)
}
object JsonBenchmark {
val lotsOfIntsList = Range(1, 100000).toList
val lotsOfIntsSeq = Range(1, 100000).toSeq
val lotsOfIntsVector = Range(1, 100000).toVector
val lotsOfIntsAsJson = Range(1, 100000).mkString("[", ",", "]")
val lotsOfIntsMongoValue = toMongo(lotsOfIntsVector)
val bigMap: Map[String, String] = lotsOfIntsList.map(i => s"key$i" -> s"value$i").toMap
val bigMapMongoValue = toMongo(bigMap)
val prices =
for (i <- 1 to 200)
yield s"""
|{
| "id": "$i",
| "value": {
| "centAmount": $i,
| "currencyCode": "USD"
| },
| "validUntil": "2025-12-14T12:50:25.070Z"
|}
""".stripMargin
val customAttributes =
(for (i <- 1 to 80) yield s""" "field-$i": "value $i" """).mkString("{", ",", "}")
val variants =
for (i <- 1 to 100)
yield s"""{
| "id": $i,
| "prices": ${prices.mkString("[", ",", "]")},
| "images": [],
| "attributes": $customAttributes,
| "categories":[]
|}""".stripMargin
val json =
s"""{
| "id": "ff30b141-67e4-41bb-97c5-4121c42d602a",
| "version": 2,
| "productType": {
| "typeId": "product-type",
| "id": "5a4c142a-40b8-4b86-b944-2259d39ced22"
| },
| "name": {"de-DE":"Ein Product 1","en":"Some Product 1"},
| "categories":[],
| "categoryOrderHints":{},
| "slug": {"en":"product_slug_1_4ff4aaa3-2dc9-4aca-8db9-1c68a341de13"},
| "variants": ${variants.mkString("[", ",", "]")},
| "searchKeywords":{},
| "hasStagedChanges":false,
| "published":true,
| "createdAt":"2015-12-14T12:50:23.679Z",
| "lastModifiedAt":"2015-12-14T12:50:25.070Z"
|}
""".stripMargin
val product = getFromJSON[Product](json)
val productMongoValue = toMongo(product)
}
| sphereio/sphere-scala-libs | benchmarks/src/main/scala/json/JsonBenchmark.scala | Scala | apache-2.0 | 3,767 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.coordinator.group
import java.util.Arrays
import org.junit.Assert._
import org.junit.Test
class MemberMetadataTest {
val groupId = "groupId"
val groupInstanceId = Some("groupInstanceId")
val clientId = "clientId"
val clientHost = "clientHost"
val memberId = "memberId"
val protocolType = "consumer"
val rebalanceTimeoutMs = 60000
val sessionTimeoutMs = 10000
@Test
def testMatchesSupportedProtocols(): Unit = {
val protocols = List(("range", Array.empty[Byte]))
val member = new MemberMetadata(memberId, groupId, groupInstanceId, clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs,
protocolType, protocols)
assertTrue(member.matches(protocols))
assertFalse(member.matches(List(("range", Array[Byte](0)))))
assertFalse(member.matches(List(("roundrobin", Array.empty[Byte]))))
assertFalse(member.matches(List(("range", Array.empty[Byte]), ("roundrobin", Array.empty[Byte]))))
}
@Test
def testVoteForPreferredProtocol(): Unit = {
val protocols = List(("range", Array.empty[Byte]), ("roundrobin", Array.empty[Byte]))
val member = new MemberMetadata(memberId, groupId, groupInstanceId, clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs,
protocolType, protocols)
assertEquals("range", member.vote(Set("range", "roundrobin")))
assertEquals("roundrobin", member.vote(Set("blah", "roundrobin")))
}
@Test
def testMetadata(): Unit = {
val protocols = List(("range", Array[Byte](0)), ("roundrobin", Array[Byte](1)))
val member = new MemberMetadata(memberId, groupId, groupInstanceId, clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs,
protocolType, protocols)
assertTrue(Arrays.equals(Array[Byte](0), member.metadata("range")))
assertTrue(Arrays.equals(Array[Byte](1), member.metadata("roundrobin")))
}
@Test(expected = classOf[IllegalArgumentException])
def testMetadataRaisesOnUnsupportedProtocol(): Unit = {
val protocols = List(("range", Array.empty[Byte]), ("roundrobin", Array.empty[Byte]))
val member = new MemberMetadata(memberId, groupId, groupInstanceId, clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs,
protocolType, protocols)
member.metadata("blah")
fail()
}
@Test(expected = classOf[IllegalArgumentException])
def testVoteRaisesOnNoSupportedProtocols(): Unit = {
val protocols = List(("range", Array.empty[Byte]), ("roundrobin", Array.empty[Byte]))
val member = new MemberMetadata(memberId, groupId, groupInstanceId, clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs,
protocolType, protocols)
member.vote(Set("blah"))
fail()
}
@Test
def testHasValidGroupInstanceId(): Unit = {
val protocols = List(("range", Array[Byte](0)), ("roundrobin", Array[Byte](1)))
val member = new MemberMetadata(memberId, groupId, groupInstanceId, clientId, clientHost, rebalanceTimeoutMs, sessionTimeoutMs,
protocolType, protocols)
assertTrue(member.isStaticMember)
assertEquals(groupInstanceId, member.groupInstanceId)
}
}
| sslavic/kafka | core/src/test/scala/unit/kafka/coordinator/group/MemberMetadataTest.scala | Scala | apache-2.0 | 3,871 |
package pokescala.parse
import pokescala.model._
import pokescala.model.Implicits._
import scala.util.parsing.json.JSONObject
import scala.util.parsing.json.JSONArray
import scala.collection.mutable
import scala.util.Try
object PokemonParser extends Parser[Pokemon] {
def parse(implicit raw : Map[String, Any]) : Try[Pokemon] = Try {
val (id, resourceURI, created, modified) = extractModelInfo("national_id");
val name = extract[String]("name");
val abilities = extractResourceURIs(raw("abilities"));
val eggGroups = extractResourceURIs(raw("egg_groups"));
val rawEvolutions = asVector(raw("evolutions"));
val evolutionsBuff = new mutable.ArrayBuffer[Pokemon.Evolution];
for (elem <- rawEvolutions) {
import Pokemon.Evolution
import Pokemon.Evolution.Method._
val obj = elem.asInstanceOf[Map[String, Any]];
val method = extract[String]("method")(obj);
val uri = extract[String]("resource_uri")(obj);
if (method equals "level_up") {
if (obj contains "level") {
val level = extract[Double]("level")(obj).toInt;
evolutionsBuff += new Evolution(new LevelUp(level), uri);
}
else {
evolutionsBuff += new Evolution(new LevelUp(-1), uri);
}
}
else if (method equals "stone") {
evolutionsBuff += new Evolution(new Stone(""), uri);
}
else if (method equals "other") {
val detail = Try { extract[String]("detail")(obj) };
if (detail.isSuccess)
evolutionsBuff += new Evolution(new Other(detail.get), uri);
else
evolutionsBuff += new Evolution(new Other(null), uri);
}
}
val evolutions = evolutionsBuff.toVector;
val pokedexEntries = extractResourceURIs(raw("descriptions"));
val rawMoves = asVector(raw("moves"));
val levelUpMovesBuff = new mutable.ArrayBuffer[(String, Int)];
val eggMovesBuff = new mutable.ArrayBuffer[String];
val machineMovesBuff = new mutable.ArrayBuffer[String];
val tutorMovesBuff = new mutable.ArrayBuffer[String];
for (elem <- rawMoves) {
val obj = elem.asInstanceOf[Map[String, Any]];
val learnType = extract[String]("learn_type")(obj);
val uri = extract[String]("resource_uri")(obj);
if (learnType equals "level up") {
val level = extract[Double]("level")(obj).toInt;
levelUpMovesBuff += ((uri, level));
}
else if (learnType equals "egg move")
eggMovesBuff += uri;
else if (learnType equals "machine")
machineMovesBuff += uri;
else if (learnType equals "tutor")
tutorMovesBuff += uri;
}
val levelUpMoves = levelUpMovesBuff.toVector;
val eggMoves = eggMovesBuff.toVector;
val machineMoves = machineMovesBuff.toVector;
val tutorMoves = tutorMovesBuff.toVector;
val types = extractResourceURIs(raw("types"));
val catchRate = extract[Double]("catch_rate").toInt;
val species = extract[String]("species");
val hp = extract[Double]("hp").toInt;
val attack = extract[Double]("attack").toInt;
val defense = extract[Double]("defense").toInt;
val specialAttack = extract[Double]("sp_atk").toInt;
val specialDefense = extract[Double]("sp_def").toInt;
val speed = extract[Double]("speed").toInt;
val stats = new Pokemon.Stats(hp, attack, defense, specialAttack, specialDefense, speed);
val eggCycles = extract[Double]("egg_cycles").toInt;
val evYield = extract[String]("ev_yield");
val exp = extract[Double]("exp").toInt;
val growthRate = extract[String]("growth_rate");
val height = extract[String]("height");
val weight = extract[String]("weight");
val happiness = extract[Double]("happiness").toInt;
val maleFemaleRatio = extract[String]("male_female_ratio");
new Pokemon(
name, abilities, eggGroups, evolutions, pokedexEntries, levelUpMoves, eggMoves,
machineMoves, tutorMoves, types, catchRate, species, stats, eggCycles, evYield,
exp, growthRate, height, weight, happiness, maleFemaleRatio,
id, resourceURI, created, modified);
};
} | haferflocken/PokeScala | PokeScala/src/pokescala/parse/PokemonParser.scala | Scala | apache-2.0 | 4,166 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.convert.text
import com.typesafe.config.ConfigFactory
import org.junit.runner.RunWith
import org.locationtech.geomesa.convert.SimpleFeatureConverters
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class NewLinesTest extends Specification {
sequential
"NewLinesTest" should {
val conf = ConfigFactory.parseString(
"""
| converter = {
| type = "delimited-text",
| format = "DEFAULT",
| id-field = "uuid()",
| fields = [
| { name = "lat", transform = "$1::double" },
| { name = "lon", transform = "$2::double" },
| { name = "geom", transform = "point($lat, $lon)" }
| ]
| }
""".stripMargin)
val sft = SimpleFeatureTypes.createType(ConfigFactory.parseString(
"""
|{
| type-name = "newlinetest"
| attributes = [
| { name = "lat", type = "Double", index = false },
| { name = "lon", type = "Double", index = false },
| { name = "geom", type = "Point", index = true, srid = 4326, default = true }
| ]
|}
""".stripMargin
))
"process trailing newline" >> {
val converter = SimpleFeatureConverters.build[String](sft, conf)
val data = "45.0,45.0\\n55.0,55.0\\n".split("\\n", -1)
data.length mustEqual 3
val res = converter.processInput(data.toIterator)
res.length mustEqual 2
converter.close()
success
}
"process middle of data newline" >> {
val converter = SimpleFeatureConverters.build[String](sft, conf)
val data = "45.0,45.0\\n\\n55.0,55.0\\n".split("\\n", -1)
data.length mustEqual 4
val res = converter.processInput(data.toIterator)
res.length mustEqual 2
converter.close()
success
}
}
} | giserh/geomesa | geomesa-convert/geomesa-convert-text/src/test/scala/org/locationtech/geomesa/convert/text/NewLinesTest.scala | Scala | apache-2.0 | 2,472 |
package org.ferrit.core.test
import scala.concurrent.ExecutionContext
import org.ferrit.core.http.{Request, Response}
import org.ferrit.core.util.Headers
import org.ferrit.core.test.FakeHttpClient.NotFound
/**
* Generates a website of N pages to support a crawler reachability test and
* work out the crawler through a very large number of pages quickly.
*
* Each page contains exactly one hyperlink
* to the next page in the sequence which the crawler is expected to follow.
* The last page can only be reached by crawling through all the previous links.
* Such a site containing N pages will also be considered a site of depth N.
*
* Page 0 is the index page and the last is Page N-1
*
* <pre>
*
* Example assuming the domain http://site.net:
*
* A request to http://site.net returns:
*
* <html>
* <a href="http://site.net/page1.html">
* </html>
*
*
* A request to http://site.net/page1.html returns:
*
* <html>
* <a href="http://site.net/page2.html">
* </html>
*
* ...
*
* A request to http://site.net/page(N-2).html returns:
* (where N-2 is the penultimate)
* In a site of 100 pages the last page is page99.html because the index page
* is treated as the first page (of zero index).
*
* <html>
* <a href="http://site.net/page(N-1).html">
* </html>
*
* Page N
* <html>
* <!-- no link is included -->
* </html>
*
* <pre>
*
* @param domainName - the scheme, domain and port of the fake website
* (e.g. http://www.site.net:80)
* @param totalPages - the number of fake pages that exist on the site
*
*/
class LinkedListHttpClient(domainName: String, totalPages: Int)(implicit ec: ExecutionContext) extends FakeHttpClient {
val headers = Map(Headers.ContentTypeTextHtmlUtf8)
val linkHtml = """<a href="%s/page%s.html">link text</a>"""
val UriPath = """page(\\d+)\\.html""".r
// The HTML is small because the larger the template the more work
// the link extracting HTML parser will need to do per page.
val html = """
|<!DOCTYPE html>
|<html lang="en-US">
|<head>
| <title>Test Page Number %s</title>
|</head>
|<body>
| <!-- BEGIN -->
| %s
| <!-- END -->
|</body>
|</html>
""".stripMargin
override implicit val _ec: ExecutionContext = ec
override def handleRequest(request: Request):Response = {
// Inspect URI and search for a path like "pageN.html"
// where N is a number within the totalPages total.
val uri = request.crawlUri
val reader = uri.reader
val pr:PartResponse = {
if (uri.toString == domainName) {
// Is request for index page, return first page
PartResponse(200, headers, makePage(0, 1))
} else if (domainName != reader.schemeToPort) {
// URI was for other website or different port
NotFound
} else {
val pageNum = for {
UriPath(n) <- UriPath.findFirstMatchIn(reader.path)
} yield (n)
pageNum match {
case Some(numStr) =>
val num = Integer.parseInt(numStr)
if (num < 1 || num > totalPages) NotFound
else PartResponse(200, headers, makePage(num, num + 1))
case _ => NotFound
}
}
}
pr.toResponse(request)
}
private [test] def makePage(pageNum:Int, linkNum: Int): String = {
val isLast = linkNum == totalPages
if (isLast) html.format(pageNum, "")
else html.format(
pageNum,
linkHtml.format(domainName, linkNum)
)
}
}
| reggoodwin/ferrit | src/test/scala/org/ferrit/core/test/LinkedListHttpClient.scala | Scala | mit | 3,598 |
package gitbucket.core.servlet
import java.io.File
import java.sql.{DriverManager, Connection}
import gitbucket.core.plugin.PluginRegistry
import gitbucket.core.service.SystemSettingsService
import gitbucket.core.util._
import org.apache.commons.io.FileUtils
import javax.servlet.{ServletContextListener, ServletContextEvent}
import org.slf4j.LoggerFactory
import Directory._
import ControlUtil._
import JDBCUtil._
import org.eclipse.jgit.api.Git
import gitbucket.core.util.Versions
import gitbucket.core.util.Directory
object AutoUpdate {
/**
* The history of versions. A head of this sequence is the current BitBucket version.
*/
val versions = Seq(
new Version(3, 3),
new Version(3, 2),
new Version(3, 1),
new Version(3, 0),
new Version(2, 8),
new Version(2, 7) {
override def update(conn: Connection, cl: ClassLoader): Unit = {
super.update(conn, cl)
conn.select("SELECT * FROM REPOSITORY"){ rs =>
// Rename attached files directory from /issues to /comments
val userName = rs.getString("USER_NAME")
val repoName = rs.getString("REPOSITORY_NAME")
defining(Directory.getAttachedDir(userName, repoName)){ newDir =>
val oldDir = new File(newDir.getParentFile, "issues")
if(oldDir.exists && oldDir.isDirectory){
oldDir.renameTo(newDir)
}
}
// Update ORIGIN_USER_NAME and ORIGIN_REPOSITORY_NAME if it does not exist
val originalUserName = rs.getString("ORIGIN_USER_NAME")
val originalRepoName = rs.getString("ORIGIN_REPOSITORY_NAME")
if(originalUserName != null && originalRepoName != null){
if(conn.selectInt("SELECT COUNT(*) FROM REPOSITORY WHERE USER_NAME = ? AND REPOSITORY_NAME = ?",
originalUserName, originalRepoName) == 0){
conn.update("UPDATE REPOSITORY SET ORIGIN_USER_NAME = NULL, ORIGIN_REPOSITORY_NAME = NULL " +
"WHERE USER_NAME = ? AND REPOSITORY_NAME = ?", userName, repoName)
}
}
// Update PARENT_USER_NAME and PARENT_REPOSITORY_NAME if it does not exist
val parentUserName = rs.getString("PARENT_USER_NAME")
val parentRepoName = rs.getString("PARENT_REPOSITORY_NAME")
if(parentUserName != null && parentRepoName != null){
if(conn.selectInt("SELECT COUNT(*) FROM REPOSITORY WHERE USER_NAME = ? AND REPOSITORY_NAME = ?",
parentUserName, parentRepoName) == 0){
conn.update("UPDATE REPOSITORY SET PARENT_USER_NAME = NULL, PARENT_REPOSITORY_NAME = NULL " +
"WHERE USER_NAME = ? AND REPOSITORY_NAME = ?", userName, repoName)
}
}
}
}
},
new Version(2, 6),
new Version(2, 5),
new Version(2, 4),
new Version(2, 3) {
override def update(conn: Connection, cl: ClassLoader): Unit = {
super.update(conn, cl)
conn.select("SELECT ACTIVITY_ID, ADDITIONAL_INFO FROM ACTIVITY WHERE ACTIVITY_TYPE='push'"){ rs =>
val curInfo = rs.getString("ADDITIONAL_INFO")
val newInfo = curInfo.split("\\n").filter(_ matches "^[0-9a-z]{40}:.*").mkString("\\n")
if (curInfo != newInfo) {
conn.update("UPDATE ACTIVITY SET ADDITIONAL_INFO = ? WHERE ACTIVITY_ID = ?", newInfo, rs.getInt("ACTIVITY_ID"))
}
}
ignore {
FileUtils.deleteDirectory(Directory.getPluginCacheDir())
//FileUtils.deleteDirectory(new File(Directory.PluginHome))
}
}
},
new Version(2, 2),
new Version(2, 1),
new Version(2, 0){
override def update(conn: Connection, cl: ClassLoader): Unit = {
import eu.medsea.mimeutil.{MimeUtil2, MimeType}
val mimeUtil = new MimeUtil2()
mimeUtil.registerMimeDetector("eu.medsea.mimeutil.detector.MagicMimeMimeDetector")
super.update(conn, cl)
conn.select("SELECT USER_NAME, REPOSITORY_NAME FROM REPOSITORY"){ rs =>
defining(Directory.getAttachedDir(rs.getString("USER_NAME"), rs.getString("REPOSITORY_NAME"))){ dir =>
if(dir.exists && dir.isDirectory){
dir.listFiles.foreach { file =>
if(file.getName.indexOf('.') < 0){
val mimeType = MimeUtil2.getMostSpecificMimeType(mimeUtil.getMimeTypes(file, new MimeType("application/octet-stream"))).toString
if(mimeType.startsWith("image/")){
file.renameTo(new File(file.getParent, file.getName + "." + mimeType.split("/")(1)))
}
}
}
}
}
}
}
},
Version(1, 13),
Version(1, 12),
Version(1, 11),
Version(1, 10),
Version(1, 9),
Version(1, 8),
Version(1, 7),
Version(1, 6),
Version(1, 5),
Version(1, 4),
new Version(1, 3){
override def update(conn: Connection, cl: ClassLoader): Unit = {
super.update(conn, cl)
// Fix wiki repository configuration
conn.select("SELECT USER_NAME, REPOSITORY_NAME FROM REPOSITORY"){ rs =>
using(Git.open(getWikiRepositoryDir(rs.getString("USER_NAME"), rs.getString("REPOSITORY_NAME")))){ git =>
defining(git.getRepository.getConfig){ config =>
if(!config.getBoolean("http", "receivepack", false)){
config.setBoolean("http", null, "receivepack", true)
config.save
}
}
}
}
}
},
Version(1, 2),
Version(1, 1),
Version(1, 0),
Version(0, 0)
)
/**
* The head version of BitBucket.
*/
val headVersion = versions.head
/**
* The version file (GITBUCKET_HOME/version).
*/
lazy val versionFile = new File(GitBucketHome, "version")
/**
* Returns the current version from the version file.
*/
def getCurrentVersion(): Version = {
if(versionFile.exists){
FileUtils.readFileToString(versionFile, "UTF-8").trim.split("\\\\.") match {
case Array(majorVersion, minorVersion) => {
versions.find { v =>
v.majorVersion == majorVersion.toInt && v.minorVersion == minorVersion.toInt
}.getOrElse(Version(0, 0))
}
case _ => Version(0, 0)
}
} else Version(0, 0)
}
} | skohar/gitbucket | src/main/scala/gitbucket/core/servlet/AutoUpdate.scala | Scala | apache-2.0 | 6,324 |
package chrome
import net.liftweb.json._
import net.liftweb.json.JsonDSL._
case class TabInfo(frontEndUrl: String,
faviconUrl: String,
thumbnail: String,
title: String,
url: String,
wsdebugUrl: String)
object TabInfo {
def fromJson(js: JValue) = for {
JObject(fields) <- js
JField("devtoolsFrontendUrl", JString(feurl)) <- fields
JField("faviconUrl", JString(favurl)) <- fields
JField("thumbnailUrl", JString(thumburl)) <- fields
JField("title", JString(title)) <- fields
JField("url", JString(url)) <- fields
JField("webSocketDebuggerUrl", JString(wsurl)) <- fields
} yield {
TabInfo(feurl, favurl, thumburl, title, url, wsurl)
}
}
| softprops/chrome-pilot | server/src/main/scala/serial.scala | Scala | mit | 771 |
package jp.kenichi.pades
import java.nio.file.{Path, Paths, Files}
trait PdfAccess {
def size: Long
def apply(offset: Long): Byte
}
object PdfAccess {
val empty = new PdfAccess {
val size = 0
def apply(index: Long) = throw new IndexOutOfBoundsException(s"$index / $size")
}
}
class PdfFileAccess(file: Path) extends PdfAccess {
def this(file: String) = this(Paths.get(file))
def this(file: java.io.File) = this(file.toPath)
val size = Files.size(file)
// FIXME: very inefficient implementation
protected val content = Files.readAllBytes(file)
def apply(index: Long) = content(index.toInt)
}
| ken1ma/pades-scala-js | jvm/src/main/scala/jp.kenichi/pades/PdfAccess.scala | Scala | apache-2.0 | 611 |
/* From ESOP 2014, Kuwahara et al */
import stainless.lang._
object NestedLoop {
def loop1(n: BigInt): BigInt = {
if (n > 0) loop1(n - 1) else 0
}
def loop2(n: BigInt): BigInt = {
if (n > 0) loop1(n) + loop2(n - 1) else 0
}
}
| epfl-lara/stainless | frontends/benchmarks/termination/valid/NestedLoop.scala | Scala | apache-2.0 | 247 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package toplevel
package synthetic
import com.intellij.openapi.project.Project
import com.intellij.openapi.vfs.VirtualFile
import com.intellij.psi._
import com.intellij.psi.impl.light.LightElement
import com.intellij.psi.scope.PsiScopeProcessor
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.util.IncorrectOperationException
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScPackaging
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTypeDefinition}
import org.jetbrains.plugins.scala.lang.psi.stubs.index.ScalaIndexKeys
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.lang.resolve.ResolveTargets._
import org.jetbrains.plugins.scala.lang.resolve.processor.BaseProcessor
import scala.collection.mutable
/**
* @author ilyas
*/
abstract class ScSyntheticPackage(name: String, manager: PsiManager)
extends LightElement(manager, ScalaLanguage.INSTANCE) with PsiPackage {
def handleQualifiedNameChange(newQualifiedName: String) {
}
def getDirectories: Array[PsiDirectory] = PsiDirectory.EMPTY_ARRAY
def checkSetName(s: String) {
throw new IncorrectOperationException("cannot set name: nonphysical element")
}
override def getText = ""
override def toString: String = "Scala Synthetic Package " + getQualifiedName
override def getDirectories(scope: GlobalSearchScope): Array[PsiDirectory] = PsiDirectory.EMPTY_ARRAY
override def getModifierList: PsiModifierList = ScalaPsiUtil.getEmptyModifierList(getManager)
override def hasModifierProperty(s: String) = false
override def getAnnotationList: PsiModifierList = null
override def getName: String = name
override def setName(newName: String) = throw new IncorrectOperationException("cannot set name: nonphysical element")
override def copy = throw new IncorrectOperationException("cannot copy: nonphysical element")
override def getContainingFile: PsiFile = SyntheticClasses.get(manager.getProject).file
override def occursInPackagePrefixes: Array[VirtualFile] = VirtualFile.EMPTY_ARRAY
override def processDeclarations(processor: PsiScopeProcessor,
state: ResolveState,
lastParent: PsiElement,
place: PsiElement): Boolean = {
processor match {
case bp: BaseProcessor =>
if (bp.kinds.contains(PACKAGE)) {
val subPackages = if (lastParent != null) getSubPackages(lastParent.resolveScope) else getSubPackages
for (subp <- subPackages) {
if (!processor.execute(subp, state)) return false
}
}
if (bp.kinds.contains(CLASS) || bp.kinds.contains(OBJECT) || bp.kinds.contains(METHOD)) {
for (clazz <- getClasses) {
if (!processor.execute(clazz, state)) return false
}
}
true
case _ => true
}
}
}
object ScSyntheticPackage {
def apply(fqn: String)
(implicit project: Project): ScSyntheticPackage = {
val (name, parentName) = fqn.lastIndexOf(".") match {
case -1 => (fqn, "")
case i => (fqn.substring(i + 1), fqn.substring(0, i))
}
import ScalaIndexKeys._
PACKAGE_FQN_KEY.integerElements(fqn, classOf[ScPackaging]) match {
case seq if seq.isEmpty =>
val packages = PACKAGE_OBJECT_KEY.integerElements(fqn, classOf[PsiClass])
if (packages.exists { pc =>
ScalaNamesUtil.equivalentFqn(pc.qualifiedName, fqn)
}) {
new ScSyntheticPackage(name, PsiManager.getInstance(project)) {
override def getFiles(globalSearchScope: GlobalSearchScope): Array[PsiFile] = Array.empty //todo: ?
def containsClassNamed(name: String): Boolean = false
def getQualifiedName: String = fqn
def getClasses: Array[PsiClass] = Array.empty
def getClasses(scope: GlobalSearchScope): Array[PsiClass] = Array.empty
def getParentPackage: ScPackageImpl = ScPackageImpl.findPackage(project, parentName)
def getSubPackages: Array[PsiPackage] = Array.empty
def getSubPackages(scope: GlobalSearchScope): Array[PsiPackage] = Array.empty
def findClassByShortName(name: String, scope: GlobalSearchScope): Array[PsiClass] = Array.empty
}
} else null
case packages =>
val cleanName = ScalaNamesUtil.cleanFqn(fqn)
packages.filter { pc =>
ScalaNamesUtil.cleanFqn(pc.fullPackageName).startsWith(cleanName) && cleanName.startsWith(ScalaNamesUtil.cleanFqn(pc.parentPackageName))
} match {
case seq if seq.isEmpty => null
case filtered =>
new ScSyntheticPackage(name, PsiManager.getInstance(project)) {
override def getFiles(globalSearchScope: GlobalSearchScope): Array[PsiFile] = Array.empty //todo: ?
def findClassByShortName(name: String, scope: GlobalSearchScope): Array[PsiClass] = {
getClasses.filter(n => ScalaNamesUtil.equivalentFqn(n.name, name))
}
def containsClassNamed(name: String): Boolean = {
getClasses.exists(n => ScalaNamesUtil.equivalentFqn(n.name, name))
}
def getQualifiedName: String = fqn
def getClasses: Array[PsiClass] = {
filtered.flatMap(p =>
if (ScalaNamesUtil.cleanFqn(p.fullPackageName).length == cleanName.length)
p.immediateTypeDefinitions.flatMap {
case td@(c: ScTypeDefinition) if c.fakeCompanionModule.isDefined =>
Seq(td, c.fakeCompanionModule.get)
case td => Seq(td)
}
else Seq.empty).toArray
}
def getClasses(scope: GlobalSearchScope): Array[PsiClass] =
getClasses.filter { clazz =>
val file = clazz.getContainingFile.getVirtualFile
file != null && scope.contains(file)
}
def getParentPackage: ScPackageImpl = ScPackageImpl.findPackage(project, parentName)
def getSubPackages: Array[PsiPackage] = {
val buff = new mutable.HashSet[PsiPackage]
filtered.foreach{
p =>
def addPackage(tail : String) {
val p = ScPackageImpl.findPackage(project, fqn + "." + tail)
if (p != null) buff += p
}
val fqn1 = p.fullPackageName
val tail = if (fqn1.length > fqn.length) fqn1.substring(fqn.length + 1) else ""
if (tail.length == 0) {
p.packagings.foreach {
pack => {
val own = pack.packageName
val i = own.indexOf(".")
addPackage(if (i > 0) own.substring(0, i) else own)
}
}
p.immediateTypeDefinitions.foreach {
case o: ScObject if o.isPackageObject && o.getName != "`package`" =>
addPackage(o.name)
case _ =>
}
} else {
val i = tail.indexOf(".")
val next = if (i > 0) tail.substring(0, i) else tail
addPackage(next)
}
}
buff.toArray
}
def getSubPackages(scope: GlobalSearchScope): Array[PsiPackage] = getSubPackages
}
}
}
}
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/synthetic/ScSyntheticPackage.scala | Scala | apache-2.0 | 7,854 |
/* Title: Pure/General/json.scala
Author: Makarius
Support for JSON: https://www.json.org/.
See also http://seriot.ch/parsing_json.php "Parsing JSON is a Minefield".
*/
package isabelle
import scala.util.parsing.combinator.Parsers
import scala.util.parsing.combinator.lexical.Scanners
import scala.util.parsing.input.CharArrayReader.EofCh
object JSON
{
type T = Any
type S = String
object Object
{
type Entry = (String, JSON.T)
type T = Map[String, JSON.T]
val empty: Object.T = Map.empty
def apply(entries: Entry*): Object.T = Map(entries:_*)
def unapply(obj: T): Option[Object.T] =
obj match {
case m: Map[_, _] if m.keySet.forall(_.isInstanceOf[String]) =>
Some(m.asInstanceOf[Object.T])
case _ => None
}
}
/* lexer */
object Kind extends Enumeration
{
val KEYWORD, STRING, NUMBER, ERROR = Value
}
sealed case class Token(kind: Kind.Value, text: String)
{
def is_keyword: Boolean = kind == Kind.KEYWORD
def is_keyword(name: String): Boolean = kind == Kind.KEYWORD && text == name
def is_string: Boolean = kind == Kind.STRING
def is_number: Boolean = kind == Kind.NUMBER
def is_error: Boolean = kind == Kind.ERROR
}
object Lexer extends Scanners with Scan.Parsers
{
override type Elem = Char
type Token = JSON.Token
def errorToken(msg: String): Token = Token(Kind.ERROR, msg)
val white_space: String = " \\t\\n\\r"
override val whiteSpace = ("[" + white_space + "]+").r
def whitespace: Parser[Any] = many(character(white_space.contains(_)))
val letter: Parser[String] = one(character(Symbol.is_ascii_letter(_)))
val letters1: Parser[String] = many1(character(Symbol.is_ascii_letter(_)))
def digits: Parser[String] = many(character(Symbol.is_ascii_digit(_)))
def digits1: Parser[String] = many1(character(Symbol.is_ascii_digit(_)))
/* keyword */
def keyword: Parser[Token] =
(letters1 | one(character("{}[],:".contains(_)))) ^^ (s => Token(Kind.KEYWORD, s))
/* string */
def string: Parser[Token] =
'\\"' ~> rep(string_body) <~ '\\"' ^^ (cs => Token(Kind.STRING, cs.mkString))
def string_body: Parser[Char] =
elem("", c => c > '\\u001f' && c != '\\"' && c != '\\\\' && c != EofCh) | '\\\\' ~> string_escape
def string_escape: Parser[Char] =
elem("", "\\"\\\\/".contains(_)) |
elem("", "bfnrt".contains(_)) ^^
{ case 'b' => '\\b' case 'f' => '\\f' case 'n' => '\\n' case 'r' => '\\r' case 't' => '\\t' } |
'u' ~> repeated(character("0123456789abcdefABCDEF".contains(_)), 4, 4) ^^
(s => Integer.parseInt(s, 16).toChar)
def string_failure: Parser[Token] = '\\"' ~> failure("Unterminated string")
/* number */
def number: Parser[Token] =
opt("-") ~ number_body ~ opt(letter) ^^ {
case a ~ b ~ None => Token(Kind.NUMBER, a.getOrElse("") + b)
case a ~ b ~ Some(c) =>
errorToken("Invalid number format: " + quote(a.getOrElse("") + b + c))
}
def number_body: Parser[String] =
(zero | positive) ~ opt(number_fract) ~ opt(number_exp) ^^
{ case a ~ b ~ c => a + b.getOrElse("") + c.getOrElse("") }
def number_fract: Parser[String] = "." ~ digits1 ^^ { case a ~ b => a + b }
def number_exp: Parser[String] =
one(character("eE".contains(_))) ~ maybe(character("-+".contains(_))) ~ digits1 ^^
{ case a ~ b ~ c => a + b + c }
def zero = one(character(c => c == '0'))
def nonzero = one(character(c => c != '0' && Symbol.is_ascii_digit(c)))
def positive: Parser[String] = nonzero ~ digits ^^ { case a ~ b => a + b }
/* token */
def token: Parser[Token] =
keyword | (string | (string_failure | (number | failure("Illegal character"))))
}
/* parser */
trait Parser extends Parsers
{
type Elem = Token
def $$$(name: String): Parser[Token] = elem(name, _.is_keyword(name))
def string: Parser[String] = elem("string", _.is_string) ^^ (_.text)
def number: Parser[Double] = elem("number", _.is_number) ^^ (tok => tok.text.toDouble)
def json_object: Parser[Object.T] =
$$$("{") ~>
repsep(string ~ ($$$(":") ~> json_value) ^^ { case a ~ b => (a, b) }, $$$(",")) <~
$$$("}") ^^ (_.toMap)
def json_array: Parser[List[T]] =
$$$("[") ~> repsep(json_value, $$$(",")) <~ $$$("]")
def json_value: Parser[T] =
json_object | (json_array | (number | (string |
($$$("true") ^^^ true | ($$$("false") ^^^ false | ($$$("null") ^^^ null))))))
def parse(input: CharSequence, strict: Boolean): T =
{
val scanner = new Lexer.Scanner(Scan.char_reader(input))
phrase(if (strict) json_object | json_array else json_value)(scanner) match {
case Success(json, _) => json
case NoSuccess(_, next) => error("Malformed JSON input at " + next.pos)
}
}
}
object Parser extends Parser
/* standard format */
def parse(s: S, strict: Boolean = true): T = Parser.parse(s, strict)
object Format
{
def unapply(s: S): Option[T] =
try { Some(parse(s, strict = false)) }
catch { case ERROR(_) => None }
def apply(json: T): S =
{
val result = new StringBuilder
def string(s: String)
{
result += '"'
result ++=
s.iterator.map {
case '"' => "\\\\\\""
case '\\\\' => "\\\\\\\\"
case '\\b' => "\\\\b"
case '\\f' => "\\\\f"
case '\\n' => "\\\\n"
case '\\r' => "\\\\r"
case '\\t' => "\\\\t"
case c =>
if (c <= '\\u001f' || c >= '\\u007f' && c <= '\\u009f') "\\\\u%04x".format(c.toInt)
else c
}.mkString
result += '"'
}
def array(list: List[T])
{
result += '['
Library.separate(None, list.map(Some(_))).foreach({
case None => result += ','
case Some(x) => json_format(x)
})
result += ']'
}
def object_(obj: Object.T)
{
result += '{'
Library.separate(None, obj.toList.map(Some(_))).foreach({
case None => result += ','
case Some((x, y)) =>
string(x)
result += ':'
json_format(y)
})
result += '}'
}
def json_format(x: T)
{
x match {
case null => result ++= "null"
case _: Int | _: Long | _: Boolean => result ++= x.toString
case n: Double =>
val i = n.toLong
result ++= (if (i.toDouble == n) i.toString else n.toString)
case s: String => string(s)
case Object(m) => object_(m)
case list: List[T] => array(list)
case _ => error("Bad JSON value: " + x.toString)
}
}
json_format(json)
result.toString
}
}
/* typed values */
object Value
{
object UUID
{
def unapply(json: T): Option[java.util.UUID] =
json match {
case x: java.lang.String =>
try { Some(java.util.UUID.fromString(x)) }
catch { case _: IllegalArgumentException => None }
case _ => None
}
}
object String {
def unapply(json: T): Option[java.lang.String] =
json match {
case x: java.lang.String => Some(x)
case _ => None
}
}
object Double {
def unapply(json: T): Option[scala.Double] =
json match {
case x: scala.Double => Some(x)
case x: scala.Long => Some(x.toDouble)
case x: scala.Int => Some(x.toDouble)
case _ => None
}
}
object Long {
def unapply(json: T): Option[scala.Long] =
json match {
case x: scala.Double if x.toLong.toDouble == x => Some(x.toLong)
case x: scala.Long => Some(x)
case x: scala.Int => Some(x.toLong)
case _ => None
}
}
object Int {
def unapply(json: T): Option[scala.Int] =
json match {
case x: scala.Double if x.toInt.toDouble == x => Some(x.toInt)
case x: scala.Long if x.toInt.toLong == x => Some(x.toInt)
case x: scala.Int => Some(x)
case _ => None
}
}
object Boolean {
def unapply(json: T): Option[scala.Boolean] =
json match {
case x: scala.Boolean => Some(x)
case _ => None
}
}
object List
{
def unapply[A](json: T, unapply: T => Option[A]): Option[List[A]] =
json match {
case xs: List[T] =>
val ys = xs.map(unapply)
if (ys.forall(_.isDefined)) Some(ys.map(_.get)) else None
case _ => None
}
}
}
/* object values */
def optional(entry: (String, Option[T])): Object.T =
entry match {
case (name, Some(x)) => Object(name -> x)
case (_, None) => Object.empty
}
def value(obj: T, name: String): Option[T] =
obj match {
case Object(m) => m.get(name)
case _ => None
}
def value[A](obj: T, name: String, unapply: T => Option[A]): Option[A] =
for {
json <- value(obj, name)
x <- unapply(json)
} yield x
def array(obj: T, name: String): Option[List[T]] =
value(obj, name) match {
case Some(a: List[T]) => Some(a)
case _ => None
}
def value_default[A](obj: T, name: String, unapply: T => Option[A], default: => A): Option[A] =
value(obj, name) match {
case None => Some(default)
case Some(json) => unapply(json)
}
def uuid(obj: T, name: String): Option[UUID] =
value(obj, name, Value.UUID.unapply)
def string(obj: T, name: String): Option[String] =
value(obj, name, Value.String.unapply)
def string_default(obj: T, name: String, default: => String = ""): Option[String] =
value_default(obj, name, Value.String.unapply, default)
def double(obj: T, name: String): Option[Double] =
value(obj, name, Value.Double.unapply)
def double_default(obj: T, name: String, default: => Double = 0.0): Option[Double] =
value_default(obj, name, Value.Double.unapply, default)
def long(obj: T, name: String): Option[Long] =
value(obj, name, Value.Long.unapply)
def long_default(obj: T, name: String, default: => Long = 0): Option[Long] =
value_default(obj, name, Value.Long.unapply, default)
def int(obj: T, name: String): Option[Int] =
value(obj, name, Value.Int.unapply)
def int_default(obj: T, name: String, default: => Int = 0): Option[Int] =
value_default(obj, name, Value.Int.unapply, default)
def bool(obj: T, name: String): Option[Boolean] =
value(obj, name, Value.Boolean.unapply)
def bool_default(obj: T, name: String, default: => Boolean = false): Option[Boolean] =
value_default(obj, name, Value.Boolean.unapply, default)
def list[A](obj: T, name: String, unapply: T => Option[A]): Option[List[A]] =
value(obj, name, Value.List.unapply(_, unapply))
def list_default[A](obj: T, name: String, unapply: T => Option[A], default: => List[A] = Nil)
: Option[List[A]] = value_default(obj, name, Value.List.unapply(_, unapply), default)
}
| larsrh/libisabelle | modules/pide/2018/src/main/scala/General/json.scala | Scala | apache-2.0 | 11,161 |
package at.vizu.s2n.types.symbol
/**
* Phil on 07.12.15.
*/
class AppliedTypeArgument(val appliedType: TType, genericName: String, upperBound: TType, lowerBound: TType,
covariant: Boolean, contravariant: Boolean, val genericModifier: TypeArgument)
extends TypeArgument(genericModifier.ctx, genericName, upperBound, lowerBound,
covariant, contravariant) {
override def methods: Seq[Method] = appliedType.methods
override def fields: Seq[Field] = appliedType.fields
override def pkg: String = appliedType.pkg
override def simpleName: String = appliedType.simpleName
override def hasParent(tpe: TType): Boolean = appliedType.hasParent(tpe)
override def isAssignableFrom(other: TType): Boolean = getConcreteType match {
case g: TypeArgument => g.isAssignableFrom(other)
case _@t => other.hasParent(t)
}
override def findField(execCtx: TType, name: String) = appliedType.findField(execCtx, name)
override def findMethod(execCtx: TType, name: String, args: Seq[TType]) = appliedType.findMethod(execCtx, name, args)
override def parents = appliedType.parents
override def applyType(appliedType: TType): AppliedTypeArgument = {
throw new RuntimeException("AHHH")
}
override def isAssignableAsParam(other: TType): Boolean = other match {
case a: AppliedTypeArgument => checkVariances(a)
case g: TypeArgument => this == g
case c: ConcreteType => appliedType == c
case _ => false
}
override def equals(that: Any): Boolean = that match {
case a: AppliedTypeArgument => a.getConcreteType == getConcreteType
case g: TypeArgument => appliedType == g
case c: ConcreteType => getConcreteType == c
case _ => false
}
override def typeEquals(that: Any): Boolean = {
that match {
case a: AppliedTypeArgument => getConcreteType == a.getConcreteType
case g: TypeArgument => getConcreteType == g
case c: ConcreteType => getConcreteType == c
case _ => false
}
}
def getConcreteType: TType = {
appliedType match {
case a: AppliedTypeArgument => a.getConcreteType
case _ => appliedType
}
}
override def isGenericModifier: Boolean = getConcreteType match {
case g: TypeArgument => true
case _ => false
}
override def gmCovariance: Boolean = genericModifier.covariance
override def gmContravariance: Boolean = genericModifier.contravariance
override def baseTypeEquals(obj: TType): Boolean = getConcreteType.baseTypeEquals(obj)
override def toString: String = appliedType.toString
}
| viZu/nasca | src/main/scala/at/vizu/s2n/types/symbol/AppliedTypeArgument.scala | Scala | apache-2.0 | 2,567 |
package com.github.simonedeponti.play26lettuce
import java.nio.ByteBuffer
import java.nio.charset.Charset
import akka.actor.ActorSystem
import akka.serialization.SerializationExtension
import io.lettuce.core.codec.RedisCodec
/** Encodes and decodes keys and values using akka's pluggable serializers.
*
* Only values are actually encoded using akka's serialization:
* keys are maintained as text to ease debugging.
*
* See https://doc.akka.io/docs/akka/2.5/serialization.html?language=scala
*
* @param system Akka's active actor system
*/
class AkkaCodec(val system: ActorSystem) extends RedisCodec[String, AnyRef] {
private val serialization = SerializationExtension(system)
private val charset = Charset.forName("UTF-8")
override def decodeKey(bytes: ByteBuffer): String = {
charset.decode(bytes).toString
}
override def decodeValue(bytes: ByteBuffer): AnyRef = {
val serializerId = bytes.getInt
val serializer = serialization.serializerByIdentity(serializerId)
val byteArr = new Array[Byte](bytes.remaining())
bytes.get(byteArr)
serializer.fromBinary(byteArr)
}
override def encodeKey(key: String): ByteBuffer = {
charset.encode(key)
}
override def encodeValue(value: AnyRef): ByteBuffer = {
val serializer = serialization.findSerializerFor(value)
val header = ByteBuffer.allocate(4).putInt(serializer.identifier).array()
ByteBuffer.wrap(header ++ serializer.toBinary(value))
}
}
| simonedeponti/play26-lettuce | src/main/scala/com/github/simonedeponti/play26lettuce/AkkaCodec.scala | Scala | bsd-3-clause | 1,468 |
package jsky.app.ot.viewer.action
import jsky.app.ot.viewer.SPViewer
import javax.swing._
import java.awt.event.{ActionEvent, InputEvent, KeyEvent}
/**
* Close the current program.
*/
class CloseWindowAction(viewer: SPViewer) extends AbstractViewerAction(viewer, "Close Window") {
putValue(AbstractViewerAction.SHORT_NAME, "Close Window")
putValue(Action.SHORT_DESCRIPTION, "Close the science program viewer and all open programs that it contains.")
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_W, InputEvent.SHIFT_DOWN_MASK | AbstractViewerAction.platformEventMask()))
setEnabled(true)
// Should always be enabled, as only appears in a window that CAN be closed.
override def computeEnabledState() = true
override def actionPerformed(e: ActionEvent) {
viewer.closeWindow()
}
}
| arturog8m/ocs | bundle/jsky.app.ot/src/main/scala/jsky/app/ot/viewer/action/CloseWindowAction.scala | Scala | bsd-3-clause | 829 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.geotools
import java.util.{Date, UUID}
import org.locationtech.geomesa.utils.geotools.ObjectType.ObjectType
import org.locationtech.jts.geom.Geometry
import org.opengis.feature.`type`.AttributeDescriptor
import scala.math.Ordering
/**
* Ordering of simple feature attributes
*/
object AttributeOrdering {
/**
* An ordering for a particular attribute. Note that although the signature is AnyRef for ease of use
* with the simple feature API, the actual values being sorted must correspond to the type of the attribute
*
* @param descriptor descriptor
* @return
*/
def apply(descriptor: AttributeDescriptor): Ordering[AnyRef] = apply(ObjectType.selectType(descriptor))
/**
* An ordering for a particular attribute. Note that although the signature is AnyRef for ease of use
* with the simple feature API, the actual values being sorted must correspond to the type of the attribute
*
* @param bindings type bindings
* @return
*/
def apply(bindings: Seq[ObjectType]): Ordering[AnyRef] = {
val ordering = bindings.head match {
case ObjectType.STRING => StringOrdering
case ObjectType.INT => IntOrdering
case ObjectType.LONG => LongOrdering
case ObjectType.FLOAT => FloatOrdering
case ObjectType.DOUBLE => DoubleOrdering
case ObjectType.BOOLEAN => BooleanOrdering
case ObjectType.DATE => DateOrdering
case ObjectType.UUID => UuidOrdering
case ObjectType.GEOMETRY => GeometryOrdering
case ObjectType.BYTES => BytesOrdering
case ObjectType.LIST => list(bindings.last)
case ObjectType.MAP => throw new NotImplementedError("Ordering for Map-type attributes is not supported")
case b => throw new NotImplementedError(s"Unexpected attribute type: $b")
}
ordering.asInstanceOf[Ordering[AnyRef]]
}
private def list(binding: ObjectType): Ordering[AnyRef] = {
val ordering = binding match {
case ObjectType.STRING => StringListOrdering
case ObjectType.INT => IntListOrdering
case ObjectType.LONG => LongListOrdering
case ObjectType.FLOAT => FloatListOrdering
case ObjectType.DOUBLE => DoubleListOrdering
case ObjectType.BOOLEAN => BooleanListOrdering
case ObjectType.DATE => DateListOrdering
case ObjectType.UUID => UuidListOrdering
case ObjectType.GEOMETRY => GeometryListOrdering
case ObjectType.BYTES => BytesListOrdering
case b => throw new NotImplementedError(s"Unexpected attribute type: List[$b]")
}
ordering.asInstanceOf[Ordering[AnyRef]]
}
val StringOrdering : Ordering[String] = new NullOrdering(Ordering.String)
val IntOrdering : Ordering[Integer] = new NullOrdering(Ordering.ordered[Integer])
val LongOrdering : Ordering[java.lang.Long] = new NullOrdering(Ordering.ordered[java.lang.Long])
val FloatOrdering : Ordering[java.lang.Float] = new NullOrdering(Ordering.ordered[java.lang.Float])
val DoubleOrdering : Ordering[java.lang.Double] = new NullOrdering(Ordering.ordered[java.lang.Double])
val BooleanOrdering : Ordering[java.lang.Boolean] = new NullOrdering(Ordering.ordered[java.lang.Boolean])
val DateOrdering : Ordering[Date] = new NullOrdering(Ordering.ordered[Date])
val UuidOrdering : Ordering[UUID] = new NullOrdering(Ordering.ordered[UUID])
val GeometryOrdering : Ordering[Geometry] = new NullOrdering(new GeometryOrdering())
val BytesOrdering : Ordering[Array[Byte]] = new NullOrdering(new BytesOrdering())
val StringListOrdering : Ordering[java.util.List[String]] = ListOrdering(StringOrdering)
val IntListOrdering : Ordering[java.util.List[Integer]] = ListOrdering(IntOrdering)
val LongListOrdering : Ordering[java.util.List[java.lang.Long]] = ListOrdering(LongOrdering)
val FloatListOrdering : Ordering[java.util.List[java.lang.Float]] = ListOrdering(FloatOrdering)
val DoubleListOrdering : Ordering[java.util.List[java.lang.Double]] = ListOrdering(DoubleOrdering)
val BooleanListOrdering : Ordering[java.util.List[java.lang.Boolean]] = ListOrdering(BooleanOrdering)
val DateListOrdering : Ordering[java.util.List[Date]] = ListOrdering(DateOrdering)
val UuidListOrdering : Ordering[java.util.List[UUID]] = ListOrdering(UuidOrdering)
val GeometryListOrdering : Ordering[java.util.List[Geometry]] = ListOrdering(GeometryOrdering)
val BytesListOrdering : Ordering[java.util.List[Array[Byte]]] = ListOrdering(BytesOrdering)
private class GeometryOrdering extends Ordering[Geometry] {
override def compare(x: Geometry, y: Geometry): Int = x.compareTo(y)
}
private class BytesOrdering extends Ordering[Array[Byte]] {
override def compare(x: Array[Byte], y: Array[Byte]): Int = {
val len = math.min(x.length, y.length)
var i = 0
while (i < len) {
val res = java.lang.Byte.compare(x(i), y(i))
if (res != 0) {
return res
}
i += 1
}
Integer.compare(x.length, y.length)
}
}
private object ListOrdering {
def apply[T](delegate: Ordering[T]): Ordering[java.util.List[T]] = new NullOrdering(new ListOrdering(delegate))
}
private class ListOrdering[T](delegate: Ordering[T]) extends Ordering[java.util.List[T]] {
override def compare(x: java.util.List[T], y: java.util.List[T]): Int = {
val len = math.min(x.size, y.size)
var i = 0
while (i < len) {
val res = delegate.compare(x.get(i), y.get(i))
if (res != 0) {
return res
}
i += 1
}
Integer.compare(x.size, y.size)
}
}
private class NullOrdering[T](delegate: Ordering[T]) extends Ordering[T] {
override def compare(x: T, y: T): Int = {
if (x == null) {
if (y == null) { 0 } else { -1 }
} else if (y == null) {
1
} else {
delegate.compare(x, y)
}
}
}
}
| locationtech/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geotools/AttributeOrdering.scala | Scala | apache-2.0 | 6,616 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.io.File
import java.util.concurrent.TimeoutException
import org.mockito.Matchers
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.BeforeAndAfter
import org.apache.hadoop.mapred.{TaskAttemptID, JobConf, TaskAttemptContext, OutputCommitter}
import org.apache.spark._
import org.apache.spark.rdd.{RDD, FakeOutputCommitter}
import org.apache.spark.util.Utils
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.postfixOps
/**
* Unit tests for the output commit coordination functionality.
*
* The unit test makes both the original task and the speculated task
* attempt to commit, where committing is emulated by creating a
* directory. If both tasks create directories then the end result is
* a failure.
*
* Note that there are some aspects of this test that are less than ideal.
* In particular, the test mocks the speculation-dequeuing logic to always
* dequeue a task and consider it as speculated. Immediately after initially
* submitting the tasks and calling reviveOffers(), reviveOffers() is invoked
* again to pick up the speculated task. This may be hacking the original
* behavior in too much of an unrealistic fashion.
*
* Also, the validation is done by checking the number of files in a directory.
* Ideally, an accumulator would be used for this, where we could increment
* the accumulator in the output committer's commitTask() call. If the call to
* commitTask() was called twice erroneously then the test would ideally fail because
* the accumulator would be incremented twice.
*
* The problem with this test implementation is that when both a speculated task and
* its original counterpart complete, only one of the accumulator's increments is
* captured. This results in a paradox where if the OutputCommitCoordinator logic
* was not in SparkHadoopWriter, the tests would still pass because only one of the
* increments would be captured even though the commit in both tasks was executed
* erroneously.
*
* See also: [[OutputCommitCoordinatorIntegrationSuite]] for integration tests that do
* not use mocks.
*/
class OutputCommitCoordinatorSuite extends SparkFunSuite with BeforeAndAfter {
var outputCommitCoordinator: OutputCommitCoordinator = null
var tempDir: File = null
var sc: SparkContext = null
before {
tempDir = Utils.createTempDir()
val conf = new SparkConf()
.setMaster("local[4]")
.setAppName(classOf[OutputCommitCoordinatorSuite].getSimpleName)
.set("spark.speculation", "true")
sc = new SparkContext(conf) {
override private[spark] def createSparkEnv(
conf: SparkConf,
isLocal: Boolean,
listenerBus: LiveListenerBus): SparkEnv = {
outputCommitCoordinator = spy(new OutputCommitCoordinator(conf, isDriver = true))
// Use Mockito.spy() to maintain the default infrastructure everywhere else.
// This mocking allows us to control the coordinator responses in test cases.
SparkEnv.createDriverEnv(conf, isLocal, listenerBus, Some(outputCommitCoordinator))
}
}
// Use Mockito.spy() to maintain the default infrastructure everywhere else
val mockTaskScheduler = spy(sc.taskScheduler.asInstanceOf[TaskSchedulerImpl])
doAnswer(new Answer[Unit]() {
override def answer(invoke: InvocationOnMock): Unit = {
// Submit the tasks, then force the task scheduler to dequeue the
// speculated task
invoke.callRealMethod()
mockTaskScheduler.backend.reviveOffers()
}
}).when(mockTaskScheduler).submitTasks(Matchers.any())
doAnswer(new Answer[TaskSetManager]() {
override def answer(invoke: InvocationOnMock): TaskSetManager = {
val taskSet = invoke.getArguments()(0).asInstanceOf[TaskSet]
new TaskSetManager(mockTaskScheduler, taskSet, 4) {
var hasDequeuedSpeculatedTask = false
override def dequeueSpeculativeTask(
execId: String,
host: String,
locality: TaskLocality.Value): Option[(Int, TaskLocality.Value)] = {
if (!hasDequeuedSpeculatedTask) {
hasDequeuedSpeculatedTask = true
Some(0, TaskLocality.PROCESS_LOCAL)
} else {
None
}
}
}
}
}).when(mockTaskScheduler).createTaskSetManager(Matchers.any(), Matchers.any())
sc.taskScheduler = mockTaskScheduler
val dagSchedulerWithMockTaskScheduler = new DAGScheduler(sc, mockTaskScheduler)
sc.taskScheduler.setDAGScheduler(dagSchedulerWithMockTaskScheduler)
sc.dagScheduler = dagSchedulerWithMockTaskScheduler
}
after {
sc.stop()
tempDir.delete()
outputCommitCoordinator = null
}
test("Only one of two duplicate commit tasks should commit") {
val rdd = sc.parallelize(Seq(1), 1)
sc.runJob(rdd, OutputCommitFunctions(tempDir.getAbsolutePath).commitSuccessfully _,
0 until rdd.partitions.size)
assert(tempDir.list().size === 1)
}
test("If commit fails, if task is retried it should not be locked, and will succeed.") {
val rdd = sc.parallelize(Seq(1), 1)
sc.runJob(rdd, OutputCommitFunctions(tempDir.getAbsolutePath).failFirstCommitAttempt _,
0 until rdd.partitions.size)
assert(tempDir.list().size === 1)
}
test("Job should not complete if all commits are denied") {
// Create a mock OutputCommitCoordinator that denies all attempts to commit
doReturn(false).when(outputCommitCoordinator).handleAskPermissionToCommit(
Matchers.any(), Matchers.any(), Matchers.any())
val rdd: RDD[Int] = sc.parallelize(Seq(1), 1)
def resultHandler(x: Int, y: Unit): Unit = {}
val futureAction: SimpleFutureAction[Unit] = sc.submitJob[Int, Unit, Unit](rdd,
OutputCommitFunctions(tempDir.getAbsolutePath).commitSuccessfully,
0 until rdd.partitions.size, resultHandler, () => Unit)
// It's an error if the job completes successfully even though no committer was authorized,
// so throw an exception if the job was allowed to complete.
intercept[TimeoutException] {
Await.result(futureAction, 5 seconds)
}
assert(tempDir.list().size === 0)
}
test("Only authorized committer failures can clear the authorized committer lock (SPARK-6614)") {
val stage: Int = 1
val partition: Int = 2
val authorizedCommitter: Int = 3
val nonAuthorizedCommitter: Int = 100
outputCommitCoordinator.stageStart(stage)
assert(outputCommitCoordinator.canCommit(stage, partition, authorizedCommitter))
assert(!outputCommitCoordinator.canCommit(stage, partition, nonAuthorizedCommitter))
// The non-authorized committer fails
outputCommitCoordinator.taskCompleted(
stage, partition, attemptNumber = nonAuthorizedCommitter, reason = TaskKilled)
// New tasks should still not be able to commit because the authorized committer has not failed
assert(
!outputCommitCoordinator.canCommit(stage, partition, nonAuthorizedCommitter + 1))
// The authorized committer now fails, clearing the lock
outputCommitCoordinator.taskCompleted(
stage, partition, attemptNumber = authorizedCommitter, reason = TaskKilled)
// A new task should now be allowed to become the authorized committer
assert(
outputCommitCoordinator.canCommit(stage, partition, nonAuthorizedCommitter + 2))
// There can only be one authorized committer
assert(
!outputCommitCoordinator.canCommit(stage, partition, nonAuthorizedCommitter + 3))
}
}
/**
* Class with methods that can be passed to runJob to test commits with a mock committer.
*/
private case class OutputCommitFunctions(tempDirPath: String) {
// Mock output committer that simulates a successful commit (after commit is authorized)
private def successfulOutputCommitter = new FakeOutputCommitter {
override def commitTask(context: TaskAttemptContext): Unit = {
Utils.createDirectory(tempDirPath)
}
}
// Mock output committer that simulates a failed commit (after commit is authorized)
private def failingOutputCommitter = new FakeOutputCommitter {
override def commitTask(taskAttemptContext: TaskAttemptContext) {
throw new RuntimeException
}
}
def commitSuccessfully(iter: Iterator[Int]): Unit = {
val ctx = TaskContext.get()
runCommitWithProvidedCommitter(ctx, iter, successfulOutputCommitter)
}
def failFirstCommitAttempt(iter: Iterator[Int]): Unit = {
val ctx = TaskContext.get()
runCommitWithProvidedCommitter(ctx, iter,
if (ctx.attemptNumber == 0) failingOutputCommitter else successfulOutputCommitter)
}
private def runCommitWithProvidedCommitter(
ctx: TaskContext,
iter: Iterator[Int],
outputCommitter: OutputCommitter): Unit = {
def jobConf = new JobConf {
override def getOutputCommitter(): OutputCommitter = outputCommitter
}
val sparkHadoopWriter = new SparkHadoopWriter(jobConf) {
override def newTaskAttemptContext(
conf: JobConf,
attemptId: TaskAttemptID): TaskAttemptContext = {
mock(classOf[TaskAttemptContext])
}
}
sparkHadoopWriter.setup(ctx.stageId, ctx.partitionId, ctx.attemptNumber)
sparkHadoopWriter.commit()
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | core/src/test/scala/org/apache/spark/scheduler/OutputCommitCoordinatorSuite.scala | Scala | apache-2.0 | 10,164 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.config
import com.twitter.zipkin.storage.{Aggregates, Index, Storage}
import com.twitter.zipkin.collector.{WriteQueue, ZipkinCollector}
import com.twitter.zipkin.collector.filter.{ServiceStatsFilter, SamplerFilter, ClientIndexFilter}
import com.twitter.zipkin.collector.sampler.{AdaptiveSampler, ZooKeeperGlobalSampler, GlobalSampler}
import com.twitter.zipkin.config.collector.CollectorServerConfig
import com.twitter.zipkin.config.sampler._
import com.twitter.zipkin.config.zookeeper.{ZooKeeperClientConfig, ZooKeeperConfig}
import com.twitter.common.zookeeper.ZooKeeperClient
import com.twitter.conversions.time._
import com.twitter.ostrich.admin.{ServiceTracker, RuntimeEnvironment}
import com.twitter.util.{FuturePool, Config}
import com.twitter.zk._
import java.net.{InetAddress, InetSocketAddress}
import org.apache.zookeeper.ZooDefs.Ids
import scala.collection.JavaConverters._
import com.twitter.zipkin.collector.processor._
import com.twitter.zipkin.common.Span
import com.twitter.finagle.{Filter, Service}
trait ZipkinCollectorConfig extends ZipkinConfig[ZipkinCollector] {
var serverPort : Int = 9410
var adminPort : Int = 9900
/* ZooKeeper paths */
var zkConfigPath : String = "/twitter/service/zipkin/config"
var zkServerSetPath : String = "/twitter/service/zipkin/collector"
/* ZooKeeper key for `AdjustableRateConfig`s */
var zkSampleRateKey : String = "samplerate"
var zkStorageRequestRateKey : String = "storagerequestrate"
/* Prefix for service/endpoint stats */
var serviceStatsPrefix : String = "agg."
/* Do not publish .p<percent> stats */
adminStatsFilters = (serviceStatsPrefix + """.*\\.p([0-9]*)""").r :: adminStatsFilters
/* Storage */
def storageConfig: StorageConfig
lazy val storage: Storage = storageConfig.apply()
/* Index */
def indexConfig: IndexConfig
lazy val index: Index = indexConfig.apply()
/* Aggregates */
def aggregatesConfig: AggregatesConfig
lazy val aggregates: Aggregates = aggregatesConfig.apply()
/* ZooKeeper */
def zkConfig: ZooKeeperConfig
def zkClientConfig: ZooKeeperClientConfig = new ZooKeeperClientConfig {
var config = zkConfig
}
lazy val zkClient: ZooKeeperClient = zkClientConfig.apply()
lazy val connector: Connector =
CommonConnector(zkClient)(FuturePool.defaultPool)
lazy val zClient: ZkClient =
ZkClient(connector)
.withAcl(Ids.OPEN_ACL_UNSAFE.asScala)
.withRetryPolicy(RetryPolicy.Exponential(1.second, 1.5)(timer))
/* `AdjustableRateConfig`s */
lazy val sampleRateConfig: AdjustableRateConfig =
ZooKeeperSampleRateConfig(zClient, zkConfigPath, zkSampleRateKey)
lazy val storageRequestRateConfig: AdjustableRateConfig =
ZooKeeperStorageRequestRateConfig(zClient, zkConfigPath, zkStorageRequestRateKey)
/**
* Adaptive Sampler
* Dynamically adjusts the sample rate so we have a stable write throughput
* Default is a NullAdaptiveSamplerConfig that does nothing
**/
def adaptiveSamplerConfig: AdaptiveSamplerConfig = new NullAdaptiveSamplerConfig {}
lazy val adaptiveSampler: AdaptiveSampler = adaptiveSamplerConfig.apply()
def globalSampler: GlobalSampler = new ZooKeeperGlobalSampler(sampleRateConfig)
/**
* To accommodate a particular input type `T`, define a `rawDataFilter` that
* converts the input data type (ex: Scrooge-generated Thrift) into a `com.twitter.zipkin.common.Span`
*/
type T
def rawDataFilter: Filter[T, Unit, Span, Unit]
lazy val processor: Service[T, Unit] =
rawDataFilter andThen
new SamplerFilter(globalSampler) andThen
new ServiceStatsFilter andThen
new FanoutService[Span](
new StorageService(storage) ::
(new ClientIndexFilter andThen new IndexService(index))
)
def writeQueueConfig: WriteQueueConfig[T]
lazy val writeQueue: WriteQueue[T] = writeQueueConfig.apply(processor)
lazy val serverAddr = new InetSocketAddress(InetAddress.getLocalHost, serverPort)
val serverConfig: CollectorServerConfig
def apply(runtime: RuntimeEnvironment): ZipkinCollector = {
new ZipkinCollector(this)
}
}
trait WriteQueueConfig[T] extends Config[WriteQueue[T]] {
var writeQueueMaxSize: Int = 500
var flusherPoolSize: Int = 10
def apply(service: Service[T, Unit]): WriteQueue[T] = {
val wq = new WriteQueue[T](writeQueueMaxSize, flusherPoolSize, service)
wq.start()
ServiceTracker.register(wq)
wq
}
def apply(): WriteQueue[T] = {
val wq = new WriteQueue[T](writeQueueMaxSize, flusherPoolSize, new NullService[T])
wq.start()
ServiceTracker.register(wq)
wq
}
}
| samstokes/zipkin | zipkin-collector-core/src/main/scala/com/twitter/zipkin/config/ZipkinCollectorConfig.scala | Scala | apache-2.0 | 5,237 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.serialization
import java.io.InputStream
import java.io.OutputStream
import java.io.Serializable
import java.nio.ByteBuffer
import org.apache.hadoop.io.serializer.{Serialization, Deserializer, Serializer, WritableSerialization}
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.{Serializer => KSerializer}
import com.esotericsoftware.kryo.io.{Input, Output}
import com.esotericsoftware.kryo.serializers.FieldSerializer
import cascading.tuple.hadoop.TupleSerialization
import cascading.tuple.hadoop.io.BufferedInputStream
import scala.annotation.tailrec
import scala.collection.immutable.ListMap
import scala.collection.immutable.HashMap
import com.twitter.scalding.DateRange
import com.twitter.scalding.RichDate
import com.twitter.scalding.Args
import com.twitter.chill._
import com.twitter.chill.config.Config
class KryoHadoop(config: Config) extends KryoInstantiator {
/** TODO!!!
* Deal with this issue. The problem is grouping by Kryo serialized
* objects silently breaks the results. If Kryo gets in front of TupleSerialization
* (and possibly Writable, unclear at this time), grouping is broken.
* There are two issues here:
* 1) Kryo objects not being compared properly.
* 2) Kryo being used instead of cascading.
*
* We must identify each and fix these bugs.
*/
override def newKryo : Kryo = {
val newK = (new ScalaKryoInstantiator).newKryo
// These are scalding objects:
newK.register(classOf[RichDate], new RichDateSerializer())
newK.register(classOf[DateRange], new DateRangeSerializer())
newK.register(classOf[Args], new ArgsSerializer)
// Some of the monoids from Algebird that we use:
newK.register(classOf[com.twitter.algebird.AveragedValue], new AveragedValueSerializer)
newK.register(classOf[com.twitter.algebird.DecayedValue], new DecayedValueSerializer)
newK.register(classOf[com.twitter.algebird.HyperLogLogMonoid], new HLLMonoidSerializer)
newK.register(classOf[com.twitter.algebird.Moments], new MomentsSerializer)
newK.addDefaultSerializer(classOf[com.twitter.algebird.HLL], new HLLSerializer)
/** AdaptiveVector is IndexedSeq, which picks up the chill IndexedSeq serializer
* (which is its own bug), force using the fields serializer here
*/
newK.register(classOf[com.twitter.algebird.DenseVector[_]],
new FieldSerializer[com.twitter.algebird.DenseVector[_]](newK,
classOf[com.twitter.algebird.DenseVector[_]]))
newK.register(classOf[com.twitter.algebird.SparseVector[_]],
new FieldSerializer[com.twitter.algebird.SparseVector[_]](newK,
classOf[com.twitter.algebird.SparseVector[_]]))
newK.addDefaultSerializer(classOf[com.twitter.algebird.AdaptiveVector[_]],
classOf[FieldSerializer[_]])
/**
* Pipes can be swept up into closures inside of case classes. This can generally
* be safely ignored. If the case class has a method that actually accesses something
* in the pipe (what would that even be?), you will get a null pointer exception,
* so it shouldn't cause data corruption.
* a more robust solution is to use Spark's closure cleaner approach on every object that
* is serialized, but that's very expensive.
*/
newK.addDefaultSerializer(classOf[cascading.pipe.Pipe], new SingletonSerializer(null))
// keeping track of references is costly for memory, and often triggers OOM on Hadoop
val useRefs = config.getBoolean("scalding.kryo.setreferences", false)
newK.setReferences(useRefs)
/**
* Make sure we use the thread's context class loader to ensure the classes of the
* submitted jar and any -libjars arguments can be found
*/
val classLoader = Thread.currentThread.getContextClassLoader
newK.setClassLoader(classLoader)
newK
}
}
| danosipov/scalding | scalding-core/src/main/scala/com/twitter/scalding/serialization/KryoHadoop.scala | Scala | apache-2.0 | 4,409 |
package nsmc.adhoc
import com.mongodb.casbah.Imports._
import com.mongodb.casbah.MongoClient
import util.EmbeddedMongo
object Doit {
def main (args: Array[String]) {
val em = new EmbeddedMongo()
val mongoClient = MongoClient("localhost", em.getPort())
mongoClient.dbNames().foreach(println)
val db = mongoClient.getDB("local")
val col = db("testCollection")
col.drop()
col += MongoDBObject("foo" -> 1) ++ ("name" -> "one")
col += MongoDBObject("foo" -> 2) ++ ("name" -> "two")
col.foreach(println)
// TODO: make sure it's actually in the database
val mr = new MongoReader()
mr.read()
em.stop()
}
}
| shotishu/spark-mongodb-connector | src/test/scala/nsmc/adhoc/Doit.scala | Scala | apache-2.0 | 667 |
object forceDelay {
import scala.language.implicitConversions
class Susp[+A](lazyValue: => A) extends Function0[A] {
private var func: () => Any = () => lazyValue
private var value: Any = null
override def apply() = {
if (func != null) {
value = func().asInstanceOf[A]
func = null
}
value.asInstanceOf[A]
}
override def toString() =
if (func == null) "Susp(" + value + ")"
else "Susp(?)"
}
def delay[A](value: => A) = new Susp[A](value)
implicit def force[A](s: Susp[A]): A = s()
}
object Test {
import forceDelay._
def main(args: Array[String]) = {
val s: Susp[Int] = delay { Console.println("evaluating..."); 3 }
Console.println("s = " + s)
Console.println("s() = " + s())
Console.println("s = " + s)
Console.println("2 + s = " + (2 + s))
}
}
| yusuke2255/dotty | tests/run/t603.scala | Scala | bsd-3-clause | 853 |
/*
* Copyright 2001-2009 OFFIS, Tammo Freese
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatestexamples.easymock
import org.scalatestexamples._
import org.easymock.EasyMock._
import org.junit.Assert._
import java.util.ArrayList
import java.util.List
import org.easymock.IAnswer
import org.junit.Before
import org.junit.Test
import org.scalatest.verb.ShouldVerb
import org.scalatest.mock.EasyMockSugar
import org.scalatest.{BeforeAndAfterEach, FlatSpec}
import org.scalatest.fixture.FixtureFlatSpec
class EasyMockExampleFixtureFlatSpec extends FixtureFlatSpec with EasyMockSugar {
case class FixtureHolder(classUnderTest: ClassTested, mockCollaborator: Collaborator)
type FixtureParam = FixtureHolder
def withFixture(test: OneArgTest) {
val mockCollaborator = mock[Collaborator]
val classUnderTest = new ClassTested()
classUnderTest.addListener(mockCollaborator)
test(FixtureHolder(classUnderTest, mockCollaborator))
}
"ClassTested" should "not call the collaborator when removing a non-existing document" in { fixture => import fixture._
replay(mockCollaborator)
classUnderTest.removeDocument("Does not exist")
}
it should "call documentAdded on the Collaborator when a new document is added" in { fixture => import fixture._
expecting {
mockCollaborator.documentAdded("New Document")
}
whenExecuting(mockCollaborator) {
classUnderTest.addDocument("New Document", new Array[Byte](0))
}
}
it should "call documentChanged on the Collaborator when a document is changed" in { fixture => import fixture._
expecting {
mockCollaborator.documentAdded("Document")
mockCollaborator.documentChanged("Document")
lastCall.times(3)
}
whenExecuting(mockCollaborator) {
classUnderTest.addDocument("Document", new Array[Byte](0))
classUnderTest.addDocument("Document", new Array[Byte](0))
classUnderTest.addDocument("Document", new Array[Byte](0))
classUnderTest.addDocument("Document", new Array[Byte](0))
}
}
it should "call voteForRemoval on Collaborator when removeDocument is called on ClassTested, and " +
"if a POSITIVE number is returned (i.e., a vote FOR removal), documentRemoved " +
"should be called on Collaborator" in { fixture => import fixture._
expecting {
// expect document addition
mockCollaborator.documentAdded("Document");
// expect to be asked to vote, and vote for it
mockCollaborator.voteForRemoval("Document").andReturn((42).asInstanceOf[Byte]);
// expect document removal
mockCollaborator.documentRemoved("Document");
}
whenExecuting(mockCollaborator) {
classUnderTest.addDocument("Document", new Array[Byte](0));
assert(classUnderTest.removeDocument("Document"))
}
}
it should "call voteForRemoval on Collaborator when removeDocument is called on ClassTested, and " +
"if a NEGATIVE number is returned (i.e., a vote AGAINST removal), documentRemoved " +
"should NOT be called on Collaborator" in { fixture => import fixture._
expecting {
// expect document addition
mockCollaborator.documentAdded("Document");
// expect to be asked to vote, and vote against it
mockCollaborator.voteForRemoval("Document").andReturn((-42).asInstanceOf[Byte]); //
// document removal is *not* expected
}
whenExecuting(mockCollaborator) {
classUnderTest.addDocument("Document", new Array[Byte](0));
assert(!classUnderTest.removeDocument("Document"))
}
}
it should "call voteForRemoval on Collaborator when removeDocument is called on ClassTested " +
"to remove multiple documents, and if a POSITIVE number is returned (i.e., a vote " +
"FOR removal), documentRemoved should be called on Collaborator" in { fixture => import fixture._
expecting {
mockCollaborator.documentAdded("Document 1");
mockCollaborator.documentAdded("Document 2");
val documents = Array("Document 1", "Document 2")
mockCollaborator.voteForRemovals(aryEq(documents)).andReturn((42).asInstanceOf[Byte]);
mockCollaborator.documentRemoved("Document 1");
mockCollaborator.documentRemoved("Document 2");
}
whenExecuting(mockCollaborator) {
classUnderTest.addDocument("Document 1", new Array[Byte](0));
classUnderTest.addDocument("Document 2", new Array[Byte](0));
assert(classUnderTest.removeDocuments(Array("Document 1",
"Document 2")))
}
}
it should "call voteForRemoval on Collaborator when removeDocument is called on ClassTested " +
"to remove multiple documents, and if a NEGATIVE number is returned (i.e., a vote " +
"AGAINST removal), documentRemoved should NOT be called on Collaborator" in { fixture => import fixture._
expecting {
mockCollaborator.documentAdded("Document 1");
mockCollaborator.documentAdded("Document 2");
val documents = Array("Document 1", "Document 2")
mockCollaborator.voteForRemovals(aryEq(documents)).andReturn((-42).asInstanceOf[Byte]);
}
whenExecuting(mockCollaborator) {
classUnderTest.addDocument("Document 1", new Array[Byte](0));
classUnderTest.addDocument("Document 2", new Array[Byte](0));
assert(!classUnderTest.removeDocuments(Array("Document 1",
"Document 2")))
}
}
"EasyMock" should "work with both andAnswer and andDelegateTo styles" in { () =>
val list = mock[List[String]]
expecting {
// andAnswer style
list.remove(10).andAnswer(new IAnswer[String]() {
def answer(): String = {
return getCurrentArguments()(0).toString();
}
});
// andDelegateTo style
list.remove(10).andDelegateTo(new ArrayList[String]() {
// private static final long serialVersionUID = 1L;
override def remove(index: Int): String = {
return Integer.toString(index);
}
});
}
whenExecuting(list) {
assert("10" === list.remove(10))
assert("10" === list.remove(10))
}
}
}
| kevinwright/scalatest | src/examples/scala/org/scalatestexamples/easymock/EasyMockExampleFixtureFlatSpec.scala | Scala | apache-2.0 | 6,826 |
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.spark.mllib.clustering.{KMeans, KMeansModel}
import org.apache.spark.mllib.linalg.Vectors
import org.apache.log4j.Logger
import org.apache.log4j.Level
object ByManner {
/* Opens header csv and returns map of (name, column #) */
def mapHeaderIndexes(headerFileName: String) : Map[String, Int] = {
val header = scala.io.Source.fromFile(headerFileName).mkString
var headerMap: Map[String, Int] = Map()
header.split(",").view.zipWithIndex.foreach{case (name, index) => headerMap += (name -> index)} //http://daily-scala.blogspot.com/2010/05/zipwithindex.html
return headerMap
}
def fracByCategoryAndManner(headerFile:String, dataFile:String, category:String, mannerKey:String) {
//Setup spark contect
val conf = new SparkConf().setAppName("suicideFracByKeyAndSex")
val sc = new SparkContext(conf)
val dataRDD = sc.textFile(dataFile)
val headerMap: Map[String, Int] = mapHeaderIndexes(headerFile)
val splitData = dataRDD.map(line => line.split(',')).cache()
println("created split data")
val categoryPairs = splitData.map(line => (line(headerMap(category)), 1))
val categoryDeathCounts = categoryPairs.reduceByKey((a,b) => a + b)
println("reduced by key")
val allByManner = splitData.filter{line => line(headerMap("MannerOfDeath")).contains(mannerKey)}
val categoryMannerPairs = allByManner.map(line => (line(headerMap(category)), 1))
val categoryMannerCounts = categoryMannerPairs.reduceByKey((a,b) => a + b)
val combinedDeathManner = categoryMannerCounts.join(categoryDeathCounts)
println("Fraction MannerOfDeath by " + category)
println("manner key=" + mannerKey + ", manner/total deaths")
combinedDeathManner.collect().foreach{case (key, (manner, total)) =>
println(key + ", " + manner.toDouble/total.toDouble)
}
println()
}
def filter1PerSex(headerFile:String, dataFile:String,
cat1:String, cat1Key:String,
binCat:String, mannerKey:String) {
//Setup spark contect
val appName = "%s:%s : Manner:%s, Bin:%s".format(cat1, cat1Key, mannerKey, binCat)
val conf = new SparkConf().setAppName(appName)
val sc = new SparkContext(conf)
println("opening file")
val dataRDD = sc.textFile(dataFile)
val headerMap: Map[String, Int] = mapHeaderIndexes(headerFile)
println("read data")
val splitData = dataRDD.map(line => line.split(',')).cache()
//val binPairs = splitData.map(line => (line(headerMap(binCat)), 1))
//val totalInBins = binPairs.reduceByKey((a,b) => a + b)
println("created split data")
// All who were in category
val totalMen = splitData.filter{line =>
line(headerMap("Sex")).contains("M") &&
line(headerMap("MannerOfDeath")).contains(mannerKey)
}
//Reduce to values of (binKey, num in binKey)
val menBinPairs = totalMen.map(line => (line(headerMap(binCat)), 1))
val totalMenBinned = menBinPairs.reduceByKey((a,b) => a + b)
// Fit category and died by Manner
val menFiltered = totalMen.filter{line =>
line(headerMap(cat1)).contains(cat1Key)
}
println("filtered")
val menFilteredPairs = menFiltered.map(line => (line(headerMap(binCat)), 1)) //filter age here headerman(age)
val menFilteredCounts = menFilteredPairs.reduceByKey((a,b) => a + b) //age
println("reduced by key")
val menCombinedCounts = menFilteredCounts.join(totalMenBinned)
println(appName)
println("binning=" + binCat + ", filtered/total deaths")
//menFilteredCounts.collect().foreach{line => println(line)}
menCombinedCounts.collect().foreach{case (bin, (filtered, total)) =>
// println(bin + ", " + filtered.toDouble/total.toDouble)
println("%d, %f : %d %d".format(bin.toInt, filtered/total.toDouble, filtered, total))
}
println()
sc.stop()
}
def filter2PerSex(headerFile:String, dataFile:String,
cat1:String, cat1Key:String,
cat2:String, cat2Key:String,
binCat:String, mannerKey:String) {
//Setup spark contect
val appName = "%s,%s : %s,%s : %s".format(cat1, cat1Key, cat2, cat2Key, binCat)
val conf = new SparkConf().setAppName(appName)
val sc = new SparkContext(conf)
val dataRDD = sc.textFile(dataFile)
val headerMap: Map[String, Int] = mapHeaderIndexes(headerFile)
val splitData = dataRDD.map(line => line.split(',')).cache()
val totalMenDeaths = splitData.filter{line => line(headerMap("Sex")).contains("M")}
val totalWomenDeaths = splitData.filter{line => line(headerMap("Sex")).contains("F")}
val menFiltered = totalMenDeaths.filter{line =>
line(headerMap(cat1)).contains(cat1Key) &&
line(headerMap(cat2)).contains(cat2Key) &&
line(headerMap("MannerOfDeath")).contains(mannerKey)
}
val menFilteredPairs = menFiltered.map(line => (line(headerMap(binCat)), 1)) //filter age here headerman(age)
val menFilteredCounts = menFilteredPairs.reduceByKey((a,b) => a + b) //age
val menCombinedCounts = menFilteredCounts.join(menFilteredCounts)
println(appName)
println("binning=" + binCat + ", bin/total deaths")
menFilteredCounts.collect().foreach{line => println(line)
//println(bin + ", " + filtered.toDouble/total.toDouble)
}
println()
sc.stop()
}
def main(args: Array[String]) {
//Turn off obnoxious logging
Logger.getLogger("org").setLevel(Level.OFF)
Logger.getLogger("akka").setLevel(Level.OFF)
val headerFile: String = args(1)
val dataFile: String = args(0)
filter1PerSex(headerFile, dataFile, "MaritalStatus", "D", "AgeRecode27","2")
println("finished")
}
} | djzurawski/death-records-data-mining-spark | src/main/scala/manner.scala | Scala | mit | 5,925 |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.tailhq.dynaml.tensorflow
import _root_.io.github.tailhq.dynaml.pipes.{DataPipe, MetaPipe}
import _root_.io.github.tailhq.dynaml.tensorflow.models.{
TFModel,
TunableTFModel
}
import io.github.tailhq.dynaml.tensorflow.layers.{
DynamicTimeStepCTRNN,
FiniteHorizonCTRNN,
FiniteHorizonLinear
}
import _root_.io.github.tailhq.dynaml.tensorflow.dynamics.PDESystem
import _root_.io.github.tailhq.dynaml.tensorflow.evaluation.{
Performance,
MAE,
MSE
}
import org.platanios.tensorflow.api.learn.{Mode, StopCriteria}
import org.platanios.tensorflow.api.learn.layers.{Compose, Input, Layer, Linear}
import org.platanios.tensorflow.api.ops.NN.SameConvPadding
import org.platanios.tensorflow.api.ops.data.Dataset
import org.platanios.tensorflow.api.ops.training.optimizers.Optimizer
import org.platanios.tensorflow.api._
import org.platanios.tensorflow.api.core.types.{
IsFloatOrDouble,
IsHalfOrFloatOrDouble,
IsNotQuantized,
TF
}
import org.platanios.tensorflow.api.implicits.helpers.{
OutputStructure,
OutputToDataType,
OutputToShape
}
import org.platanios.tensorflow.api.ops.Output
import org.platanios.tensorflow.api.ops.variables.{
Initializer,
RandomNormalInitializer
}
import org.platanios.tensorflow.api.learn.layers.Activation
private[tensorflow] object Learn {
type TFDATA[D] = Dataset[(Tensor[D], Tensor[D])]
type SupervisedModel[In, TrainIn, TrainOut, Out, Loss] =
tf.learn.SupervisedTrainableModel[In, TrainIn, TrainOut, Out, Loss]
type SupEstimatorTF[In, TrainIn, TrainOut, Out, Loss, EvalIn] =
tf.learn.Estimator[In, (In, TrainIn), Out, TrainOut, Loss, EvalIn]
type SupModelPair[In, TrainIn, TrainOut, Out, Loss, EvalIn] = (
SupervisedModel[In, TrainIn, TrainOut, Out, Loss],
SupEstimatorTF[In, TrainIn, TrainOut, Out, Loss, EvalIn]
)
type UnsupervisedModel[In, Out, Loss] =
tf.learn.UnsupervisedTrainableModel[In, Out, Loss]
type UnsupEstimatorTF[In, Out, Loss] =
tf.learn.Estimator[In, In, Out, Out, Loss, Out]
type UnsupModelPair[In, Out, Loss] =
(UnsupervisedModel[In, Out, Loss], UnsupEstimatorTF[In, Out, Loss])
val Phi: layers.Phi.type = layers.Phi
val Tanh: layers.Tanh.type = layers.Tanh
val GeneralizedLogistic: layers.GeneralizedLogistic.type =
layers.GeneralizedLogistic
val batch_norm: tf.learn.BatchNormalization.type = tf.learn.BatchNormalization
val ctrnn: layers.FiniteHorizonCTRNN.type = layers.FiniteHorizonCTRNN
val dctrnn: layers.DynamicTimeStepCTRNN.type = layers.DynamicTimeStepCTRNN
val ts_linear: layers.FiniteHorizonLinear.type = layers.FiniteHorizonLinear
val rbf_layer: layers.RBFLayer.type = layers.RBFLayer
val stack_outputs: layers.StackOutputs.type = layers.StackOutputs
val concat_outputs: layers.ConcatenateOutputs.type = layers.ConcatenateOutputs
val seq_layer: layers.SeqLayer.type = layers.SeqLayer
val array_layer: layers.ArrayLayer.type = layers.ArrayLayer
val map_layer: layers.MapLayer.type = layers.MapLayer
val scoped_map_layer: layers.ScopedMapLayer.type = layers.ScopedMapLayer
val combined_layer: layers.CombinedLayer.type = layers.CombinedLayer
val combined_array_layer: layers.CombinedArrayLayer.type =
layers.CombinedArrayLayer
val unstack: layers.Unstack.type = layers.Unstack
val identity: layers.IdentityLayer.type = layers.IdentityLayer
val identity_act: layers.IdentityAct.type = layers.IdentityAct
val bifurcation_layer: layers.BifurcationLayer.type = layers.BifurcationLayer
val tuple2_layer: layers.Tuple2Layer.type = layers.Tuple2Layer
val stack_tuple2: layers.StackTuple2.type = layers.StackTuple2
val concat_tuple2: layers.ConcatenateTuple2.type = layers.ConcatenateTuple2
val sum_tuple: layers.SumTuple.type = layers.SumTuple
val sum_seq: layers.SumSeq.type = layers.SumSeq
val mult_seq: layers.MultSeq.type = layers.MultSeq
val multiply_const: layers.MultConstant.type = layers.MultConstant
/** Stop after a specified maximum number of iterations has been reached.
*/
def max_iter_stop(n: Long): StopCriteria =
tf.learn.StopCriteria(maxSteps = Some(n))
/** Stop after a specified maximum number of epochs has been reached.
*/
def max_epochs_stop(n: Long): StopCriteria =
tf.learn.StopCriteria(maxEpochs = Some(n))
/** Stop after the change in the loss function falls below a specified threshold.
*/
def abs_loss_change_stop(
d: Double,
max_iterations: Long,
max_epochs: Long = 0L
): StopCriteria =
tf.learn.StopCriteria(
absLossChangeTol = Some(d),
maxSteps = if (max_iterations > 0L) Some(max_iterations) else None,
maxEpochs = if (max_epochs > 0L) Some(max_epochs) else None
)
/** Stop after the relative change in the loss function falls below a specified threshold.
*/
def rel_loss_change_stop(
d: Double,
max_iterations: Long,
max_epochs: Long = 0L
): StopCriteria =
tf.learn.StopCriteria(
relLossChangeTol = Some(d),
maxSteps = if (max_iterations > 0L) Some(max_iterations) else None,
maxEpochs = if (max_epochs > 0L) Some(max_epochs) else None
)
val model: TFModel.type = TFModel
val tunable_tf_model: TunableTFModel.type = TunableTFModel
val performance: Performance.type = Performance
val mse: MSE.type = MSE
val mae: MAE.type = MAE
val pde_system: PDESystem.type = PDESystem
def constant[I, D: TF](name: String, t: Tensor[D]): Layer[I, Output[D]] =
new Layer[I, Output[D]](name) {
override val layerType: String = "Const"
override def forwardWithoutContext(
input: I
)(
implicit mode: Mode
): Output[D] = t
}
def layer[I, J](name: String, pipe: MetaPipe[Mode, I, J]): Layer[I, J] =
new Layer[I, J](name) {
override val layerType: String = name
override def forwardWithoutContext(input: I)(implicit mode: Mode): J =
pipe(mode)(input)
}
/** Constructs a feed-forward layer.
*
* @param num_units The number of neurons in the layer.
* @param useBias Set to true if bias unit is to be included.
* @param weightsInitializer Initialization for the weights.
* @param biasInitializer Initialization for the bias.
* @param id A unique integer id for constructing the layer name.
*/
def feedforward[T: TF: IsNotQuantized](
num_units: Int,
useBias: Boolean = true,
weightsInitializer: Initializer = RandomNormalInitializer(),
biasInitializer: Initializer = RandomNormalInitializer(),
tag: String = "Linear"
)(id: Int
): Linear[T] =
tf.learn.Linear[T](
name = s"${tag}_$id",
num_units,
useBias,
weightsInitializer,
biasInitializer
)
def activation_generator[T: TF](
activations: Seq[String => Layer[Output[T], Output[T]]]
): Int => Layer[Output[T], Output[T]] =
(i: Int) => activations(i % activations.length)(s"Act_$i")
/** Constructs a simple feed-forward stack of layers.
*
* @param get_act A function which given a layer index number,
* returns an activation function.
*
* @param layer_sizes A Sequence of layer sizes/dimensions/neuron counts.
*
* @param starting_index Specify which layer number should the indexing of
* the layers start with, defaults to 1.
* @param useBias Set to true if bias unit is to be included.
* @param weightsInitializer Initialization for the weights.
* @param biasInitializer Initialization for the bias.
*/
def feedforward_stack[T: TF: IsNotQuantized](
get_act: Int => Layer[Output[T], Output[T]]
)(layer_sizes: Seq[Int],
starting_index: Int = 1,
useBias: Boolean = true,
weightsInitializer: Initializer = RandomNormalInitializer(),
biasInitializer: Initializer = RandomNormalInitializer(),
tag: String = "Linear"
): Layer[Output[T], Output[T]] =
layer_sizes
.map(layer_size =>
dtflearn.feedforward[T](
layer_size,
useBias,
weightsInitializer,
biasInitializer,
tag
) _
)
.zipWithIndex
.map(li =>
if (li._2 < layer_sizes.length - 1)
li._1(starting_index + li._2) >> get_act(starting_index + li._2)
else li._1(starting_index + li._2)
)
.reduceLeft(_ >> _)
/** Constructs a symmetric (square) convolutional layer from the provided dimensions.
*
* [[org.platanios.tensorflow.api.ops.NN.SameConvPadding]] is used as the padding mode.
*
* @param size The size of each square filter e.g. 2*2, 3*3 etc
* @param num_channels_input The number of channels in the input
* @param num_filters The number of channels in the layer output
* @param strides A [[Tuple2]] with strides, for each direction i.e. breadth and height.
* @param index The layer id or index, helps in creating a unique layer name
*/
def conv2d[T: TF: IsDecimal](
size: Int,
num_channels_input: Int,
num_filters: Int,
strides: (Int, Int)
)(index: Int
) =
tf.learn.Conv2D(
"Conv2D_" + index,
Shape(size, size, num_channels_input, num_filters),
strides._1,
strides._2,
SameConvPadding
)
/** Constructs a convolutional layer activated by a ReLU, with
* an option of appending a dropout layer.
*/
def conv2d_unit[T: TF: IsDecimal: IsHalfOrFloatOrDouble](
shape: Shape,
stride: (Int, Int) = (1, 1),
relu_param: Float = 0.1f,
dropout: Boolean = true,
keep_prob: Float = 0.6f
)(i: Int
): Compose[Output[T], Output[T], Output[T]] =
if (dropout) {
tf.learn.Conv2D(
"Conv2D_" + i,
shape,
stride._1,
stride._2,
SameConvPadding
) >>
tf.learn.AddBias(name = "Bias_" + i) >>
tf.learn.ReLU("ReLU_" + i, relu_param) >>
tf.learn.Dropout("Dropout_" + i, keep_prob)
} else {
tf.learn.Conv2D(
"Conv2D_" + i,
shape,
stride._1,
stride._2,
SameConvPadding
) >>
batch_norm(name = "BatchNorm_" + i) >>
tf.learn.ReLU("ReLU_" + i, relu_param) >>
tf.learn.Cast("Cast_" + i)
}
/** Constructs an inverted convolutional pyramid, consisting of
* stacked versions of [Conv2d --> ReLU --> Dropout] layers.
*
* The number of filters learned in each Conv2d layer are
* arranged in decreasing exponents of 2. They are constructed
* using calls to [[conv2d_unit()]]
*
* ... Conv_unit(128) --> Conv_unit(64) --> Conv_unit(32) --> Conv_unit(16) ...
*
* @param size The size of the square convolutional filter to be applied
* in each segment.
* @param num_channels_input The number of channels in the input.
* @param start_num_bits The exponent of 2 which determines size/depth of the starting layer
* e.g. set to 4 for a depth of 16.
*
* @param end_num_bits The exponent of 2 which determines the size/depth of the end layer.
*
* @param relu_param The activation barrier of the ReLU activation.
*
* @param dropout Set to true, if dropout layers should be placed in each convolutional unit.
* Set to false, and batch normalisation layers shall be placed after each convolutional unit.
*
* @param keep_prob If dropout is enabled, then this determines the retain probability.
*/
def conv2d_pyramid[T: TF: IsDecimal: IsHalfOrFloatOrDouble](
size: Int,
num_channels_input: Int
)(start_num_bits: Int,
end_num_bits: Int
)(relu_param: Float = 0.1f,
dropout: Boolean = true,
keep_prob: Float = 0.6f,
starting_index: Int = 0
): Compose[Output[T], Output[T], Output[T]] = {
require(
start_num_bits > end_num_bits,
"To construct a 2d-convolutional pyramid, you need to start_num_bits > end_num_bits"
)
//Create the first layer segment.
val head_segment = conv2d_unit[T](
Shape(size, size, num_channels_input, math.pow(2, start_num_bits).toInt),
stride = (1, 1),
relu_param,
dropout,
keep_prob
)(starting_index)
//Create the rest of the pyramid
val tail_segments = (end_num_bits until start_num_bits).reverse.zipWithIndex
.map(bitsAndIndices => {
val (bits, index) = bitsAndIndices
conv2d_unit[T](
Shape(
size,
size,
math.pow(2, bits + 1).toInt,
math.pow(2, bits).toInt
),
stride = (math.pow(2, index + 1).toInt, math.pow(2, index + 1).toInt),
relu_param,
dropout,
keep_prob
)(index + 1 + starting_index)
})
.reduceLeft((a, b) => a >> b)
//Join head to tail.
head_segment >> tail_segments
}
/** <h4>Inception Module</h4>
*
* Constructs an Inception v2 computational unit,
* optionally with batch normalisation.
*
* Assumes input to be of shape Shape(?, height, width, channels)
*
* <b>Architecture Details</b>
*
* An Inception module consists of the following branches.
*
* <ol>
* <li>Convolution (1 × 1)</li>
* <li>Convolution (1 × 1) -> Convolution (3 × 3)</li>
* <li>Convolution (1 × 1) -> Convolution (5 × 5)</li>
* <li>Max Pooling (1 × 1) -> Convolution (1 × 1)</li>
* </ol>
*
* After performing the operations above, the module performs depth-wise
* concatenation of the results.
*
* <b>Implementation Notes</b>
*
* Each convolution is followed by a batch normalisation layer (if applicable)
* followed by a Rectified Linear activation.
*
* @param channels The depth of the input.
* @param num_filters The number of filters to learn in each branch of
* the module, supplied as a sequence of integers.
* @param activation_generator A DataPipe which takes a name/identifier as input
* and returns an activation.
* @param use_batch_norm If true, apply batch normalisation at the end
* of each convolution.
*/
def inception_unit[T: TF: IsDecimal](
channels: Int,
num_filters: Seq[Int],
activation_generator: DataPipe[String, Activation[T]],
use_batch_norm: Boolean = true
)(layer_index: Int
): Layer[Output[T], Output[T]] = {
require(
num_filters.length == 4,
s"Inception module has only 4 branches, but ${num_filters.length}" +
s" were assigned while setting num_filters variable"
)
val name = s"Inception_$layer_index"
def get_post_conv_layer(b_index: Int, l_index: Int) =
if (use_batch_norm) {
batch_norm(s"$name/B$b_index/BatchNorm_$l_index") >>
activation_generator(s"$name/B$b_index/Act_$l_index")
} else {
activation_generator(s"$name/B$b_index/Act_$l_index")
}
val branch1 =
tf.learn.Conv2D(
s"$name/B1/Conv2D_1x1",
Shape(1, 1, channels, num_filters.head),
1,
1,
SameConvPadding
) >>
get_post_conv_layer(1, 1)
val branch2 =
tf.learn.Conv2D(
s"$name/B2/Conv2D_1x1",
Shape(1, 1, channels, num_filters(1)),
1,
1,
SameConvPadding
) >>
get_post_conv_layer(2, 1) >>
tf.learn.Conv2D(
s"$name/B2/Conv2D_1x3",
Shape(1, 3, num_filters(1), num_filters(1)),
1,
1,
SameConvPadding
) >>
tf.learn.Conv2D(
s"$name/B2/Conv2D_3x1",
Shape(3, 1, num_filters(1), num_filters(1)),
1,
1,
SameConvPadding
) >>
get_post_conv_layer(2, 2)
val branch3 =
tf.learn.Conv2D(
s"$name/B3/Conv2D_1x1",
Shape(1, 1, channels, num_filters(2)),
1,
1,
SameConvPadding
) >>
get_post_conv_layer(3, 1) >>
tf.learn.Conv2D(
s"$name/B3/Conv2D_1x3_1",
Shape(1, 3, num_filters(2), num_filters(2)),
1,
1,
SameConvPadding
) >>
tf.learn.Conv2D(
s"$name/B3/Conv2D_3x1_1",
Shape(3, 1, num_filters(2), num_filters(2)),
1,
1,
SameConvPadding
) >>
tf.learn.Conv2D(
s"$name/B3/Conv2D_1x3_2",
Shape(1, 3, num_filters(2), num_filters(2)),
1,
1,
SameConvPadding
) >>
tf.learn.Conv2D(
s"$name/B3/Conv2D_3x1_2",
Shape(3, 1, num_filters(2), num_filters(2)),
1,
1,
SameConvPadding
) >>
get_post_conv_layer(3, 2)
val branch4 = tf.learn.MaxPool(
s"$name/B4/MaxPool",
Seq(1, 3, 3, 1),
1,
1,
SameConvPadding
) >>
tf.learn.Conv2D(
s"$name/B4/Conv2D_1x1",
Shape(1, 1, channels, num_filters(3)),
1,
1,
SameConvPadding
) >>
get_post_conv_layer(4, 1)
val layers = Seq(
branch1,
branch2,
branch3,
branch4
)
combined_layer(name, layers) >> concat_outputs(name + "/DepthConcat", -1)
}
/** Create a stack of Inception modules (See [[inception_unit()]] for more details).
*
* @param num_channels_image The depth, or number of colour channels in the image.
* @param num_filters Specifies the number of filters for each branch of every inception module.
* @param starting_index The starting index of the stack. The stack is named in a consecutive manner,
* i.e. Inception_i, Inception_i+1, ...
*/
def inception_stack[T: TF: IsDecimal](
num_channels_image: Int,
num_filters: Seq[Seq[Int]],
activation_generator: DataPipe[String, Activation[T]],
use_batch_norm: Boolean
)(starting_index: Int
): Layer[Output[T], Output[T]] = {
val head = inception_unit(
num_channels_image,
num_filters.head,
activation_generator
)(starting_index)
val tail_section = num_filters
.sliding(2)
.map(pair =>
inception_unit(
pair.head.sum,
pair.last,
activation_generator,
use_batch_norm
) _
)
.zipWithIndex
.map(layer_fn_index_pair => {
val (create_inception_layer, index) = layer_fn_index_pair
create_inception_layer(index + starting_index + 1)
})
.reduceLeft((l1, l2) => l1 >> l2)
head >> tail_section
}
/** Constructs a Continuous Time Recurrent Neural Network (CTRNN) Layer, consisting
* of some latent states, composed with a linear projection into the space of observables.
*
* @param observables The dimensionality of the output space.
* @param timestep The integration time step, if set to 0 or a negative
* value, create a [[DynamicTimeStepCTRNN]].
* @param horizon The number of steps in time to simulate the dynamical system
* @param index The layer index, should be unique.
*/
def ctrnn_block[T: TF: IsDecimal](
observables: Int,
horizon: Int,
timestep: Double = -1d
)(index: Int
): Layer[Output[T], Output[T]] =
if (timestep <= 0d) {
DynamicTimeStepCTRNN(s"DFHctrnn_$index", horizon) >>
FiniteHorizonLinear(s"FHlinear_$index", observables)
} else {
FiniteHorizonCTRNN(s"FHctrnn_$index", horizon, timestep) >>
FiniteHorizonLinear(s"FHlinear_$index", observables)
}
/** <h4>Supervised Learning</h4>
*
* Trains a supervised tensorflow model/estimator.
*
* @param architecture The network architecture,
* takes a value of type [[In]] and returns
* a value of type [[Out]].
* @param input The input meta data.
* @param target The output label meta data
* @param loss The loss function to be optimized during training.
* @param optimizer The optimization algorithm implementation.
* @param summariesDir A filesystem path of type [[java.nio.file.Path]], which
* determines where the intermediate model parameters/checkpoints
* will be written.
* @param stopCriteria The stopping criteria for training, for examples see
* [[max_iter_stop]], [[abs_loss_change_stop]] and [[rel_loss_change_stop]]
*
* @param stepRateFreq The frequency at which to log the step rate (expressed as number of iterations/sec).
* @param summarySaveFreq The frequency at which to log the loss summary.
* @param checkPointFreq The frequency at which to log the model parameters.
* @param training_data A training data set, as an instance of [[Dataset]].
* @param inMemory Set to true if the estimator should be in-memory.
*
* @return A [[Tuple2]] containing the model and estimator.
*
* @author tailhq
*/
def build_tf_model[
In: OutputStructure,
TrainIn: OutputStructure,
TrainOut,
Out: OutputStructure,
Loss: TF: IsFloatOrDouble,
EvalIn,
ID,
IS,
TD,
TS
](architecture: Layer[In, Out],
input: Input[In],
target: Input[TrainIn],
loss: Layer[(Out, TrainIn), Output[Loss]],
optimizer: Optimizer,
summariesDir: java.nio.file.Path,
stopCriteria: StopCriteria,
stepRateFreq: Int = 5000,
summarySaveFreq: Int = 5000,
checkPointFreq: Int = 5000
)(training_data: Dataset[(In, TrainIn)],
inMemory: Boolean = false
)(
implicit
evOutputToDataTypeIn: OutputToDataType.Aux[In, _],
evOutputToShapeIn: OutputToShape.Aux[In, _],
evOutputToDataTypeTrainIn: OutputToDataType.Aux[TrainIn, _],
evOutputToShapeTrainIn: OutputToShape.Aux[TrainIn, _],
evOutputToDataType: OutputToDataType.Aux[(In, TrainIn), (ID, TD)],
evOutputToShape: OutputToShape.Aux[(In, TrainIn), (IS, TS)]
): SupModelPair[In, TrainIn, Out, Out, Loss, (Out, (In, TrainIn))] = {
val model =
tf.learn.Model.simpleSupervised[In, TrainIn, Out, TrainOut, Loss](
input,
target,
architecture,
loss,
optimizer
)
println("\\nTraining model.\\n")
val estimator = if (inMemory) {
tf.learn.InMemoryEstimator(
model,
tf.learn.Configuration(Some(summariesDir)),
stopCriteria,
Set(
tf.learn.StepRateLogger(
log = false,
summaryDir = summariesDir,
trigger = tf.learn.StepHookTrigger(stepRateFreq)
),
tf.learn.SummarySaver(
summariesDir,
tf.learn.StepHookTrigger(summarySaveFreq)
),
tf.learn.CheckpointSaver(
summariesDir,
tf.learn.StepHookTrigger(checkPointFreq)
)
),
tensorBoardConfig = tf.learn
.TensorBoardConfig(summariesDir, reloadInterval = checkPointFreq)
)
} else {
tf.learn.FileBasedEstimator(
model,
tf.learn.Configuration(Some(summariesDir)),
stopCriteria,
Set(
tf.learn.StepRateLogger(
log = false,
summaryDir = summariesDir,
trigger = tf.learn.StepHookTrigger(stepRateFreq)
),
tf.learn.SummarySaver(
summariesDir,
tf.learn.StepHookTrigger(summarySaveFreq)
),
tf.learn.CheckpointSaver(
summariesDir,
tf.learn.StepHookTrigger(checkPointFreq)
)
),
tensorBoardConfig = tf.learn
.TensorBoardConfig(summariesDir, reloadInterval = checkPointFreq)
)
}
estimator.train(() => training_data)
(model, estimator)
}
/** <h4>Unsupervised Learning</h4>
*
* Trains an unsupervised tensorflow model/estimator.
*
* @param architecture The network architecture,
* takes a value of type [[In]] and returns
* a value of type [[Out]].
* @param input The input meta data.
* @param loss The loss function to be optimized during training.
* @param optimizer The optimization algorithm implementation.
* @param summariesDir A filesystem path of type [[java.nio.file.Path]], which
* determines where the intermediate model parameters/checkpoints
* will be written.
* @param stopCriteria The stopping criteria for training, for examples see
* [[max_iter_stop]], [[abs_loss_change_stop]] and [[rel_loss_change_stop]]
*
* @param stepRateFreq The frequency at which to log the step rate (expressed as number of iterations/sec).
* @param summarySaveFreq The frequency at which to log the loss summary.
* @param checkPointFreq The frequency at which to log the model parameters.
* @param training_data A training data set, as an instance of [[Dataset]].
* @param inMemory Set to true if the estimator should be in-memory.
*
* @return A [[Tuple2]] containing the model and estimator.
*
* @author tailhq
*/
def build_unsup_tf_model[
In: OutputStructure,
Out: OutputStructure,
Loss: TF: IsFloatOrDouble
](architecture: Layer[In, Out],
input: Input[In],
loss: Layer[(In, Out), Output[Loss]],
optimizer: Optimizer,
summariesDir: java.nio.file.Path,
stopCriteria: StopCriteria,
stepRateFreq: Int,
summarySaveFreq: Int,
checkPointFreq: Int
)(training_data: Dataset[In],
inMemory: Boolean
)(
implicit
evOutputToDataTypeIn: OutputToDataType.Aux[In, _],
evOutputToShapeIn: OutputToShape.Aux[In, _]
): UnsupModelPair[In, Out, Loss] = {
val (model, estimator): UnsupModelPair[In, Out, Loss] =
tf.createWith(graph = Graph()) {
val model =
tf.learn.Model.unsupervised(input, architecture, loss, optimizer)
println("\\nTraining model.\\n")
val estimator = if (inMemory) {
tf.learn.InMemoryEstimator(
model,
tf.learn.Configuration(Some(summariesDir)),
stopCriteria,
Set(
tf.learn.StepRateLogger(
log = false,
summaryDir = summariesDir,
trigger = tf.learn.StepHookTrigger(stepRateFreq)
),
tf.learn.SummarySaver(
summariesDir,
tf.learn.StepHookTrigger(summarySaveFreq)
),
tf.learn.CheckpointSaver(
summariesDir,
tf.learn.StepHookTrigger(checkPointFreq)
)
),
tensorBoardConfig = tf.learn
.TensorBoardConfig(summariesDir, reloadInterval = checkPointFreq)
)
} else {
tf.learn.FileBasedEstimator(
model,
tf.learn.Configuration(Some(summariesDir)),
stopCriteria,
Set(
tf.learn.StepRateLogger(
log = false,
summaryDir = summariesDir,
trigger = tf.learn.StepHookTrigger(stepRateFreq)
),
tf.learn.SummarySaver(
summariesDir,
tf.learn.StepHookTrigger(summarySaveFreq)
),
tf.learn.CheckpointSaver(
summariesDir,
tf.learn.StepHookTrigger(checkPointFreq)
)
),
tensorBoardConfig = tf.learn
.TensorBoardConfig(summariesDir, reloadInterval = checkPointFreq)
)
}
estimator.train(() => training_data)
(model, estimator)
}
(model, estimator)
}
}
| mandar2812/DynaML | dynaml-tensorflow/src/main/scala/io/github/tailhq/dynaml/tensorflow/Learn.scala | Scala | apache-2.0 | 28,677 |
package org.shapelogic.sc.imageprocessing
import org.shapelogic.sc.polygon.Calculator2D._
//import org.shapelogic.sc.logic.LetterTaskFactory
import org.shapelogic.sc.polygon.CPointInt
import org.shapelogic.sc.polygon.Calculator2D
import org.shapelogic.sc.util.Constants
import spire.implicits._
import org.shapelogic.sc.image.BufferImage
import org.shapelogic.sc.polygon.AnnotatedShapeImplementation
/**
* Vectorizer that is splitting lines based on max distance to line between end points.
* <br />
* <p>
* The main idea is that this will read a whole multi line at a time.
* Then later it will split it according to max distance of pixels to the line
* between start and end point of the multi line.
* </p> <p>
* Maybe this could be completely abstracted out, maybe but at that point I
* will just take most of this class and turn it into a base class.
* </p> <p>
* Always stop on junctions, if there is one junction point use that, but stop after.
* N points are chosen last.
* Never go from one N point to another,
* unless that the N point is the first point, to handle an X, where you have 4
* N points in the center.
* If you are at a start point then just chose one direction.
* Can I delegate this to a different object. I always need to find all the
* neighbors first.
* I might have to know how many N points there are if there are more just
* add all to _unfinishedPoints.
* </p> <p>
* Treatment of different points:
* Junction: add to new point, move to first junction.
* N points: count, keep track of first.
* Other: count, keep track of first.
* Unused: count, keep track of first. I think that is already done.
* Used: count, keep track of first.
* </p> <p>
* For each junction add to unfinished. Go to first junction.
* If other points are available take first and go to it.
* If only N point is available, if current point an N and not the first point
* stop else go to that.
* </p> <p>
* When coming to a new point check if it is a junction if stop if not on
* first point. It does not matter if the start point is used or not.
* I think that at the end check to see if you can go to either a junction
* point or to the start point.
* Also stop if you do not know what to do, at the end of handleProblematicPoints().
* </p>
* @author Sami Badawi
*
*/
class MaxDistanceVectorizer(imageIn: BufferImage[Byte]) extends BaseVectorizer(imageIn) {
override def verboseLogging = false
//Top level so create annotation here
lazy val annotatedShapeImplementation = new AnnotatedShapeImplementation(null)
//This is problematic since ChainCodeHandler only handles one polygon not the multi polygon
var _chainCodeHandler: ChainCodeHandler = null //new ChainCodeHandler(annotatedShapeImplementation)
/**
* Take point off _unfinishedPoints try to start line from that, if nothing is found the remove point
*/
override def findMultiLine(): Unit = {
findFirstLinePoint(process = true) //XXX maybe move
do {
val done = !findMultiLinePreProcess()
if (done)
return
var stop1 = false
cfor(0)(_ => !stop1, _ + 1) { i =>
if (!findNextLinePoint())
stop1 = true
}
if (startedInTheMiddleTurnOpposite()) {
var stop2 = false
cfor(0)(_ => !stop2, _ + 1) { j =>
if (!findNextLinePoint())
stop2 = true
}
}
findMultiLinePostProcess()
} while (true)
}
/**
* started In The Middle Turn Opposite
* If there are 2 neighbors and 1 of them are unused
* @return
*/
def startedInTheMiddleTurnOpposite(): Boolean = {
var firstPointInMultiLineIndex: Int = this.pointToPixelIndex(_firstPointInMultiLine)
var startPixelTypeCalculator: PixelTypeCalculator = findPointType(firstPointInMultiLineIndex, null)
var color: Byte = _pixels(firstPointInMultiLineIndex)
// if (PixelType.
if (startPixelTypeCalculator.neighbors != 2 || startPixelTypeCalculator.unusedNeighbors != 1)
return false
_chainCodeHandler.swapChainCodeInOppositeDirection()
var swapHolder: CPointInt = _firstPointInMultiLine.copy().asInstanceOf[CPointInt]
_firstPointInMultiLine.setLocation(_currentPoint)
_currentPoint.setLocation(swapHolder)
_currentPixelIndex = firstPointInMultiLineIndex
return true
}
/**
* Get the next point to investigate from _currentPoint
* This also contains check if this should cause a new new point to be created.
*
* if there is more than one point to chose from add the point to:
* _unfinishedPoints that is a list of points that need to be revisited.
* assumes that _pixelTypeCalculator is set to current point
* @return true if there are more points
*/
def findNextLinePoint(): Boolean = {
//If there is only one direction to go in then do it
var newDirection: Byte = Constants.DIRECTION_NOT_USED
findPointType(_currentPixelIndex, _pixelTypeCalculator)
//Stop at any junction unless you are just starting
if (PixelType.PIXEL_JUNCTION.equals(_pixelTypeCalculator.getPixelType())
&& _chainCodeHandler.getLastChain() > Constants.BEFORE_START_INDEX) {
// addToUnfinishedPoints(_currentPoint.copy().asInstanceOf[CPointInt])
newDirection = handleJunction()
return false
} else if (_pixelTypeCalculator.unusedNeighbors == 1) {
newDirection = _pixelTypeCalculator.firstUnusedNeighbor
} else if (_pixelTypeCalculator.unusedNeighbors == 0) {
newDirection = handleLastUnused()
} else {
newDirection = handleProblematicPoints()
if (newDirection == Constants.DIRECTION_NOT_USED)
addToUnfinishedPoints(_currentPoint.copy().asInstanceOf[CPointInt])
}
if (newDirection == Constants.DIRECTION_NOT_USED)
return false
_currentDirection = newDirection //XXX redundant
moveCurrentPointForwards(_currentDirection)
_chainCodeHandler.addChainCode(newDirection)
return true
}
/**
* Get here if there are no unused directions left
*
* Ways to go further:
*
* First point is 1 away
* A junction is 1 away
*
* And you are not standing on the on the second pixel trying to go back
*/
def handleLastUnused(): Byte = {
var isEndPoint: Boolean = true
var newDirection: Byte = Constants.DIRECTION_NOT_USED
newDirection = directionBetweenNeighborPoints(_currentPoint, _firstPointInMultiLine)
if (newDirection != Constants.DIRECTION_NOT_USED &&
_chainCodeHandler.getLastChain() != Constants.START_INDEX)
isEndPoint = false
if (isEndPoint) {
var pointHandle: NeighborChecker =
new NeighborChecker(outputImage, _currentPixelIndex)
pointHandle.checkNeighbors()
//If you have taken more than 2 steps you can go back to any junction point
//maybe this could be expanded
//or I could put a constraint in that it cannot go back to the start point
if (0 < pointHandle.junction.countUsed &&
Constants.START_INDEX < _chainCodeHandler.getLastChain()) {
// addToUnfinishedPoints(_currentPoint.copy().asInstanceOf[CPointInt])
isEndPoint = false
newDirection = pointHandle.junction.firstUsedDirection
}
}
if (isEndPoint) {
var endPoint: CPointInt = _currentPoint.copy().asInstanceOf[CPointInt]
getPolygon().putAnnotation(endPoint, GeometricType.PIXEL_LINE_END)
}
return newDirection
}
def handleJunction(): Byte = {
var pointHandle: NeighborChecker =
new NeighborChecker(outputImage, _currentPixelIndex)
pointHandle.checkNeighbors()
if (pointHandle.falseJunction()) {
//Too complicated set an extra point unless at start
if (pointHandle.vCornerPoint.count != 1 && pointHandle.extraNeighborPoint.count != 2
&& Constants.BEFORE_START_INDEX < _chainCodeHandler.getLastChain())
return Constants.DIRECTION_NOT_USED
var directionBackToPrevious: Byte = Calculator2D.oppositeDirection(_currentDirection)
var comesFromVPoint: Boolean = pointHandle.vCornerPoint.firstDirection == directionBackToPrevious
if (!comesFromVPoint)
return pointHandle.vCornerPoint.firstDirection
//Coming from the V point select the unused point with biggest
//distance from V or distance that is not 90 degrees
val directionToVPoint: Byte = pointHandle.vCornerPoint.firstDirection
cfor(0)(_ < Constants.DIRECTIONS_AROUND_POINT, _ + 1) { iInt =>
val i: Byte = iInt.toByte
var pixelIndexI: Int = _currentPixelIndex + cyclePoints(i)
var pixel: Byte = _pixels(pixelIndexI)
if (PixelType.isUnused(pixel)) {
if (2 != Math.abs(Calculator2D.directionDifference(directionToVPoint, i))) {
return i
}
}
}
}
val junctionPoint: CPointInt = _currentPoint.copy().asInstanceOf[CPointInt]
addToUnfinishedPoints(junctionPoint)
getPolygon().putAnnotation(junctionPoint, GeometricType.PIXEL_JUNCTION)
return Constants.DIRECTION_NOT_USED
}
/**
* Junction: add to new point, move to first junction.
* N points: count, keep track of first.
* Other: count, keep track of first.
* Used: count, keep track of first.
*
* Unused: count, keep track of first. This is done in the point finder.
*/
override def handleProblematicPoints(): Byte = {
var pointHandle: NeighborChecker =
new NeighborChecker(outputImage, _currentPixelIndex)
pointHandle.checkNeighbors()
//XXX problematic with 2 points next to each other
if (pointHandle.junction.count > 0 && Constants.START_INDEX != _chainCodeHandler.getLastChain() &&
_currentDirection != BaseVectorizer.oppesiteDirection(pointHandle.junction.firstDirection)) {
if (pointHandle.falseJunction())
return pointHandle.vCornerPoint.firstDirection
val pixelIndexI: Int = _currentPixelIndex + cyclePoints(pointHandle.junction.firstDirection)
val pixel: Byte = _pixels(pixelIndexI)
if (PixelType.isUnused(pixel))
return pointHandle.junction.firstDirection
}
if (pointHandle.vCornerPoint.count > 0)
return pointHandle.vCornerPoint.firstDirection
else if (pointHandle.other.count > 0)
return pointHandle.other.firstDirection
else if (0 < pointHandle.extraNeighborPoint.count &&
(_chainCodeHandler.getLastChain() <= 0 ||
!PixelType.PIXEL_EXTRA_NEIGHBOR.equals(_pixelTypeCalculator.getPixelType())))
return pointHandle.extraNeighborPoint.firstDirection
else if (pointHandle.used.countUsed > 0)
//Only works if at end of closed curve
return directionBetweenNeighborPoints(_currentPoint, _firstPointInMultiLine)
return Constants.DIRECTION_NOT_USED
}
/** Everything is always OK. Stop only on junctions and end points. */
override def lastPixelOk(newDirection: Byte): Boolean = {
return true
}
override def internalFactory(): Unit = {
_pixelTypeFinder = new SimplePixelTypeFinder(outputImage)
// _rulesArrayForLetterMatching = LetterTaskFactory.getSimpleNumericRuleForAllLetters(LetterTaskFactory.POLYGON)
}
override def findMultiLinePreProcess(): Boolean = {
var result: Boolean = super.findMultiLinePreProcess()
if (!result)
return result
_chainCodeHandler = new ChainCodeHandler(getPolygon().getAnnotatedShape())
_chainCodeHandler.setup()
_chainCodeHandler.setMultiLine(this.getPolygon().getCurrentMultiLine())
_chainCodeHandler.setFirstPoint(_firstPointInMultiLine)
return result
}
override def findMultiLinePostProcess(): Unit = {
_chainCodeHandler.getValue()
super.findMultiLinePostProcess()
}
/**
* XXX Not sure about this
*/
def init(): Unit = {
// super.init()
_chainCodeHandler = new ChainCodeHandler(getPolygon().getAnnotatedShape())
}
}
object MaxDistanceVectorizer {
def transform(image: BufferImage[Byte]): BufferImage[Byte] = {
if (image.numBands != 1) {
println(s"Can only handle images with 1 channel run treshold first")
return image
}
val maxDistanceVectorizer = new MaxDistanceVectorizer(
image)
maxDistanceVectorizer.findMultiLine()
val points = maxDistanceVectorizer.getPoints()
println(s"MaxDistanceVectorizer: points: $points")
val polygon = maxDistanceVectorizer.getPolygon()
println(s"MaxDistanceVectorizer: polygon: $polygon")
maxDistanceVectorizer.result
}
} | sami-badawi/shapelogic-scala | src/main/scala/org/shapelogic/sc/imageprocessing/MaxDistanceVectorizer.scala | Scala | mit | 12,416 |
package com.twitter.finagle.kestrelx.unit
import _root_.java.net.{InetSocketAddress, SocketAddress}
import _root_.java.nio.charset.Charset
import _root_.java.util.concurrent.{BlockingDeque, ExecutorService, Executors, LinkedBlockingDeque}
import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
import com.twitter.concurrent.{Broker, Spool}
import com.twitter.conversions.time._
import com.twitter.finagle.builder.{ClientBuilder, ClientConfig, Cluster}
import com.twitter.finagle.kestrelx._
import com.twitter.finagle.kestrelx.protocol.{Command, Response, Set}
import com.twitter.finagle.{Addr, ClientConnection, Service, ServiceFactory}
import com.twitter.io.Buf
import com.twitter.util._
import org.junit.runner.RunWith
import org.mockito.Mockito
import org.mockito.Mockito.{times, verify, when}
import org.scalatest.FunSuite
import org.scalatest.concurrent.{Eventually, IntegrationPatience}
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
import scala.collection.immutable.{Set => ISet}
import scala.collection.mutable.{ArrayBuffer, Set => MSet}
@RunWith(classOf[JUnitRunner])
class MultiReaderTest extends FunSuite with MockitoSugar with Eventually with IntegrationPatience {
class MockHandle extends ReadHandle {
val _messages = new Broker[ReadMessage]
val _error = new Broker[Throwable]
val messages = _messages.recv
val error = _error.recv
def close() {} // to spy on!
}
trait MultiReaderHelper {
val queueName = "the_queue"
val queueNameBuf = Buf.Utf8(queueName)
val N = 3
val handles = (0 until N) map { _ => Mockito.spy(new MockHandle) }
val va: Var[Return[ISet[ReadHandle]]] = Var.value(Return(handles.toSet))
}
trait AddrClusterHelper {
val queueName = "the_queue"
val queueNameBuf = Buf.Utf8(queueName)
val N = 3
val hosts = 0 until N map { i =>
InetSocketAddress.createUnresolved("10.0.0.%d".format(i), 22133)
}
val executor = Executors.newCachedThreadPool()
def newKestrelService(
executor: Option[ExecutorService],
queues: LoadingCache[Buf, BlockingDeque[Buf]]
): Service[Command, Response] = {
val interpreter = new Interpreter(queues)
new Service[Command, Response] {
def apply(request: Command) = {
val promise = new Promise[Response]()
executor match {
case Some(exec) =>
exec.submit(new Runnable {
def run() {
promise.setValue(interpreter(request))
}
})
case None => promise.setValue(interpreter(request))
}
promise
}
}
}
val hostQueuesMap = hosts.map { host =>
val queues = CacheBuilder.newBuilder()
.build(new CacheLoader[Buf, BlockingDeque[Buf]] {
def load(k: Buf) = new LinkedBlockingDeque[Buf]
})
(host, queues)
}.toMap
lazy val mockClientBuilder = {
val result = mock[ClientBuilder[Command, Response, Nothing, ClientConfig.Yes, ClientConfig.Yes]]
hosts.foreach { host =>
val mockHostClientBuilder =
mock[ClientBuilder[Command, Response, ClientConfig.Yes, ClientConfig.Yes, ClientConfig.Yes]]
when(result.hosts(host)) thenReturn mockHostClientBuilder
val queues = hostQueuesMap(host)
val factory = new ServiceFactory[Command, Response] {
// use an executor so readReliably doesn't block waiting on an empty queue
def apply(conn: ClientConnection) =
Future.value(newKestrelService(Some(executor), queues))
def close(deadline: Time) = Future.Done
override def toString() = "ServiceFactory for %s".format(host)
}
when(mockHostClientBuilder.buildFactory()) thenReturn factory
}
result
}
val services = hosts.map { host =>
val queues = hostQueuesMap(host)
// no executor here: this one is used for writing to the queues
newKestrelService(None, queues)
}
def configureMessageReader(handle: ReadHandle): MSet[String] = {
val messages = MSet[String]()
val UTF8 = Charset.forName("UTF-8")
handle.messages foreach { msg =>
val Buf.Utf8(str) = msg.bytes
messages += str
msg.ack.sync()
}
messages
}
}
trait DynamicClusterHelper {
class DynamicCluster[U](initial: Seq[U]) extends Cluster[U] {
def this() = this(Seq[U]())
var set = initial.toSet
var s = new Promise[Spool[Cluster.Change[U]]]
def add(f: U) = {
set += f
performChange(Cluster.Add(f))
}
def del(f: U) = {
set -= f
performChange(Cluster.Rem(f))
}
private[this] def performChange(change: Cluster.Change[U]) = synchronized {
val newTail = new Promise[Spool[Cluster.Change[U]]]
s() = Return(change *:: newTail)
s = newTail
}
def snap = (set.toSeq, s)
}
val N = 3
val hosts = 0 until N map { i => InetSocketAddress.createUnresolved("10.0.0.%d".format(i), 22133) }
val executor = Executors.newCachedThreadPool()
def newKestrelService(
executor: Option[ExecutorService],
queues: LoadingCache[Buf, BlockingDeque[Buf]]
): Service[Command, Response] = {
val interpreter = new Interpreter(queues)
new Service[Command, Response] {
def apply(request: Command) = {
val promise = new Promise[Response]()
executor match {
case Some(exec) =>
exec.submit(new Runnable {
def run() {
promise.setValue(interpreter(request))
}
})
case None => promise.setValue(interpreter(request))
}
promise
}
}
}
val hostQueuesMap = hosts.map { host =>
val queues = CacheBuilder.newBuilder()
.build(new CacheLoader[Buf, BlockingDeque[Buf]] {
def load(k: Buf) = new LinkedBlockingDeque[Buf]
})
(host, queues)
}.toMap
lazy val mockClientBuilder = {
val result = mock[ClientBuilder[Command, Response, Nothing, ClientConfig.Yes, ClientConfig.Yes]]
hosts.foreach { host =>
val mockHostClientBuilder =
mock[ClientBuilder[Command, Response, ClientConfig.Yes, ClientConfig.Yes, ClientConfig.Yes]]
when(result.hosts(host)) thenReturn mockHostClientBuilder
val queues = hostQueuesMap(host)
val factory = new ServiceFactory[Command, Response] {
// use an executor so readReliably doesn't block waiting on an empty queue
def apply(conn: ClientConnection) = Future(newKestrelService(Some(executor), queues))
def close(deadline: Time) = Future.Done
override def toString() = "ServiceFactory for %s".format(host)
}
when(mockHostClientBuilder.buildFactory()) thenReturn factory
}
result
}
val services = hosts.map { host =>
val queues = hostQueuesMap(host)
// no executor here: this one is used for writing to the queues
newKestrelService(None, queues)
}
def configureMessageReader(handle: ReadHandle): MSet[String] = {
val messages = MSet[String]()
handle.messages foreach { msg =>
val Buf.Utf8(str) = msg.bytes
messages += str
msg.ack.sync()
}
messages
}
}
test("static ReadHandle cluster should always grab the first available message") {
new MultiReaderHelper {
val handle = MultiReaderHelper.merge(va)
val messages = new ArrayBuffer[ReadMessage]
handle.messages foreach { messages += _ }
// stripe some messages across
val sentMessages = 0 until N * 100 map { _ => mock[ReadMessage] }
assert(messages.size === 0)
sentMessages.zipWithIndex foreach { case (m, i) =>
handles(i % handles.size)._messages ! m
}
assert(messages === sentMessages)
}
}
ignore("This test stopped working. " +
"static ReadHandle cluster should round robin from multiple available queues") {
// We use frozen time for deterministic randomness.
// The message output order was simply determined empirically.
new MultiReaderHelper {
Time.withTimeAt(Time.epoch + 1.seconds) { _ =>
// stuff the queues beforehand
val ms = handles map { h =>
val m = mock[ReadMessage]
h._messages ! m
m
}
val handle = MultiReaderHelper.merge(va)
assert((handle.messages ??) === ms(0))
assert((handle.messages ??) === ms(2))
assert((handle.messages ??) === ms(1))
}
}
}
test("static ReadHandle cluster should propagate closes") {
new MultiReaderHelper {
handles foreach { h => verify(h, times(0)).close() }
val handle = MultiReaderHelper.merge(va)
handle.close()
handles foreach { h => verify(h).close() }
}
}
test("static ReadHandle cluster should propagate errors when everything's errored out") {
new MultiReaderHelper {
val handle = MultiReaderHelper.merge(va)
val e = handle.error.sync()
handles foreach { h =>
assert(e.isDefined === false)
h._error ! new Exception("sad panda")
}
assert(e.isDefined === true)
assert(Await.result(e) === AllHandlesDiedException)
}
}
test("Var[Addr]-based cluster should read messages from a ready cluster") {
new AddrClusterHelper {
val va = Var(Addr.Bound(hosts: _*))
val handle = MultiReader(va, queueName).clientBuilder(mockClientBuilder).build()
val messages = configureMessageReader(handle)
val sentMessages = 0 until N * 10 map { i => "message %d".format(i) }
assert(messages.size === 0)
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(queueNameBuf, Time.now, Buf.Utf8(m))))
}
eventually {
assert(messages === sentMessages.toSet)
}
}
}
test("Var[Addr]-based cluster should read messages as cluster hosts are added") {
new AddrClusterHelper {
val va = Var(Addr.Bound(hosts.head))
val handle = MultiReader(va, queueName).clientBuilder(mockClientBuilder).build()
val messages = configureMessageReader(handle)
val sentMessages = 0 until N * 10 map { i => "message %d".format(i) }
assert(messages.size === 0)
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(queueNameBuf, Time.now, Buf.Utf8(m))))
}
// 0, 3, 6 ...
eventually {
assert(messages === sentMessages.grouped(N).map { _.head }.toSet)
}
messages.clear()
va.update(Addr.Bound(hosts: _*))
// 1, 2, 4, 5, ...
eventually {
assert(messages === sentMessages.grouped(N).map { _.tail }.flatten.toSet)
}
}
}
test("Var[Addr]-based cluster should read messages as cluster hosts are removed") {
new AddrClusterHelper {
var mutableHosts: Seq[SocketAddress] = hosts
val va = Var(Addr.Bound(mutableHosts: _*))
val rest = hosts.tail.reverse
val handle = MultiReader(va, queueName).clientBuilder(mockClientBuilder).build()
val messages = configureMessageReader(handle)
val sentMessages = 0 until N * 10 map { i => "message %d".format(i) }
assert(messages.size === 0)
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(queueNameBuf, Time.now, Buf.Utf8(m))))
}
eventually {
assert(messages === sentMessages.toSet)
}
rest.zipWithIndex.foreach { case (host, hostIndex) =>
messages.clear()
mutableHosts = (mutableHosts.toSet - host).toSeq
va.update(Addr.Bound(mutableHosts: _*))
// write to all 3
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(queueNameBuf, Time.now, Buf.Utf8(m))))
}
// expect fewer to be read on each pass
val expectFirstN = N - hostIndex - 1
eventually {
assert(messages === sentMessages.grouped(N).map { _.take(expectFirstN) }.flatten.toSet)
}
}
}
}
test("Var[Addr]-based cluster should wait for cluster to become ready before snapping initial hosts") {
new AddrClusterHelper {
val va = Var(Addr.Bound())
val handle = MultiReader(va, queueName).clientBuilder(mockClientBuilder).build()
val messages = configureMessageReader(handle)
val error = handle.error.sync()
val sentMessages = 0 until N * 10 map { i => "message %d".format(i) }
assert(messages.size === 0)
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(queueNameBuf, Time.now, Buf.Utf8(m))))
}
assert(messages.size === 0) // cluster not ready
assert(error.isDefined === false)
va.update(Addr.Bound(hosts: _*))
eventually {
assert(messages === sentMessages.toSet)
}
}
}
test("Var[Addr]-based cluster should report an error if all hosts are removed") {
new AddrClusterHelper {
val va = Var(Addr.Bound(hosts: _*))
val handle = MultiReader(va, queueName).clientBuilder(mockClientBuilder).build()
val error = handle.error.sync()
va.update(Addr.Bound())
assert(error.isDefined === true)
assert(Await.result(error) === AllHandlesDiedException)
}
}
test("Var[Addr]-based cluster should propagate exception if cluster fails") {
new AddrClusterHelper {
val ex = new Exception("uh oh")
val va: Var[Addr] with Updatable[Addr] = Var(Addr.Bound(hosts: _*))
val handle = MultiReader(va, queueName).clientBuilder(mockClientBuilder).build()
val error = handle.error.sync()
va.update(Addr.Failed(ex))
assert(error.isDefined === true)
assert(Await.result(error) === ex)
}
}
test("dynamic SocketAddress cluster should read messages from a ready cluster") {
new DynamicClusterHelper {
val cluster = new DynamicCluster[SocketAddress](hosts)
val handle = MultiReader(cluster, "the_queue").clientBuilder(mockClientBuilder).build()
val messages = configureMessageReader(handle)
val sentMessages = 0 until N * 10 map { i => "message %d".format(i) }
assert(messages.size === 0)
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(Buf.Utf8("the_queue"), Time.now, Buf.Utf8(m))))
}
eventually {
assert(messages === sentMessages.toSet)
}
}
}
test("dynamic SocketAddress cluster should read messages as cluster hosts are added") {
new DynamicClusterHelper {
val (host, rest) = (hosts.head, hosts.tail)
val cluster = new DynamicCluster[SocketAddress](List(host))
val handle = MultiReader(cluster, "the_queue").clientBuilder(mockClientBuilder).build()
val messages = configureMessageReader(handle)
val sentMessages = 0 until N * 10 map { i => "message %d".format(i) }
assert(messages.size === 0)
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(Buf.Utf8("the_queue"), Time.now, Buf.Utf8(m))))
}
// 0, 3, 6 ...
eventually {
assert(messages === sentMessages.grouped(N).map { _.head }.toSet)
}
messages.clear()
rest.foreach { host => cluster.add(host) }
// 1, 2, 4, 5, ...
eventually {
assert(messages === sentMessages.grouped(N).map { _.tail }.flatten.toSet)
}
}
}
test("dynamic SocketAddress cluster should read messages as cluster hosts are removed") {
new DynamicClusterHelper {
val cluster = new DynamicCluster[SocketAddress](hosts)
val rest = hosts.tail
val handle = MultiReader(cluster, "the_queue").clientBuilder(mockClientBuilder).build()
val messages = configureMessageReader(handle)
val sentMessages = 0 until N * 10 map { i => "message %d".format(i) }
assert(messages.size === 0)
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(Buf.Utf8("the_queue"), Time.now, Buf.Utf8(m))))
}
eventually {
assert(messages === sentMessages.toSet)
}
rest.reverse.zipWithIndex.foreach { case (host, hostIndex) =>
messages.clear()
cluster.del(host)
// write to all 3
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(Buf.Utf8("the_queue"), Time.now, Buf.Utf8(m))))
}
// expect fewer to be read on each pass
val expectFirstN = N - hostIndex - 1
eventually {
assert(messages === sentMessages.grouped(N).map { _.take(expectFirstN) }.flatten.toSet)
}
}
}
}
test("dynamic SocketAddress cluster should wait " +
"for cluster to become ready before snapping initial hosts") {
new DynamicClusterHelper {
val cluster = new DynamicCluster[SocketAddress](Seq())
val handle = MultiReader(cluster, "the_queue").clientBuilder(mockClientBuilder).build()
val messages = configureMessageReader(handle)
val errors = (handle.error ?)
val sentMessages = 0 until N * 10 map { i => "message %d".format(i) }
assert(messages.size === 0)
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(Buf.Utf8("the_queue"), Time.now, Buf.Utf8(m))))
}
assert(messages.size === 0) // cluster not ready
assert(errors.isDefined === false)
hosts.foreach { host => cluster.add(host) }
eventually {
assert(messages === sentMessages.toSet)
}
}
}
test("dynamic SocketAddress cluster should report an error if all hosts are removed") {
new DynamicClusterHelper {
val cluster = new DynamicCluster[SocketAddress](hosts)
val handle = MultiReader(cluster, "the_queue").clientBuilder(mockClientBuilder).build()
val e = (handle.error ?)
hosts.foreach { host => cluster.del(host) }
assert(e.isDefined === true)
assert(Await.result(e) === AllHandlesDiedException)
}
}
test("dynamic SocketAddress cluster should silently" +
" handle the removal of a host that was never added") {
new DynamicClusterHelper {
val cluster = new DynamicCluster[SocketAddress](hosts)
val handle = MultiReader(cluster, "the_queue").clientBuilder(mockClientBuilder).build()
val messages = configureMessageReader(handle)
val sentMessages = 0 until N * 10 map { i => "message %d".format(i) }
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(Buf.Utf8("the_queue"), Time.now, Buf.Utf8(m))))
}
eventually {
assert(messages === sentMessages.toSet)
}
messages.clear()
cluster.del(InetSocketAddress.createUnresolved("10.0.0.100", 22133))
sentMessages.zipWithIndex foreach { case (m, i) =>
Await.result(services(i % services.size).apply(Set(Buf.Utf8("the_queue"), Time.now, Buf.Utf8(m))))
}
eventually {
assert(messages === sentMessages.toSet)
}
}
}
}
| yancl/finagle-6.22.0 | finagle-kestrelx/src/test/scala/com/twitter/finagle/kestrelx/unit/MultiReaderTest.scala | Scala | apache-2.0 | 19,430 |
package net.randallalexander.restaurant.chooser.model
trait Enum extends Product {
def name:String = productPrefix
}
trait EnumOps[T<:Enum] {
def values:Seq[T]
def toEnum(name: String):Option[T] = values.find(_.name == name)
def toEnumYOLO(name: String):T = toEnum(name).get
}
trait DatabaseEnum extends Enum
trait DatabaseEnumOps[T <: DatabaseEnum] extends EnumOps[T]
trait ApiEnum extends Enum
trait ApiEnumOps[T <: ApiEnum] extends EnumOps[T] | randallalexander/restaurant-chooser | service/src/main/scala/net/randallalexander/restaurant/chooser/model/Database.scala | Scala | mit | 459 |
package freecli
package option
package help
import cats.Monoid
import cats.implicits._
import core.formatting._
import option.api._
import printer.{Printer, PrinterParts}
sealed trait OptionHelp
case class SingleOptionHelp(field: OptionField, default: Option[String], required: Boolean)
extends OptionHelp
case class SubOptionHelp(description: String, options: OptionsHelp)
extends OptionHelp
case class OptionsHelp(list: List[OptionHelp]) {
def result: PrinterParts = {
list.traverse {
case SingleOptionHelp(field, default, required) =>
for {
_ <- Printer.row
_ <- Printer.col(field.shortDescription.yellow)
modifiers = default.fold(if (required) "required".bold else "")(_.bold)
_ <- Printer.col(modifiers)
_ <- Printer.optCol(field.description.map(_.value))
_ <- Printer.endRow
} yield ()
case SubOptionHelp(description, options) =>
for {
_ <- Printer.row
_ <- Printer.col(description)
_ <- Printer.endRow
_ <- Printer.addFlat(options.result)
} yield ()
}.run
}
}
object OptionsHelp {
def single(o: OptionHelp) = OptionsHelp(List(o))
implicit object monoidInstance extends Monoid[OptionsHelp] {
def empty: OptionsHelp = OptionsHelp(List.empty)
def combine(x: OptionsHelp, y: OptionsHelp): OptionsHelp = {
OptionsHelp(x.list ++ y.list)
}
}
}
| pavlosgi/freecli | core/src/main/scala/freecli/option/help/OptionsHelp.scala | Scala | apache-2.0 | 1,438 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testing.interface
import scala.concurrent.{Future, Promise}
import scala.util.control.NonFatal
import scala.util.Try
import org.scalajs.testing.common._
import sbt.testing._
private[interface] object TestAdapterBridge {
private[this] val mux = new RunMuxRPC(JSRPC)
def start(): Unit = {
import JSEndpoints._
JSRPC.attach(detectFrameworks)(detectFrameworksFun)
JSRPC.attach(createMasterRunner)(createRunnerFun(isMaster = true))
JSRPC.attach(createSlaveRunner)(createRunnerFun(isMaster = false))
}
private def detectFrameworksFun = { names: List[List[String]] =>
FrameworkLoader.detectFrameworkNames(names).map { maybeName =>
maybeName.map { name =>
val framework = FrameworkLoader.loadFramework(name)
new FrameworkInfo(name, framework.name, framework.fingerprints.toList)
}
}
}
private def createRunnerFun(isMaster: Boolean) = { args: RunnerArgs =>
val framework = FrameworkLoader.loadFramework(args.frameworkImpl)
val loader = new ScalaJSClassLoader()
val runID = args.runID
val runner = {
if (isMaster) {
framework.runner(args.args.toArray, args.remoteArgs.toArray, loader)
} else {
framework.slaveRunner(args.args.toArray, args.remoteArgs.toArray, loader,
mux.send(JVMEndpoints.msgSlave, runID))
}
}
mux.attach(JSEndpoints.tasks, runID)(tasksFun(runner))
mux.attachAsync(JSEndpoints.execute, runID)(executeFun(runID, runner))
mux.attach(JSEndpoints.done, runID)(doneFun(runID, runner, isMaster))
if (isMaster) {
mux.attach(JSEndpoints.msgMaster, runID)(msgMasterFun(runID, runner))
} else {
mux.attach(JSEndpoints.msgSlave, runID)(runner.receiveMessage _)
}
}
private def detachRunnerCommands(runID: RunMux.RunID, isMaster: Boolean) = {
mux.detach(JSEndpoints.tasks, runID)
mux.detach(JSEndpoints.execute, runID)
mux.detach(JSEndpoints.done, runID)
if (isMaster)
mux.detach(JSEndpoints.msgMaster, runID)
else
mux.detach(JSEndpoints.msgSlave, runID)
}
private def tasksFun(runner: Runner) = { taskDefs: List[TaskDef] =>
val tasks = runner.tasks(taskDefs.toArray)
tasks.map(TaskInfoBuilder.detachTask(_, runner)).toList
}
private def executeFun(runID: RunMux.RunID, runner: Runner) = { req: ExecuteRequest =>
val task = TaskInfoBuilder.attachTask(req.taskInfo, runner)
val eventHandler = new RemoteEventHandler(runID)
val loggers = for {
(withColor, i) <- req.loggerColorSupport.zipWithIndex
} yield new RemoteLogger(runID, i, withColor)
val promise = Promise[List[TaskInfo]]
def cont(tasks: Array[Task]) = {
val result = Try(tasks.map(TaskInfoBuilder.detachTask(_, runner)).toList)
promise.complete(result)
}
try {
task.execute(eventHandler, loggers.toArray, cont)
} catch {
case NonFatal(t) =>
promise.tryFailure(t)
}
promise.future
}
private def doneFun(runID: RunMux.RunID, runner: Runner, isMaster: Boolean) = { _: Unit =>
try runner.done()
finally detachRunnerCommands(runID, isMaster)
}
private def msgMasterFun(runID: RunMux.RunID, runner: Runner) = { msg: FrameworkMessage =>
for (reply <- runner.receiveMessage(msg.msg)) {
val fm = new FrameworkMessage(msg.slaveId, reply)
mux.send(JVMEndpoints.msgMaster, runID)(fm)
}
}
private class RemoteEventHandler(runID: RunMux.RunID) extends EventHandler {
def handle(event: Event): Unit = mux.send(JVMEndpoints.event, runID)(event)
}
private class RemoteLogger(runID: RunMux.RunID, index: Int,
val ansiCodesSupported: Boolean) extends Logger {
import JVMEndpoints._
private def l[T](x: T) = new LogElement(index, x)
def error(msg: String): Unit = mux.send(logError, runID)(l(msg))
def warn(msg: String): Unit = mux.send(logWarn, runID)(l(msg))
def info(msg: String): Unit = mux.send(logInfo, runID)(l(msg))
def debug(msg: String): Unit = mux.send(logDebug, runID)(l(msg))
def trace(t: Throwable): Unit = mux.send(logTrace, runID)(l(t))
}
}
| nicolasstucki/scala-js | test-interface/src/main/scala/org/scalajs/testing/interface/TestAdapterBridge.scala | Scala | apache-2.0 | 4,400 |
package org.crudible.core.model
case class ModelWithText(val model: ModelWithIdentity, val text: String) | rehei/crudible | crudible-core/src/main/scala/org/crudible/core/model/ModelWithText.scala | Scala | apache-2.0 | 105 |
package com.tajpure.scheme.compiler
import com.tajpure.scheme.compiler.parser.Parser
import com.tajpure.scheme.compiler.value.VoidValue
object Interpreter {
def interp(_source: String): Unit = {
if (_source == null || _source.size == 0) {
println("input can't be empty")
}
else {
Parser.parse(_source, "REPL").interp(Scope.buildInitScope())
}
}
def interp(_source: String, s: Scope) = {
if (_source == null || _source.size == 0) {
"input can't be empty"
}
else {
Parser.parse(_source, "REPL").interp(s)
}
}
def interp0(_path: String): Unit = {
Parser.parse(_path).interp(Scope.buildInitScope())
}
def test() {
interp0("./src/test/resources/scheme/hello.scm")
interp0("./src/test/resources/scheme/fact.scm")
interp0("./src/test/resources/scheme/fib.scm")
interp0("./src/test/resources/scheme/procedure.scm")
interp0("./src/test/resources/scheme/if.scm")
interp0("./src/test/resources/scheme/insertsort.scm")
interp0("./src/test/resources/scheme/quicksort.scm")
}
def repl(): Unit = {
println("So Scheme version 0.1")
print(">")
var scope = Scope.buildInitScope()
val buffer: StringBuffer = new StringBuffer
for (line <- io.Source.stdin.getLines) {
isExitd(line)
buffer.append(line)
if (isInputFinished(buffer.toString())) {
try {
val result = interp(buffer.toString(), scope)
println(result)
scope = scope.innerScope
} catch {
case e: Exception => println(e.getMessage)
}
buffer.delete(0, buffer.length())
print(">")
}
}
}
def isInputFinished(input: String): Boolean = {
var openCounter: Int = 0
var closeCounter: Int = 0
input.foreach { ch => {
if (ch == '(') {
openCounter = openCounter + 1
}
else if (ch ==')') {
closeCounter = closeCounter + 1
}
} }
if (openCounter == closeCounter) {
true
}
else {
false
}
}
def isExitd(input: String): Unit = {
if ("(exit)".equals(input)) {
System.exit(0)
}
}
// override
// def main(args: Array[String]): Unit = {
// repl()
// }
} | tajpure/SoScheme | src/main/scala/com/tajpure/scheme/compiler/Interpreter.scala | Scala | gpl-3.0 | 2,263 |
/*
* ScalaQCMS -- Scala Quantum Circuit Model Simulator
*
* Copyright (c) 2012 Antti Vikman
*/
package models
import scalala.scalar.Complex
import scalala.library.LinearAlgebra._
import scalala.tensor.Matrix
/**
* Enumeration of mathematical operators
*/
object EquationOperator extends Enumeration {
type EquationOperator = Value
val Multiply, Kronecker = Value
}
/**
* EquationStatistics is a collection of data for theoretical complexity analysis of the equation
*
* @param sumOperations number of arbitrary sum operations in the equation
* @param multiplyOperations number of arbitrary multiply operations in the equation
* @param memoryPeak peak memory requirement during solving
* @param memoryResult memory requirement of the result
* @param depth the depth of the equation (levels of dependencies, the higher the worse for parallelling)
*/
case class EquationStatistics(sumOperations: Long,
multiplyOperations: Long,
memoryPeak: Long,
memoryResult: Long,
depth: Int)
/**
* EquationEntity is an abstract class that works as unified interface for Equation and EquationSymbol
* @todo Looks like this could be refactored to Trait
*/
abstract class EquationEntity {
/** Rows in the result matrix
* @return number of rows in the result matrix
*/
def rows: Int
/** Columns in the result matrix
* @return number of columns in the result matrix
*/
def cols: Int
/** Cells in the result matrix
* @return number of cells in the result matrix
*/
def size: Int
/** The result matrix
* @return the result matrix
*/
def getResult: Matrix[Complex]
/** Statistics about the equation. Especially useful for PreProcessing policies to determine
* the effectiveness of selected pre-processing algorithm
* @return statistics about this equation (tree)
*/
def getStatistics: EquationStatistics
/** Human readable presentation of the EquationEntity
* @return String presentation of the EquationEntity
*/
override def toString: String
}
import EquationOperator._
/**
* Equation is a intermediate node of "Equation-tree", that contains 2 component a (left) and b (right),
* an op that defines the operation that shall be perfomed for a and b and result, which will be
* defined when the equation is solved.
* @param a left side of the equation
* @param b right side of the equation
* @param op arithmetic operator of the operation that shall be performed for a and b
*/
class Equation(a: EquationEntity, b: EquationEntity, op: EquationOperator) extends EquationEntity {
if(op == EquationOperator.Multiply)
if(a.cols != b.rows)
throw new IllegalArgumentException("Can NOT multiply " + a.rows + "x" + a.cols + "-matrix" +
" with " + b.rows + "x" + b.cols + "-matrix")
private var _a = a
private var _b = b
private var _op = op
private var _result: Matrix[Complex] = null
def A = _a // Getter of a (left)
def A_= (a: EquationEntity) { // Setter of a (left)
_a = a
}
def B = _b
def B_= (b: EquationEntity) {
_b = b
}
def operator = _op
def operator_= (op: EquationOperator) {
_op = op
}
def result = _result
def result_= (result: Matrix[Complex]) {
_result = result
}
def rows: Int = {
_op match {
case Multiply =>
_a.rows
case Kronecker =>
_a.rows * _b.rows
}
}
def cols: Int = {
_op match {
case Multiply =>
_b.cols
case Kronecker =>
_a.cols * _b.cols
}
}
def size: Int = rows * cols
def getResult: Matrix[Complex] = {
if(result == null) {
println("Warning: Solver hasn't set the result for " + toString)
_op match {
case Multiply =>
_a.getResult * _b.getResult
case Kronecker =>
kron[Complex, Complex, Complex](_a.getResult, _b.getResult)
case _ =>
null
}
}
else
result
}
def getStatistics: EquationStatistics = {
val sumOperation = {
_op match {
case Multiply =>
3L * (a.cols * size) + ((a.cols - 1) * size)
case Kronecker =>
3L * size /*Multiplication of Complexes contains 3 sum operations (well 2 addition and 1 subtraction */
case _ =>
0
}
}
val multiplyOperation = {
_op match {
case Multiply =>
4L * (a.cols * size)
case Kronecker =>
4L * size /*Multiplication of Complexes contains 4 multiplication operations */
case _ =>
0
}
}
val aStat = _a.getStatistics
val bStat = _b.getStatistics
new EquationStatistics(
sumOperation + aStat.sumOperations + bStat.sumOperations,
multiplyOperation + aStat.multiplyOperations + bStat.multiplyOperations,
16L * (size + _a.size + _b.size), /*16 is a size of Complex*/
16L * size, /*16 is a size of Complex*/
(aStat.depth max bStat.depth) + 1
)
}
override def toString: String = {
"(" + _a.toString +
(_op match {
case Multiply => "*"
case Kronecker => "⊗"
case _ => "UNKNOWN_OPERATOR"
}) +
_b + ")"
}
}
/**
* EquationSymbol is a leaf node of "Equation-tree", that has a matrix (of some quantum gate)
* and name of that matrix
* @param name identifier of the matrix
* @param matrix actual matrix (if this is not defined, then matrix will be fetched from QuantumGates)
*/
class EquationSymbol(name: String, matrix: Matrix[Complex] = null) extends EquationEntity {
private val _matrix: Matrix[Complex] = ( if(matrix == null) QuantumGates.get(name) else matrix )
def rows: Int = _matrix.numRows
def cols: Int = _matrix.numCols
def size: Int = _matrix.size
def getResult: Matrix[Complex] = _matrix
def getStatistics = new EquationStatistics(0, 0, 16L * size, 16L * size, 0) /*16 is a size of Complex*/
override def toString: String = name
} | n-a-g-r-o-m/ScaQCMS | app/models/Equation.scala | Scala | mit | 6,042 |
package idv.brianhsu.maidroid.plurk.util
import java.io.InputStream
import android.graphics.BitmapFactory
import java.net.URL
import java.io._
import android.media.ExifInterface
import android.graphics.Matrix
import android.graphics.Bitmap
case class ResizeFactor(originWidth: Int, originHeight: Int, sampleSize: Int)
object ImageSampleFactor {
def apply(url: String, requiredWidth: Int, requiredHeight: Int): ResizeFactor = {
apply(new URL(url).openConnection.getInputStream, requiredWidth, requiredHeight)
}
def apply(file: File, requiredWidth: Int, requiredHeight: Int): ResizeFactor = {
apply(new FileInputStream(file), requiredWidth, requiredHeight)
}
def resizeImageFile(imageFile: File, size: Int, shouldRotate: Boolean = false) = {
val ResizeFactor(_, _, sampleSize) = apply(imageFile, size, size)
val decodeOptions = new BitmapFactory.Options
decodeOptions.inSampleSize = sampleSize
val bitmap = BitmapFactory.decodeFile(imageFile.getAbsolutePath, decodeOptions)
if (shouldRotate) {
rotateBitmap(imageFile, bitmap)
} else {
bitmap
}
}
private def rotateBitmap(imageFile: File, bitmap: Bitmap) = {
val matrix = new Matrix
val exif = new ExifInterface(imageFile.getAbsolutePath)
val orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, 1)
orientation match {
case ExifInterface.ORIENTATION_FLIP_HORIZONTAL =>
matrix.setScale(-1, 1)
createNewBitmap(bitmap, matrix)
case ExifInterface.ORIENTATION_ROTATE_180 =>
matrix.setRotate(180)
createNewBitmap(bitmap, matrix)
case ExifInterface.ORIENTATION_FLIP_VERTICAL =>
matrix.setRotate(180)
matrix.setScale(-1, 1)
createNewBitmap(bitmap, matrix)
case ExifInterface.ORIENTATION_TRANSPOSE =>
matrix.setRotate(90)
matrix.setScale(-1, 1)
createNewBitmap(bitmap, matrix)
case ExifInterface.ORIENTATION_ROTATE_90 =>
matrix.setRotate(90)
createNewBitmap(bitmap, matrix)
case ExifInterface.ORIENTATION_TRANSVERSE =>
matrix.setRotate(-90)
matrix.setScale(-1, 1)
createNewBitmap(bitmap, matrix)
case ExifInterface.ORIENTATION_ROTATE_270 =>
matrix.setRotate(-90)
createNewBitmap(bitmap, matrix)
case _ =>
bitmap
}
}
private def createNewBitmap(bitmap: Bitmap, matrix: Matrix) ={
try {
val newBitmap = Bitmap.createBitmap(
bitmap, 0, 0,
bitmap.getWidth(),
bitmap.getHeight(),
matrix, true
)
newBitmap
} catch {
case e: OutOfMemoryError => bitmap
}
}
private def apply(imgStream: InputStream , requiredWidth: Int, requiredHeight: Int): ResizeFactor = {
val (originWidth, originHeight) = calculateOriginSize(imgStream)
val factor = calculateSampleFactor(originWidth, originHeight, requiredWidth, requiredHeight)
ResizeFactor(originWidth, originHeight, factor)
}
private def calculateSampleFactor (originWidth: Int, originHeight: Int,
requiredWidth: Int, requiredHeight: Int): Int = {
var inSampleSize = 1
if (originHeight > requiredHeight || originWidth > requiredWidth) {
val halfHeight = originHeight / 2
val halfWidth = originWidth / 2
// Calculate the largest inSampleSize value that is a power of 2 and keeps both
// height and width larger than the requested height and width.
while ((halfHeight / inSampleSize) > requiredHeight &&
(halfWidth / inSampleSize) > requiredWidth) {
inSampleSize *= 2
}
}
inSampleSize
}
private def calculateOriginSize(imgStream: InputStream): (Int, Int) = {
val options = new BitmapFactory.Options
options.inJustDecodeBounds = true
BitmapFactory.decodeStream(imgStream, null, options)
imgStream.close()
(options.outWidth, options.outHeight)
}
}
| brianhsu/MaidroidPlurk | src/main/scala/util/ImageSampleFactor.scala | Scala | gpl-3.0 | 3,977 |
import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._
import sbt.Keys._
import sbt._
import sbtcrossproject.CrossPlugin.autoImport._
import scalajscrossproject.ScalaJSCrossPlugin.autoImport._
import scoverage.ScoverageKeys.coverageEnabled
import blended.sbt.Dependencies
private object BlendedSecurityCross {
private[this] val builder = sbtcrossproject
.CrossProject("blendedSecurity", file("blended.security"))(JVMPlatform, JSPlatform)
val project = builder
.crossType(CrossType.Full)
.build()
}
object BlendedSecurityJs extends ProjectFactory {
override val project = {
BlendedSecurityCross.project.js.settings(
Seq(
name := "blended.security",
moduleName := "blended.security",
libraryDependencies ++= Seq(
"com.github.benhutchison" %%% "prickle" % Dependencies.prickleVersion,
"org.scalatest" %%% "scalatest" % Dependencies.scalatestVersion % "test"
)
)
)
.settings(CommonSettings())
.settings(PublishConfig.doPublish)
.settings(Seq(coverageEnabled := false))
}
}
object BlendedSecurityJvm extends ProjectFactory {
private[this] val helper = new ProjectSettings(
projectName = "blended.security",
description = "Configuration bundle for the security framework.",
deps = Seq(
Dependencies.prickle,
Dependencies.scalatest % "test",
Dependencies.logbackCore % "test",
Dependencies.logbackClassic % "test"
),
adaptBundle = b => b.copy(
bundleActivator = s"${b.bundleSymbolicName}.internal.SecurityActivator",
exportPackage = Seq(
b.bundleSymbolicName,
s"${b.bundleSymbolicName}.json"
)
)
) {
override def projectFactory: () => Project = { () =>
BlendedSecurityCross.project.jvm.settings(
Seq(
name := "blendedSecurityJvm"
)
)
}
}
override val project = helper.baseProject.dependsOn(
BlendedUtilLogging.project,
BlendedDomino.project,
BlendedUtil.project,
BlendedSecurityBoot.project
)
}
| lefou/blended | project/BlendedSecurity.scala | Scala | apache-2.0 | 2,080 |
package gapt.proofs.expansion
import gapt.expr._
import gapt.expr.subst.Substitution
import gapt.logic.Polarity
import org.specs2.mutable.Specification
class FormulaToExpansionTreeTest extends Specification {
"substituting bound variables" in {
formulaToExpansionTree(
hof"!x x=x",
Set( Substitution( hov"x" -> le"x" ) ),
Polarity.InAntecedent ).deep must_== hof"x=x"
}
"renamed bound variables" in {
formulaToExpansionTree(
hof"!x!y p x y",
Set( Substitution( hov"x" -> le"y", hov"y" -> le"x" ) ),
Polarity.InAntecedent ).deep must_== hof"p y x"
}
}
| gapt/gapt | tests/src/test/scala/gapt/proofs/expansion/FormulaToExpansionTreeTest.scala | Scala | gpl-3.0 | 607 |
//-----------------------------------------------------------------------
// FILE : RTCredential.scala
// SUBJECT : Classes that represent RT credentials in the abstract.
// AUTHOR : (C) Copyright 2011 by Simone Willett <sgwillet@uvm.edu>
// and Peter C. Chapin <PChapin@vtc.vsc.edu>
//
//-----------------------------------------------------------------------
package edu.uvm.rtadmin
sealed abstract class RTCredential
case class RTMembershipCredential(
target: (String, String),
sourceEntity: String) extends RTCredential
case class RTInclusionCredential(
target: (String, String),
source: (String, String)) extends RTCredential
case class RTLinkedCredential(
target: (String, String),
source: (String, String),
sourceRole2: String) extends RTCredential
case class RTIntersectionCredential(
target: (String, String),
source1: (String, String),
source2: (String, String)) extends RTCredential
| pchapin/sprocket | src/edu/uvm/rtadmin/RTCredential.scala | Scala | bsd-3-clause | 938 |
package swiss.sib.analytics.server.logs
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import swiss.sib.analytics.server.logs.utils.LogEntryUtils
import org.scalatest.Ignore
import swiss.sib.analytics.server.logs.model.LogEntry
class LogEntryUtilsSpecs extends FlatSpec with Matchers {
def mustAllSucceed (entries : List[LogEntry]){
val failedEntries = entries.filter(e => !e.successfulParsing).toList
failedEntries.foreach(println)
if(failedEntries.size > 0){
fail()
}
}
"LogEntryUtils" should "parse correctly log entries" in {
//Sample of a UniProt log file (starting with 0)
val l0 = """uniprot-lb1.org 127.0.0.1 - - [26/Mar/2016:18:43:06 +0000] "GET /administrator/index.php HTTP/1.1" 404 14727 "-" "Mozilla/5.0 (Linux; U; Android 2.2) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1" 0.035 at7ov105990vb9qpqflik0rd87 - 127.0.0.1 -"""
val l1 = """uniprot-lb1.org 127.0.0.1 - - [22/Mar/2017:11:09:30 +0000] "GET /foo/bar HTTP/1.1" 200 1519 "-" "-" 1.358 - text/plain;charset=utf-8 127.0.0.2 5"""
val l2 = """uniprot-lb1.org 127.0.0.1 - - [22/Mar/2017:11:09:30 +0000] "GET /foo/bar HTTP/1.1" 200 1871 "www.google.com" "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36" 0.001 - image/png 127.0.0.2 -"""
val l3 = """uniprot-lb2.org 127.0.0.1 - - [22/Mar/2017:11:09:30 +0000] "GET /foo/bar HTTP/1.1" 304 - "http://www.google.com" "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko" 0.000 - - 127.0.0.2 -"""
val l4 = """uniprot-lb1.org 127.0.0.1 - - [22/Mar/2017:11:09:30 +0000] "GET /foo/bar HTTP/1.1" 304 - "-" "wget" 0.004 - text/html;charset=ISO-8859-1 180.76.15.142 1"""
val l5 = """uniprot-lb1.org 127.0.0.1 - - [22/Mar/2017:11:09:30 +0000] "GET /foo/bar HTTP/1.1" 304 - "http://www.google.com" "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko" 0.000 - - 127.0.0.2 -"""
val l6 = """uniprot-lb2.org 127.0.0.1 - - [01/Mar/2016:17:33:25 -0500] "GET /foo/bar HTTP/1.1" 301 - "-" "Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)" 752 - text/plain 127.0.0.1 -"""
val l7 = """uniprot-lb2.org 127.0.0.1 - - [07/Sep/2016:08:25:27 +0000] "GET /index.php HTTP/1.1" 404 14858 "-" "Mozilla/5.0 (Linux; U; Android 2.2) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1" 0.030 86a04a24c8a28313d724f7d18c176f84 - 127.0.0.1 -"""
val l8 = """uniprot-lb2.org 127.0.0.1 - - [24/May/2016:23:35:23 +0000] " /images/arrow.png HTTP/1.1" 501 1139 "http://www.google.com" "Mozilla/5.0 (Windows NT 6.1; rv:46.0) Gecko/20100101 Firefox/46.0" 0.009 - - 190.80.8.31 -"""
val le = List(l0, l1, l2, l3, l4, l5, l6, l7, l8).map(LogEntryUtils.parseLogLine)
le(1).server should equal("uniprot-lb1.org")
le(1).clientInfo.ipAddress should equal("127.0.0.1")
le(1).month should equal(3)
le(1).responseInfo.contentSize should equal(1519)
le(1).requestInfo.firstLevelPath should equal("/foo")
mustAllSucceed(le)
}
"LogEntryUtils" should "parse correctly STRING log entries" in {
//Sample of a STRING log file (starting with 1)
val l1 = """127.0.0.1 - - [07/Dec/2015:00:00:00 +0000] "GET /new/string HTTP/1.1" 200 70353 "http://string-db.org/version_10/newstring_cgi/show_network_section.pl" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.86 Safari/537.36" **1/1467435**"""
val l2 = """127.0.0.1 - - [12/Dec/2015:08:39:15 +0000] "GET /newstring_cgi/show_network_section.pl?identifier=127851 133601 127043 121885 117360 117359 133054 122481&additional_network_nodes=10&advanced_menu=yes&chemicalmode=-1&input_query_species=3702&interactive=yes&internal_call=1&limit=10&minprotchem=0&network_flavor=evidence&previous_network_size=18&required_score=400&sessionId=_dIQqTK6Z23u&targetmode=proteins&userId=XB25gLAsxE_b HTTP/1.1" 200 9255 "http://string-db.org/newstring_cgi/show_network_section.pl" "Mozilla/5.0 (Linux; U; Android 4.4.2; zh-cn; GT-I9500 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko)Version/4.0 MQQBrowser/5.0 QQ-URL-Manager Mobile Safari/537.36" **0/597129**"""
val l3 = """127.0.0.1 - - [05/Jan/2017:14:22:53 +0000] "GET /javascript/basic/\\" + loadingImgUrl + \\" HTTP/1.1" 404 9468 "-" "Java/1.4.1_04" **0/337226**"""
val le = List(l1, l2, l3).map(LogEntryUtils.parseLogLine)
le(0).requestInfo.method should equal("GET")
le(0).requestInfo.url should equal("/new/string")
le(0).requestInfo.firstLevelPath should equal("/new")
le(0).requestInfo.protocol should equal("HTTP/1.1")
le(2).requestInfo.protocol should equal("HTTP/1.1")
mustAllSucceed(le)
}
"LogEntryUtils" should "parse correctly OMA log entries" in {
//Sample of OMA log file (starting with 2)
// val PATTERN = """(\\S+)?\\s?(\\S+) (\\S+) (\\S+) \\[([\\w:\\/]+\\s[+\\-]\\d{4})\\] "(.*) (\\S+) (.*)" (\\d{3}) (\\S+) "(.*)" "(.*)"\\s?(.*)""".r
// case PATTERN(hostname, ipAddress, clientIdentd, userId, dateTime, method, endpoint, protocol, responseCode, contentSize, referer, agent, remaining) => {
val l0 = """127.0.0.1 - - [08/May/2017:06:25:16 +0000] "GET /oma HTTP/1.1" 403 Cache:- 150 "-" "Mozilla/5.0 (compatible; SemrushBot/1.2~bl; +http://www.semrush.com/bot.html)""""
val l1 = """127.0.0.1 - omabrowser.org [08/May/2017:17:24:54 +0000] "GET /cgi-bin/gateway.pl?f=DisplayEntry&p1=5759318&p2=info HTTP/1.1" 200 Cache:MISS 4564 "-" "Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)""""
val le = List(l0, l1).map(LogEntryUtils.parseLogLine)
le(0).clientInfo.ipAddress should equal("127.0.0.1")
mustAllSucceed(le)
}
"LogEntryUtils" should "parse correctly the mimetype if present" in {
val l1 = """elixir.org 127.0.0.1 - - [22/Mar/2017:11:09:30 +0000] "GET /foo/bar HTTP/1.1" 200 1519 "-" "-" 1.358 - text/plain;charset=utf-8 127.0.0.2 5"""
val le = List(l1).map(LogEntryUtils.parseLogLine)
val logEntry1 = le(0)
logEntry1.responseInfo.charset should equal("text/plain");
mustAllSucceed(le)
}
"LogEntryUtils" should "parse correctly RHEA log entries" in {
val l0 = """localhost 127.0.0.1 - - [01/May/2017:20:00:23 +0100] "GET /rhea/comp HTTP/1.1" 301 246 "http://www.ebi.ac.uk/intenz/" - - www.ebi.ac.uk"""
val l1 = """ola.world 127.0.0.1 - - [03/May/2017:15:17:18 +0100] "GET /rhea/rest/1.0/ws/reaction/cmlreact/10000 HTTP/1.1" 200 3363 "-" "Java/1.8.0_60" ves-pg-91:8080 0.181214 www.rhea-db.org"""
val l2 = """boum.paf.toto 127.0.0.1 - - [03/May/2017:14:27:00 +0100] "GET /rhea/reacti23 HTTP/1.1" - 0 "-" "Mozilla/5.0 (compatible; BLEXBot/1.0; +http://webmeup-crawler.com/)" ves-pg-91:8080 - www.rhea-db.org"""
val l3 = """boum.paf.toto 127.0.0.1 - - [13/May/2017:17:55:23 +0100] "GET /rhea/reaction?id=38056"%20and%20"x"%3D"x HTTP/1.1" 500 1728 "-" "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; elertz 2.4.025; .NET CLR 1.0.3705; .NET CLR 1.1.4322; Media Center PC 4.0)" ves-pg-90:8080 0.014860 www.rhea-db.org"""
val le = List(l0, l1, l2, l3).map(LogEntryUtils.parseLogLine)
le(0).clientInfo.ipAddress should equal("127.0.0.1")
//Sometimes there is no status code. So the number is 0.
le(2).responseInfo.status should equal(0)
mustAllSucceed(le)
}
"LogEntryUtils" should "parse correctly progenetix log entries" in {
val l0 = """127.0.0.1 - - [08/Feb/2017:09:15:41 +0100] "-" 408 - "-" "-""""
val l1 = """127.0.0.1 - - [25/Apr/2017:23:49:09 +0200] "Gh0st\\xad" 400 226 "-" "-""""
val le = List(l0, l1).map(LogEntryUtils.parseLogLine)
le(0).clientInfo.ipAddress should equal("127.0.0.1")
le(0).responseInfo.status should equal(408)
le(0).requestInfo.method should equal("method-not-defined")
le(0).requestInfo.url should equal("-")
le(0).requestInfo.protocol should equal("protocol-not-defined")
le(0).requestInfo.firstLevelPath should equal("not-defined")
mustAllSucceed(List(le(0), le(1)))
}
} | sib-swiss/server-log-analytics | src/test/scala/swiss/sib/analytics/server/logs/LogEntryUtilsSpecs.scala | Scala | gpl-2.0 | 8,127 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.aggregate
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util.TypeUtils
import org.apache.spark.sql.types._
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the mean calculated from values of a group.")
case class Average(child: Expression) extends DeclarativeAggregate {
override def prettyName: String = "avg"
override def children: Seq[Expression] = child :: Nil
override def nullable: Boolean = true
// Return data type.
override def dataType: DataType = resultType
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType)
override def checkInputDataTypes(): TypeCheckResult =
TypeUtils.checkForNumericExpr(child.dataType, "function average")
private lazy val resultType = child.dataType match {
case DecimalType.Fixed(p, s) =>
DecimalType.bounded(p + 4, s + 4)
case _ => DoubleType
}
private lazy val sumDataType = child.dataType match {
case _ @ DecimalType.Fixed(p, s) => DecimalType.bounded(p + 10, s)
case _ => DoubleType
}
private lazy val sum = AttributeReference("sum", sumDataType)()
private lazy val count = AttributeReference("count", LongType, nullable = false)()
override lazy val aggBufferAttributes = sum :: count :: Nil
override lazy val initialValues = Seq(
/* sum = */ Cast(Literal(0), sumDataType),
/* count = */ Literal(0L)
)
override lazy val updateExpressions = Seq(
/* sum = */
Add(
sum,
Coalesce(Cast(child, sumDataType) :: Cast(Literal(0), sumDataType) :: Nil)),
/* count = */ If(IsNull(child), count, count + 1L)
)
override lazy val mergeExpressions = Seq(
/* sum = */ sum.left + sum.right,
/* count = */ count.left + count.right
)
// If all input are nulls, count will be 0 and we will get null after the division.
override lazy val evaluateExpression = child.dataType match {
case DecimalType.Fixed(p, s) =>
// increase the precision and scale to prevent precision loss
val dt = DecimalType.bounded(p + 14, s + 4)
Cast(Cast(sum, dt) / Cast(count, dt), resultType)
case _ =>
Cast(sum, resultType) / Cast(count, resultType)
}
}
| big-pegasus/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Average.scala | Scala | apache-2.0 | 3,146 |
package domain.models
import org.joda.money.Money
import java.time.LocalDate
import scala.math.Ordered.orderingToOrdered
import util.PlayDateTimeHelper._
case class LunchOffer(
id: Id,
name: String,
day: LocalDate,
price: Money,
provider: LunchProviderId) extends Ordered[LunchOffer] {
def compare(that: LunchOffer): Int = (this.provider, this.day) compare (that.provider, that.day)
} | rori-dev/lunchbox | backend-play-akka-scala/app/domain/models/LunchOffer.scala | Scala | mit | 409 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spark.streaming
import spark.{Logging, Utils}
import org.apache.hadoop.fs.{FileUtil, Path}
import org.apache.hadoop.conf.Configuration
import java.io._
import com.ning.compress.lzf.{LZFInputStream, LZFOutputStream}
import java.util.concurrent.Executors
import java.util.concurrent.RejectedExecutionException
private[streaming]
class Checkpoint(@transient ssc: StreamingContext, val checkpointTime: Time)
extends Logging with Serializable {
val master = ssc.sc.master
val framework = ssc.sc.appName
val sparkHome = ssc.sc.sparkHome
val jars = ssc.sc.jars
val environment = ssc.sc.environment
val graph = ssc.graph
val checkpointDir = ssc.checkpointDir
val checkpointDuration = ssc.checkpointDuration
val pendingTimes = ssc.scheduler.jobManager.getPendingTimes()
def validate() {
assert(master != null, "Checkpoint.master is null")
assert(framework != null, "Checkpoint.framework is null")
assert(graph != null, "Checkpoint.graph is null")
assert(checkpointTime != null, "Checkpoint.checkpointTime is null")
logInfo("Checkpoint for time " + checkpointTime + " validated")
}
}
/**
* Convenience class to speed up the writing of graph checkpoint to file
*/
private[streaming]
class CheckpointWriter(checkpointDir: String) extends Logging {
val file = new Path(checkpointDir, "graph")
// The file to which we actually write - and then "move" to file.
private val writeFile = new Path(file.getParent, file.getName + ".next")
private val bakFile = new Path(file.getParent, file.getName + ".bk")
private var stopped = false
val conf = new Configuration()
var fs = file.getFileSystem(conf)
val maxAttempts = 3
val executor = Executors.newFixedThreadPool(1)
// Removed code which validates whether there is only one CheckpointWriter per path 'file' since
// I did not notice any errors - reintroduce it ?
class CheckpointWriteHandler(checkpointTime: Time, bytes: Array[Byte]) extends Runnable {
def run() {
var attempts = 0
val startTime = System.currentTimeMillis()
while (attempts < maxAttempts) {
attempts += 1
try {
logDebug("Saving checkpoint for time " + checkpointTime + " to file '" + file + "'")
// This is inherently thread unsafe .. so alleviating it by writing to '.new' and then doing moves : which should be pretty fast.
val fos = fs.create(writeFile)
fos.write(bytes)
fos.close()
if (fs.exists(file) && fs.rename(file, bakFile)) {
logDebug("Moved existing checkpoint file to " + bakFile)
}
// paranoia
fs.delete(file, false)
fs.rename(writeFile, file)
val finishTime = System.currentTimeMillis();
logInfo("Checkpoint for time " + checkpointTime + " saved to file '" + file +
"', took " + bytes.length + " bytes and " + (finishTime - startTime) + " milliseconds")
return
} catch {
case ioe: IOException =>
logWarning("Error writing checkpoint to file in " + attempts + " attempts", ioe)
}
}
logError("Could not write checkpoint for time " + checkpointTime + " to file '" + file + "'")
}
}
def write(checkpoint: Checkpoint) {
val bos = new ByteArrayOutputStream()
val zos = new LZFOutputStream(bos)
val oos = new ObjectOutputStream(zos)
oos.writeObject(checkpoint)
oos.close()
bos.close()
try {
executor.execute(new CheckpointWriteHandler(checkpoint.checkpointTime, bos.toByteArray))
} catch {
case rej: RejectedExecutionException =>
logError("Could not submit checkpoint task to the thread pool executor", rej)
}
}
def stop() {
synchronized {
if (stopped) return ;
stopped = true
}
executor.shutdown()
val startTime = System.currentTimeMillis()
val terminated = executor.awaitTermination(10, java.util.concurrent.TimeUnit.SECONDS)
val endTime = System.currentTimeMillis()
logInfo("CheckpointWriter executor terminated ? " + terminated + ", waited for " + (endTime - startTime) + " ms.")
}
}
private[streaming]
object CheckpointReader extends Logging {
def read(path: String): Checkpoint = {
val fs = new Path(path).getFileSystem(new Configuration())
val attempts = Seq(new Path(path, "graph"), new Path(path, "graph.bk"), new Path(path), new Path(path + ".bk"))
attempts.foreach(file => {
if (fs.exists(file)) {
logInfo("Attempting to load checkpoint from file '" + file + "'")
try {
val fis = fs.open(file)
// ObjectInputStream uses the last defined user-defined class loader in the stack
// to find classes, which maybe the wrong class loader. Hence, a inherited version
// of ObjectInputStream is used to explicitly use the current thread's default class
// loader to find and load classes. This is a well know Java issue and has popped up
// in other places (e.g., http://jira.codehaus.org/browse/GROOVY-1627)
val zis = new LZFInputStream(fis)
val ois = new ObjectInputStreamWithLoader(zis, Thread.currentThread().getContextClassLoader)
val cp = ois.readObject.asInstanceOf[Checkpoint]
ois.close()
fs.close()
cp.validate()
logInfo("Checkpoint successfully loaded from file '" + file + "'")
logInfo("Checkpoint was generated at time " + cp.checkpointTime)
return cp
} catch {
case e: Exception =>
logError("Error loading checkpoint from file '" + file + "'", e)
}
} else {
logWarning("Could not read checkpoint from file '" + file + "' as it does not exist")
}
})
throw new Exception("Could not read checkpoint from path '" + path + "'")
}
}
private[streaming]
class ObjectInputStreamWithLoader(inputStream_ : InputStream, loader: ClassLoader) extends ObjectInputStream(inputStream_) {
override def resolveClass(desc: ObjectStreamClass): Class[_] = {
try {
return loader.loadClass(desc.getName())
} catch {
case e: Exception =>
}
return super.resolveClass(desc)
}
}
| wgpshashank/spark | streaming/src/main/scala/spark/streaming/Checkpoint.scala | Scala | apache-2.0 | 7,011 |
// Copyright (c) 2014 Belmont Technology Pty Ltd. All rights reserved.
package com.grahamlea.examples.rxjava.threading
/**
* Shows that simply calling parallel() somewhere in the chain doesn't
* necessarily result that downstream operations occurring in parallel,
* or even on different threads.
*/
object Example08x_Parallel_NoWorkInsideParallel extends App {
val shiftedUp = generator.parallel(f => f).map(shiftUp).debug("Shifted Up")
val shiftedDown = shiftedUp.map(shiftDown).debug("Shifted Down")
shiftedDown.subscribe(debug("Received", _))
// The built in Schedulers use daemon threads, so you need to make the main thread stick around or you won't see anything
Thread.sleep(1000)
}
| GrahamLea/RxJava-Threading-Examples | src/main/scala/com/grahamlea/examples/rxjava/threading/Example08x_Parallel_NoWorkInsideParallel.scala | Scala | apache-2.0 | 708 |
/*
* Copyright (C) 2007-2008 Artima, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Example code from:
*
* Programming in Scala (First Edition, Version 6)
* by Martin Odersky, Lex Spoon, Bill Venners
*
* http://booksites.artima.com/programming_in_scala
*/
import scala.swing._
object FirstSwingApp extends SimpleGUIApplication {
def top = new MainFrame {
title = "First Swing App"
contents = new Button {
text = "Click me"
}
}
}
| peachyy/scalastu | swing/FirstSwingApp.scala | Scala | apache-2.0 | 1,006 |
package me.reminisce.analysis
import me.reminisce.analysis.model.{ItemSummary, UserSummary}
import me.reminisce.analysis.model.Formatters._
import me.reminisce.database
import me.reminisce.database.MongoCollections
import me.reminisce.database.MongoDBEntities.{FBPage, FBPageLike, FBPost}
import me.reminisce.database.MongoDBFormats._
import me.reminisce.fetching.config.GraphResponses.Friend
import reactivemongo.api.{Cursor, DefaultDB}
import reactivemongo.api.collections.bson.BSONCollection
import reactivemongo.bson.BSONDocument
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
object DataFetcher {
/**
* Gets all the necessary data for the analysis from the database
* @param userId the user being analysed
* @param db the database to fetch from
* @param maybeNewFriends new friends if there are
* @param maybeFbPostIds new posts ids if there are
* @return analysis data
*/
def getAnalysisData(userId: String, db: DefaultDB,
maybeNewFriends: Option[Set[Friend]] = None,
maybeFbPostIds: Option[List[String]] = None): Future[AnalysisData] = {
val postCollection = db[BSONCollection](MongoCollections.fbPosts)
val itemsSummariesCollection = db[BSONCollection](MongoCollections.itemsSummaries)
val userSummariesCollection = db[BSONCollection](MongoCollections.userSummaries)
lazy val emptyUserSummary = UserSummary(userId = userId)
val defaultSelector = BSONDocument("userId" -> userId)
val postsCursor = postCollection.find(defaultSelector).cursor[FBPost]()
for {
maybeUserSummary <- userSummariesCollection.find(defaultSelector).one[UserSummary]
userSummary = maybeUserSummary.getOrElse(emptyUserSummary)
allPosts <- postsCursor.collect[List](maxDocs = -1, Cursor.DoneOnError[List[FBPost]]())
defaultSumSelector = BSONDocument("userId" -> userId, "itemType" -> "Post")
itemSummarySelector = maybeFbPostIds.fold(defaultSumSelector)(ids => BSONDocument("userId" -> userId, "itemId" -> BSONDocument("$in" -> ids)))
itemsSummariesCursor = itemsSummariesCollection.find(itemSummarySelector).cursor[ItemSummary]()
itemSummaries <- itemsSummariesCursor.collect[List](maxDocs = -1, Cursor.DoneOnError[List[ItemSummary]]())
pageLikes <- getPageLikes(db, userId)
allPages <- getAllPages(db)
} yield {
val friends = userSummary.friends.map(friend => Friend(friend.name, friend.name))
val notLikedPagesCount = allPages.size - pageLikes.size
val userLikedPages = allPages.filter {
fbPage => pageLikes.exists(pageLike => pageLike.pageId == fbPage.pageId)
}
val analysedPosts = maybeFbPostIds.fold(allPosts) {
fbPostIds =>
allPosts.filter(post => fbPostIds.contains(post.postId))
}
AnalysisData(analysedPosts, allPosts, userLikedPages, allPages, pageLikes,
maybeNewFriends.getOrElse(friends), notLikedPagesCount, userSummary, itemSummaries)
}
}
/**
* Get All pages from the database
* @param db database to get the pages from
* @return an eventual list of pages
*/
def getAllPages(db: DefaultDB): Future[List[FBPage]] = {
val pagesCollection = db[BSONCollection](MongoCollections.fbPages)
database.findSome[FBPage](pagesCollection, BSONDocument())
}
/**
* Ger the user's liked pages
* @param db database to get the pages from
* @param userId considered user
* @return an eventual list of pages
*/
def getPageLikes(db: DefaultDB, userId: String): Future[List[FBPageLike]] = {
val pageLikesCollection = db[BSONCollection](MongoCollections.fbPageLikes)
val pageLikesSelector = BSONDocument("userId" -> userId)
database.findSome[FBPageLike](pageLikesCollection, pageLikesSelector)
}
}
| reminisceme/game-creator | src/main/scala/me/reminisce/analysis/DataFetcher.scala | Scala | apache-2.0 | 3,836 |
package coursier.cli.options
import caseapp.{ExtraName => Short, HelpMessage => Help, ValueDescription => Value, _}
// format: off
final case class DependencyOptions(
@Group(OptionGroup.dependency)
@Hidden
@Help("Exclude module")
@Value("organization:name")
@Short("E")
exclude: List[String] = Nil,
@Group(OptionGroup.dependency)
@Hidden
@Short("x")
@Help("Path to the local exclusion file. " +
"Syntax: <org:name>--<org:name>. `--` means minus. Example file content:\\n\\t" +
"\\tcom.twitter.penguin:korean-text--com.twitter:util-tunable-internal_2.11\\n\\t" +
"\\torg.apache.commons:commons-math--com.twitter.search:core-query-nodes\\n\\t" +
"Behavior: If root module A excludes module X, but root module B requires X, module X will still be fetched."
)
localExcludeFile: String = "",
@Group(OptionGroup.dependency)
@Hidden
@Help("If --sbt-plugin options are passed: default sbt version (short version X.Y is enough - note that for sbt 1.x, this should be passed 1.0)")
@Value("sbt version")
sbtVersion: String = "1.0",
@Group(OptionGroup.dependency)
@Hidden
@Help("Add intransitive dependencies")
intransitive: List[String] = Nil,
@Group(OptionGroup.dependency)
@Help("Add sbt plugin dependencies")
sbtPlugin: List[String] = Nil,
@Group(OptionGroup.dependency)
@Help("Enable Scala.js")
scalaJs: Boolean = false,
@Group(OptionGroup.dependency)
@Help("Enable scala-native")
@Short("S")
native: Boolean = false
)
// format: on
object DependencyOptions {
implicit val parser = Parser[DependencyOptions]
implicit val help = caseapp.core.help.Help[DependencyOptions]
}
| coursier/coursier | modules/cli/src/main/scala/coursier/cli/options/DependencyOptions.scala | Scala | apache-2.0 | 1,668 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources
import java.io.{File, FileNotFoundException}
import java.net.URI
import scala.collection.mutable
import org.apache.hadoop.fs.{BlockLocation, FileStatus, LocatedFileStatus, Path, RawLocalFileSystem}
import org.apache.spark.SparkException
import org.apache.spark.metrics.source.HiveCatalogMetrics
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.util.KnownSizeEstimation
class FileIndexSuite extends SharedSparkSession {
private class TestInMemoryFileIndex(
spark: SparkSession,
path: Path,
fileStatusCache: FileStatusCache = NoopCache)
extends InMemoryFileIndex(spark, Seq(path), Map.empty, None, fileStatusCache) {
def leafFilePaths: Seq[Path] = leafFiles.keys.toSeq
def leafDirPaths: Seq[Path] = leafDirToChildrenFiles.keys.toSeq
def leafFileStatuses: Iterable[FileStatus] = leafFiles.values
}
test("InMemoryFileIndex: leaf files are qualified paths") {
withTempDir { dir =>
val file = new File(dir, "text.txt")
stringToFile(file, "text")
val path = new Path(file.getCanonicalPath)
val catalog = new TestInMemoryFileIndex(spark, path)
assert(catalog.leafFilePaths.forall(p => p.toString.startsWith("file:/")))
assert(catalog.leafDirPaths.forall(p => p.toString.startsWith("file:/")))
}
}
test("SPARK-26188: don't infer data types of partition columns if user specifies schema") {
withTempDir { dir =>
val partitionDirectory = new File(dir, "a=4d")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val path = new Path(dir.getCanonicalPath)
val schema = StructType(Seq(StructField("a", StringType, false)))
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema))
val partitionValues = fileIndex.partitionSpec().partitions.map(_.values)
assert(partitionValues.length == 1 && partitionValues(0).numFields == 1 &&
partitionValues(0).getString(0) == "4d")
}
}
test("SPARK-26990: use user specified field names if possible") {
withTempDir { dir =>
val partitionDirectory = new File(dir, "a=foo")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val path = new Path(dir.getCanonicalPath)
val schema = StructType(Seq(StructField("A", StringType, false)))
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema))
assert(fileIndex.partitionSchema.length == 1 && fileIndex.partitionSchema.head.name == "A")
}
}
}
test("SPARK-26230: if case sensitive, validate partitions with original column names") {
withTempDir { dir =>
val partitionDirectory = new File(dir, "a=1")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val partitionDirectory2 = new File(dir, "A=2")
partitionDirectory2.mkdir()
val file2 = new File(partitionDirectory2, "text.txt")
stringToFile(file2, "text")
val path = new Path(dir.getCanonicalPath)
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, None)
val partitionValues = fileIndex.partitionSpec().partitions.map(_.values)
assert(partitionValues.length == 2)
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
val msg = intercept[AssertionError] {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, None)
fileIndex.partitionSpec()
}.getMessage
assert(msg.contains("Conflicting partition column names detected"))
assert("Partition column name list #[0-1]: A".r.findFirstIn(msg).isDefined)
assert("Partition column name list #[0-1]: a".r.findFirstIn(msg).isDefined)
}
}
}
test("SPARK-26263: Throw exception when partition value can't be casted to user-specified type") {
withTempDir { dir =>
val partitionDirectory = new File(dir, "a=foo")
partitionDirectory.mkdir()
val file = new File(partitionDirectory, "text.txt")
stringToFile(file, "text")
val path = new Path(dir.getCanonicalPath)
val schema = StructType(Seq(StructField("a", IntegerType, false)))
withSQLConf(SQLConf.VALIDATE_PARTITION_COLUMNS.key -> "true") {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema))
val msg = intercept[RuntimeException] {
fileIndex.partitionSpec()
}.getMessage
assert(msg == "Failed to cast value `foo` to `IntegerType` for partition column `a`")
}
withSQLConf(SQLConf.VALIDATE_PARTITION_COLUMNS.key -> "false") {
val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema))
val partitionValues = fileIndex.partitionSpec().partitions.map(_.values)
assert(partitionValues.length == 1 && partitionValues(0).numFields == 1 &&
partitionValues(0).isNullAt(0))
}
}
}
test("InMemoryFileIndex: input paths are converted to qualified paths") {
withTempDir { dir =>
val file = new File(dir, "text.txt")
stringToFile(file, "text")
val unqualifiedDirPath = new Path(dir.getCanonicalPath)
val unqualifiedFilePath = new Path(file.getCanonicalPath)
require(!unqualifiedDirPath.toString.contains("file:"))
require(!unqualifiedFilePath.toString.contains("file:"))
val fs = unqualifiedDirPath.getFileSystem(spark.sessionState.newHadoopConf())
val qualifiedFilePath = fs.makeQualified(new Path(file.getCanonicalPath))
require(qualifiedFilePath.toString.startsWith("file:"))
val catalog1 = new InMemoryFileIndex(
spark, Seq(unqualifiedDirPath), Map.empty, None)
assert(catalog1.allFiles.map(_.getPath) === Seq(qualifiedFilePath))
val catalog2 = new InMemoryFileIndex(
spark, Seq(unqualifiedFilePath), Map.empty, None)
assert(catalog2.allFiles.map(_.getPath) === Seq(qualifiedFilePath))
}
}
test("InMemoryFileIndex: root folders that don't exist don't throw exceptions") {
withTempDir { dir =>
val deletedFolder = new File(dir, "deleted")
assert(!deletedFolder.exists())
val catalog1 = new InMemoryFileIndex(
spark, Seq(new Path(deletedFolder.getCanonicalPath)), Map.empty, None)
// doesn't throw an exception
assert(catalog1.listLeafFiles(catalog1.rootPaths).isEmpty)
}
}
test("SPARK-27676: InMemoryFileIndex respects ignoreMissingFiles config for non-root paths") {
import DeletionRaceFileSystem._
for (
raceCondition <- Seq(
classOf[SubdirectoryDeletionRaceFileSystem],
classOf[FileDeletionRaceFileSystem]
);
ignoreMissingFiles <- Seq(true, false);
parDiscoveryThreshold <- Seq(0, 100)
) {
withClue(s"raceCondition=$raceCondition, ignoreMissingFiles=$ignoreMissingFiles, " +
s"parDiscoveryThreshold=$parDiscoveryThreshold"
) {
withSQLConf(
SQLConf.IGNORE_MISSING_FILES.key -> ignoreMissingFiles.toString,
SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD.key -> parDiscoveryThreshold.toString,
"fs.mockFs.impl" -> raceCondition.getName,
"fs.mockFs.impl.disable.cache" -> "true"
) {
def makeCatalog(): InMemoryFileIndex = new InMemoryFileIndex(
spark, Seq(rootDirPath), Map.empty, None)
if (ignoreMissingFiles) {
// We're ignoring missing files, so catalog construction should succeed
val catalog = makeCatalog()
val leafFiles = catalog.listLeafFiles(catalog.rootPaths)
if (raceCondition == classOf[SubdirectoryDeletionRaceFileSystem]) {
// The only subdirectory was missing, so there should be no leaf files:
assert(leafFiles.isEmpty)
} else {
assert(raceCondition == classOf[FileDeletionRaceFileSystem])
// One of the two leaf files was missing, but we should still list the other:
assert(leafFiles.size == 1)
assert(leafFiles.head.getPath == nonDeletedLeafFilePath)
}
} else {
// We're NOT ignoring missing files, so catalog construction should fail
val e = intercept[Exception] {
makeCatalog()
}
// The exact exception depends on whether we're using parallel listing
if (parDiscoveryThreshold == 0) {
// The FileNotFoundException occurs in a Spark executor (as part of a job)
assert(e.isInstanceOf[SparkException])
assert(e.getMessage.contains("FileNotFoundException"))
} else {
// The FileNotFoundException occurs directly on the driver
assert(e.isInstanceOf[FileNotFoundException])
// Test that the FileNotFoundException is triggered for the expected reason:
if (raceCondition == classOf[SubdirectoryDeletionRaceFileSystem]) {
assert(e.getMessage.contains(subDirPath.toString))
} else {
assert(raceCondition == classOf[FileDeletionRaceFileSystem])
assert(e.getMessage.contains(leafFilePath.toString))
}
}
}
}
}
}
}
test("PartitioningAwareFileIndex listing parallelized with many top level dirs") {
for ((scale, expectedNumPar) <- Seq((10, 0), (50, 1))) {
withTempDir { dir =>
val topLevelDirs = (1 to scale).map { i =>
val tmp = new File(dir, s"foo=$i.txt")
tmp.mkdir()
new Path(tmp.getCanonicalPath)
}
HiveCatalogMetrics.reset()
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == 0)
new InMemoryFileIndex(spark, topLevelDirs, Map.empty, None)
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == expectedNumPar)
}
}
}
test("PartitioningAwareFileIndex listing parallelized with large child dirs") {
for ((scale, expectedNumPar) <- Seq((10, 0), (50, 1))) {
withTempDir { dir =>
for (i <- 1 to scale) {
new File(dir, s"foo=$i.txt").mkdir()
}
HiveCatalogMetrics.reset()
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == 0)
new InMemoryFileIndex(spark, Seq(new Path(dir.getCanonicalPath)), Map.empty, None)
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == expectedNumPar)
}
}
}
test("PartitioningAwareFileIndex listing parallelized with large, deeply nested child dirs") {
for ((scale, expectedNumPar) <- Seq((10, 0), (50, 4))) {
withTempDir { dir =>
for (i <- 1 to 2) {
val subdirA = new File(dir, s"a=$i")
subdirA.mkdir()
for (j <- 1 to 2) {
val subdirB = new File(subdirA, s"b=$j")
subdirB.mkdir()
for (k <- 1 to scale) {
new File(subdirB, s"foo=$k.txt").mkdir()
}
}
}
HiveCatalogMetrics.reset()
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == 0)
new InMemoryFileIndex(spark, Seq(new Path(dir.getCanonicalPath)), Map.empty, None)
assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == expectedNumPar)
}
}
}
test("InMemoryFileIndex - file filtering") {
assert(!InMemoryFileIndex.shouldFilterOut("abcd"))
assert(InMemoryFileIndex.shouldFilterOut(".ab"))
assert(InMemoryFileIndex.shouldFilterOut("_cd"))
assert(!InMemoryFileIndex.shouldFilterOut("_metadata"))
assert(!InMemoryFileIndex.shouldFilterOut("_common_metadata"))
assert(InMemoryFileIndex.shouldFilterOut("_ab_metadata"))
assert(InMemoryFileIndex.shouldFilterOut("_cd_common_metadata"))
assert(InMemoryFileIndex.shouldFilterOut("a._COPYING_"))
}
test("SPARK-17613 - PartitioningAwareFileIndex: base path w/o '/' at end") {
class MockCatalog(
override val rootPaths: Seq[Path])
extends PartitioningAwareFileIndex(spark, Map.empty, None) {
override def refresh(): Unit = {}
override def leafFiles: mutable.LinkedHashMap[Path, FileStatus] = mutable.LinkedHashMap(
new Path("mockFs://some-bucket/file1.json") -> new FileStatus()
)
override def leafDirToChildrenFiles: Map[Path, Array[FileStatus]] = Map(
new Path("mockFs://some-bucket/") -> Array(new FileStatus())
)
override def partitionSpec(): PartitionSpec = {
PartitionSpec.emptySpec
}
}
withSQLConf(
"fs.mockFs.impl" -> classOf[FakeParentPathFileSystem].getName,
"fs.mockFs.impl.disable.cache" -> "true") {
val pathWithSlash = new Path("mockFs://some-bucket/")
assert(pathWithSlash.getParent === null)
val pathWithoutSlash = new Path("mockFs://some-bucket")
assert(pathWithoutSlash.getParent === null)
val catalog1 = new MockCatalog(Seq(pathWithSlash))
val catalog2 = new MockCatalog(Seq(pathWithoutSlash))
assert(catalog1.allFiles().nonEmpty)
assert(catalog2.allFiles().nonEmpty)
}
}
test("InMemoryFileIndex with empty rootPaths when PARALLEL_PARTITION_DISCOVERY_THRESHOLD" +
"is a nonpositive number") {
withSQLConf(SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD.key -> "0") {
new InMemoryFileIndex(spark, Seq.empty, Map.empty, None)
}
val e = intercept[IllegalArgumentException] {
withSQLConf(SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD.key -> "-1") {
new InMemoryFileIndex(spark, Seq.empty, Map.empty, None)
}
}.getMessage
assert(e.contains("The maximum number of paths allowed for listing files at " +
"driver side must not be negative"))
}
test("refresh for InMemoryFileIndex with FileStatusCache") {
withTempDir { dir =>
val fileStatusCache = FileStatusCache.getOrCreate(spark)
val dirPath = new Path(dir.getAbsolutePath)
val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf())
val catalog = new TestInMemoryFileIndex(spark, dirPath, fileStatusCache)
val file = new File(dir, "text.txt")
stringToFile(file, "text")
assert(catalog.leafDirPaths.isEmpty)
assert(catalog.leafFilePaths.isEmpty)
catalog.refresh()
assert(catalog.leafFilePaths.size == 1)
assert(catalog.leafFilePaths.head == fs.makeQualified(new Path(file.getAbsolutePath)))
assert(catalog.leafDirPaths.size == 1)
assert(catalog.leafDirPaths.head == fs.makeQualified(dirPath))
}
}
test("SPARK-20280 - FileStatusCache with a partition with very many files") {
/* fake the size, otherwise we need to allocate 2GB of data to trigger this bug */
class MyFileStatus extends FileStatus with KnownSizeEstimation {
override def estimatedSize: Long = 1000 * 1000 * 1000
}
/* files * MyFileStatus.estimatedSize should overflow to negative integer
* so, make it between 2bn and 4bn
*/
val files = (1 to 3).map { i =>
new MyFileStatus()
}
val fileStatusCache = FileStatusCache.getOrCreate(spark)
fileStatusCache.putLeafFiles(new Path("/tmp", "abc"), files.toArray)
}
test("SPARK-20367 - properly unescape column names in inferPartitioning") {
withTempPath { path =>
val colToUnescape = "Column/#%'?"
spark
.range(1)
.select(col("id").as(colToUnescape), col("id"))
.write.partitionBy(colToUnescape).parquet(path.getAbsolutePath)
assert(spark.read.parquet(path.getAbsolutePath).schema.exists(_.name == colToUnescape))
}
}
test("SPARK-25062 - InMemoryFileIndex stores BlockLocation objects no matter what subclass " +
"the FS returns") {
withSQLConf("fs.file.impl" -> classOf[SpecialBlockLocationFileSystem].getName) {
withTempDir { dir =>
val file = new File(dir, "text.txt")
stringToFile(file, "text")
val inMemoryFileIndex = new TestInMemoryFileIndex(spark, new Path(file.getCanonicalPath))
val blockLocations = inMemoryFileIndex.leafFileStatuses.flatMap(
_.asInstanceOf[LocatedFileStatus].getBlockLocations)
assert(blockLocations.forall(_.getClass == classOf[BlockLocation]))
}
}
}
}
object DeletionRaceFileSystem {
val rootDirPath: Path = new Path("mockFs:///rootDir/")
val subDirPath: Path = new Path(rootDirPath, "subDir")
val leafFilePath: Path = new Path(subDirPath, "leafFile")
val nonDeletedLeafFilePath: Path = new Path(subDirPath, "nonDeletedLeafFile")
val rootListing: Array[FileStatus] =
Array(new FileStatus(0, true, 0, 0, 0, subDirPath))
val subFolderListing: Array[FileStatus] =
Array(
new FileStatus(0, false, 0, 100, 0, leafFilePath),
new FileStatus(0, false, 0, 100, 0, nonDeletedLeafFilePath))
}
// Used in SPARK-27676 test to simulate a race where a subdirectory is deleted
// between back-to-back listing calls.
class SubdirectoryDeletionRaceFileSystem extends RawLocalFileSystem {
import DeletionRaceFileSystem._
override def getScheme: String = "mockFs"
override def listStatus(path: Path): Array[FileStatus] = {
if (path == rootDirPath) {
rootListing
} else if (path == subDirPath) {
throw new FileNotFoundException(subDirPath.toString)
} else {
throw new IllegalArgumentException()
}
}
}
// Used in SPARK-27676 test to simulate a race where a file is deleted between
// being listed and having its size / file status checked.
class FileDeletionRaceFileSystem extends RawLocalFileSystem {
import DeletionRaceFileSystem._
override def getScheme: String = "mockFs"
override def listStatus(path: Path): Array[FileStatus] = {
if (path == rootDirPath) {
rootListing
} else if (path == subDirPath) {
subFolderListing
} else {
throw new IllegalArgumentException()
}
}
override def getFileBlockLocations(
file: FileStatus,
start: Long,
len: Long): Array[BlockLocation] = {
if (file.getPath == leafFilePath) {
throw new FileNotFoundException(leafFilePath.toString)
} else {
Array.empty
}
}
}
class FakeParentPathFileSystem extends RawLocalFileSystem {
override def getScheme: String = "mockFs"
override def getUri: URI = {
URI.create("mockFs://some-bucket")
}
}
class SpecialBlockLocationFileSystem extends RawLocalFileSystem {
class SpecialBlockLocation(
names: Array[String],
hosts: Array[String],
offset: Long,
length: Long)
extends BlockLocation(names, hosts, offset, length)
override def getFileBlockLocations(
file: FileStatus,
start: Long,
len: Long): Array[BlockLocation] = {
Array(new SpecialBlockLocation(Array("dummy"), Array("dummy"), 0L, file.getLen))
}
}
| pgandhi999/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileIndexSuite.scala | Scala | apache-2.0 | 20,158 |
import stainless.lang._
object ObjectHierarchyMutation1 {
case class A(var y: Int)
case class B(a: A)
def update(b: B): Int = {
b.a.y = 17
b.a.y
} ensuring(res => res == 17)
}
| epfl-lara/stainless | frontends/benchmarks/imperative/valid/ObjectHierarchyMutation1.scala | Scala | apache-2.0 | 196 |
package com.twitter.finagle.netty3
import com.twitter.finagle._
import com.twitter.finagle.IOExceptionStrings.FinestIOExceptionMessages
import com.twitter.finagle.netty3.channel._
import com.twitter.finagle.netty3.param.Netty3Timer
import com.twitter.finagle.netty3.ssl.server.SslServerConnectHandler
import com.twitter.finagle.netty3.transport.ChannelTransport
import com.twitter.finagle.param.{Label, Logger, Stats, Timer}
import com.twitter.finagle.server.{Listener, ServerRegistry}
import com.twitter.finagle.ssl.server.{
SslServerConfiguration, SslServerEngineFactory, SslServerSessionVerifier}
import com.twitter.finagle.stats.{ServerStatsReceiver, StatsReceiver}
import com.twitter.finagle.transport.Transport
import com.twitter.logging.HasLogLevel
import com.twitter.util.{CloseAwaitably, Duration, Future, Promise, Time}
import java.net.SocketAddress
import java.util.IdentityHashMap
import java.util.logging.Level
import org.jboss.netty.bootstrap.ServerBootstrap
import org.jboss.netty.channel._
import org.jboss.netty.channel.group._
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory
import org.jboss.netty.handler.ssl._
import org.jboss.netty.handler.timeout.{ReadTimeoutException, ReadTimeoutHandler}
import scala.collection.JavaConverters._
import scala.collection.mutable
object Netty3Listener {
/**
* Class Closer implements channel tracking and semi-graceful closing
* of this group of channels.
*/
private class Closer(timer: com.twitter.util.Timer) {
val activeChannels = new DefaultChannelGroup
private implicit val implicitTimer = timer
/**
* Close the channels managed by this Closer. Closer
*
* 1. Closes the `serverCh`, preventing new connections
* from being created;
* 2. Asks the service dispatchers associated with each
* managed channel to drain itself
* 3. Waiting for at most `grace`-duration, forcibly closes
* remaining channels.
*
* At the conclusion of this, the bootstrap is released.
*/
def close(bootstrap: ServerBootstrap, serverCh: Channel, deadline: Time): Future[Unit] = {
// According to NETTY-256, the following sequence of operations
// has no race conditions.
//
// - close the server socket (awaitUninterruptibly)
// - close all open channels (awaitUninterruptibly)
// - releaseExternalResources
//
// We modify this a little bit, to allow for graceful draining,
// closing open channels only after the grace period.
//
// The next step here is to do a half-closed socket: we want to
// suspend reading, but not writing to a socket. This may be
// important for protocols that do any pipelining, and may
// queue in their codecs.
// On cursory inspection of the relevant Netty code, this
// should never block (it is little more than a close() syscall
// on the FD).
serverCh.close().awaitUninterruptibly()
// At this point, no new channels may be created; drain existing
// ones.
val snap = activeChannels.asScala
val closing = new DefaultChannelGroupFuture(
activeChannels, snap.map(_.getCloseFuture).asJava)
val p = new Promise[Unit]
closing.addListener(new ChannelGroupFutureListener {
def operationComplete(f: ChannelGroupFuture) {
p.setDone()
}
})
p.by(deadline) transform { _ =>
activeChannels.close()
// Force close any remaining connections. Don't wait for success.
bootstrap.releaseExternalResources()
Future.Done
}
}
}
def addTlsToPipeline(
pipeline: ChannelPipeline,
engineFactory: SslServerEngineFactory,
config: SslServerConfiguration
): Unit = addTlsToPipeline(pipeline, engineFactory, config, SslServerSessionVerifier.AlwaysValid)
def addTlsToPipeline(
pipeline: ChannelPipeline,
engineFactory: SslServerEngineFactory,
config: SslServerConfiguration,
sessionVerifier: SslServerSessionVerifier
): Unit = {
val engine = engineFactory(config)
val handler = new SslHandler(engine.self)
// Certain engine implementations need to handle renegotiation internally,
// as Netty's TLS protocol parser implementation confuses renegotiation and
// notification events. Renegotiation will be enabled for those Engines with
// a true handlesRenegotiation value.
handler.setEnableRenegotiation(engine.handlesRenegotiation)
pipeline.addFirst("ssl", handler)
// Netty's SslHandler does not provide SSLEngine implementations any hints that they
// are no longer needed (namely, upon disconnection.) Since some engine implementations
// make use of objects that are not managed by the JVM's memory manager, we need to
// know when memory can be released. This will invoke the shutdown method on implementations
// that define shutdown(): Unit. The SslServerConnectHandler also ensures that the SSL
// handshake is complete before continuing.
def onShutdown(): Unit =
try {
val sslEngine = engine.self
val method = sslEngine.getClass.getMethod("shutdown")
method.invoke(sslEngine)
} catch {
case _: NoSuchMethodException =>
}
pipeline.addFirst(
"sslConnect",
new SslServerConnectHandler(handler, config, sessionVerifier, onShutdown))
}
val channelFactory: ServerChannelFactory =
new NioServerSocketChannelFactory(Executor, WorkerPool) {
override def releaseExternalResources() = () // no-op
}
/**
* A [[com.twitter.finagle.Stack.Param]] used to configure
* the ServerChannelFactory for a `Listener`.
*/
case class ChannelFactory(cf: ServerChannelFactory) {
def mk(): (ChannelFactory, Stack.Param[ChannelFactory]) =
(this, ChannelFactory.param)
}
object ChannelFactory {
implicit val param = Stack.Param(ChannelFactory(channelFactory))
}
/**
* Constructs a `Listener[In, Out]` given a netty3 `ChannelPipelineFactory`
* responsible for framing a `Transport` stream. The `Listener` is configured
* via the passed in [[com.twitter.finagle.Stack.Param]]'s.
*
* @see [[com.twitter.finagle.server.Listener]]
* @see [[com.twitter.finagle.transport.Transport]]
* @see [[com.twitter.finagle.param]]
*/
def apply[In, Out](
pipeline: ChannelPipelineFactory,
params: Stack.Params
): Listener[In, Out] = new Netty3Listener[In, Out](pipeline, params)
}
/**
* A listener using Netty3 which is given a ChannelPipelineFactory
* that yields ``Out``-typed upstream messages and accepts
* ``In``-typed downstream messages.
*
* @tparam Out the type of output messages
*
* @tparam In the type of input messages
*
* @param pipelineFactory The pipeline factory for encoding input
* messages and decoding output messages.
*
* @param params A collection of `Stack.Param` values used to
* configure the listener.
*/
class Netty3Listener[In, Out](
pipelineFactory: ChannelPipelineFactory,
params: Stack.Params)
extends Listener[In, Out] {
import Netty3Listener._
private[this] val statsHandlers = new IdentityHashMap[StatsReceiver, ChannelHandler]
private[this] val bootstrapOptions = makeBootstrapOptions(params)
// Parameters used throughout the listener
private[this] val Logger(logger) = params[Logger]
private[this] val Timer(timer) = params[Timer]
private[this] val ChannelFactory(channelFactory) = params[ChannelFactory]
// Named paramStatsReceiver to clarify which StatsReceiver is used where.
private[this] val Stats(paramStatsReceiver) = params[Stats]
// name is public for compatibility
val Label(name) = params[Label]
def channelStatsHandler(statsReceiver: StatsReceiver) = synchronized {
if (!(statsHandlers containsKey statsReceiver)) {
statsHandlers.put(statsReceiver, new ChannelStatsHandler(statsReceiver))
}
statsHandlers.get(statsReceiver)
}
// Accessible for testing
private[this] def makeBootstrapOptions(params: Stack.Params): Map[String, Object] = {
val Listener.Backlog(backlog) = params[Listener.Backlog]
val Transport.BufferSizes(sendBufSize, recvBufSize) = params[Transport.BufferSizes]
val Transport.Liveness(readTimeout, writeTimeout, keepAlive) = params[Transport.Liveness]
val Transport.Options(noDelay, reuseAddr) = params[Transport.Options]
val opts = new mutable.HashMap[String, Object]()
opts += "soLinger" -> (0: java.lang.Integer)
opts += "reuseAddress" -> (reuseAddr: java.lang.Boolean)
opts += "child.tcpNoDelay" -> (noDelay: java.lang.Boolean)
for (v <- backlog) opts += "backlog" -> (v: java.lang.Integer)
for (v <- sendBufSize) opts += "child.sendBufferSize" -> (v: java.lang.Integer)
for (v <- recvBufSize) opts += "child.receiveBufferSize" -> (v: java.lang.Integer)
for (v <- keepAlive) opts += "child.keepAlive" -> (v: java.lang.Boolean)
for (v <- params[Listener.TrafficClass].value) {
opts += "trafficClass" -> (v: java.lang.Integer)
opts += "child.trafficClass" -> (v: java.lang.Integer)
}
opts.toMap
}
private[this] def makeChannelSnooper(params: Stack.Params): Option[ChannelSnooper] = {
val Label(label) = params[Label]
val Logger(logger) = params[Logger]
params[Transport.Verbose] match {
case Transport.Verbose(true) => Some(ChannelSnooper(label)(logger.log(Level.INFO, _, _)))
case _ => None
}
}
private[this] def addFirstSnooperHandlers(pipeline: ChannelPipeline, params: Stack.Params): Unit = {
val channelSnooper = makeChannelSnooper(params)
for (channelSnooper <- channelSnooper)
pipeline.addFirst("channelLogger", channelSnooper)
}
private[this] def addFirstStatsHandlers(
pipeline: ChannelPipeline,
params: Stack.Params,
statsReceiver: StatsReceiver
): Unit = {
if (!statsReceiver.isNull)
pipeline.addFirst("channelStatsHandler", channelStatsHandler(statsReceiver))
}
private[this] def addLastTimeoutHandlers(pipeline: ChannelPipeline, params: Stack.Params): Unit = {
val Netty3Timer(nettyTimer) = params[Netty3Timer]
val Transport.Liveness(channelReadTimeout, channelWriteCompletionTimeout, keepAlive) =
params[Transport.Liveness]
// Apply read timeouts *after* request decoding, preventing
// death from clients trying to DoS by slowly trickling in
// bytes to our (accumulating) codec.
if (channelReadTimeout < Duration.Top) {
val (timeoutValue, timeoutUnit) = channelReadTimeout.inTimeUnit
pipeline.addLast(
"readTimeout",
new ReadTimeoutHandler(nettyTimer, timeoutValue, timeoutUnit))
}
if (channelWriteCompletionTimeout < Duration.Top) {
pipeline.addLast(
"writeCompletionTimeout",
new WriteCompletionTimeoutHandler(timer, channelWriteCompletionTimeout))
}
}
private[this] def addFirstTlsHandlers(pipeline: ChannelPipeline, params: Stack.Params): Unit = {
val SslServerEngineFactory.Param(serverEngine) = params[SslServerEngineFactory.Param]
val SslServerSessionVerifier.Param(sessionVerifier) = params[SslServerSessionVerifier.Param]
val Transport.ServerSsl(serverConfig) = params[Transport.ServerSsl]
for (config <- serverConfig) {
addTlsToPipeline(pipeline, serverEngine, config, sessionVerifier)
}
}
private[this] def addLastRequestStatsHandlers(
pipeline: ChannelPipeline,
params: Stack.Params,
statsReceiver: StatsReceiver
): Unit = {
if (!statsReceiver.isNull) {
pipeline.addLast(
"channelRequestStatsHandler",
new ChannelRequestStatsHandler(statsReceiver))
}
}
private[this] def addLastFinagleBridge(
pipeline: ChannelPipeline,
params: Stack.Params,
newBridge: () => ChannelHandler
): Unit = {
pipeline.addLast("finagleBridge", newBridge())
}
def newServerPipelineFactory(statsReceiver: StatsReceiver, newBridge: () => ChannelHandler) =
new ChannelPipelineFactory {
def getPipeline() = {
// The pipeline returned from the pipelineFactory already starts
// with protocol support. We are carefully adding handlers around
// the protocol support so that we do not break it.
val pipeline = pipelineFactory.getPipeline()
addFirstSnooperHandlers(pipeline, params)
addFirstStatsHandlers(pipeline, params, statsReceiver)
addLastTimeoutHandlers(pipeline, params)
addFirstTlsHandlers(pipeline, params)
addLastRequestStatsHandlers(pipeline, params, statsReceiver)
addLastFinagleBridge(pipeline, params, newBridge)
pipeline
}
}
def listen(addr: SocketAddress)(serveTransport: Transport[In, Out] => Unit): ListeningServer =
new ListeningServer with CloseAwaitably {
val serverLabel = ServerRegistry.nameOf(addr) getOrElse name
val scopedStatsReceiver = paramStatsReceiver match {
case ServerStatsReceiver if serverLabel.nonEmpty =>
paramStatsReceiver.scope(serverLabel)
case sr => sr
}
val closer = new Closer(timer)
val newBridge = () => new ServerBridge(
serveTransport,
logger,
scopedStatsReceiver,
closer.activeChannels
)
val bootstrap = new ServerBootstrap(channelFactory)
bootstrap.setOptions(bootstrapOptions.asJava)
bootstrap.setPipelineFactory(
newServerPipelineFactory(scopedStatsReceiver, newBridge))
val ch = bootstrap.bind(addr)
def closeServer(deadline: Time) = closeAwaitably {
closer.close(bootstrap, ch, deadline)
}
def boundAddress = ch.getLocalAddress()
}
override def toString: String = "Netty3Listener"
}
/**
* Bridges a channel (pipeline) onto a transport. This must be
* installed as the last handler.
*/
private[netty3] class ServerBridge[In, Out](
serveTransport: Transport[In, Out] => Unit,
log: java.util.logging.Logger,
statsReceiver: StatsReceiver,
channels: ChannelGroup)
extends SimpleChannelHandler {
private[this] val readTimeoutCounter = statsReceiver.counter("read_timeout")
private[this] val writeTimeoutCounter = statsReceiver.counter("write_timeout")
private[this] def severity(exc: Throwable): Level = exc match {
case e: HasLogLevel => e.logLevel
case
_: java.nio.channels.ClosedChannelException
| _: javax.net.ssl.SSLException
| _: ReadTimeoutException
| _: WriteTimedOutException
| _: javax.net.ssl.SSLException => Level.FINEST
case e: java.io.IOException if FinestIOExceptionMessages.contains(e.getMessage) =>
Level.FINEST
case _ => Level.WARNING
}
override def channelConnected(ctx: ChannelHandlerContext, e: ChannelStateEvent): Unit = {
val channel = e.getChannel
channels.add(channel)
val transport = Transport.cast[In, Out](classOf[Any].asInstanceOf[Class[Out]], new ChannelTransport[Any, Any](channel)) // We are lying about this type
serveTransport(transport)
super.channelOpen(ctx, e)
}
override def exceptionCaught(ctx: ChannelHandlerContext, e: ExceptionEvent): Unit = {
val cause = e.getCause
cause match {
case e: ReadTimeoutException => readTimeoutCounter.incr()
case e: WriteTimedOutException => writeTimeoutCounter.incr()
case _ => ()
}
val msg = "Unhandled exception in connection with " +
e.getChannel.getRemoteAddress.toString +
" , shutting down connection"
log.log(severity(cause), msg, cause)
if (e.getChannel.isOpen)
Channels.close(e.getChannel)
}
}
| koshelev/finagle | finagle-netty3/src/main/scala/com/twitter/finagle/netty3/Netty3Listener.scala | Scala | apache-2.0 | 15,626 |
package org.openstack.api.restful.ceilometer.v2.requests
import org.openstack.api.restful.ceilometer.v2.FilterExpressions.Query
/**
* @author Antonio Murgia
* @version 21/10/14.
*/
case class ResourcesListGETRequest(q : Seq[Query] = List(), meter_links : Option[Int] = None) {
def relativeURL = "/v2/resources"
}
object ResourcesListGETRequestJsonConversion extends spray.json.DefaultJsonProtocol{
import spray.json._
import org.openstack.api.restful.ceilometer.v2.FilterExpressions.JsonConversions._
implicit object ResourcesListGETRequestJsonFormat extends RootJsonFormat[ResourcesListGETRequest]{
override def write(obj: ResourcesListGETRequest) = {
val mapb = scala.collection.mutable.Map[String,JsValue]()
if (!obj.q.isEmpty)
mapb("q") = obj.q.toJson
if (obj.meter_links.isDefined)
mapb("limit") = obj.meter_links.get.toJson
JsObject(mapb.toMap)
}
override def read(json: JsValue) = ???
}
} | tmnd1991/ceilometerAPI4s | src/main/scala/org/openstack/api/restful/ceilometer/v2/requests/ResourcesListGETRequest.scala | Scala | apache-2.0 | 963 |
/** Pow2.scala -> This file computes the power of 2 in fixed point using a lookup table with linear interpolation
Copyright (C) 2015 Stephen Tridgell
This file is part of a pipelined OLK application.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this code. If not, see <http://www.gnu.org/licenses/>.
*/
package OLK.Kernel
import Chisel._
import scala.collection.mutable.ArrayBuffer
/**
This file computes y = 2^(-gamma*x) in fixed point
It uses a a lookup table with a linear interpolation between the points
Both x and gamma must be positive
*/
class Pow2(val bitWidth : Int, val fracWidth : Int, val stages : ArrayBuffer[Boolean],
val lookupTableSize : Int) extends Module {
Predef.assert(lookupTableSize > 0, "Table size must be greater than zero")
def log2Table: Int = { if (lookupTableSize == 1) 0 else log2Up(lookupTableSize) }
def limitShift: BigInt = { BigInt((1 << log2Up(fracWidth)) - 1) }
Predef.assert((1 << log2Table) == lookupTableSize, "Table size must be a power of 2")
Predef.assert(log2Table < fracWidth, "Table size must be smaller than the number of fractional bits")
Predef.assert(stages.length == 5, "The size of stages must be exactly 5")
val FRAC = Fixed(BigInt((1 << fracWidth) - 1), bitWidth, fracWidth)
val io = new Bundle {
val x = Fixed(INPUT, bitWidth, fracWidth)
val gamma = Fixed(INPUT, bitWidth, fracWidth)
val addToDict = Bool(INPUT)
val xValAlt = Fixed(INPUT, bitWidth, fracWidth)
val xintAlt = UInt(INPUT, width=(bitWidth - fracWidth))
val xFracAlt = Fixed(INPUT, bitWidth, fracWidth)
val gradTabAlt = Fixed(INPUT, bitWidth, fracWidth)
val offTabAlt = Fixed(INPUT, bitWidth, fracWidth)
val xint1Alt = UInt(INPUT, width=(bitWidth - fracWidth))
val limitAlt = Bool(INPUT)
val gradAlt = Fixed(INPUT, bitWidth, fracWidth)
val offAlt = Fixed(INPUT, bitWidth, fracWidth)
val xint2Alt = UInt(INPUT, width=(log2Up(fracWidth) + 1))
val yFracAlt = Fixed(INPUT, bitWidth, fracWidth)
val yAlt = Fixed(INPUT, bitWidth, fracWidth)
val xValOut = Fixed(OUTPUT, bitWidth, fracWidth)
val xintOut = UInt(OUTPUT, width=(bitWidth - fracWidth))
val xFracOut = Fixed(OUTPUT, bitWidth, fracWidth)
val gradTabOut = Fixed(OUTPUT, bitWidth, fracWidth)
val offTabOut = Fixed(OUTPUT, bitWidth, fracWidth)
val xint1Out = UInt(OUTPUT, width=(bitWidth - fracWidth))
val limitOut = Bool(OUTPUT)
val gradOut = Fixed(OUTPUT, bitWidth, fracWidth)
val offOut = Fixed(OUTPUT, bitWidth, fracWidth)
val xint2Out = UInt(OUTPUT, width=(log2Up(fracWidth) + 1))
val yFracOut = Fixed(OUTPUT, bitWidth, fracWidth)
val yOut = Fixed(OUTPUT, bitWidth, fracWidth)
val y = Fixed(OUTPUT, bitWidth, fracWidth)
}
def optional[T <: Data](stage : Boolean, alt : T, calc : T) : T = {
// optional Register and Mux
if ( stage )
RegNext(Mux(io.addToDict, alt, calc))
else
calc
}
// Generate Table for linear interpolation
val gradients = new ArrayBuffer[Int]()
val offsets = new ArrayBuffer[Int]()
// Fixed point increment
val increment = 1.0 / (1 << log2Table)
val tableEnd = 1.0
var x = 0.0
// NOTE: x is positive, therefore gradient is negitive
while (x < tableEnd) {
// m = (y1 - y2)/(x1 - x2)
val m = -(scala.math.pow(2, - x) - scala.math.pow(2,- x - increment))/increment
// convert to Fixed
gradients += (m * (1 << fracWidth)).toInt
// b = y1 - m*x1
val b = scala.math.pow(2, - x) - m*x
// convert to Fixed
offsets += (b * (1 << fracWidth)).toInt
x += increment
}
// Create Lookup Tables gradTable(gradients) and offsetTable(offsets)
val gradTable = Vec(gradients.map((i: Int) => Fixed(BigInt(i), bitWidth, fracWidth)))
val offsetTable = Vec(offsets.map((i: Int) => Fixed(BigInt(i), bitWidth, fracWidth)))
// multiply gamma*x
val xValOut = io.gamma*%io.x
io.xValOut := xValOut
val xVal = optional[Fixed](stages(0), io.xValAlt, xValOut)
// END OF STAGE 0
// Split x into parts
val x_int = xVal(bitWidth - 1, fracWidth)
val x_frac = xVal & FRAC
val x_tabl = {
if ( log2Table == 0 )
UInt(0, width=1)
else
xVal(fracWidth - 1, fracWidth - log2Table)
}
// get values from lookup table
val gradTabOut = gradTable(x_tabl)
io.gradTabOut := gradTabOut
val offTabOut = offsetTable(x_tabl)
io.offTabOut := offTabOut
val xFracOut = x_frac
io.xFracOut := xFracOut
io.xintOut := x_int
val gradVal = optional[Fixed](stages(1), io.gradTabAlt, gradTabOut)
val offVal = optional[Fixed](stages(1), io.offTabAlt, offTabOut)
val xFracVal = optional[Fixed](stages(1), io.xFracAlt, xFracOut)
val x_int1 = optional[UInt](stages(1), io.xintAlt, x_int)
// END OF STAGE 1
// calculate m*x
val gradOut = gradVal*%xFracVal
val offOut = offVal
val limitOut = {
if ( bitWidth - fracWidth <= log2Up(fracWidth) )
Bool(false)
else
(x_int1 >= UInt(limitShift, width=(bitWidth - fracWidth)))
}
io.gradOut := gradOut
io.offOut := offOut
io.xint1Out := x_int1
io.limitOut := limitOut
val limit = optional[Bool](stages(2), io.limitAlt, limitOut)
val mxVal = optional[Fixed](stages(2), io.gradAlt, gradOut)
val offValReg = optional[Fixed](stages(2), io.offAlt, offOut)
val x_int2 = optional[UInt](stages(2), io.xint1Alt, x_int1)
// END OF STAGE 2
// calculate y = mx + b
val yFracOut = mxVal + offValReg
// Need to have a zero in MSB (which should be optimized out) so that not interpreted as negitive shift
val xint2Out = Mux(limit, UInt(limitShift, width=(log2Up(fracWidth) + 1)),
x_int2(scala.math.min(log2Up(fracWidth), bitWidth - fracWidth - 1), 0) & UInt(limitShift, width=(log2Up(fracWidth) + 1)))
io.yFracOut := yFracOut
io.xint2Out := xint2Out
val yFrac = optional[Fixed](stages(3), io.yFracAlt, yFracOut)
val x_int_delayed = optional[UInt](stages(3), io.xint2Alt, xint2Out)
// END OF STAGE 3
// calculate y >> x_int
val yOut = yFrac >> x_int_delayed
io.yOut := yOut
val y = optional[Fixed](stages(4), io.yAlt, yOut)
// return y
io.y := y
}
| da-steve101/chisel-pipelined-olk | src/main/scala/Kernel/Pow2.scala | Scala | gpl-2.0 | 6,724 |
package com.scalableQuality.quick.surface.commandLineOptions
object CommandLineParser {
private val quickName = "Quick"
private val quickVersion = "\\n\\rrelease: 0.6.2"
def apply(args: Array[String]): Option[QuickState] =
new scopt.OptionParser[QuickState](quickName) {
head(quickName, quickVersion)
version("version")
help("help")
opt[String]('d', "description")
.action { (optionValue, config) =>
config.copy(descriptionFile = optionValue)
}
.required()
opt[String]('i', "id")
.action { (optionValue, config) =>
config.copy(descriptionId = Some(optionValue))
}
.optional()
opt[String]('l', "label")
.action { (optionValue, config) =>
config.addLabel(optionValue)
}
.optional()
.maxOccurs(2)
opt[Unit]('m', "matchOnly")
.action { (_, config) =>
config.copy(rowsProcessingPhase = QuickState.matchRows)
}
.optional()
opt[Unit]('u', "unknownRows")
.action { (_, config) =>
config.copy(ignoreUnknownRows = true)
}
.optional()
arg[String]("<leftFile> <rightFile>")
.action { (optionValue, config) =>
config.addFile(optionValue)
}
.required()
.minOccurs(2)
.maxOccurs(2)
}.parse(args, QuickState())
}
| MouslihAbdelhakim/Quick | src/main/scala/com/scalableQuality/quick/surface/commandLineOptions/CommandLineParser.scala | Scala | apache-2.0 | 1,399 |
package skychat.irc
import org.bukkit.{BanList, Bukkit}
import org.pircbotx.hooks.events.MessageEvent
import scala.collection.JavaConverters._
object Commands {
def init(): Unit = {
import reflect.runtime.universe._
val r = reflect.runtime.currentMirror.reflect(Impl)
SkyChatIrc.listen(r.symbol.typeSignature.members.collect {
case s: MethodSymbol if s.isMethod => r.reflectMethod(s)
}.map { m =>
m.symbol.name.toString.trim -> ((s: String, msg: MessageEvent) => m.apply(s, msg): Unit)
}.toMap)
}
object Impl {
def list(s: String, m: MessageEvent): Unit = {
m.respondWith(s"Online: ${Bukkit.getOnlinePlayers.asScala.map(_.getDisplayName).mkString(", ")}")
}
private val kickArgs = """(\\w+)(?: (.+))?""".r
def kick(args: String, m: MessageEvent): Unit = if (m.getChannel.isOp(m.getUser)) {
args match {
case kickArgs(who: String) =>
Bukkit.getPlayer(who).kickPlayer(s"Kicked from IRC by ${m.getUser.getNick}.")
m.respond("Successfully kicked")
case kickArgs(who: String, msg: String) =>
Bukkit.getPlayer(who).kickPlayer(s"Kicked from IRC by ${m.getUser.getNick}: $msg.")
case _ => m.respond("Incorrect syntax, try again.")
}
}
else m.respond("You can't do that!")
}
}
| robotbrain/skychat | src/main/scala/skychat/irc/Commands.scala | Scala | apache-2.0 | 1,310 |
package mesosphere.marathon.api.v2
import java.net._
import com.wix.accord._
import mesosphere.marathon.{ AllConf, ValidationFailedException }
import mesosphere.marathon.state.FetchUri
import org.slf4j.LoggerFactory
import play.api.libs.json._
import scala.collection.GenTraversableOnce
import scala.reflect.ClassTag
import scala.util.Try
object Validation {
def validateOrThrow[T](t: T)(implicit validator: Validator[T]): T = validate(t) match {
case Success => t
case f: Failure => throw new ValidationFailedException(t, f)
}
implicit def optional[T](implicit validator: Validator[T]): Validator[Option[T]] = {
new Validator[Option[T]] {
override def apply(option: Option[T]): Result = option.map(validator).getOrElse(Success)
}
}
implicit def every[T](implicit validator: Validator[T]): Validator[Iterable[T]] = {
new Validator[Iterable[T]] {
override def apply(seq: Iterable[T]): Result = {
val violations = seq.map(item => (item, validator(item))).zipWithIndex.collect {
case ((item, f: Failure), pos: Int) => GroupViolation(item, "not valid", Some(s"($pos)"), f.violations)
}
if (violations.isEmpty) Success
else Failure(Set(GroupViolation(seq, "Seq contains elements, which are not valid.", None, violations.toSet)))
}
}
}
implicit lazy val failureWrites: Writes[Failure] = Writes { f =>
Json.obj(
"message" -> "Object is not valid",
"details" -> {
f.violations
.flatMap(allRuleViolationsWithFullDescription(_))
.groupBy(_.description)
.map {
case (description, ruleViolation) =>
Json.obj(
"path" -> description,
"errors" -> ruleViolation.map(r => JsString(r.constraint))
)
}
})
}
def allRuleViolationsWithFullDescription(violation: Violation,
parentDesc: Option[String] = None,
prependSlash: Boolean = false): Set[RuleViolation] = {
def concatPath(parent: String, child: Option[String], slash: Boolean): String = {
child.map(c => parent + { if (slash) "/" else "" } + c).getOrElse(parent)
}
violation match {
case r: RuleViolation => Set(
parentDesc.map {
p =>
r.description.map {
// Error is on object level, having a parent description. Omit 'value', prepend '/' as root.
case "value" => r.withDescription("/" + p)
// Error is on property level, having a parent description. Prepend '/' as root.
case s: String => r.withDescription(concatPath("/" + p, r.description, prependSlash))
// Error is on unknown level, having a parent description. Prepend '/' as root.
} getOrElse r.withDescription("/" + p)
} getOrElse {
r.withDescription(r.description.map {
// Error is on object level, having no parent description, being a root error.
case "value" => "/"
// Error is on property level, having no parent description, being a property of root error.
case s: String => "/" + s
} getOrElse "/")
})
case g: GroupViolation => g.children.flatMap { c =>
val dot = g.value match {
case _: Iterable[_] => false
case _ => true
}
val desc = parentDesc.map {
p => Some(concatPath(p, g.description, prependSlash))
} getOrElse {
g.description.map(d => concatPath("", Some(d), prependSlash))
}
allRuleViolationsWithFullDescription(c, desc, dot)
}
}
}
def urlCanBeResolvedValidator: Validator[String] = {
new Validator[String] {
def apply(url: String) = {
Try {
new URL(url).openConnection() match {
case http: HttpURLConnection =>
http.setRequestMethod("HEAD")
if (http.getResponseCode == HttpURLConnection.HTTP_OK) Success
else Failure(Set(RuleViolation(url, "URL could not be resolved.", None)))
case other: URLConnection =>
other.getInputStream
Success //if we come here, we could read the stream
}
}.getOrElse(
Failure(Set(RuleViolation(url, "URL could not be resolved.", None)))
)
}
}
}
def fetchUriIsValid: Validator[FetchUri] = {
new Validator[FetchUri] {
def apply(uri: FetchUri) = {
try {
new URI(uri.uri)
Success
}
catch {
case _: URISyntaxException => Failure(Set(RuleViolation(uri.uri, "URI has invalid syntax.", None)))
}
}
}
}
def elementsAreUnique[A](errorMessage: String = "Elements must be unique."): Validator[Seq[A]] = {
new Validator[Seq[A]] {
def apply(seq: Seq[A]) = areUnique(seq, errorMessage)
}
}
def elementsAreUniqueBy[A, B](fn: A => B,
errorMessage: String = "Elements must be unique.",
filter: B => Boolean = { _: B => true }): Validator[Seq[A]] = {
new Validator[Seq[A]] {
def apply(seq: Seq[A]) = areUnique(seq.map(fn).filter(filter), errorMessage)
}
}
def elementsAreUniqueByOptional[A, B](fn: A => GenTraversableOnce[B],
errorMessage: String = "Elements must be unique.",
filter: B => Boolean = { _: B => true }): Validator[Seq[A]] = {
new Validator[Seq[A]] {
def apply(seq: Seq[A]) = areUnique(seq.flatMap(fn).filter(filter), errorMessage)
}
}
def elementsAreUniqueWithFilter[A](fn: A => Boolean,
errorMessage: String = "Elements must be unique."): Validator[Seq[A]] = {
new Validator[Seq[A]] {
def apply(seq: Seq[A]) = areUnique(seq.filter(fn), errorMessage)
}
}
private[this] def areUnique[A](seq: Seq[A], errorMessage: String): Result = {
if (seq.size == seq.distinct.size) Success
else Failure(Set(RuleViolation(seq, errorMessage, None)))
}
def theOnlyDefinedOptionIn[A <: Product: ClassTag, B](product: A): Validator[Option[B]] =
new Validator[Option[B]] {
def apply(option: Option[B]) = {
option match {
case Some(prop) =>
val n = product.productIterator.count {
case Some(_) => true
case _ => false
}
if (n == 1)
Success
else
Failure(Set(RuleViolation(product, s"not allowed in conjunction with other properties.", None)))
case None => Success
}
}
}
def oneOf[T <: AnyRef](options: Set[T]): Validator[T] = {
import ViolationBuilder._
new NullSafeValidator[T](
test = options.contains,
failure = _ -> s"is not one of (${options.mkString(",")})"
)
}
def oneOf[T <: AnyRef](options: T*): Validator[T] = {
import ViolationBuilder._
new NullSafeValidator[T](
test = options.contains,
failure = _ -> s"is not one of (${options.mkString(",")})"
)
}
def configValueSet[T <: AnyRef](config: String*): Validator[T] =
isTrue(s"""You have to supply ${config.mkString(", ")} on the command line.""") { _ =>
config.forall(AllConf.suppliedOptionNames)
}
def isTrue[T](constraint: String)(test: T => Boolean): Validator[T] = new Validator[T] {
import ViolationBuilder._
override def apply(value: T): Result = {
if (test(value)) Success else RuleViolation(value, constraint, None)
}
}
/**
* For debugging purposes only.
* Since the macro removes all logging statements in the validator itself.
* Usage: info("message") { yourValidator }
*/
def info[T](message: String)(implicit validator: Validator[T]): Validator[T] = new Validator[T] {
override def apply(t: T): Result = {
LoggerFactory.getLogger(Validation.getClass).info(s"Validate: $message on $t")
validator(t)
}
}
}
| vivekjuneja/marathon | src/main/scala/mesosphere/marathon/api/v2/Validation.scala | Scala | apache-2.0 | 8,117 |
package org.jetbrains.plugins.scala
package compiler
import java.util.UUID
import com.intellij.compiler.server.BuildManagerListener
import com.intellij.notification.{Notification, NotificationType, Notifications}
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.compiler.{CompileContext, CompileTask, CompilerManager}
import com.intellij.openapi.components.ProjectComponent
import com.intellij.openapi.module.{Module, ModuleManager}
import com.intellij.openapi.project.Project
import com.intellij.openapi.roots.{CompilerModuleExtension, ModuleRootManager}
import com.intellij.openapi.ui.Messages
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.project._
/**
* Pavel Fatin
*/
class ServerMediator(project: Project) extends ProjectComponent {
private def isScalaProject = project.hasScala
private val settings = ScalaCompileServerSettings.getInstance
private val connection = project.getMessageBus.connect
private val serverLauncher = new BuildManagerListener {
override def beforeBuildProcessStarted(project: Project, uuid: UUID): Unit = {}
override def buildStarted(project: Project, sessionId: UUID, isAutomake: Boolean): Unit = {
if (settings.COMPILE_SERVER_ENABLED && isScalaProject) {
invokeAndWait {
CompileServerManager.instance(project).configureWidget()
}
if (CompileServerLauncher.needRestart(project)) {
CompileServerLauncher.instance.stop()
}
if (!CompileServerLauncher.instance.running) {
invokeAndWait {
CompileServerLauncher.instance.tryToStart(project)
}
}
}
}
override def buildFinished(project: Project, sessionId: UUID, isAutomake: Boolean): Unit = {}
}
connection.subscribe(BuildManagerListener.TOPIC, serverLauncher)
private val checkSettingsTask = new CompileTask {
def execute(context: CompileContext): Boolean = {
if (isScalaProject) {
if (!checkCompilationSettings()) false
else true
}
else true
}
}
private val checkCompileServerDottyTask = new CompileTask {
override def execute(context: CompileContext): Boolean = {
if (!settings.COMPILE_SERVER_ENABLED && project.hasDotty) {
val title = "Enable Scala Compile Server"
val content = s"<html><body>Dotty projects require Scala Compile Server<br> <a href=''>Configure</a></body></html>"
Notifications.Bus.notify(new Notification("scala", title, content, NotificationType.ERROR, CompileServerLauncher.ConfigureLinkListener))
false
}
else true
}
}
CompilerManager.getInstance(project).addBeforeTask(checkSettingsTask)
CompilerManager.getInstance(project).addBeforeTask(checkCompileServerDottyTask)
private def checkCompilationSettings(): Boolean = {
def hasClashes(module: Module) = module.hasScala && {
val extension = CompilerModuleExtension.getInstance(module)
val production = extension.getCompilerOutputUrl
val test = extension.getCompilerOutputUrlForTests
production == test
}
val modulesWithClashes = ModuleManager.getInstance(project).getModules.toSeq.filter(hasClashes)
var result = true
if (modulesWithClashes.nonEmpty) {
invokeAndWait {
val choice =
if (!ApplicationManager.getApplication.isUnitTestMode) {
Messages.showYesNoDialog(project,
"Production and test output paths are shared in: " + modulesWithClashes.map(_.getName).mkString(" "),
"Shared compile output paths in Scala module(s)",
"Split output path(s) automatically", "Cancel compilation", Messages.getErrorIcon)
}
else Messages.YES
val splitAutomatically = choice == Messages.YES
if (splitAutomatically) {
inWriteAction {
modulesWithClashes.foreach { module =>
val model = ModuleRootManager.getInstance(module).getModifiableModel
val extension = model.getModuleExtension(classOf[CompilerModuleExtension])
val outputUrlParts = extension.getCompilerOutputUrl match {
case null => Seq.empty
case url => url.split("/").toSeq
}
val nameForTests = if (outputUrlParts.lastOption.contains("classes")) "test-classes" else "test"
extension.inheritCompilerOutputPath(false)
extension.setCompilerOutputPathForTests((outputUrlParts.dropRight(1) :+ nameForTests).mkString("/"))
model.commit()
}
project.save()
}
}
result = splitAutomatically
}
}
result
}
def getComponentName = getClass.getSimpleName
def initComponent() {}
def disposeComponent() {}
def projectOpened() {}
def projectClosed() {}
}
| whorbowicz/intellij-scala | src/org/jetbrains/plugins/scala/compiler/ServerMediator.scala | Scala | apache-2.0 | 4,890 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.specs2.mutable.Specification
import reactivemongo.ReactiveMongoHelper
import reactivemongo.api.MongoConnectionOptions
/**
*
*/
class MongoConnectorSpec extends Specification {
"ReactiveMongoHelper" should {
"create a Mongo connection with the given options" in {
val helper = ReactiveMongoHelper("mongo-server", Seq("mongo1"), Seq(), None, MongoConnectionOptions(connectTimeoutMS = 1000))
helper.connection.options.connectTimeoutMS shouldEqual 1000
}
}
}
| hmrc/reactivemongo-json | src/test/scala/MongoConnectorSpec.scala | Scala | apache-2.0 | 1,103 |
package akka
import akka.actor.SupervisorStrategy.{Restart, Resume, Stop}
import akka.actor.{Actor, ActorSystem, OneForOneStrategy, Props}
/**
* Created by Om Prakash C on 20-06-2017.
*/
object SupervisorExample extends App {
case object CreateChild
case class SignalChild(order: Int)
case class PrintSignal(order: Int)
case class DivideNumbers(n: Int, d: Int)
case object BadStuff
class ParentActor extends Actor {
private var number = 0
def receive = {
case CreateChild =>
context.actorOf(Props[ChildActor], "Child"+number)
number += 1
case SignalChild(n) =>
context.children.foreach(_ ! PrintSignal(n))
}
override val supervisorStrategy = OneForOneStrategy(loggingEnabled = false) {
case ae: ArithmeticException => Resume
case ex: Exception => Restart
}
}
class ChildActor extends Actor {
println("Child Created")
def receive = {
case PrintSignal(n) => println(n + " " + self)
case DivideNumbers(n, d) => println(n/d)
case BadStuff => throw new RuntimeException("Bad Stuff happened")
}
override def preStart(): Unit = {
super.preStart()
println("Child Pre Start")
}
override def postStop(): Unit = {
super.postStop()
println("Child Post Stop")
}
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
super.preRestart(reason, message)
println("Child Pre Restart")
}
override def postRestart(reason: Throwable): Unit = {
super.postRestart(reason)
println("Child Post Restart")
}
}
val system = ActorSystem("HierarchySystem")
val actor = system.actorOf(Props[ParentActor], "Parent1")
val actor2 = system.actorOf(Props[ParentActor], "Parent2")
actor ! CreateChild
//actor ! CreateChild
val child0 = system.actorSelection("/user/Parent1/Child0")
child0 ! DivideNumbers(4, 0)
child0 ! DivideNumbers(4, 2)
child0 ! BadStuff
Thread.sleep(2000)
system.terminate()
}
| comprakash/learning-scala | concurrency/src/main/scala/akka/SupervisorExample.scala | Scala | gpl-3.0 | 2,012 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.serving.core.models.policy
case class UserJar(jarPath: String)
| Frannie-Ludmilla/sparta | serving-core/src/main/scala/com/stratio/sparta/serving/core/models/policy/UserJar.scala | Scala | apache-2.0 | 712 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.rdbms.fs.postgres
import slamdata.Predef._
import quasar.physical.rdbms.fs.RdbmsDescribeTable
import quasar.physical.rdbms.common._
import quasar.physical.rdbms.common.TablePath._
import doobie.imports._
import quasar.physical.rdbms.model._
import quasar.physical.rdbms.model.TableModel._
import scalaz._
import Scalaz._
trait PostgresDescribeTable extends RdbmsDescribeTable {
private def descQuery[F[_], T](
whereClause: Fragment,
mapResult: List[(String, String)] => T): ConnectionIO[T] = {
(fr"SELECT COLUMN_NAME, DATA_TYPE FROM information_schema.COLUMNS"
++ whereClause)
.query[(String, String)]
.list
.map(mapResult)
}
private def whereSchemaAndTable(tablePath: TablePath): Fragment = {
fr"WHERE TABLE_SCHEMA =" ++
Fragment.const("'" + tablePath.schema.shows + "'") ++
fr"AND TABLE_NAME =" ++
Fragment.const("'" + tablePath.table.shows + "'")
}
private def whereSchema(schema: Schema): Fragment =
if (schema.isRoot)
fr""
else
fr"WHERE TABLE_SCHEMA =" ++
Fragment.const("'" + schema.shows + "'")
override def findChildTables(schema: Schema): ConnectionIO[Vector[TableName]] = {
if (schema.isRoot)
Vector.empty.point[ConnectionIO]
else
(fr"select TABLE_NAME from information_schema.tables" ++ whereSchema(schema))
.query[String]
.vector
.map(_.map(TableName.apply))
}
override def findChildSchemas(parent: Schema): ConnectionIO[Vector[Schema]] = {
val whereClause = if (parent.isRoot)
fr""
else
fr"WHERE SCHEMA_NAME LIKE" ++ Fragment.const("'" + parent.shows + Separator + "%'")
(fr"SELECT SCHEMA_NAME FROM information_schema.schemata" ++ whereClause)
.query[String]
.vector
.map(_.map(Schema.apply))
}
override def schemaExists(schema: Schema): ConnectionIO[Boolean] =
if (schema.isRoot) true.point[ConnectionIO]
else
sql"""SELECT 1 FROM information_schema.schemata WHERE SCHEMA_NAME=${schema.shows}"""
.query[Int]
.list
.map(_.nonEmpty)
override def tableExists(
tablePath: TablePath): ConnectionIO[Boolean] =
descQuery(whereSchemaAndTable(tablePath), _.nonEmpty)
def tableModel(tablePath: TablePath): ConnectionIO[Option[TableModel]] = {
val cols = descQuery(whereSchemaAndTable(tablePath), _.map {
case (colName, colTypeStr) =>
ColumnDesc(colName, colTypeStr.mapToColumnType)
})
cols.map {
case Nil => None
case c :: Nil if c.tpe === JsonCol => Some(JsonTable)
case multipleCols => Some(ColumnarTable.fromColumns(multipleCols))
}
}
}
| jedesah/Quasar | rdbms/src/main/scala/quasar/physical/rdbms/fs/postgres/PostgresDescribeTable.scala | Scala | apache-2.0 | 3,267 |
/**
* Copyright (C) 2013 Adam Retter (adam.retter@googlemail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shadoop
import org.apache.commons.logging.{LogFactory, Log}
trait Logging {
other =>
private lazy val logger: Log = LogFactory.getLog(other.getClass())
protected def debug(message: => String) = log(logger.isDebugEnabled, logger.debug, message)
protected def info(message: => String) = log(logger.isInfoEnabled, logger.info, message)
private def log(fc: => Boolean, fl: (String) => Unit, message: String) = if(fc) fl(message)
}
| adamretter/Shadoop | src/main/scala/shadoop/Logging.scala | Scala | apache-2.0 | 1,082 |
package services.datetime
import org.specs2.mutable.Specification
import services.SDate
import services.graphstages.Crunch.europeLondonTimeZone
class MidnightTimeZoneSpec extends Specification {
def asLocalTimeZone(localDateTimeString: String) = SDate(localDateTimeString, europeLondonTimeZone)
"When finding the last local midnight for UTC Date during BST" >> {
"Given 11 AM UTC on October 23rd 2017 (During BST) we should get 2017-10-23T00:00:00+01:00 as last local midnight" >> {
val currentTime = SDate("2017-10-23T22:00Z")
val result = currentTime.getLocalLastMidnight
val expected = asLocalTimeZone("2017-10-23T00:00:00+01:00")
result.millisSinceEpoch === expected.millisSinceEpoch
}
"Given 12 AM UTC on October 23rd 2017 (During BST) we should get 2017-10-23T00:00:00+01:00 as last local midnight" >> {
val currentTime = SDate("2017-10-23T00:00Z")
val result = currentTime.getLocalLastMidnight
val expected = asLocalTimeZone("2017-10-23T00:00:00+01:00")
result.millisSinceEpoch === expected.millisSinceEpoch
}
"Given 11 PM UTC on October 22nd 2017 (During BST) we should get 2017-10-23T00:00:00+01:00 as last local midnight" >> {
val currentTime = SDate("2017-10-22T23:00Z")
val result = currentTime.getLocalLastMidnight
val expected = asLocalTimeZone("2017-10-23T00:00:00+01:00")
result.millisSinceEpoch === expected.millisSinceEpoch
}
}
"When finding the last local midnight for UTC Date during GMT" >> {
"Given 11 AM UTC on January 2nd 2018 (During GMT) we should get 2018-01-02T00:00:00Z as last local midnight" >> {
val currentTime = SDate("2018-01-02T22:00Z")
val result = currentTime.getLocalLastMidnight
val expected = asLocalTimeZone("2018-01-02T00:00:00Z")
result.millisSinceEpoch === expected.millisSinceEpoch
}
"Given 12 AM UTC on January 2nd 2018 (During GMT) we should get 2018-01-02T00:00:00Z as last local midnight" >> {
val currentTime = SDate("2018-01-02T00:00Z")
val result = currentTime.getLocalLastMidnight
val expected = asLocalTimeZone("2018-01-02T00:00:00Z")
result.millisSinceEpoch === expected.millisSinceEpoch
}
"Given 11 PM UTC on January 1st 2018 (During GMT) we should get 2018-01-01T00:00:00Z as last local midnight" >> {
val currentTime = SDate("2018-01-01T23:00Z")
val result = currentTime.getLocalLastMidnight
val expected = asLocalTimeZone("2018-01-01T00:00:00Z")
result.millisSinceEpoch === expected.millisSinceEpoch
}
}
"When switching timezones on the first day of BST" >> {
"Given midnight UTC/BST on 31/03/2019 then we should get 2019-03-31T00:00:00Z" >> {
val currentTime = SDate("2019-03-31T00:00Z")
val result = currentTime.getLocalLastMidnight
val expected = "2019-03-31T00:00:00Z"
SDate(result.millisSinceEpoch).toISOString === expected
}
"Given midnight BST on 01/04/2019 then we should get 2019-03-31T23:00:00Z" >> {
val bstMidnight = SDate("2019-03-31T23:00:00Z")
val result = bstMidnight.getLocalLastMidnight
val expected = "2019-03-31T23:00:00Z"
SDate(result.millisSinceEpoch).toISOString === expected
}
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | server/src/test/scala/services/datetime/MidnightTimeZoneSpec.scala | Scala | apache-2.0 | 3,260 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fnothaft.gnocchi.models.variant.logistic
import net.fnothaft.gnocchi.algorithms.siteregression.AdditiveLogisticRegression
import org.apache.commons.math3.linear.SingularMatrixException
import org.bdgenomics.formats.avro.Variant
case class AdditiveLogisticVariantModel(variantId: String,
variant: Variant,
weights: List[Double],
geneticParameterStandardError: Double,
pValue: Double,
numSamples: Int,
phenotype: String,
phaseSetId: Int = 0)
extends LogisticVariantModel[AdditiveLogisticVariantModel]
with AdditiveLogisticRegression with Serializable {
val modelType = "Additive Logistic Variant Model"
override val regressionName = "Additive Logistic Regression"
/**
* Updates the LogisticVariantModel given a new batch of data
*
* @param observations Array containing data at the particular site for
* all samples. Format of each element is:
* (gs, Array(pheno, covar1, ... covarp))
* where gs is the diploid genotype at that site for the
* given sample [0, 1, or 2], pheno is the sample's value for
* the primary phenotype being regressed on, and covar1-covarp
* are that sample's values for each covariate.
*/
def update(observations: Array[(Double, Array[Double])]): AdditiveLogisticVariantModel = {
//TODO: add validation stringency here rather than just creating empty association object
println((new Array[Double](observations.head._2.length)).toList)
val batchVariantModel = try {
applyToSite(observations, variant, phenotype, phaseSetId)
.toVariantModel
} catch {
case error: SingularMatrixException => {
AdditiveLogisticRegression.constructAssociation(variantId,
1,
"",
new Array[Double](observations.head._2.length + 1),
0.0,
variant,
"",
0.0,
0.0,
0,
Map(("", ""))).toVariantModel
}
}
mergeWith(batchVariantModel)
}
def constructVariantModel(variantId: String,
variant: Variant,
updatedGeneticParameterStandardError: Double,
updatedPValue: Double,
updatedWeights: List[Double],
updatedNumSamples: Int): AdditiveLogisticVariantModel = {
AdditiveLogisticVariantModel(variantId,
variant,
updatedWeights,
updatedGeneticParameterStandardError,
updatedPValue,
updatedNumSamples,
phenotype,
phaseSetId)
}
}
| Veryku/gnocchi | gnocchi-core/src/main/scala/net/fnothaft/gnocchi/models/variant/logistic/AdditiveLogisticVariantModel.scala | Scala | apache-2.0 | 3,719 |
/* Copyright 2009-2011 Jay Conrod
*
* This file is part of Tungsten.
*
* Tungsten is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2 of
* the License, or (at your option) any later version.
*
* Tungsten is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Tungsten. If not, see
* <http://www.gnu.org/licenses/>.
*/
package tungsten
trait Copying[T <: AnyRef]
extends Mapping[T]
{
def copyWith(changes: (String, AnyRef)*): T = {
def mapper(field: java.lang.reflect.Field, oldValue: AnyRef): AnyRef = {
changes.find(_._1 == field.getName) match {
case Some((_, newValue)) => newValue
case None => oldValue
}
}
mapFields(mapper)
}
}
| jayconrod/tungsten | core/src/main/scala/tungsten/Copying.scala | Scala | gpl-2.0 | 1,104 |
package tomtom.splitter.layer7
import java.net.Socket
import scala.util.Try
object PortFactory {
@volatile var portStream: Stream[Int] = Stream.from(1024).flatMap {
port =>
Try {
val socket = new Socket("localhost", port)
socket.close()
Seq.empty[Int]
}.getOrElse {
Seq(port)
}
}
def findPort(): Int = {
this.synchronized {
val port = portStream.head
portStream = portStream.tail
port
}
}
}
| ebowman/splitter | src/test/scala/tomtom/splitter/layer7/PortFactory.scala | Scala | apache-2.0 | 502 |
package slamdata.engine.std
trait StdLib extends Library {
val math = MathLib
val structural = StructuralLib
val agg = AggLib
val relations = RelationsLib
val set = SetLib
val array = ArrayLib
val string = StringLib
val date = DateLib
val functions = math.functions ++ structural.functions ++ agg.functions ++ relations.functions ++ set.functions ++ array.functions ++ string.functions ++ date.functions ++ Nil
}
object StdLib extends StdLib
| mossprescott/quasar | src/main/scala/slamdata/engine/std/std.scala | Scala | agpl-3.0 | 469 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.feature.transform.vision.image.augmentation
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.{BytesToMat, ImageFrame, LocalImageFrame, MatToFloats}
import org.scalatest.{FlatSpec, Matchers}
class PixelNormalizerSpec extends FlatSpec with Matchers {
val resource = getClass.getClassLoader.getResource("pascal/")
"PixelNormalizer" should "work properly" in {
val data = ImageFrame.read(resource.getFile)
val means = new Array[Float](375 * 500 * 3)
var i = 0
while (i < 375 * 500 * 3) {
means(i) = 300f
means(i + 1) = 200f
means(i + 2) = 100f
i += 3
}
val transformer = PixelNormalizer(means) -> MatToFloats()
val transformed = transformer(data)
val data2 = ImageFrame.read(resource.getFile)
val toFloat = new MatToFloatsWithNorm(meanRGB = Some(100f, 200f, 300f))
val transformed2 = toFloat(data2)
val imageFeature = transformed.asInstanceOf[LocalImageFrame].array(0)
val imageFeature2 = transformed2.asInstanceOf[LocalImageFrame].array(0)
imageFeature2.floats().length should be (375 * 500 * 3)
imageFeature2.floats() should equal(imageFeature.floats())
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/transform/vision/image/augmentation/PixelNormalizerSpec.scala | Scala | apache-2.0 | 1,811 |
package org.jetbrains.plugins.scala.performance
import java.io.File
import java.util
import com.intellij.lang.javascript.boilerplate.GithubDownloadUtil
import com.intellij.openapi.externalSystem.model.ProjectSystemId
import com.intellij.openapi.externalSystem.settings.ExternalProjectSettings
import com.intellij.openapi.externalSystem.test.ExternalSystemImportingTestCase
import com.intellij.openapi.projectRoots.ProjectJdkTable
import com.intellij.openapi.projectRoots.impl.JavaAwareProjectJdkTableImpl
import com.intellij.openapi.roots.ProjectRootManager
import com.intellij.openapi.vfs.{LocalFileSystem, VirtualFile}
import com.intellij.platform.templates.github.ZipUtil
import com.intellij.psi.search.{FileTypeIndex, GlobalSearchScopesCore}
import com.intellij.testFramework.{IdeaTestUtil, VfsTestUtil}
import org.jetbrains.SbtStructureSetup
import org.jetbrains.plugins.scala.finder.SourceFilterScope
import org.jetbrains.plugins.scala.util.TestUtils
import org.jetbrains.plugins.scala.{ScalaFileType, extensions}
import org.jetbrains.sbt.project.SbtProjectSystem
import org.jetbrains.sbt.project.settings.SbtProjectSettings
import org.junit.Assert
/**
* Author: Svyatoslav Ilinskiy
* Date: 11/17/2015
*/
abstract class DownloadingAndImportingTestCase extends ExternalSystemImportingTestCase with SbtStructureSetup {
implicit class IntExt(val i: Int) {
def seconds: Int = i * 1000
}
override protected def getCurrentExternalProjectSettings: ExternalProjectSettings = {
val settings = new SbtProjectSettings
val internalSdk = JavaAwareProjectJdkTableImpl.getInstanceEx.getInternalJdk
val sdk = if (internalSdk == null) IdeaTestUtil.getMockJdk18
else internalSdk
settings.setJdk(sdk.getName)
settings.setCreateEmptyContentRootDirectories(true)
settings
}
override protected def getExternalSystemId: ProjectSystemId = SbtProjectSystem.Id
override protected def getTestsTempDir: String = ""
def rootDirPath: String = s"${TestUtils.getTestDataPath}/projects"
def projectDirPath: String = s"$rootDirPath/$githubRepoName"
def downloadURL: String = s"https://github.com/$githubUsername/$githubRepoName/archive/$revision.zip"
def outputZipFileName = s"$rootDirPath/zipFiles/$githubRepoName-$githubUsername-$revision"
override def setUpInWriteAction(): Unit = {
super.setUpInWriteAction()
val outputZipFile = new File(outputZipFileName)
val projectDir = new File(projectDirPath)
println("Starting download")
if (!outputZipFile.exists() && !projectDir.exists()) {
//don't download if zip file is already there
GithubDownloadUtil.downloadAtomically(null, downloadURL, outputZipFile, githubUsername, githubRepoName)
}
println("Finished download, extracting")
if (!projectDir.exists()) {
//don't unpack if the project is already unpacked
ZipUtil.unzip(null, projectDir, outputZipFile, null, null, true)
}
Assert.assertTrue("Project dir does not exist. Download or unpack failed!", projectDir.exists())
myProjectRoot = LocalFileSystem.getInstance.refreshAndFindFileByIoFile(projectDir)
setUpSbtLauncherAndStructure(myProject)
extensions.inWriteAction {
val internalSdk = JavaAwareProjectJdkTableImpl.getInstanceEx.getInternalJdk
val sdk = if (internalSdk == null) IdeaTestUtil.getMockJdk17
else internalSdk
if (ProjectJdkTable.getInstance().findJdk(sdk.getName) == null) {
ProjectJdkTable.getInstance().addJdk(sdk)
}
ProjectRootManager.getInstance(myProject).setProjectSdk(sdk)
}
}
override def setUp(): Unit = {
super.setUp()
importProject()
}
def findFile(filename: String): VirtualFile = {
import scala.collection.JavaConversions._
val searchScope = SourceFilterScope(myProject, GlobalSearchScopesCore.directoryScope(myProject, myProjectRoot, true))
val files: util.Collection[VirtualFile] = FileTypeIndex.getFiles(ScalaFileType.INSTANCE, searchScope)
val file = files.filter(_.getName == filename).toList match {
case vf :: Nil => vf
case Nil => //is this a filepath?
val file = VfsTestUtil.findFileByCaseSensitivePath(s"$projectDirPath/$filename")
if (file != null && files.contains(file)) file
else {
Assert.assertTrue(s"Could not find file: $filename.\\nConsider providing relative path from project root", false)
null
}
case list =>
Assert.assertTrue(s"There are ${list.size} files with name $filename.\\nProvide full path from project root", false)
null
}
LocalFileSystem.getInstance().refreshFiles(files)
file
}
def githubUsername: String
def githubRepoName: String
def revision: String
}
trait ScalaCommunityDownloadingAndImportingTestCase {
protected def getExternalSystemConfigFileName: String = "build.sbt"
def githubUsername: String = "JetBrains"
def githubRepoName: String = "intellij-scala"
def revision: String = "d2906113e9cdca0e302437cfd412fcb19d288720"
}
| ilinum/intellij-scala | test/org/jetbrains/plugins/scala/performance/DownloadingAndImportingTestCase.scala | Scala | apache-2.0 | 5,020 |
package com.pygmalios.rawKafkaCassandra
import akka.actor.ActorSystem
import akka.testkit.{ImplicitSender, TestKit}
import com.datastax.driver.core.PreparedStatement
import com.pygmalios.rawKafkaCassandra.actors.KafkaToCassandra
import com.pygmalios.rawKafkaCassandra.cassandra.{CassandraSession, CassandraSessionFactory}
import com.pygmalios.rawKafkaCassandra.itest.ITestConfig
import org.mockito.Matchers._
import org.mockito.Mockito
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Matchers}
class TestKafkaISpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with FlatSpecLike
with Matchers with BeforeAndAfterAll with ActorSystemRawKafkaCassandraConfig with MockitoSugar {
import TestKafkaISpec._
def this() = this(ActorSystem("KafkaToCassandraISpec", ITestConfig.config))
override def actorSystem = system
override def afterAll: Unit = {
TestKit.shutdownActorSystem(system)
}
behavior of "KafkaToCassandra with real Kafka and mocked Cassandra"
it should "restart in case of Cassandra failure" in new TestScope() {
// Prepare
when(mockCassandraSession.write(any(), any())).thenThrow(new RuntimeException("x"))
// Execute
run()
// Wait a little bit to get
Thread.sleep(15000)
verify(mockCassandraSession, Mockito.atLeast(3)).write(any(), any())
verify(mockCassandraSession, Mockito.atLeast(3)).prepareWriteStatement(table1)
verify(mockCassandraSession, Mockito.atLeast(3)).ensureTableExists(table1)
verify(mockCassandraSession, Mockito.atLeast(3)).close()
verify(mockCassandraSessionFactory, Mockito.atLeast(3)).create()
verifyNoMore()
}
class TestScope {
val mockCassandraSessionFactory = mock[CassandraSessionFactory]
val mockCassandraSession = mock[CassandraSession]
when(mockCassandraSessionFactory.create()).thenReturn(mockCassandraSession)
val preparedStatement1 = mock[PreparedStatement]
when(mockCassandraSession.prepareWriteStatement(table1)).thenReturn(preparedStatement1)
def run(): Unit =
KafkaToCassandra.factory(actorSystem, mockCassandraSessionFactory)
def verifyNoMore(): Unit = {
verifyNoMoreInteractions(mockCassandraSession)
verifyNoMoreInteractions(mockCassandraSessionFactory)
}
}
}
object TestKafkaISpec {
val table1 = "data_source_positioning_device"
}
| pygmalios/raw-kafka-cassandra | src/itest/scala/com/pygmalios/rawKafkaCassandra/TestKafkaISpec.scala | Scala | apache-2.0 | 2,413 |
package is.hail.utils
import is.hail.HailSuite
import org.testng.annotations.Test
class SpillingCollectIteratorSuite extends HailSuite {
@Test def addOneElement() {
val array = (0 to 1234).toArray
val sci = SpillingCollectIterator(ctx.localTmpdir, fs, sc.parallelize(array, 99), 100)
assert(sci.hasNext)
assert(sci.next() == 0)
assert(sci.hasNext)
assert(sci.next() == 1)
assert(sci.toArray sameElements (2 to 1234))
}
}
| hail-is/hail | hail/src/test/scala/is/hail/utils/SpillingCollectIteratorSuite.scala | Scala | mit | 455 |
package im.actor.server.mtproto.transport
@SerialVersionUID(1L)
case class Drop(messageId: Long, errorCode: Byte, message: String) extends MTProto {
val header = Drop.header
}
object Drop {
val header = 0x3
}
| TimurTarasenko/actor-platform | actor-server/actor-models/src/main/scala/im/actor/server/mtproto/transport/Drop.scala | Scala | mit | 215 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.store
import org.apache.predictionio.data.storage.Storage
import org.apache.predictionio.data.storage.Event
import org.joda.time.DateTime
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.Duration
/** This object provides a set of operation to access Event Store
* without going through Spark's parallelization
*/
object LEventStore {
private val defaultTimeout = Duration(60, "seconds")
@transient lazy private val eventsDb = Storage.getLEvents()
/** Reads events of the specified entity. May use this in Algorithm's predict()
* or Serving logic to have fast event store access.
*
* @param appName return events of this app
* @param entityType return events of this entityType
* @param entityId return events of this entityId
* @param channelName return events of this channel (default channel if it's None)
* @param eventNames return events with any of these event names.
* @param targetEntityType return events of this targetEntityType:
* - None means no restriction on targetEntityType
* - Some(None) means no targetEntityType for this event
* - Some(Some(x)) means targetEntityType should match x.
* @param targetEntityId return events of this targetEntityId
* - None means no restriction on targetEntityId
* - Some(None) means no targetEntityId for this event
* - Some(Some(x)) means targetEntityId should match x.
* @param startTime return events with eventTime >= startTime
* @param untilTime return events with eventTime < untilTime
* @param limit Limit number of events. Get all events if None or Some(-1)
* @param latest Return latest event first (default true)
* @return Iterator[Event]
*/
def findByEntity(
appName: String,
entityType: String,
entityId: String,
channelName: Option[String] = None,
eventNames: Option[Seq[String]] = None,
targetEntityType: Option[Option[String]] = None,
targetEntityId: Option[Option[String]] = None,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
limit: Option[Int] = None,
latest: Boolean = true,
timeout: Duration = defaultTimeout): Iterator[Event] = {
val (appId, channelId) = Common.appNameToId(appName, channelName)
Await.result(eventsDb.futureFind(
appId = appId,
channelId = channelId,
startTime = startTime,
untilTime = untilTime,
entityType = Some(entityType),
entityId = Some(entityId),
eventNames = eventNames,
targetEntityType = targetEntityType,
targetEntityId = targetEntityId,
limit = limit,
reversed = Some(latest)),
timeout)
}
/** Reads events generically. If entityType or entityId is not specified, it
* results in table scan.
*
* @param appName return events of this app
* @param entityType return events of this entityType
* - None means no restriction on entityType
* - Some(x) means entityType should match x.
* @param entityId return events of this entityId
* - None means no restriction on entityId
* - Some(x) means entityId should match x.
* @param channelName return events of this channel (default channel if it's None)
* @param eventNames return events with any of these event names.
* @param targetEntityType return events of this targetEntityType:
* - None means no restriction on targetEntityType
* - Some(None) means no targetEntityType for this event
* - Some(Some(x)) means targetEntityType should match x.
* @param targetEntityId return events of this targetEntityId
* - None means no restriction on targetEntityId
* - Some(None) means no targetEntityId for this event
* - Some(Some(x)) means targetEntityId should match x.
* @param startTime return events with eventTime >= startTime
* @param untilTime return events with eventTime < untilTime
* @param limit Limit number of events. Get all events if None or Some(-1)
* @return Iterator[Event]
*/
def find(
appName: String,
entityType: Option[String] = None,
entityId: Option[String] = None,
channelName: Option[String] = None,
eventNames: Option[Seq[String]] = None,
targetEntityType: Option[Option[String]] = None,
targetEntityId: Option[Option[String]] = None,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
limit: Option[Int] = None,
timeout: Duration = defaultTimeout): Iterator[Event] = {
val (appId, channelId) = Common.appNameToId(appName, channelName)
Await.result(eventsDb.futureFind(
appId = appId,
channelId = channelId,
startTime = startTime,
untilTime = untilTime,
entityType = entityType,
entityId = entityId,
eventNames = eventNames,
targetEntityType = targetEntityType,
targetEntityId = targetEntityId,
limit = limit), timeout)
}
}
| himanshudhami/PredictionIO | data/src/main/scala/org/apache/predictionio/data/store/LEventStore.scala | Scala | apache-2.0 | 5,830 |
package org.zazukoians.zz2h
import org.apache.clerezza.commons.rdf._
import org.apache.clerezza.rdf.utils.GraphNode
import org.osgi.service.component.annotations._
import javax.ws.rs._
import javax.ws.rs.core._
import org.apache.clerezza.rdf.core._
import org.apache.clerezza.rdf.scala.utils._
//import org.apache.clerezza.rdf.scala.utils.impl._
import Preamble._
import org.apache.clerezza.rdf.ontologies._
@Component(service = Array(classOf[Object]), property = Array("javax.ws.rs=true"))
@Path("zz2h")
class EntryPage {
val bblfishModulus = """
9D ☮ 79 ☮ BF ☮ E2 ☮ F4 ☮ 98 ☮ BC ☮ 79 ☮ 6D ☮ AB ☮ 73 ☮ E2 ☮ 8B ☮ 39 ☮ 4D ☮ B5 26 ✜ 68 ✜ 49 ✜ EE ✜ 71 ✜ 87 ✜
06 ✜ 32 ✜ C9 ✜ 9F ✜ 3F ✜ 94 ✜ E5 ✜ CB ✜ 4D ✜ B5 12 ☮ 35 ☮ 13 ☮ 69 ☮ 60 ☮ 81 ☮ 58 ☮ 79 ☮ 66 ☮ F3 ☮ 79 ☮ 20 ☮
91 ☮ 6A ☮ 3F ☮ 42 5A ✜ F6 ✜ 54 ✜ 42 ✜ 88 ✜ B2 ✜ E9 ✜ 19 ✜ 4A ✜ 79 ✜ 87 ✜ 2E ✜ 62 ✜ 44 ✜ 2D ✜ 7C 06 ☽ 78 ☽ F8
☽ FD ☽ 52 ☽ 92 ☽ 6D ☽ CD ☽ D6 ☽ F3 ☽ 28 ☽ 6B ☽ 1F ☽ DB ☽ CB ☽ D3 F2 ☮ 08 ☮ 34 ☮ 72 ☮ A2 ☮ 12 ☮ 75 ☮ AE ☮ D1
☮ 09 ☮ 17 ☮ D0 ☮ 88 ☮ 4C ☮ 04 ☮ 8E 04 ☾ E5 ☾ BF ☾ D1 ☾ 41 ☾ 64 ☾ D1 ☾ F7 ☾ 89 ☾ 6D ☾ 8B ☾ B2 ☾ F2 ☾ 46 ☾ C0
☾ 56 87 ☮ 8D ☮ B8 ☮ 7C ☮ C6 ☮ FE ☮ E9 ☮ 61 ☮ 88 ☮ 08 ☮ 61 ☮ DD ☮ E3 ☮ B8 ☮ B5 ☮ 47 ♥
"""
/**import some references in order to reduce dependencies */
final val hex: IRI = new IRI("http://www.w3.org/ns/auth/cert#hex")
final val identity: IRI = new IRI("http://www.w3.org/ns/auth/cert#identity")
final val RSAPublicKey: IRI = new IRI("http://www.w3.org/ns/auth/rsa#RSAPublicKey")
final val modulus: IRI = new IRI("http://www.w3.org/ns/auth/rsa#modulus")
final val public_exponent: IRI = new IRI("http://www.w3.org/ns/auth/rsa#public_exponent")
val henryUri: String = "http://bblfish.net/#hjs"
val retoUri: String = "http://farewellutopia.com/reto/#me"
val danbriUri: String = "http://danbri.org/foaf.rdf#danbri"
def schema(localName: String) = {
("http://schema.org/"+localName).iri
}
def zz2h(localName: String) = {
("http://zz2h.zazukoinas.org/ontology/"+localName).iri
}
@GET
def hello(@Context uriInfo: UriInfo) =
{
val resource = uriInfo.getRequestUri().toString().iri;
val g = new EzGraph() {
(
resource.a(schema("WebPage")) -- schema("headline") --> "zz2h".lang("en")
-- zz2h("matcherGraph") --> (uriInfo.getBaseUri.toString + "zz2h/matchers").iri
)
/*(
resource.a(FOAF.Person) -- FOAF.name --> "Reto Gmür".lang("rm")
-- FOAF.title --> "Mr"
-- FOAF.currentProject --> "http://clerezza.org/".iri
-- FOAF.knows --> (
"http://bblfish.net/#hjs".iri.a(FOAF.Person)
-- FOAF.name --> "Henry Story"
-- FOAF.currentProject --> "http://webid.info/".iri
-- FOAF.knows -->> List(b_("reto"), b_("danny"))
//one need to list properties before inverse properties, or use brackets
<-- identity -- (
bnode.a(RSAPublicKey) //. notation because of precedence of operators
-- modulus --> 65537
-- public_exponent --> (bblfishModulus ^^ hex) // brackets needed due to precedence
)
)
-- FOAF.knows --> (
b_("danny").a(FOAF.Person)
-- FOAF.name --> "Danny Ayers".lang("en")
-- FOAF.knows --> "http://bblfish.net/#hjs".iri //knows
-- FOAF.knows --> b_("reto")
)
)*/
}
new GraphNode(resource,g)
}
//"hello "+uriInfo.getRequestUri().toString();
}
| zazukoians/zz2h | src/main/scala/org/zazukoians/zz2h/EntryPage.scala | Scala | apache-2.0 | 3,804 |
package org.jetbrains.plugins.scala.externalHighlighters
import com.intellij.compiler.server.BuildManagerListener
import com.intellij.openapi.project.Project
import java.util.UUID
class AutomakeBuildManagerListener extends BuildManagerListener {
override def buildFinished(project: Project, sessionId: UUID, isAutomake: Boolean): Unit = {
if (isAutomake && ScalaHighlightingMode.isShowErrorsFromCompilerEnabled(project)) {
TriggerCompilerHighlightingService.get(project).afterIncrementalCompilation()
}
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/externalHighlighters/AutomakeBuildManagerListener.scala | Scala | apache-2.0 | 528 |
var id = 0
def getCurrentId: Int = { id += 1; id }
class Node() {
val key = getCurrentId
val children = collection.mutable.Map[Char, Node]()
override def toString() =
children.map(z => f"${key} ${z._2.key} ${z._1}\\n${z._2.toString()}").mkString
}
val root = new Node()
io.Source.fromFile(new java.io.File(args(0))).getLines()
.foreach(_.foldLeft(root)(_.children.getOrElseUpdate(_, new Node())))
print(root) | cnt0/rosalind | pure-and-clean/scala/TRIE.scala | Scala | unlicense | 422 |
package tool
/**
*
* @author ponkotuy
* Date: 14/10/22.
*/
sealed abstract class EquipType(val v: Int)
object EquipType {
case object MainGunS extends EquipType(1)
case object MainGunM extends EquipType(2)
case object MainGunL extends EquipType(3)
case object SubGun extends EquipType(4)
case object Torpedo extends EquipType(5)
case object Fighter extends EquipType(6)
case object Bomber extends EquipType(7)
case object TorpedoBomber extends EquipType(8)
case object Scout extends EquipType(9)
case object ScoutSeaplane extends EquipType(10)
case object SeaBasedBomber extends EquipType(11)
case object RadarS extends EquipType(12)
case object RadarL extends EquipType(13)
case object Sonar extends EquipType(14)
case object DepthBomb extends EquipType(15)
case object Armor extends EquipType(16)
case object EngineChamber extends EquipType(17)
case object AntiAirBullet extends EquipType(18)
case object AntiShipBullet extends EquipType(19)
case object ProximityFuze extends EquipType(20)
case object AntiAirGun extends EquipType(21)
case object MidgetSubmarine extends EquipType(22)
case object DamageControl extends EquipType(23)
case object LandingCraft extends EquipType(24)
case object Autogiro extends EquipType(25)
case object MaritimePartrolAircraft extends EquipType(26)
case object ArmorM extends EquipType(27)
case object ArmorL extends EquipType(28)
case object Searchlight extends EquipType(29)
case object SimplifiedPackage extends EquipType(30)
case object RepairFacility extends EquipType(31)
case object ShortTorpedo extends EquipType(32)
case object Flare extends EquipType(33)
case object Command extends EquipType(34)
case object Pilot extends EquipType(35)
val values = Array(MainGunS, MainGunM, MainGunL, SubGun, Torpedo, Fighter, Bomber, TorpedoBomber, Scout,
ScoutSeaplane, SeaBasedBomber, RadarS, RadarL, Sonar, DepthBomb, Armor, EngineChamber, AntiAirBullet,
AntiShipBullet, ProximityFuze, AntiAirGun, MidgetSubmarine, DamageControl, LandingCraft, Autogiro,
MaritimePartrolAircraft, ArmorM, ArmorL, Searchlight, SimplifiedPackage, RepairFacility, ShortTorpedo, Flare,
Command, Pilot)
def fromInt(v: Int): Option[EquipType] = values.find(_.v == v)
val Scouts: Array[EquipType] = Array(Scout, ScoutSeaplane)
val Radars: Array[EquipType] = Array(RadarS, RadarL)
val Aircraft: Array[EquipType] = Array(
Fighter, Bomber, TorpedoBomber, Scout, ScoutSeaplane, SeaBasedBomber, MaritimePartrolAircraft
)
val CarrierBased: Array[EquipType] = Array(Fighter, Bomber, TorpedoBomber, SeaBasedBomber)
}
sealed abstract class EquipIconType(val v: Int)
object EquipIconType {
case object MainGunS extends EquipIconType(1)
case object MainGunM extends EquipIconType(2)
case object MainGunL extends EquipIconType(3)
case object SubGun extends EquipIconType(4)
case object Torpedo extends EquipIconType(5)
case object Fighter extends EquipIconType(6)
case object Bomber extends EquipIconType(7)
case object TorpedoBomber extends EquipIconType(8)
case object Scout extends EquipIconType(9)
case object ScoutSeaplane extends EquipIconType(10)
case object Radar extends EquipIconType(11)
case object AntiAirBullet extends EquipIconType(12)
case object AntiShipBullet extends EquipIconType(13)
case object DamageControl extends EquipIconType(14)
case object CaliberGun extends EquipIconType(15)
case object AntiAirGun extends EquipIconType(16)
case object DepthBomb extends EquipIconType(17)
case object Sonar extends EquipIconType(18)
case object EngineChamber extends EquipIconType(19)
case object LandingCraft extends EquipIconType(20)
case object Autogiro extends EquipIconType(21)
case object MaritimePartrolAircraft extends EquipIconType(22)
case object Armor extends EquipIconType(23)
case object Searchlight extends EquipIconType(24)
case object SimplifiedPackage extends EquipIconType(25)
case object RepairFacility extends EquipIconType(26)
case object Flare extends EquipIconType(27)
case object Command extends EquipIconType(28)
case object Pilot extends EquipIconType(29)
val values = Array(MainGunS, MainGunM, MainGunL, SubGun, Torpedo, Fighter, Bomber, TorpedoBomber, Scout,
ScoutSeaplane, Radar, AntiAirBullet, AntiShipBullet, DamageControl, CaliberGun, AntiAirGun, DepthBomb, Sonar,
EngineChamber, LandingCraft, Autogiro, MaritimePartrolAircraft, Armor, Searchlight, SimplifiedPackage, RepairFacility,
Flare, Command, Pilot)
def fromInt(v: Int): Option[EquipIconType] = values.find(_.v == v)
}
| nekoworkshop/MyFleetGirls | server/app/tool/EquipType.scala | Scala | mit | 4,604 |
/*
* Copyright (c) 2018. Fengguo Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0
* which accompanies this distribution, and is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.amandroid.serialization
import org.argus.jawa.flow.pta._
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.native.JsonMethods._
object PTASlotKeySerializer extends CustomKeySerializer[PTASlot](format => (
{
case str: String =>
implicit val formats: Formats = format + InstanceSerializer + SignatureSerializer + JawaTypeSerializer + FieldFQNSerializer
val jv = parse(str)
jv match {
case JObject(List(JField("VarSlot", v))) => Extraction.extract[VarSlot](v)
case JObject(List(JField("StaticFieldSlot", v))) => Extraction.extract[StaticFieldSlot](v)
case JObject(List(JField("FieldSlot", v))) => Extraction.extract[FieldSlot](v)
case JObject(List(JField("ArraySlot", v))) => Extraction.extract[ArraySlot](v)
case JObject(List(JField("InstanceSlot", v))) => Extraction.extract[InstanceSlot](v)
case JObject(List(JField("InvokeSlot", v))) => Extraction.extract[InvokeSlot](v)
}
}, {
case slot: PTASlot =>
implicit val formats: Formats = format + InstanceSerializer + SignatureSerializer + JawaTypeSerializer + FieldFQNSerializer
slot match {
case s: VarSlot =>
compact(render("VarSlot" -> Extraction.decompose(s)))
case s: StaticFieldSlot =>
compact(render("StaticFieldSlot" -> Extraction.decompose(s)))
case s: FieldSlot =>
compact(render("FieldSlot" -> ("ins" -> Extraction.decompose(s.ins)) ~ ("fieldName" -> Extraction.decompose(s.fieldName))))
case s: ArraySlot =>
compact(render("ArraySlot" -> ("ins" -> Extraction.decompose(s.ins))))
case s: InstanceSlot =>
compact(render("InstanceSlot" -> ("ins" -> Extraction.decompose(s.ins))))
case s: InvokeSlot =>
compact(render("InvokeSlot" -> Extraction.decompose(s)))
}
}
))
| arguslab/Argus-SAF | amandroid/src/main/scala/org/argus/amandroid/serialization/PTASlotKeySerializer.scala | Scala | apache-2.0 | 2,285 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.crossdata.common.serializers
import com.stratio.crossdata.common.result.{StreamedRow, StreamedSchema, StreamedSuccessfulSQLResult}
import org.json4s.JsonAST.{JField, JObject}
import org.json4s.{CustomSerializer, Extraction, Formats}
import StreamedSuccessfulSQLResultSerializerHelper._
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.StructType
private[serializers] object StreamedSuccessfulSQLResultSerializerHelper {
val SchemaLabel = "streamedSchema"
val RowLabel = "streamedRow"
}
object StreamedSuccessfulSQLResultSerializer extends CustomSerializer[StreamedSuccessfulSQLResult](
formats => (
{
case JObject(JField(SchemaLabel, jSchema)::Nil) =>
implicit val _: Formats = formats
StreamedSchema(jSchema.extract[StructType])
},
{
case StreamedSchema(schema) => JObject(JField(SchemaLabel, Extraction.decompose(schema)(formats)))
case StreamedRow(row, Some(providedSchema)) =>
JObject(JField(RowLabel, Extraction.decompose(row)(formats + RowSerializer(providedSchema))))
}
)
)
class StreamedRowSerializer(schema: StructType) extends CustomSerializer[StreamedSuccessfulSQLResult](
formats => (
{
case JObject(JField(RowLabel, jRow)::Nil) =>
implicit val _: Formats = formats + new RowSerializer(schema)
StreamedRow(jRow.extract[Row])
},
PartialFunction.empty
)
)
| gserranojc/Crossdata | common/src/main/scala/com/stratio/crossdata/common/serializers/StreamedSuccessfulSQLResultSerializer.scala | Scala | apache-2.0 | 2,030 |
package rpm4s.data
import rpm4s.data.Dependency._
case class RpmPrimaryEntry(
name: Name,
version: Version,
release: Release,
architecture: Architecture,
vendor: Vendor,
license: License,
summery: Summary,
description: Description,
group: rpm4s.data.Group,
headerRange: HeaderRange,
epoch: Epoch = Epoch.ZERO,
buildhost: Option[BuildHost] = None,
buildtime: Option[BuildTime] = None,
fileEntries: Option[Vector[FileEntry]] = None,
requires: Vector[Requires] = Vector.empty,
provides: Vector[Provides] = Vector.empty,
obsoletes: Vector[Obsoletes] = Vector.empty,
enhances: Vector[Enhances] = Vector.empty,
conflicts: Vector[Conflicts] = Vector.empty,
supplements: Vector[Supplements] = Vector.empty,
recommends: Vector[Recommends] = Vector.empty,
suggests: Vector[Suggests] = Vector.empty
)
| lucidd/rpm4s | shared/src/main/scala/rpm4s/data/RpmPrimaryEntry.scala | Scala | mit | 836 |
package com.blinkbox.books.marvin.watcher
import java.lang.Throwable
import java.nio.file._
import akka.actor.{ActorSystem, Props}
import com.blinkbox.books.config.Configuration
import com.blinkbox.books.logging.Loggers
import com.blinkbox.books.rabbitmq.{RabbitMq, RabbitMqConfirmedPublisher}
import com.typesafe.scalalogging.StrictLogging
import scala.concurrent.duration._
object WatcherService extends App with Configuration with Loggers with StrictLogging {
val Version = scala.util.Try(scala.io.Source.fromFile("VERSION").mkString.trim).getOrElse("0.0.0")
try {
val appConfig = AppConfig(config)
implicit val system = ActorSystem("marvin-watcher")
val reliableConnection = RabbitMq.reliableConnection(appConfig.messaging.rabbitmq)
val rabbitPublisher = system.actorOf(Props(new RabbitMqConfirmedPublisher(reliableConnection, appConfig.messaging.marvin)), "WatcherPublisher")
val inboundDirectory = Paths.get(appConfig.inboundDirectory)
val processingDirectory = Paths.get(appConfig.processingDirectory)
val storageDirectory = Paths.get(appConfig.storageDirectory)
val errorDirectory = Paths.get(appConfig.errorDirectory)
val delay = 15.seconds
val directoryScanner = new DirectoryScanner(inboundDirectory) with DefaultScanningFunctions
val fileProcessor = new FileProcessor(
inboundDirectory, processingDirectory, storageDirectory, errorDirectory, rabbitPublisher, appConfig.messaging.marvin.messageTimeout
)
logger.info(s"Started Marvin/watcher v${Version}.")
while (true) {
try {
logger.debug(s"Scanning ${inboundDirectory}")
directoryScanner.scan(fileProcessor.fileFound)
logger.debug(s"Waiting ${delay} before starting a new scan.")
Thread.sleep(delay.toMillis)
} catch {
case ex: Throwable => logger.error("Uncaught error while scanning; restarting", ex)
}
}
} catch {
case ex: Throwable =>
logger.error("Error during execution of the service", ex)
System.exit(1)
}
} | blinkboxbooks/watcher | src/main/scala/com/blinkbox/books/marvin/watcher/WatcherService.scala | Scala | mit | 2,033 |
package com.themillhousegroup.mondrian
import play.api.Logger
import play.api.cache.CacheApi
import scala.concurrent.Future
import scala.concurrent.duration.Duration
/**
* Mix this trait in with your MongoService to get simple caching via the Play Framework's CacheApi:
* https://www.playframework.com/documentation/2.4.x/ScalaCache
*
*/
trait MongoCaching[ME <: MongoEntity] {
this:TypedMongoService[ME] =>
val cache: CacheApi
val logger:Logger
type EntityMap = Map[String, ME]
val idCacheName:String
val cacheExpirationTime:Duration
/**
* Caches the result of performing "listAll" into an id -> entity Map,
* and then uses that to lookup results.
*/
def withIdCache[R](f: EntityMap => R): Future[R] = {
withOneToOneCache[R, String](entity => entity.id, idCacheName)(f)
}
/** Clears all the entries in the id -> entity Map */
def clearIdCache():Unit = clearCache(idCacheName)
/** Clears all the entries in the given cache */
def clearCache[K, V](cacheName:String):Unit = {
val emptyMap = Map[K, V]()
cache.set(cacheName, emptyMap, cacheExpirationTime)
}
/**
* If each ME object can be *uniquely* identified by a key of type K, store a Map
* of (K -> ME) and use that for lookups.
*/
def withOneToOneCache[R, K](keyFn: ME => K, cacheName:String)(f: Map[K, ME] => R): Future[R] = {
withCache[R, K, ME](all => all.map(e => keyFn(e) -> e).toMap, cacheName)(f)
}
/**
* Given a cached thing of type Map[K, V], returns a Future[R] by looking it up.
*
* Provide a method of putting all the MEs from listAll into a Map[K, V]
*
* Best used in a curried style. e.g.:
*
* def withPhoneNumberCache = withCache[User, PhoneNumber](_.phoneNumber, "allNumbers") _
*/
def withCache[R, K, V](mappingFn: Seq[ME] => Map[K, V], cacheName:String)(f: Map[K, V] => R): Future[R] = {
withCachePopulatedBy(listAll)(mappingFn, cacheName)(f)
}
/**
* Given a cached thing of type Map[K, V], returns a Future[R] by looking it up.
*
* Provide a method of putting all the MEs returned from loadQuery() into a Map[K, V]
*
* Best used in a curried style. e.g.:
*
* def withPhoneNumberCache = withCache[User, PhoneNumber](_.phoneNumber, "allNumbers") _
*/
def withCachePopulatedBy[R, K, V](loadQuery: => Future[Seq[ME]])(mappingFn: Seq[ME] => Map[K, V], cacheName:String)(f: Map[K, V] => R): Future[R] = {
val maybeMap = cache.get[Map[K, V]](cacheName)
maybeMap.fold {
logger.trace(s"$cacheName cache miss")
loadQuery.map { results =>
val theMap = mappingFn(results)
cache.set(cacheName, theMap, cacheExpirationTime)
f(theMap)
}
} { theMap =>
logger.trace(s"$cacheName cache hit")
Future.successful(f(theMap))
}
}
}
| themillhousegroup/mondrian | src/main/scala/com/themillhousegroup/mondrian/MongoCaching.scala | Scala | mit | 2,826 |
package benchmarks
import lift.arithmetic.SizeVar
import ir._
import ir.ast._
import opencl.ir._
import opencl.ir.pattern._
@deprecated("Uses an old benchmark infrastructure", "")
class MatrixVector (override val f: Seq[(String, Array[Lambda])]) extends DeprecatedBenchmark("Matrix Vector Multiplication (gemv)", Seq(4096, 4096), f, 0.0f) {
override def runScala(inputs: Any*): Array[Float] = {
var matrix = inputs(0).asInstanceOf[Array[Array[Float]]]
val vectorX = inputs(1).asInstanceOf[Array[Float]]
val vectorY = inputs(2).asInstanceOf[Array[Float]]
val alpha = inputs(3).asInstanceOf[Float]
val beta = inputs(4).asInstanceOf[Float]
if (variant == 4)
matrix = matrix.transpose
val tmp = matrix.map(
(row) => (row, vectorX).zipped.map(_ * _).sum * alpha
)
val scaledY = vectorY.map(_ * beta)
(tmp, scaledY).zipped.map(_ + _)
}
override def generateInputs(): Seq[Any] = {
val inputSizeN = inputSizes()(0)
val inputSizeM = inputSizes()(1)
var matrix = Array.fill(inputSizeN, inputSizeM)(util.Random.nextInt(5).toFloat)
val vectorX = Array.fill(inputSizeM)(util.Random.nextInt(5).toFloat)
val vectorY = Array.fill(inputSizeN)(util.Random.nextInt(5).toFloat)
val alpha = 2.5f
val beta = 1.5f
if (variant == 4)
matrix = matrix.transpose
Seq(matrix, vectorX, vectorY, alpha, beta)
}
}
object MatrixVector {
val N = SizeVar("N")
val M = SizeVar("M")
val fullMatrixVectorFusedOpenCL = fun(
ArrayTypeWSWC(ArrayTypeWSWC(Float, N), M),
ArrayTypeWSWC(Float, N),
ArrayTypeWSWC(Float, M),
Float,
Float,
(matrix, vectorX, vectorY, alpha, beta) => {
MapWrg(fun( t =>
Join() o toGlobal(MapLcl(MapSeq(fun( x => multAndSumUp(x, Get(t, 1), beta))))) o Split(1) o
Join() o MapLcl(MapSeq(fun( x => mult(alpha, x) ))) o Split(1) o
Join() o toLocal(MapLcl(toLocal(MapSeq(id)) o ReduceSeq(fun((acc, y) => multAndSumUp.apply(acc, Get(y, 0), Get(y, 1))), 0.0f))) o Split(N) $ Zip(vectorX, Get(t, 0)))
) $ Zip(matrix, vectorY)
})
val fullMatrixVectorFusedOpenCLAMD = fun(
ArrayTypeWSWC(ArrayTypeWSWC(Float, N), M),
ArrayTypeWSWC(Float, N),
ArrayTypeWSWC(Float, M),
Float,
Float,
(matrix, vectorX, vectorY, alpha, beta) => {
MapWrg(fun( t =>
Join() o toGlobal(MapLcl(MapSeq(fun( x => multAndSumUp(x, Get(t, 1), beta))))) o Split(1) o
Join() o MapLcl(toLocal(MapSeq(id)) o ReduceSeq(add, 0.0f)) o Split(128) o
Join() o MapLcl(MapSeq(fun( x => mult(alpha, x) ))) o Split(1) o
Join() o toLocal(MapLcl(toLocal(MapSeq(id)) o ReduceSeq(fun((acc, y) => multAndSumUp.apply(acc, Get(y, 0), Get(y, 1))), 0.0f))) o Split(N/^128) o ReorderStride(128) $ Zip(vectorX, Get(t, 0)) )
) $ Zip(matrix, vectorY)
})
// The same expression as 'fullMatrixVectorFusedOpenCLAMD' but written in a
// dataflow / more imperative style
val fullMatrixVectorFusedOpenCLAMD_ = fun(
ArrayTypeWSWC(ArrayTypeWSWC(Float, N), M),
ArrayTypeWSWC(Float, N),
ArrayTypeWSWC(Float, M),
Float,
Float,
(matrix, vectorX, vectorY, alpha, beta) => {
Zip(matrix, vectorY) :>>
MapWrg(
\\(pair => {
val matrixRow = pair._0
val y_i = pair._1
val partialDotProdcut = {
Zip(vectorX, matrixRow) :>>
ReorderStride(128) :>>
Split(N /^ 128) :>>
toLocal(MapLcl(
ReduceSeq(\\((acc, y) => multAndSumUp(acc, y._0, y._1)), 0.0f) >>>
toLocal(MapSeq(id))
)) :>>
Join()
}
val timesAlpha = {
partialDotProdcut :>>
Split(1) :>> MapLcl(MapSeq(\\(x => mult(alpha, x)))) :>> Join()
}
val fullDotProduct = {
timesAlpha :>>
Split(128) :>> MapLcl(ReduceSeq(add, 0.0f) >>> toLocal(MapSeq(id))) :>> Join()
}
fullDotProduct :>>
Split(1) :>>
toGlobal(MapLcl(MapSeq(fun(x => multAndSumUp(x, y_i, beta))))) :>>
Join()
})
)
})
val clblast_N = fun(
ArrayTypeWSWC(ArrayTypeWSWC(Float, N), M),
ArrayTypeWSWC(Float, N),
ArrayTypeWSWC(Float, M),
Float,
Float,
(matrix, vectorX, vectorY, alpha, beta) =>
Join() o MapWrg(fun( matChunk =>
MapSeq(
toGlobal(fun(y =>
MapLcl(fun(x =>
add(
toPrivate(mult)(x._0, alpha),
toPrivate(mult)(x._1, beta)
)
)) $ Zip(y, Map(Get(1)) $ matChunk)
))
) o
ReduceSeq(fun((acc, next) =>
Let(localX =>
Join() o MapLcl(fun(x => ReduceSeq(fun((acc2, next2) =>
multAndSumUp(acc2, Get(next2, 0), Get(next2, 1)))
, Get(x, 0)) $ Zip(Get(x, 1), localX))) $ Zip(acc, Get(next, 0))
) o toLocal(MapLcl(id)) $ Get(next, 1)),
MapLcl(id) $ Value(0.0f, ArrayTypeWSWC(Float, 64)))
$ Zip(Transpose() o Map(Split(64) o Get(0)) $ matChunk, Split(64) $ vectorX)
)) o Split(64) $ Zip(matrix, vectorY)
)
val clblast_T = fun(
ArrayTypeWSWC(ArrayTypeWSWC(Float, M), N),
ArrayTypeWSWC(Float, N),
ArrayTypeWSWC(Float, M),
Float,
Float,
(matrix, vectorX, vectorY, alpha, beta) =>
Join() o MapWrg(fun( matChunk =>
MapSeq(
toGlobal(fun(y =>
MapLcl(fun(x =>
add(
toPrivate(mult)(x._0, alpha),
toPrivate(mult)(x._1, beta)
)
)) $ Zip(y, Map(Get(1)) $ matChunk)
))
) o
ReduceSeq(fun((acc, next) =>
Let(localX =>
Join() o MapLcl(fun(x => ReduceSeq(fun((acc2, next2) =>
multAndSumUp(acc2, Get(next2, 0), Get(next2, 1)))
, Get(x, 0)) $ Zip(Get(x, 1), localX))) $ Zip(acc, Get(next, 0))
) o toLocal(MapLcl(id)) $ Get(next, 1)),
MapLcl(id) $ Value(0.0f, ArrayTypeWSWC(Float, 64)))
$ Zip(Transpose() o Map(Split(64) o Get(0)) $ matChunk, Split(64) $ vectorX)
)) o Split(64) $ Zip(Transpose() $ matrix, vectorY)
)
def apply() = new MatrixVector(Seq(
("FULL_MATRIX_VECTOR_FUSED_OPENCL", Array[Lambda](fullMatrixVectorFusedOpenCL)),
("FULL_MATRIX_VECTOR_FUSED_OPENCL_AMD", Array[Lambda](fullMatrixVectorFusedOpenCLAMD)),
("FULL_MATRIX_VECTOR_FUSED_OPENCL_AMD_", Array[Lambda](fullMatrixVectorFusedOpenCLAMD_)),
("clblast_N", Array[Lambda](clblast_N)),
("clblast_T", Array[Lambda](clblast_T))
))
def main(args: Array[String]): Unit = {
MatrixVector().run(args)
}
}
| lift-project/lift | src/main/benchmarks/MatrixVector.scala | Scala | mit | 6,935 |
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.reactive.internals.operators
import monifu.concurrent.cancelables.RefCountCancelable
import monifu.reactive.Ack.{Cancel, Continue}
import monifu.reactive.exceptions.CompositeException
import monifu.reactive.{Ack, Observer, Observable}
import monifu.reactive.internals._
import scala.collection.mutable
import scala.concurrent.{Future, Promise}
import scala.util.control.NonFatal
private[reactive] object flatScan {
/**
* Implementation for [[Observable.flatScan]].
*/
def apply[T,R](source: Observable[T], initial: R)(op: (R, T) => Observable[R]) =
Observable.create[R] { subscriber =>
implicit val s = subscriber.scheduler
val o = subscriber
source.onSubscribe(new Observer[T] {
private[this] val refCount = RefCountCancelable(o.onComplete())
private[this] var state = initial
def onNext(elem: T) = {
// for protecting user calls
var streamError = true
try {
val upstreamPromise = Promise[Ack]()
val newState = op(state, elem)
streamError = false
val refID = refCount.acquire()
newState.onSubscribe(new Observer[R] {
def onNext(elem: R): Future[Ack] = {
state = elem
o.onNext(elem)
.ifCancelTryCanceling(upstreamPromise)
}
def onError(ex: Throwable): Unit = {
// error happened, so signaling both the main thread that it should stop
// and the downstream consumer of the error
upstreamPromise.trySuccess(Cancel)
o.onError(ex)
}
def onComplete(): Unit = {
// NOTE: we aren't sending this onComplete signal downstream to our observer
// instead we are just instructing upstream to send the next observable
upstreamPromise.trySuccess(Continue)
refID.cancel()
}
})
upstreamPromise.future
}
catch {
case NonFatal(ex) =>
if (streamError) {
o.onError(ex)
Cancel
}
else {
Future.failed(ex)
}
}
}
def onError(ex: Throwable) = {
// oops, error happened on main thread, piping that along should cancel everything
o.onError(ex)
}
def onComplete() = {
refCount.cancel()
}
})
}
/**
* Implementation for [[Observable.flatScanDelayError]].
*/
def delayError[T,R](source: Observable[T], initial: R)(op: (R, T) => Observable[R]) =
Observable.create[R] { subscriber =>
import subscriber.{scheduler => s}
source.onSubscribe(new Observer[T] {
private[this] var state = initial
private[this] val errors = mutable.ArrayBuffer.empty[Throwable]
private[this] val refCount = RefCountCancelable {
if (errors.nonEmpty)
subscriber.onError(CompositeException(errors))
else
subscriber.onComplete()
}
def onNext(elem: T) = {
// for protecting user calls
var streamError = true
try {
val upstreamPromise = Promise[Ack]()
val newState = op(state, elem)
streamError = false
val refID = refCount.acquire()
newState.onSubscribe(new Observer[R] {
def onNext(elem: R): Future[Ack] = {
state = elem
subscriber.onNext(elem)
.ifCancelTryCanceling(upstreamPromise)
}
def onError(ex: Throwable): Unit = {
errors += ex
// next element please
upstreamPromise.trySuccess(Continue)
refID.cancel()
}
def onComplete(): Unit = {
// next element please
upstreamPromise.trySuccess(Continue)
refID.cancel()
}
})
upstreamPromise.future
}
catch {
case NonFatal(ex) =>
if (streamError) {
onError(ex)
Cancel
}
else {
Future.failed(ex)
}
}
}
def onError(ex: Throwable) = {
errors += ex
refCount.cancel()
}
def onComplete() = {
refCount.cancel()
}
})
}
}
| sergius/monifu | monifu/shared/src/main/scala/monifu/reactive/internals/operators/flatScan.scala | Scala | apache-2.0 | 5,271 |
// code-examples/Rounding/match-deep-pair-script.scala
class Role
case object Manager extends Role
case object Developer extends Role
case class Person(name: String, age: Int, role: Role)
val alice = new Person("Alice", 25, Developer)
val bob = new Person("Bob", 32, Manager)
val charlie = new Person("Charlie", 32, Developer)
for (item <- Map(1 -> alice, 2 -> bob, 3 -> charlie)) {
item match {
case (id, p @ Person(_, _, Manager)) => print(p + " is overpaid.\n")
case (id, p @ Person(_, _, _)) => print(p + " is underpaid.\n")
}
}
| foomango/scalaex | code-examples/Rounding/match-deep-pair-script.scala | Scala | mit | 549 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.compiler.config.cli
private[cli] case class CommandLineConstant(full: String, abbr: String)
private[cli] object CommandLineConstants {
val SimulationsFolder = CommandLineConstant("simulations-folder", "sf")
val BinariesFolder = CommandLineConstant("binaries-folder", "bf")
}
| wiacekm/gatling | gatling-compiler/src/main/scala/io/gatling/compiler/config/cli/CommandLineConstants.scala | Scala | apache-2.0 | 918 |
// InterpretDTM.scala
//
// Copyright (C) 2015 Ben Wing, The University of Texas at Austin
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
///////////////////////////////////////////////////////////////////////////////
package opennlp.textgrounder
package postprocess
import util.argparser._
import util.collection._
import util.error.assert_==
import util.experiment._
import util.io.localfh
import util.print._
import util.table._
/**
* See description under `InterpretDTM`.
*/
class InterpretDTMParameters(ap: ArgParser) {
var output_dir = ap.option[String]("output-dir", "o", "od",
must = be_specified,
help = """Directory or directories containing output from running DTM.""")
var input_prefix = ap.option[String]("input-prefix", "i", "ip",
must = be_specified,
help = """Input prefix of files sent to DTM.""")
var words_per_topic = ap.option[Int]("words-per-topic", "w", "wpt",
default = 10,
help = """Number of top words per topic to display.""")
var latex = ap.flag("latex",
help = """Display in LaTeX format.""")
var rename_headings = ap.option[String]("rename-headings", "rh",
help = """Rename headings, in the form FROM=TO,FROM=TO,....""")
var topics = ap.option[String]("topics", "t",
help = """Topics to display; numbers separated by commas.""")
var boldface = ap.option[String]("boldface", "b",
help = """Words to boldface. Format is WORD,WORD,... In place of
a WORD can be WORD/TOPIC where TOPIC is a topic number to boldface
only a word in a specific topic.""")
var two_columns = ap.flag("two-columns", "tc",
help = """Display topics as two columns.""")
var topic_probs = ap.option[String]("topic-probs", "tp",
help = """Instead of displaying top words in topics, show the
topic probabilities for the specified words in specified topics, in
a tabular format for use with R. Format is WORD/TOPIC,WORD/TOPIC,....""")
}
/**
* Interpret the output of DTM. The output is a directory, containing
* a subdirectory 'lda-seq', containing a file 'info.dat' listing the
* number of topics, terms and slices, as well as files
* 'topic-###-var-e-log-prob.dat' containing the probabilities for each
* term in each slice, in row-major order (the probabilities for all
* slices for the first term, then the probabilities for all slices
* for the second term, etc.).
*/
object InterpretDTM extends ExperimentApp("InterpretDTM") {
type TParam = InterpretDTMParameters
def create_param_object(ap: ArgParser) = new InterpretDTMParameters(ap)
case class Document(title: String, coord: String, date: Int, counts: String)
def read_log_probs(file: String, num_terms: Int, num_seq: Int) = {
val log_probs = Array.fill(num_seq, num_terms)(0.0)
for ((line, index) <- localfh.openr(file).zipWithIndex) {
val seqind = index % num_seq
val termind = index / num_seq
log_probs(seqind)(termind) = line.toDouble
}
log_probs
}
def process_dir(dir: String) {
// Read info file
val info_file =
localfh.openr("%s/lda-seq/info.dat" format params.output_dir)
var num_topics = 0
var num_terms = 0
var seq_length = 0
for (line <- info_file) {
val parts = line.split(" ")
if (parts.size == 2) {
val Array(prop, value) = parts
if (prop == "NUM_TOPICS")
num_topics = value.toInt
else if (prop == "NUM_TERMS")
num_terms = value.toInt
else if (prop == "SEQ_LENGTH")
seq_length = value.toInt
}
}
info_file.close()
def read_topic(topic: Int) = {
// Read the probabilities; we get an array of arrays, first indexed
// by slice, then by word
read_log_probs("%s/lda-seq/topic-%03d-var-e-log-prob.dat"
format (params.output_dir, topic), num_terms, seq_length)
}
// Read vocab file
val vocab = localfh.openr(params.input_prefix + "-vocab.dat").toIndexedSeq
// Read slice file
val orig_slices = localfh.openr(params.input_prefix + "-slice.dat").toIndexedSeq
assert_==(orig_slices.size, seq_length)
assert_==(vocab.size, num_terms)
// Compute map to remap headings
val remap_headings =
if (params.rename_headings == null) Map[String,String]()
else params.rename_headings.split(",").map { _.split("=") }.map {
case Array(from, to) => (from, to)
}.toMap
// Compute remapped headings
val slices = orig_slices.map { x => remap_headings.getOrElse(x, x) }
if (params.topic_probs == null) {
// Set of topics to include
val topicset =
if (params.topics == null) (0 until num_topics).toSet
else params.topics.split(",").map(_.toInt).toSet
// Words in topics to boldface
var boldfaceterms =
if (params.boldface == null) Set[(String, Int)]()
else {
params.boldface.split(",").map { spec =>
if (spec contains "/") {
val Array(word, topic) = spec.split("/")
(word, topic.toInt)
} else
(spec, -1)
}.toSet
}
// For each topic, find the top words
val topic_top_words =
for (topic <- 0 until num_topics; if topicset contains topic) yield {
// Read the probabilities; we get an array of arrays, first indexed
// by slice, then by word
val log_probs = read_topic(topic)
// outprint("Log probs: %s" format log_probs.map(_.toIndexedSeq).toIndexedSeq)
// For each slice, find the top N words by probability.
// Transpose the resulting array of arrays so we output the slice
// words in columns.
// val seq_top_word_probs =
// log_probs.map { seq_topic_probs =>
// seq_topic_probs.zipWithIndex.sortBy(-_._1).
// take(params.words_per_topic).map {
// case (prob, index) => "%s, %s" format (index, prob)
// }.toIndexedSeq
// }.toIndexedSeq.transpose
// outprint(format_table(slices +: seq_top_word_probs))
val seq_top_words =
log_probs.map { seq_topic_probs =>
seq_topic_probs.zipWithIndex.sortBy(-_._1).
take(params.words_per_topic).map(_._2).map {
index => vocab(index)
}.toIndexedSeq
}.toIndexedSeq.transpose
(seq_top_words, topic)
}
// Maybe boldface some words
val bf_topic_top_words =
if (!params.latex) topic_top_words
else {
topic_top_words.map { case (words, topic) =>
(words.map { line =>
line.map { word =>
if (boldfaceterms.contains((word, topic)) ||
boldfaceterms.contains((word, -1)))
"""\\textit{\\textbf{\\textcolor{blue}{%s}}}""" format word
else
word
}
}, topic)
}
}
def paste_two_topics(topic_1: Seq[Seq[String]],
topic_2: Seq[Seq[String]]) = {
(topic_1 zip topic_2).map { case (x, y) => x ++ y }
}
if (params.two_columns) {
if (params.latex)
outprint("""\\begin{tabular}{|%s|%s|}""", "c" * slices.size,
"c" * slices.size)
var first = true
bf_topic_top_words.sliding(2, 2).foreach { group =>
val ((tstr1, tstr2), headers, words) = group match {
case Seq((tw1, topic1), (tw2, topic2)) => {
(("Topic %s" format topic1, "Topic %s" format topic2),
slices ++ slices,
paste_two_topics(tw1, tw2))
}
case Seq((tw1, topic1)) => {
(("Topic %s" format topic1, ""),
slices ++ slices.map(x => ""),
paste_two_topics(tw1, tw1.map { x => x.map(y => "") }))
}
}
if (params.latex) {
if (first)
outprint("""\\hline""")
else
outprint("""\\hhline{|%s|%s|}""", "=" * slices.size,
"=" * slices.size)
outprint("""\\multicolumn{%s}{|c}{%s} & \\multicolumn{%s}{|c|}{%s} \\\\
\\hline
%s \\\\
\\hline""",
slices.size, tstr1, slices.size, tstr2,
headers mkString " & "
)
for (line <- words) {
outprint("""%s \\\\""",
line mkString " & "
)
}
} else {
outprint("For dir %s: %s, %s" format (dir, tstr1, tstr2))
outprint(format_table(headers +: words))
}
first = false
}
if (params.latex)
outprint("""\\hline
\\end{tabular}""")
} else {
if (params.latex)
outprint("""\\begin{tabular}{|%s|}""", "c" * slices.size)
var first = true
for ((seq_top_words, topic) <- bf_topic_top_words) {
if (params.latex) {
if (first)
outprint("""\\hline""")
else
outprint("""\\hhline{|%s|}""", "=" * slices.size)
outprint("""\\multicolumn{%s}{|c|}{Topic %s} \\\\
\\hline
%s \\\\
\\hline""",
slices.size, topic, slices mkString " & "
)
for (line <- seq_top_words) {
outprint("""%s \\\\""",
line mkString " & "
)
}
} else {
outprint("For dir %s, topic %s:" format (dir, topic))
outprint(format_table(slices +: seq_top_words))
}
first = false
}
if (params.latex)
outprint("""\\hline
\\end{tabular}""")
}
} else {
// --topic-probs
val words_topics = params.topic_probs.split(",").map(_.split("/")).map {
case Array(word, topic) => (word, topic.toInt)
}
val vocab_to_index = vocab.zipWithIndex.toMap
outprint("slice logprob word")
for ((word, topic) <- words_topics) {
val ind = vocab_to_index(word)
// Read the topic probabilities; we get an array of arrays,
// first indexed by slice, then by word
val topic_logprobs = read_topic(topic)
val word_logprobs = topic_logprobs.map(_(ind))
for ((logprob, slice) <- word_logprobs zip slices) {
outprint(s"$slice $logprob $word")
}
}
}
}
def run_program(args: Array[String]) = {
process_dir(params.output_dir)
0
}
}
| utcompling/textgrounder | src/main/scala/opennlp/textgrounder/postprocess/InterpretDTM.scala | Scala | apache-2.0 | 10,941 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
package backend
import io.AbstractFile
import scala.tools.nsc.classpath.AggregateClassPath
import scala.tools.util.PathResolver
import scala.tools.nsc.util.ClassPath
trait JavaPlatform extends Platform {
val global: Global
override val symbolTable: global.type = global
import global._
import definitions._
private[nsc] var currentClassPath: Option[ClassPath] = None
protected[nsc] def classPath: ClassPath = {
if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings, global.closeableRegistry).result)
currentClassPath.get
}
/** Update classpath with a substituted subentry */
def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = global.classPath match {
case AggregateClassPath(entries) =>
currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e))))
case cp: ClassPath =>
currentClassPath = Some(subst.getOrElse(cp, cp))
}
def platformPhases = List(
flatten, // get rid of inner classes
genBCode // generate .class files
)
lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_)
lazy val externalEqualsNumNum = getDecl(BoxesRunTimeClass, nme.equalsNumNum)
lazy val externalEqualsNumChar = getDecl(BoxesRunTimeClass, nme.equalsNumChar)
lazy val externalEqualsNumObject = getDecl(BoxesRunTimeClass, nme.equalsNumObject)
/** We could get away with excluding BoxedBooleanClass for the
* purpose of equality testing since it need not compare equal
* to anything but other booleans, but it should be present in
* case this is put to other uses.
*/
def isMaybeBoxed(sym: Symbol) = {
(sym == ObjectClass) ||
(sym == SerializableClass) ||
(sym == ComparableClass) ||
(sym isNonBottomSubClass BoxedNumberClass) ||
(sym isNonBottomSubClass BoxedCharacterClass) ||
(sym isNonBottomSubClass BoxedBooleanClass)
}
def needCompile(bin: AbstractFile, src: AbstractFile) =
src.lastModified >= bin.lastModified
}
| scala/scala | src/compiler/scala/tools/nsc/backend/JavaPlatform.scala | Scala | apache-2.0 | 2,323 |
package chandu0101.scalajs.facades.examples.pages.common
import chandu0101.scalajs.react.components.models.Github
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.all._
import scala.scalajs.js
/**
* Created by chandrasekharkode .
*/
object GithubUser {
case class State(checked: Boolean)
object Styles {
val userGroup = Seq(display := "inline-block" ,textAlign := "center" ,textDecoration := "none" ,color := "rgb(200, 205, 200)")
val userIcon = Seq(margin := "10px" ,display := "block" ,width := "100px" , height := "100px" , borderRadius := "50%")
val userName = Seq(fontSize := "18px" , fontWeight := 500)
}
class Backend(t: BackendScope[Props, State]) {
}
val component = ReactComponentB[Props]("GithubUser")
.initialState(State(false))
.backend(new Backend(_))
.render((P, S, B) => {
a( Styles.userGroup ,href := P.user.html_url)(
img(Styles.userIcon ,src := P.user.avatar_url),
span(Styles.userName)(P.user.login)
)
})
.build
case class Props(user : Github)
def apply(user : Github,ref: js.UndefOr[String] = "", key: js.Any = {}) = component.set(key, ref)(Props(user))
}
| CapeSepias/scalajs-facades | examples/src/main/scala/chandu0101/scalajs/facades/examples/pages/common/GithubUser.scala | Scala | mit | 1,187 |
package com.github.lstephen.ootp.ai.selection.bench
import com.github.lstephen.ootp.ai.player.Player
import com.github.lstephen.ootp.ai.player.ratings.Position
import com.github.lstephen.ootp.ai.regression.Predictor
import com.github.lstephen.ootp.ai.score.Score
import com.github.lstephen.ootp.ai.selection.depthchart.DepthChart.Backup
import com.github.lstephen.ootp.ai.selection.depthchart.DepthChartSelection
import com.github.lstephen.ootp.ai.selection.lineup.Defense
import com.github.lstephen.ootp.ai.selection.lineup.InLineupScore
import com.github.lstephen.ootp.ai.selection.lineup.Lineup
import com.github.lstephen.ootp.ai.selection.lineup.Lineup.VsHand
import collection.JavaConversions._
class BenchScorer(implicit predictor: Predictor) {
val depthChartSelection = new DepthChartSelection
def score(bench: java.lang.Iterable[Player],
lineup: Lineup,
vs: VsHand): Double = {
score_(bench, lineup, vs).toDouble
}
def score_(bench: Traversable[Player], lineup: Lineup, vs: VsHand): Score = {
val dc = depthChartSelection.select(lineup, bench.toSet, vs)
lineup
.filter(_.getPositionEnum != Position.PITCHER)
.flatMap(bu => dc.getBackups(bu.getPositionEnum).take(1))
.map(score(_, vs))
.total
}
def score(bu: Backup, vs: VsHand): Score =
(Defense.getPositionFactor(bu.getPosition) * bu.getPercentage / 100.0) *: InLineupScore(
bu.getPlayer,
bu.getPosition,
vs).score
}
| lstephen/ootp-ai | src/main/scala/com/github/lstephen/ootp/ai/selection/bench/BenchScorer.scala | Scala | apache-2.0 | 1,478 |
package nabab
object DefaultGraph {
def apply(implicit factory: GraphFactory) =
new DefaultGraph(
nodes = factory.makeNodeSet,
edges = factory.makeEdgeSet,
destinations = factory.makeEdgeNodeMap,
origins = factory.makeEdgeNodeMap,
incomings = factory.makeNodeMap[Set[Edge]],
outgoings = factory.makeNodeMap[Set[Edge]])
}
case class DefaultGraph(
nodes: Set[Node],
edges: Set[Edge],
destinations: Map[Edge, Node],
origins: Map[Edge, Node],
incomings: Map[Node, Set[Edge]],
outgoings: Map[Node, Set[Edge]])
(implicit val factory: GraphFactory)
extends Graph {
override def origin(edge: Edge) = origins(edge)
override def destination(edge: Edge) = destinations(edge)
override def incoming(node: Node) = incomings(node)
override def outgoing(node: Node) = outgoings(node)
private[this]
def fillWithDefault(nodes: Set[Node],
map: Map[Node, Set[Edge]])
: Map[Node, Set[Edge]] =
map ++ (for (n <- nodes -- map.keys) yield (n -> factory.makeEdgeSet))
override def add(newNodes: Set[Node], newEdges: Seq[EdgeDefinition]) =
DefaultGraph(
nodes = nodes ++ newNodes,
edges = edges ++ newEdges.map(_.edge),
destinations = destinations ++ newEdges.map(e => e.edge -> e.destination),
origins = origins ++ newEdges.map(e => e.edge -> e.origin),
// incomings = incomings,
// outgoings = outgoings)
incomings = fillWithDefault(newNodes, incomings ++ newEdges.groupBy(_.destination).toSeq.map({
case (destination, edges) =>
destination -> (incomings(destination) ++ edges.map(_.edge))
})),
outgoings = fillWithDefault(newNodes, outgoings ++ newEdges.groupBy(_.origin).toSeq.map({
case (origin, edges) =>
origin -> (outgoings(origin) ++ edges.map(_.edge))
})))
override def remove(removedNodes: Set[Node], removedEdges: Set[Edge]) = {
val allRemovedEdges: Set[Edge] =
removedEdges ++
removedNodes.flatMap(incomings) ++
removedNodes.flatMap(outgoings)
type Connection = (Node, Set[Edge])
val transformConnections: PartialFunction[Connection, Connection] = {
case (node, edges) if !removedNodes(node) =>
(node, edges.filterNot(allRemovedEdges))
}
DefaultGraph(
nodes = nodes -- removedNodes,
edges = edges -- allRemovedEdges,
destinations = destinations -- allRemovedEdges,
origins = origins -- allRemovedEdges,
incomings = incomings.toIterator.collect(transformConnections).toMap,
outgoings = outgoings.toIterator.collect(transformConnections).toMap)
}
}
| ochafik/nabab | ScalaPort/src/main/scala/DefaultGraph.scala | Scala | mit | 2,694 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.aliyun.udfs.ml
import java.lang
import org.apache.hadoop.hive.ql.exec.UDFArgumentException
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, StructObjectInspector}
import org.apache.hadoop.hive.serde2.objectinspector.primitive._
import org.json4s.DefaultFormats
import org.apache.spark.internal.Logging
import org.apache.spark.ml.util.ParquetFormatModelMetadataLoader
import org.apache.spark.mllib.classification.LogisticRegressionModel
import org.apache.spark.mllib.linalg._
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.sql.execution.datasources.parquet.ParquetFormatModelLoader
import org.apache.spark.sql.types._
class LogisticRegressionUDF extends GenericUDF with Logging {
var _x1: StringObjectInspector = _
var _x2: ObjectInspector = _
var isVectorType = false
override def getDisplayString(children: Array[String]): String = "Logistic_Regression"
override def initialize(arguments: Array[ObjectInspector]): ObjectInspector = {
if (arguments.length != 2) {
throw new UDFArgumentException(
s"""Logistic_Regression requires 2 arguments, got ${arguments.length}.
|Arguments should be: (modelPath, features).
|
| modelPath: LogisticRegression pre-trained model path in HDFS or OSS.
| features: data vector/string
""".stripMargin)
}
val Array(x1, x2) = arguments
if (!x1.isInstanceOf[StringObjectInspector]
|| (!x2.isInstanceOf[StructObjectInspector] && !x2.isInstanceOf[StringObjectInspector])) {
val errorMsg =
s"""Argument type error.
|(modelPath: string, features: vector)
|(${x1.isInstanceOf[StringObjectInspector]}, ${x2.isInstanceOf[StructObjectInspector]})
|or
|(modelPath: string, features: string)
|(${x1.isInstanceOf[StringObjectInspector]}, ${x2.isInstanceOf[StringObjectInspector]})
""".stripMargin
logError(errorMsg)
throw new UDFArgumentException(errorMsg)
}
_x1 = x1.asInstanceOf[StringObjectInspector]
_x2 = x2 match {
case _: StructObjectInspector =>
isVectorType = true
x2.asInstanceOf[StructObjectInspector]
case _: StringObjectInspector =>
x2.asInstanceOf[StringObjectInspector]
}
PrimitiveObjectInspectorFactory.javaDoubleObjectInspector
}
override def evaluate(arguments: Array[GenericUDF.DeferredObject]): AnyRef = {
val modelPath = _x1.getPrimitiveJavaObject(arguments(0).get())
val model = LogisticRegressionUDF.loadModel(modelPath)
val vector = if (isVectorType) {
val features = _x2.asInstanceOf[StructObjectInspector]
.getStructFieldsDataAsList(arguments(1).get())
features.get(0).asInstanceOf[Byte] match {
case 0 =>
val size = features.get(1).asInstanceOf[Int]
val indices = features.get(2).asInstanceOf[Array[Int]]
val values = features.get(3).asInstanceOf[Array[Double]]
new SparseVector(size, indices, values)
case 1 =>
val values = features.get(3).asInstanceOf[Array[Double]]
new DenseVector(values)
}
} else {
val line = _x2.asInstanceOf[StringObjectInspector].getPrimitiveJavaObject(arguments(1).get())
val record = MLUtils.parseLibSVMRecord(line)
new SparseVector(model.numFeatures, record._2, record._3)
}
new lang.Double(model.predict(vector))
}
}
object LogisticRegressionUDF {
var initialized: Boolean = false
var model: LogisticRegressionModel = _
val lock = new Object
val className = "org.apache.spark.mllib.classification.LogisticRegressionModel"
object VectorType extends VectorUDT
val requiredSchema = StructType(Array(
StructField("weights", VectorType),
StructField("intercept", DoubleType),
StructField("threshold", DoubleType)
))
def loadModel(modelPath: String): LogisticRegressionModel = {
lock.synchronized {
if (!initialized) {
val (loadedClassName, version, metadata) =
ParquetFormatModelMetadataLoader.loadModelMetaData(modelPath)
(loadedClassName, version) match {
case (clazzName, "1.0") if clazzName == className =>
implicit val formats = DefaultFormats
val numFeatures = (metadata \\ "numFeatures").extract[Int]
val numClasses = (metadata \\ "numClasses").extract[Int]
val (weights, intercept, threshold) =
ParquetFormatModelLoader.loadModelData(modelPath, className, requiredSchema)
model = new LogisticRegressionModel(weights, intercept, numFeatures, numClasses)
threshold match {
case Some(t) => model.setThreshold(t)
case None => model.clearThreshold()
}
initialized = true
case _ => throw new Exception(
s"ParquetFormatModelMetadataLoader.loadModel did not recognize model with " +
s"(className, format version): ($loadedClassName, $version). Supported:\\n" +
s"($className, 1.0)")
}
}
model
}
}
}
| aliyun/aliyun-emapreduce-sdk | emr-sql/src/main/scala/org/apache/spark/sql/aliyun/udfs/ml/LogisticRegressionUDF.scala | Scala | artistic-2.0 | 5,997 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.