code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
package mesosphere.marathon.upgrade
import akka.actor.{ ActorSystem, Props }
import akka.testkit.{ TestActorRef, TestKit }
import mesosphere.marathon.TaskUpgradeCanceledException
import mesosphere.marathon.event.{ HealthStatusChanged, MesosStatusUpdateEvent }
import mesosphere.marathon.state.AppDefinition
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.tasks.TaskQueue
import org.mockito.Mockito.{ spy, times, verify }
import org.scalatest.{ BeforeAndAfterAll, FunSuiteLike, Matchers }
import scala.concurrent.duration._
import scala.concurrent.{ Await, Promise }
class TaskStartActorTest
extends TestKit(ActorSystem("System"))
with FunSuiteLike
with Matchers
with BeforeAndAfterAll {
override protected def afterAll(): Unit = {
super.afterAll()
system.shutdown()
}
test("Start success") {
val taskQueue = new TaskQueue
val promise = Promise[Unit]()
val app = AppDefinition("myApp".toPath, instances = 5)
val ref = TestActorRef(Props(
classOf[TaskStartActor],
taskQueue,
system.eventStream,
app,
app.instances,
false,
promise))
watch(ref)
awaitCond(taskQueue.count(app) == 5, 3.seconds)
for (task <- taskQueue.removeAll())
system.eventStream.publish(MesosStatusUpdateEvent("", "", "TASK_RUNNING", app.id, "", Nil, app.version.toString))
Await.result(promise.future, 3.seconds) should be(())
expectTerminated(ref)
}
test("Start success with no instances to start") {
val taskQueue = new TaskQueue
val promise = Promise[Boolean]()
val app = AppDefinition("myApp".toPath, instances = 0)
val ref = TestActorRef(Props(
classOf[TaskStartActor],
taskQueue,
system.eventStream,
app,
app.instances,
false,
promise))
watch(ref)
Await.result(promise.future, 3.seconds) should be(())
expectTerminated(ref)
}
test("Start with health checks") {
val taskQueue = new TaskQueue
val promise = Promise[Boolean]()
val app = AppDefinition("myApp".toPath, instances = 5)
val ref = TestActorRef(Props(
classOf[TaskStartActor],
taskQueue,
system.eventStream,
app,
app.instances,
true,
promise))
watch(ref)
awaitCond(taskQueue.count(app) == 5, 3.seconds)
for ((_, i) <- taskQueue.removeAll().zipWithIndex)
system.eventStream.publish(HealthStatusChanged(app.id, s"task_${i}", app.version.toString, true))
Await.result(promise.future, 3.seconds) should be(())
expectTerminated(ref)
}
test("Start with health checks with no instances to start") {
val taskQueue = new TaskQueue
val promise = Promise[Boolean]()
val app = AppDefinition("myApp".toPath, instances = 0)
val ref = TestActorRef(Props(
classOf[TaskStartActor],
taskQueue,
system.eventStream,
app,
app.instances,
true,
promise))
watch(ref)
Await.result(promise.future, 3.seconds) should be(())
expectTerminated(ref)
}
test("Cancelled") {
val taskQueue = new TaskQueue
val promise = Promise[Boolean]()
val app = AppDefinition("myApp".toPath, instances = 5)
val ref = system.actorOf(Props(
classOf[TaskStartActor],
taskQueue,
system.eventStream,
app,
app.instances,
false,
promise))
watch(ref)
system.stop(ref)
intercept[TaskUpgradeCanceledException] {
Await.result(promise.future, 5.seconds)
}.getMessage should equal("The task upgrade has been cancelled")
expectTerminated(ref)
}
test("Task fails to start") {
val taskQueue = spy(new TaskQueue)
val promise = Promise[Unit]()
val app = AppDefinition("myApp".toPath, instances = 1)
val ref = TestActorRef(Props(
classOf[TaskStartActor],
taskQueue,
system.eventStream,
app,
app.instances,
false,
promise))
watch(ref)
awaitCond(taskQueue.count(app) == 1, 3.seconds)
for (task <- taskQueue.removeAll())
system.eventStream.publish(MesosStatusUpdateEvent("", "", "TASK_FAILED", app.id, "", Nil, app.version.toString))
awaitCond(taskQueue.count(app) == 1, 3.seconds)
verify(taskQueue, times(2)).add(app)
for (task <- taskQueue.removeAll())
system.eventStream.publish(MesosStatusUpdateEvent("", "", "TASK_RUNNING", app.id, "", Nil, app.version.toString))
Await.result(promise.future, 3.seconds) should be(())
expectTerminated(ref)
}
}
| tnachen/marathon | src/test/scala/mesosphere/marathon/upgrade/TaskStartActorTest.scala | Scala | apache-2.0 | 4,519 |
package me.pzang.utils
import org.scalatest.{FlatSpec, Matchers}
import scala.util.Random
/**
* Created by pzang on 4/6/16.
*/
class CommonUtilsTests extends FlatSpec with Matchers {
val d: java.lang.Double = java.lang.Double.valueOf(1.234)
val i: java.lang.Integer = java.lang.Integer.valueOf(123)
val l: java.lang.Long = java.lang.Long.valueOf(Random.nextLong())
val str: String = Random.nextString(10)
val longlist: List[Object] = (1L to 10L).map(java.lang.Long.valueOf(_)).toList
val intlist: List[Object] = (1 to 10).map(java.lang.Double.valueOf(_)).toList
val doublelist: List[Object] = (1 to 10).map(_ => java.lang.Double.valueOf(Random.nextDouble())).toList
val strlist: List[Object] = (1 to 10).map(_ => Random.nextString(10)).toList
val nulllist: List[Object] = List(1,2L,3.5, null).asInstanceOf[List[Object]]
"isDouble" should "check if Number is double correctly" in {
CommonUtils.isDouble(d) shouldBe true
CommonUtils.isDouble(i) shouldBe false
CommonUtils.isDouble(l) shouldBe false
}
"isLong" should "check if Number is Long correctly" in {
CommonUtils.isLong(l) shouldBe true
CommonUtils.isLong(i) shouldBe true
CommonUtils.isLong(d) shouldBe false
}
"isLongList/DoubleList/StringList/ConfigList" should "check correctly list type" in {
CommonUtils.isLongList(longlist) shouldBe true
CommonUtils.isLongList(intlist) shouldBe true
CommonUtils.isLongList(doublelist) shouldBe false
CommonUtils.isLongList(strlist) shouldBe false
CommonUtils.isLongList(nulllist) shouldBe false
CommonUtils.isDoubleList(doublelist) shouldBe true
CommonUtils.isDoubleList(intlist) shouldBe false
CommonUtils.isDoubleList(longlist) shouldBe false
CommonUtils.isDoubleList(strlist) shouldBe false
CommonUtils.isDoubleList(nulllist) shouldBe false
CommonUtils.isStringList(strlist) shouldBe true
CommonUtils.isStringList(longlist) shouldBe false
CommonUtils.isStringList(doublelist) shouldBe false
CommonUtils.isStringList(intlist) shouldBe false
CommonUtils.isStringList(nulllist) shouldBe false
}
}
| pzang/config-sbt-plugin | src/test/scala/me/pzang/utils/CommonUtilsTests.scala | Scala | mit | 2,120 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.jmock
import org.scalatest._
import scala.reflect.Manifest
import org.jmock.Expectations
import org.hamcrest.core.IsAnything
import org.scalatest.events._
trait SuiteExpectations {
def expectSingleTestToPass(expectations: Expectations, reporter: Reporter) = expectNTestsToPass(expectations, 1, reporter)
def expectSingleTestToFail(expectations: Expectations, reporter: Reporter) = expectNTestsToFail(expectations, 1, reporter)
def expectNTestsToPass(expectations: Expectations, n: Int, reporter: Reporter) = {
expectNTestsToRun(expectations, n, reporter) {
expectations.one(reporter).apply(expectations.`with`(new IsAnything[TestSucceeded]))
}
}
def expectNTestsToFail(expectations: Expectations, n: Int, reporter: Reporter) = {
expectNTestsToRun(expectations, n, reporter) {
expectations.one(reporter).apply(expectations.`with`(new IsAnything[TestFailed]))
}
}
def expectNTestsToRun(expectations: Expectations, n: Int, reporter: Reporter)(f: => Unit) = {
expectations.never(reporter).apply(expectations.`with`(new IsAnything[SuiteStarting]))
for( i <- 1 to n ){
expectations.one(reporter).apply(expectations.`with`(new IsAnything[TestStarting]))
f
}
expectations.never(reporter).apply(expectations.`with`(new IsAnything[SuiteCompleted]))
}
}
| travisbrown/scalatest | src/test/scala/org/scalatest/jmock/SuiteExpectations.scala | Scala | apache-2.0 | 1,951 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.data.format
import java.text.{ DateFormat, SimpleDateFormat }
import java.time.temporal.{ ChronoField, TemporalAccessor, TemporalField, TemporalQueries }
import java.time._
import java.time.format.{ DateTimeFormatter, DateTimeFormatterBuilder, ResolverStyle }
import java.util.UUID
import play.api.data._
import annotation.implicitNotFound
/**
* Handles field binding and unbinding.
*/
@implicitNotFound(
msg = "Cannot find Formatter type class for ${T}. Perhaps you will need to import play.api.data.format.Formats._ "
)
trait Formatter[T] {
/**
* The expected format of `Any`.
*/
val format: Option[(String, Seq[Any])] = None
/**
* Binds this field, i.e. constructs a concrete value from submitted data.
*
* @param key the field key
* @param data the submitted data
* @return Either a concrete value of type T or a set of error if the binding failed.
*/
def bind(key: String, data: Map[String, String]): Either[Seq[FormError], T]
/**
* Unbinds this field, i.e. transforms a concrete value to plain data.
*
* @param key the field ke
* @param value the value to unbind
* @return either the plain data or a set of errors if unbinding failed
*/
def unbind(key: String, value: T): Map[String, String]
}
/** This object defines several default formatters. */
object Formats {
/**
* Formatter for ignored values.
*
* @param value As we ignore this parameter in binding/unbinding we have to provide a default value.
*/
def ignoredFormat[A](value: A): Formatter[A] = new Formatter[A] {
def bind(key: String, data: Map[String, String]) = Right(value)
def unbind(key: String, value: A) = Map.empty
}
/**
* Default formatter for the `String` type.
*/
implicit def stringFormat: Formatter[String] = new Formatter[String] {
def bind(key: String, data: Map[String, String]) = data.get(key).toRight(Seq(FormError(key, "error.required", Nil)))
def unbind(key: String, value: String) = Map(key -> value)
}
/**
* Default formatter for the `Char` type.
*/
implicit def charFormat: Formatter[Char] = new Formatter[Char] {
def bind(key: String, data: Map[String, String]) =
data.get(key).filter(s => s.length == 1 && s != " ").map(s => Right(s.charAt(0))).getOrElse(
Left(Seq(FormError(key, "error.required", Nil)))
)
def unbind(key: String, value: Char) = Map(key -> value.toString)
}
/**
* Helper for formatters binders
* @param parse Function parsing a String value into a T value, throwing an exception in case of failure
* @param errArgs Error to set in case of parsing failure
* @param key Key name of the field to parse
* @param data Field data
*/
def parsing[T](parse: String => T, errMsg: String, errArgs: Seq[Any])(key: String, data: Map[String, String]): Either[Seq[FormError], T] = {
stringFormat.bind(key, data).right.flatMap { s =>
scala.util.control.Exception.allCatch[T]
.either(parse(s))
.left.map(e => Seq(FormError(key, errMsg, errArgs)))
}
}
private def numberFormatter[T](convert: String => T, real: Boolean = false): Formatter[T] = {
val (formatString, errorString) = if (real) ("format.real", "error.real") else ("format.numeric", "error.number")
new Formatter[T] {
override val format = Some(formatString -> Nil)
def bind(key: String, data: Map[String, String]) =
parsing(convert, errorString, Nil)(key, data)
def unbind(key: String, value: T) = Map(key -> value.toString)
}
}
/**
* Default formatter for the `Long` type.
*/
implicit def longFormat: Formatter[Long] = numberFormatter(_.toLong)
/**
* Default formatter for the `Int` type.
*/
implicit def intFormat: Formatter[Int] = numberFormatter(_.toInt)
/**
* Default formatter for the `Short` type.
*/
implicit def shortFormat: Formatter[Short] = numberFormatter(_.toShort)
/**
* Default formatter for the `Byte` type.
*/
implicit def byteFormat: Formatter[Byte] = numberFormatter(_.toByte)
/**
* Default formatter for the `Float` type.
*/
implicit def floatFormat: Formatter[Float] = numberFormatter(_.toFloat, real = true)
/**
* Default formatter for the `Double` type.
*/
implicit def doubleFormat: Formatter[Double] = numberFormatter(_.toDouble, real = true)
/**
* Default formatter for the `BigDecimal` type.
*/
def bigDecimalFormat(precision: Option[(Int, Int)]): Formatter[BigDecimal] = new Formatter[BigDecimal] {
override val format = Some(("format.real", Nil))
def bind(key: String, data: Map[String, String]) = {
Formats.stringFormat.bind(key, data).right.flatMap { s =>
scala.util.control.Exception.allCatch[BigDecimal]
.either {
val bd = BigDecimal(s)
precision.map({
case (p, s) =>
if (bd.precision - bd.scale > p - s) {
throw new java.lang.ArithmeticException("Invalid precision")
}
bd.setScale(s)
}).getOrElse(bd)
}
.left.map { e =>
Seq(
precision match {
case Some((p, s)) => FormError(key, "error.real.precision", Seq(p, s))
case None => FormError(key, "error.real", Nil)
}
)
}
}
}
def unbind(key: String, value: BigDecimal) = Map(key -> precision.map({ p => value.setScale(p._2) }).getOrElse(value).toString)
}
/**
* Default formatter for the `BigDecimal` type with no precision
*/
implicit val bigDecimalFormat: Formatter[BigDecimal] = bigDecimalFormat(None)
/**
* Default formatter for the `Boolean` type.
*/
implicit def booleanFormat: Formatter[Boolean] = new Formatter[Boolean] {
override val format = Some(("format.boolean", Nil))
def bind(key: String, data: Map[String, String]) = {
Right(data.get(key).getOrElse("false")).right.flatMap {
case "true" => Right(true)
case "false" => Right(false)
case _ => Left(Seq(FormError(key, "error.boolean", Nil)))
}
}
def unbind(key: String, value: Boolean) = Map(key -> value.toString)
}
import java.util.{ Date, TimeZone }
/**
* Formatter for the `java.util.Date` type.
*
* @param pattern a date pattern, as specified in `java.time.format.DateTimeFormatter`.
* @param timeZone the `java.util.TimeZone` to use for parsing and formatting
*/
def dateFormat(pattern: String, timeZone: TimeZone = TimeZone.getDefault): Formatter[Date] = new Formatter[Date] {
val javaTimeZone = timeZone.toZoneId
val formatter = DateTimeFormatter.ofPattern(pattern)
def dateParse(data: String) = {
val instant = PlayDate.parse(data, formatter).toZonedDateTime(ZoneOffset.UTC)
Date.from(instant.withZoneSameLocal(javaTimeZone).toInstant)
}
override val format = Some(("format.date", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = parsing(dateParse, "error.date", Nil)(key, data)
def unbind(key: String, value: Date) = Map(key -> formatter.format(value.toInstant.atZone(javaTimeZone)))
}
/**
* Default formatter for the `java.util.Date` type with pattern `yyyy-MM-dd`.
*/
implicit val dateFormat: Formatter[Date] = dateFormat("yyyy-MM-dd")
/**
* Formatter for the `java.sql.Date` type.
*
* @param pattern a date pattern as specified in `java.time.DateTimeFormatter`.
* @param timeZone the `java.util.TimeZone` to use for parsing and formatting
*/
def sqlDateFormat(pattern: String, timeZone: TimeZone = TimeZone.getDefault): Formatter[java.sql.Date] = new Formatter[java.sql.Date] {
val dateFormatter = dateFormat(pattern, timeZone)
override val format = Some(("format.date", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = {
dateFormatter.bind(key, data).right.map(d => new java.sql.Date(d.getTime))
}
def unbind(key: String, value: java.sql.Date) = dateFormatter.unbind(key, value)
}
/**
* Default formatter for `java.sql.Date` type with pattern `yyyy-MM-dd`.
*/
implicit val sqlDateFormat: Formatter[java.sql.Date] = sqlDateFormat("yyyy-MM-dd")
/**
* Formatter for the `java.time.LocalDate` type.
*
* @param pattern a date pattern as specified in `java.time.format.DateTimeFormatter`.
*/
def localDateFormat(pattern: String): Formatter[java.time.LocalDate] = new Formatter[java.time.LocalDate] {
import java.time.LocalDate
val formatter = java.time.format.DateTimeFormatter.ofPattern(pattern)
def localDateParse(data: String) = LocalDate.parse(data, formatter)
override val format = Some(("format.date", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = parsing(localDateParse, "error.date", Nil)(key, data)
def unbind(key: String, value: LocalDate) = Map(key -> value.format(formatter))
}
/**
* Default formatter for `java.time.LocalDate` type with pattern `yyyy-MM-dd`.
*/
implicit val localDateFormat: Formatter[java.time.LocalDate] = localDateFormat("yyyy-MM-dd")
/**
* Formatter for the `java.time.LocalDateTime` type.
*
* @param pattern a date pattern as specified in `java.time.format.DateTimeFormatter`.
* @param zoneId the `java.time.ZoneId` to use for parsing and formatting
*/
def localDateTimeFormat(pattern: String, zoneId: java.time.ZoneId = java.time.ZoneId.systemDefault()): Formatter[java.time.LocalDateTime] = new Formatter[java.time.LocalDateTime] {
import java.time.LocalDateTime
val formatter = java.time.format.DateTimeFormatter.ofPattern(pattern).withZone(zoneId)
def localDateTimeParse(data: String) = LocalDateTime.parse(data, formatter)
override val format = Some(("format.localDateTime", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = parsing(localDateTimeParse, "error.localDateTime", Nil)(key, data)
def unbind(key: String, value: LocalDateTime) = Map(key -> value.format(formatter))
}
/**
* Default formatter for `java.time.LocalDateTime` type with pattern `yyyy-MM-dd`.
*/
implicit val localDateTimeFormat: Formatter[java.time.LocalDateTime] = localDateTimeFormat("yyyy-MM-dd HH:mm:ss")
/**
* Formatter for the `java.time.LocalTime` type.
*
* @param pattern a date pattern as specified in `java.time.format.DateTimeFormatter`.
*/
def localTimeFormat(pattern: String): Formatter[java.time.LocalTime] = new Formatter[java.time.LocalTime] {
import java.time.LocalTime
val formatter = java.time.format.DateTimeFormatter.ofPattern(pattern)
def localTimeParse(data: String) = LocalTime.parse(data, formatter)
override val format = Some(("format.localTime", Seq(pattern)))
def bind(key: String, data: Map[String, String]) = parsing(localTimeParse, "error.localTime", Nil)(key, data)
def unbind(key: String, value: LocalTime) = Map(key -> value.format(formatter))
}
/**
* Default formatter for `java.time.LocalTime` type with pattern `HH:mm:ss`.
*/
implicit val localTimeFormat: Formatter[java.time.LocalTime] = localTimeFormat("HH:mm:ss")
/**
* Default formatter for the `java.util.UUID` type.
*/
implicit def uuidFormat: Formatter[UUID] = new Formatter[UUID] {
override val format = Some(("format.uuid", Nil))
override def bind(key: String, data: Map[String, String]) = parsing(UUID.fromString, "error.uuid", Nil)(key, data)
override def unbind(key: String, value: UUID) = Map(key -> value.toString)
}
}
| ktoso/playframework | framework/src/play/src/main/scala/play/api/data/format/Format.scala | Scala | apache-2.0 | 11,640 |
package com.twitter.server.handler
/** Marker trait for LoggingHandlers */
trait LoggingHandler extends AdminHttpMuxHandler {
/** Implementation name */
def name: String
}
| twitter/twitter-server | server/src/main/scala/com/twitter/server/handler/LoggingHandler.scala | Scala | apache-2.0 | 178 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.fs.tools
import java.io.File
import java.net.{MalformedURLException, URL}
import java.util
import java.util.concurrent.atomic.AtomicBoolean
import com.beust.jcommander.{Parameter, ParameterException}
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory
import org.locationtech.geomesa.fs.{FileSystemDataStore, FileSystemDataStoreParams}
import org.locationtech.geomesa.tools.DataStoreCommand
/**
* Abstract class for commands that have a pre-existing catalog
*/
trait FsDataStoreCommand extends DataStoreCommand[FileSystemDataStore] {
override def params: FsParams
override def connection: Map[String, String] = {
FsDataStoreCommand.configureURLFactory()
val url = if (params.path.matches("""\\w+://.*""")) {
try {
new URL(params.path)
} catch {
case e: MalformedURLException => throw new ParameterException(s"Invalid URL ${params.path}: ", e)
}
} else {
try {
new File(params.path).toURI.toURL
} catch {
case e: MalformedURLException => throw new ParameterException(s"Invalid URL ${params.path}: ", e)
}
}
Map(FileSystemDataStoreParams.PathParam.getName -> url.toString,
FileSystemDataStoreParams.EncodingParam.getName -> params.encoding)
}
}
object FsDataStoreCommand {
private var urlStreamHandlerSet = false
def configureURLFactory(): Unit =
synchronized {
if (!urlStreamHandlerSet) {
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory())
urlStreamHandlerSet = true
}
}
}
trait PathParam {
@Parameter(names = Array("--path", "-p"), description = "Path to root of filesystem datastore", required = true)
var path: String = _
}
// TODO future work would be nice to store this in metadata
trait EncodingParam {
@Parameter(names = Array("--encoding", "-e"), description = "Encoding (parquet, csv, etc)", required = true)
var encoding: String = _
}
trait PartitionParam {
@Parameter(names = Array("--partitions"), description = "Partitions (if empty all partitions will be used)", required = false, variableArity = true)
var partitions: java.util.List[String] = new util.ArrayList[String]()
}
trait FsParams extends PathParam with EncodingParam
| ronq/geomesa | geomesa-fs/geomesa-fs-tools/src/main/scala/org/locationtech/geomesa/fs/tools/FsDataStoreCommand.scala | Scala | apache-2.0 | 2,717 |
package pimpathon
import org.junit.Test
import scala.collection.{mutable ⇒ M}
import scala.collection.generic.CanBuildFrom
import pimpathon.builder._
import pimpathon.multiMap._
import pimpathon.util._
class MultiMapTest {
@Test def multiMapCBF(): Unit = {
val cbf = MultiMap.build[List, Int, String]
val builder = cbf.apply()
builder += (1 → "foo") += (1 → "bar")
builder.reset() === Map(1 → List("foo", "bar"))
builder.reset() === Map()
}
@Test def ignoreFromCBF(): Unit = on(
new UnitCanBuildFrom[List[Int], Int],
new UnitCanBuildFrom[List[Int], Int] with IgnoreFromCBF[List[Int], Int, Unit]
).calling(_.apply(), _.apply(List(1, 2, 3))).produces(
UnitBuilder[Int]("apply()"), UnitBuilder[Int]("apply()"),
UnitBuilder[Int]("apply(List(1, 2, 3))"), UnitBuilder[Int]("apply()")
)
@Test def merge(): Unit = {
Map(1 → List(1, 2)).merge(MultiMap.empty[List, Int, Int]) === Map(1 → List(1, 2))
MultiMap.empty[List, Int, Int].merge(Map(1 → List(1, 2))) === Map(1 → List(1, 2))
Map(1 → List(1)).merge(Map(1 → List(2))) === Map(1 → List(1, 2))
Map(1 → List(1)).merge(Map(2 → List(2))) === Map(1 → List(1), 2 → List(2))
Map(1 → Set(1)).merge(Map(1 → Set(2))) === Map(1 → Set(1, 2))
}
@Test def select(): Unit = Map(1 → List(2), 2 → List(3, 4)).select(_.head) === Map(1 → 2, 2 → 3)
@Test def append(): Unit = {
MultiMap.empty[List, Int, Int].append(1, List(2, 3)) === Map(1 → List(2, 3))
Map(1 → List(2)).append(1, List(3)) === Map(1 → List(2, 3))
Map(1 → List(2, 3)).append(1, Nil) === Map(1 → List(2, 3))
}
@Test def multiMap_head(): Unit = on(
Map(1 → List(10, 11), 2 → List(20)), Map(1 → Nil, 2 → List(20)), MultiMap.empty[List, Int, Int]
).calling(_.multiMap.head).produces(Map(1 → 10, 2 → 20), Map(2 → 20), Map())
@Test def multiMap_tail(): Unit = on(
Map(1 → List(10, 11), 2 → List(20)), Map(1 → Nil, 2 → List(20)), Map(1 → Nil), MultiMap.empty[List, Int, Int]
).calling(_.multiMap.tail).produces(Map(1 → List(11)), Map(), Map(), Map())
@Test def onlyOption(): Unit = on(
Map(1 → Nil, 2 → List(20)), Map(1 → List(10, 11), 2 → List(20)), Map(1 → Nil), MultiMap.empty[List, Int, Int]
).calling(_.onlyOption).produces(Some(Map(2 → 20)), None, None, None)
@Test def headTailOption(): Unit = on(
Map(1 → List(10, 11), 2 → List(20)), Map(1 → Nil, 2 → List(20)), Map(1 → Nil), MultiMap.empty[List, Int, Int]
).calling(_.headTailOption).produces(
Some(Map(1 → 10, 2 → 20), Map(1 → List(11))), Some(Map(2 → 20), MultiMap.empty[List, Int, Int]), None, None
)
@Test def multiMap_values(): Unit = {
Map(1 → List(1), 2 → List(2, 3)).multiMap.values === List(1, 2, 3)
Map(1 → Set(1), 2 → Set(2, 3)).multiMap.values === Set(1, 2, 3)
}
@Test def multiMap_reverse(): Unit = Map(1 → List(2, 3), 2 → List(3, 4)).multiMap.reverse ===
Map(2 → List(1), 3 → List(1, 2), 4 → List(2))
@Test def multiMap_mapEntries(): Unit =
Map(1 → List(10, 11), 2 → List(20, 21), 3 → List(30, 31)).multiMap.mapEntries(k ⇒ vs ⇒ (k % 2, vs)) ===
Map(0 → List(20, 21), 1 → List(10, 11, 30, 31))
@Test def multiMap_mapEntriesU(): Unit =
Map(1 → List(10, 11), 2 → List(20, 21), 3 → List(30, 31)).multiMap.mapEntriesU(k ⇒ vs ⇒ (k % 2, vs.toSet)) ===
Map(0 → Set(20, 21), 1 → Set(10, 11, 30, 31))
@Test def multiMap_mapValues(): Unit =
Map(1 → List(10, 11), 2 → List(20, 21), 3 → List(30, 31)).multiMap.mapValues(v ⇒ v * 2) ===
Map(1 → List(20, 22), 2 → List(40, 42), 3 → List(60, 62))
@Test def flatMapValues(): Unit = Map(0 → List(1, 2), 1 → List(2, 3)).flatMapValues(v ⇒ List(v, -v)) ===
Map(0 → List(1, -1, 2, -2), 1 → List(2, -2, 3, -3))
@Test def flatMapValuesU(): Unit = {
Map(0 → List(1, 2), 1 → List(2, 3)).flatMapValuesU(v ⇒ Set(v, -v)) ===
Map(0 → Set(1, -1, 2, -2), 1 → Set(2, -2, 3, -3))
Map(0 → Vector(1, 2), 1 → Vector(2, 3)).flatMapValuesU(v ⇒ List(v, -v)) ===
Map(0 → List(1, -1, 2, -2), 1 → List(2, -2, 3, -3))
}
@Test def pop(): Unit = on(Map(1 → List(2, 3), 2 → List(3))).calling(_.pop(1), _.pop(2), _.pop(3))
.produces(Map(1 → List(3), 2 → List(3)), Map(1 → List(2, 3)), Map(1 → List(2, 3), 2 → List(3)))
@Test def sequence(): Unit = Map(1 → List(10, 11), 2 → List(20, 21)).sequence ===
List(Map(1 → 10, 2 → 20), Map(1 → 11, 2 → 21))
@Test def sliding(): Unit = Map(1 → List(11, 12, 13), 2 → List(21, 22, 23)).multiMap.sliding(2) ===
List(Map(1 → List(11, 12), 2 → List(21, 22)), Map(1 → List(12, 13), 2 → List(22, 23)))
@Test def getOrEmpty(): Unit = {
on(Map(1 → List(2))).calling(_.getOrEmpty(1), _.getOrEmpty(2)).produces(List(2), Nil)
on(Map(1 → Set(2))).calling(_.getOrEmpty(1), _.getOrEmpty(2)).produces(Set(2), Set())
}
class UnitCanBuildFrom[From, Elem] extends CanBuildFrom[From, Elem, Unit] {
def apply(): M.Builder[Elem, Unit] = UnitBuilder[Elem]("apply()")
def apply(from: From): M.Builder[Elem, Unit] = UnitBuilder[Elem](s"apply($from)")
}
case class UnitBuilder[E](from: String) extends M.Builder[E, Unit] {
def +=(elem: E): this.type = this
def clear(): Unit = {}
def result(): Unit = ()
override def toString = s"UnitBuilder($from)"
}
} | raymanoz/pimpathon | src/test/scala/pimpathon/multiMap.scala | Scala | apache-2.0 | 5,580 |
package skinny.util
import scala.language.implicitConversions
import skinny.ParamType
import org.joda.time._
import scala.util.Try
/**
* DateTime utility.
*/
object DateTimeUtil {
/**
* The ISO8601 standard date format.
*/
//val ISO_DATE_TIME_FORMAT = "%04d-%02d-%02dT%02d:%02d:%02d%s"
val ISO_DATE_TIME_FORMAT = "%s-%s-%sT%s:%s:%s%s"
/**
* Returns current timezone value (e.g. +09:00).
*/
def currentTimeZone: String = {
val minutes = java.util.TimeZone.getDefault.getRawOffset / 1000 / 60
(if (minutes >= 0) "+" else "-") + "%02d:%02d".format((math.abs(minutes) / 60), (math.abs(minutes) % 60))
}
/**
* Returns "2014-01-02 03:04:05".
*/
def toString(d: DateTime): String = d.toString("YYYY-MM-dd HH:mm:ss")
def toString(d: LocalDate): String = d.toString("YYYY-MM-dd")
def toString(d: LocalTime): String = d.toString("HH:mm:ss")
/**
* Returns "2014-01-02 03:04:05".
*/
def nowString: String = toString(DateTime.now)
private case class ZeroPaddingString(s: String) {
def to04d: String = {
try "%04d".format(s.toInt)
catch { case e: NumberFormatException => s }
}
def to02d: String = {
try "%02d".format(s.toInt)
catch { case e: NumberFormatException => s }
}
}
private implicit def fromStringToZeroPadding(s: String): ZeroPaddingString = ZeroPaddingString(s)
/**
* Converts string value to ISO8601 date format if possible.
* @param s string value
* @param paramType DateTime/LocalDate/LocalTime
* @return ISO8601 data format string value
*/
def toISODateTimeFormat(s: String, paramType: ParamType): String = {
val str = s.replaceAll("/", "-")
if (str.matches("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}")) {
val timeZone = "([+-]\\d{2}:\\d{2})".r.findFirstIn(s).getOrElse(currentTimeZone)
str + timeZone
} else if (str.matches("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}[+-]\\d{2}:\\d{2}")
|| str.matches("\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}.\\d+[+-]\\d{2}:\\d{2}")) {
str
} else if (str.matches("\\d{4}-\\d{2}-\\d{2}\\s+\\d{2}:\\d{2}:\\d{2}[+-]\\d{2}:\\d{2}")) {
str.replaceFirst("\\s+", "T")
} else {
str.split("[-:\\s/]").toList match {
case year :: month :: day :: hour :: minute :: second :: zoneHour :: zoneMinute :: _ =>
val timeZone = "([+-]\\d{2}:\\d{2})".r.findFirstIn(str).getOrElse(currentTimeZone)
ISO_DATE_TIME_FORMAT.format(year.to04d, month.to02d, day.to02d, hour.to02d, minute.to02d, second.to02d, timeZone)
case year :: month :: day :: hour :: minute :: second :: _ =>
ISO_DATE_TIME_FORMAT.format(year.to04d, month.to02d, day.to02d, hour.to02d, minute.to02d, second.to02d, currentTimeZone)
case year :: month :: day :: hour :: minute :: _ =>
ISO_DATE_TIME_FORMAT.format(year.to04d, month.to02d, day.to02d, hour.to02d, minute.to02d, "00", currentTimeZone)
case year :: month :: day :: _ if paramType == ParamType.LocalDate =>
ISO_DATE_TIME_FORMAT.format(year.to04d, month.to02d, day.to02d, "00", "00", "00", currentTimeZone)
case hour :: minute :: second :: _ if paramType == ParamType.LocalTime =>
ISO_DATE_TIME_FORMAT.format("1970", "01", "01", hour.to02d, minute.to02d, second.to02d, currentTimeZone)
case hour :: minute :: _ if paramType == ParamType.LocalTime =>
ISO_DATE_TIME_FORMAT.format("1970", "01", "01", hour.to02d, minute.to02d, "00", currentTimeZone)
case _ => str
}
}
}
def parseDateTime(s: String): DateTime = DateTime.parse(toISODateTimeFormat(s, ParamType.DateTime))
def parseLocalDate(s: String): LocalDate = DateTime.parse(toISODateTimeFormat(s, ParamType.LocalDate)).toLocalDate
def parseLocalTime(s: String): LocalTime = DateTime.parse(toISODateTimeFormat(s, ParamType.LocalTime)).toLocalTime
def toDateString(
params: Map[String, Any],
year: String = "year",
month: String = "month",
day: String = "day"): Option[String] = {
try {
(params.get(year).filterNot(_.toString.isEmpty) orElse
params.get(month).filterNot(_.toString.isEmpty) orElse
params.get(day).filterNot(_.toString.isEmpty)).map { _ =>
"%04d-%02d-%02d".format(
params.get(year).map(_.toString.toInt).orNull,
params.get(month).map(_.toString.toInt).orNull,
params.get(day).map(_.toString.toInt).orNull
)
}
} catch { case e: NumberFormatException => None }
}
def toUnsafeDateString(
params: Map[String, Any],
year: String = "year",
month: String = "month",
day: String = "day"): Option[String] = {
(params.get(year).filterNot(_.toString.isEmpty) orElse
params.get(month).filterNot(_.toString.isEmpty) orElse
params.get(day).filterNot(_.toString.isEmpty)).map { t =>
"%s-%s-%s".format(
params.get(year).map(_.toString.to04d).orNull,
params.get(month).map(_.toString.to02d).orNull,
params.get(day).map(_.toString.to02d).orNull
)
}
}
def toTimeString(
params: Map[String, Any],
hour: String = "hour",
minute: String = "minute",
second: String = "second"): Option[String] = {
try {
(params.get(hour).filterNot(_.toString.isEmpty) orElse
params.get(minute).filterNot(_.toString.isEmpty) orElse
params.get(second).filterNot(_.toString.isEmpty)).map { _ =>
"1970-01-01 %02d:%02d:%02d".format(
params.get(hour).map(_.toString.toInt).orNull,
params.get(minute).map(_.toString.toInt).orNull,
params.get(second).map(_.toString.toInt).orNull
)
}
} catch { case e: NumberFormatException => None }
}
def toUnsafeTimeString(
params: Map[String, Any],
hour: String = "hour",
minute: String = "minute",
second: String = "second"): Option[String] = {
(params.get(hour).filterNot(_.toString.isEmpty) orElse
params.get(minute).filterNot(_.toString.isEmpty) orElse
params.get(second).filterNot(_.toString.isEmpty)).map { _ =>
"1970-01-01 %s:%s:%s".format(
params.get(hour).map(_.toString.to02d).orNull,
params.get(minute).map(_.toString.to02d).orNull,
params.get(second).map(_.toString.to02d).orNull
)
}
}
def toDateTimeString(
params: Map[String, Any],
year: String = "year",
month: String = "month",
day: String = "day",
hour: String = "hour",
minute: String = "minute",
second: String = "second"): Option[String] = {
try {
(params.get(year).filterNot(_.toString.isEmpty) orElse
params.get(month).filterNot(_.toString.isEmpty) orElse
params.get(day).filterNot(_.toString.isEmpty) orElse
params.get(hour).filterNot(_.toString.isEmpty) orElse
params.get(minute).filterNot(_.toString.isEmpty) orElse
params.get(second).filterNot(_.toString.isEmpty)).map { _ =>
"%04d-%02d-%02d %02d:%02d:%02d".format(
params.get(year).map(_.toString.toInt).orNull,
params.get(month).map(_.toString.toInt).orNull,
params.get(day).map(_.toString.toInt).orNull,
params.get(hour).map(_.toString.toInt).orNull,
params.get(minute).map(_.toString.toInt).orNull,
params.get(second).map(_.toString.toInt).orNull
)
}
} catch { case e: NumberFormatException => None }
}
def toUnsafeDateTimeString(
params: Map[String, Any],
year: String = "year",
month: String = "month",
day: String = "day",
hour: String = "hour",
minute: String = "minute",
second: String = "second"): Option[String] = {
(params.get(year).filterNot(_.toString.isEmpty) orElse
params.get(month).filterNot(_.toString.isEmpty) orElse
params.get(day).filterNot(_.toString.isEmpty) orElse
params.get(hour).filterNot(_.toString.isEmpty) orElse
params.get(minute).filterNot(_.toString.isEmpty) orElse
params.get(second).filterNot(_.toString.isEmpty)).map { _ =>
"%s-%s-%s %s:%s:%s".format(
params.get(year).map(_.toString.to04d).orNull,
params.get(month).map(_.toString.to02d).orNull,
params.get(day).map(_.toString.to02d).orNull,
params.get(hour).map(_.toString.to02d).orNull,
params.get(minute).map(_.toString.to02d).orNull,
params.get(second).map(_.toString.to02d).orNull
)
}
}
def toUnsafeDateTimeStringFromDateAndTime(
params: Map[String, Any],
date: String = "date",
time: String = "time"): Option[String] = {
(params.get(date).filterNot(_.toString.isEmpty) orElse
params.get(time).filterNot(_.toString.isEmpty)).map { _ =>
"%s %s".format(
params.get(date).map(_.toString).orNull,
params.get(time).map(_.toString).orNull
)
}
}
def isLocalDateFormat(str: String): Boolean = Try(parseLocalDate(str)).isSuccess
def isDateTimeFormat(str: String): Boolean = Try(parseDateTime(str)).isSuccess
}
| Kuchitama/skinny-framework | common/src/main/scala/skinny/util/DateTimeUtil.scala | Scala | mit | 9,075 |
/**
* The MIT License (MIT)
* <p/>
* Copyright (c) 2016 ScalateKids
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* <p/>
*
* @author Scalatekids
* @version 1.0
* @since 1.0
*/
package com.actorbase.actorsystem.actors.httpserver
import akka.actor.{Actor, ActorSystem, ActorLogging, ActorRef, PoisonPill, Props}
import akka.io.IO
import com.actorbase.actorsystem.messages.AuthActorMessages.{ Init, Save, Clean }
import spray.can.Http
import akka.event.LoggingReceive
import akka.cluster.singleton.{ClusterSingletonManager, ClusterSingletonManagerSettings, ClusterSingletonProxy, ClusterSingletonProxySettings}
import akka.cluster._
import akka.cluster.sharding.{ClusterSharding, ClusterShardingSettings}
import com.typesafe.config.ConfigFactory
import com.actorbase.actorsystem.actors.clientactor.ClientActor
import com.actorbase.actorsystem.actors.authactor.AuthActor
import com.actorbase.actorsystem.actors.main.Main
import com.actorbase.actorsystem.messages.MainMessages._
import com.actorbase.actorsystem.utils.ActorbaseCollection
import com.actorbase.actorsystem.utils.ActorbaseCollection. { Permissions, Read, ReadWrite }
import com.actorbase.actorsystem.utils.CryptoUtils
import scala.collection.mutable.Queue
import java.io.File
/**
* Class that represent a HTTPServer actor. This actor is responsible to accept the connection
* incoming from clients and to instantiate a ClientActor assigned to the client asking.
*
* @param main: an ActorRef to a main actor
* @param authProxy: an ActorRef to the AuthActor
* @param address: a String representing the address on which Actorbase has to listen on
* @param listenPort: a Int representing the port on which Actorbase has to listen on
*/
class HTTPServer(main: ActorRef, authProxy: ActorRef, address: String, listenPort: Int) extends Actor
with ActorLogging with SslConfiguration {
val config = ConfigFactory.load().getConfig("persistence")
implicit val system = context.system
IO(Http)(system) ! Http.Bind(self, interface = address, port = listenPort)
val initLoad: Unit = loadData
/**
* Loads all the data saved on the rootfolder onto the system. This is used to repopulate
* the database after a restart.
* The actor reads all the data from files and it proceed to send messages to the right actors
* to repopulate the server.
*
* @return no return value
*/
def loadData: Unit = {
val root = new File(config getString "save-folder")
var dataShard = Map.empty[String, Array[Byte]]
var usersmap = Map.empty[String, String]
var contributors = Map.empty[String, List[(String, Boolean)]]
var data = Queue.empty[(ActorbaseCollection, Map[String, Array[Byte]])]
log.info("LOADING ......... ")
if (root.exists && root.isDirectory) {
var (name, owner) = ("", "")
root.listFiles.filter(_.isDirectory).foreach { x =>
x.listFiles.filter(_.isFile).foreach { x =>
x match {
case meta if meta.getName.endsWith("actbmeta") =>
val metaData = CryptoUtils.decrypt[Map[String, String]](config getString "encryption-key", meta)
metaData get "collection" map (c => name = c)
metaData get "owner" map (o => owner = o)
main ! CreateCollection(owner, ActorbaseCollection(name, owner))
case user if (user.getName == "usersdata.shadow") =>
usersmap ++= CryptoUtils.decrypt[Map[String, String]](config getString "encryption-key", user)
case contributor if (contributor.getName == "contributors.shadow") =>
contributors ++= CryptoUtils.decrypt[Map[String, List[(String, Boolean)]]](config getString "encryption-key", contributor)
case _ => dataShard ++= CryptoUtils.decrypt[Map[String, Array[Byte]]](config getString "encryption-key", x)
}
}
val collection = ActorbaseCollection(name, owner)
data += (collection -> dataShard)
dataShard = dataShard.empty
}
authProxy ! Clean
usersmap map ( x => authProxy ! Init(x._1, x._2) )
contributors.foreach {
case (k, v) =>
v.foreach { item =>
val permission = if (item._2) ReadWrite else Read
main ! AddContributor("admin", k, permission, item._1)
}
}
contributors = contributors.empty
// populate collections
data.foreach {
case (k, v) =>
v.foreach {
case (kk, vv) =>
main ! InsertTo(k.getOwner, k, kk, vv, false)
}
}
def getRecursively(f: File): Seq[File] = f.listFiles.filter(_.isDirectory).flatMap(getRecursively) ++ f.listFiles
getRecursively( root ).foreach { f =>
if (!f.getName.endsWith("shadow") && f.getName != "usersdata")
f.delete()
}
} else log.warning("Directory not found!")
authProxy ! Save
}
/**
* Receive method, handle connection from outside, registering it to a
* dedicated actor
*
*/
def receive: Receive = LoggingReceive {
case _: Http.Connected =>
val serverConnection = sender()
val handler = context.actorOf(Props(new ClientActor(main, authProxy)))
serverConnection ! Http.Register(handler)
}
}
/**
* HTTPServer object, it contains the main of the application
*/
object HTTPServer {
def main(args: Array[String]) = {
var (hostname, port) = ("127.0.0.1", 2500)
//Argument GrammarParser
if (args.length == 0)
println("[!] no arg, Client loaded by default param");
else {
val arglist = args.toList
type OptionMap = Map[String, String]
def nextOption(map : OptionMap, list: List[String]) : OptionMap = {
def isSwitch(s : String) = (s(0) == '-')
list match {
case Nil => map
case "-h" :: value :: tail =>
nextOption(map ++ Map("host" -> value), tail)
case "-p" :: value :: tail =>
nextOption(map ++ Map("port" -> value), tail)
case string :: Nil => nextOption(map ++ Map("error" -> string), list.tail)
case _ :: value :: tail =>
nextOption(map ++ Map("error" -> value), tail)
}
}
val options = nextOption(Map(), arglist)
options get "host" map (hostname = _)
options get "port" map (s => port = s.toInt)
}
val config = ConfigFactory.parseString(s"""
akka.remote.netty.tcp.hostname=${hostname}
akka.remote.netty.tcp.port=${port}
listen-on=${hostname}
""").withFallback(ConfigFactory.load())
val system = ActorSystem(config getString "name", config)
// singleton authactor
system.actorOf(ClusterSingletonManager.props(
singletonProps = Props(classOf[AuthActor]),
terminationMessage = PoisonPill,
settings = ClusterSingletonManagerSettings(system)),
name = "authactor")
// proxy
val authProxy = system.actorOf(ClusterSingletonProxy.props(
singletonManagerPath = "/user/authactor",
settings = ClusterSingletonProxySettings(system)),
name = "authProxy")
// main sharding
ClusterSharding(system).start(
typeName = Main.shardName,
entityProps = Main.props(authProxy),
settings = ClusterShardingSettings(system),
extractShardId = Main.extractShardId,
extractEntityId = Main.extractEntityId)
val main = ClusterSharding(system).shardRegion(Main.shardName)
val http = system.actorOf(Props(classOf[HTTPServer], main, authProxy, config getString "listen-on", config getInt "exposed-port"))
}
}
| ScalateKids/Actorbase-Server | src/main/scala/com/actorbase/actorsystem/actors/httpserver/HTTPServer.scala | Scala | mit | 8,611 |
/*
* Bioinformatics Algorithms in Scala
* Copyright (C) 2016 Jason Mar
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package bioinf
import bioinf.mutations.Mutations.AdjacencyList
object Input {
def getInts(s:String): IndexedSeq[Int] = {
s.split(" ").map(_.toInt).toIndexedSeq
}
def transpose(x: IndexedSeq[IndexedSeq[Int]]):IndexedSeq[IndexedSeq[Int]] = {
x.head.indices.map{i =>
x.indices.map{j =>
x(j)(i)
}
}
}
// edge notation: node0->node1:weight
def readDAG(x: IndexedSeq[String]): IndexedSeq[IndexedSeq[Int]] = {
val edges = Array.fill[Int](x.length,3){0}
for (i <- x.indices) {
val s = x(i)
val i0: Int = s.indexOf("->")
val i1: Int = s.indexOf(":")
edges(i)(0) = s.substring(0,i0).toInt
edges(i)(1) = s.substring(i0 + 2,i1).toInt
edges(i)(2) = s.substring(i1 + 1,s.length).toInt
}
edges.map{_.toIndexedSeq}.toIndexedSeq
}
// edge notation: node0->node1:weight
def readAdjacencyList(x: IndexedSeq[String]): AdjacencyList = {
val v = Array.fill[Int](x.length){0}
val w = Array.fill[Int](x.length){0}
val label = Array.fill[Int](x.length){0}
for (i <- x.indices) {
val s = x(i)
val i0: Int = s.indexOf("->")
val i1: Int = s.indexOf(":")
v(i) = s.substring(0,i0).toInt
w(i) = s.substring(i0 + 2,i1).toInt
label(i) = s.substring(i1 + 1,s.length).toInt
}
AdjacencyList(v,w,label)
}
def readMatrix(x: IndexedSeq[String]): IndexedSeq[IndexedSeq[Int]] = {
val n = x.length
val m = x.head.split(" ").length
val a = Array.fill[Int](n,m){-1}
for (i <- 0 until n) {
for (j <- 0 until m) {
val row = x(i).split(" ")
assert(row.length == m)
a(i)(j) = row(j).toInt
}
}
finalizeMatrix(a)
}
@inline
def finalizeMatrix(x: Array[Array[Int]]): IndexedSeq[IndexedSeq[Int]] = x.map(_.toIndexedSeq).toIndexedSeq
@inline
def removeSpaces(s: String): String = {
s.replaceAll(" ","")
}
@inline
def label2int(c: Char): Int = {
c match {
case 'A' => 0
case 'C' => 1
case 'G' => 2
case 'T' => 3
case '$' => -1
case _ => -2
}
}
@inline
def int2label(x: Int): Char = {
x match {
case 0 => 'A'
case 1 => 'C'
case 2 => 'G'
case 3 => 'T'
case -1 => '$'
case _ => 'N'
}
}
@inline
def char2num(s: String): String = {
s.toLowerCase
.replace('a','0')
.replace('b','1')
.replace('c','2')
.replace('d','3')
.replace('e','4')
.replace('f','5')
.replace('g','6')
.replace('h','7')
.replace('i','8')
.replace('j','9')
}
@inline
def num2char(s: String): String = {
s.replace('0','a')
.replace('1','b')
.replace('2','c')
.replace('3','d')
.replace('4','e')
.replace('5','f')
.replace('6','g')
.replace('7','h')
.replace('8','i')
.replace('9','j')
}
/** Sample Input:
* (-3 +4 +1 +5 -2)
* Output:
* IndexedSeq[Int](-3,4,1,5,-2)
*/
def readPermutation(s: String): IndexedSeq[Int] = {
s.substring(1, s.length - 1).split(" ").map{_.toInt}.toIndexedSeq
}
}
| sterglee/ScalaLabBioinf | src/main/scala/bioinf/Input.scala | Scala | gpl-3.0 | 3,843 |
import sbt._
import Keys._
import com.typesafe.sbt.SbtMultiJvm
import com.typesafe.sbt.SbtMultiJvm.MultiJvmKeys.{ MultiJvm }
object WordsBuild extends Build {
lazy val buildSettings = Defaults.defaultSettings ++ multiJvmSettings ++ Seq(
crossPaths := false
)
lazy val project = Project(
id = "words-cluster",
base = file("."),
settings = buildSettings ++ Project.defaultSettings
) configs(MultiJvm)
lazy val multiJvmSettings = SbtMultiJvm.multiJvmSettings ++ Seq(
// make sure that MultiJvm test are compiled by the default test compilation
compile in MultiJvm <<= (compile in MultiJvm) triggeredBy (compile in Test),
// disable parallel tests
parallelExecution in Test := false,
// make sure that MultiJvm tests are executed by the default test target
executeTests in Test <<=
((executeTests in Test), (executeTests in MultiJvm)) map {
case ((testResults), (multiJvmResults)) =>
val overall =
if (testResults.overall.id < multiJvmResults.overall.id)
multiJvmResults.overall
else
testResults.overall
Tests.Output(overall,
testResults.events ++ multiJvmResults.events,
testResults.summaries ++ multiJvmResults.summaries)
}
)
}
| RayRoestenburg/akka-in-action | chapter-cluster/project/WordsBuild.scala | Scala | mit | 1,302 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import scala.collection.mutable
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.{ArrayData, MapData}
import org.apache.spark.sql.types._
/**
* An expression that produces zero or more rows given a single input row.
*
* Generators produce multiple output rows instead of a single value like other expressions,
* and thus they must have a schema to associate with the rows that are output.
*
* However, unlike row producing relational operators, which are either leaves or determine their
* output schema functionally from their input, generators can contain other expressions that
* might result in their modification by rules. This structure means that they might be copied
* multiple times after first determining their output schema. If a new output schema is created for
* each copy references up the tree might be rendered invalid. As a result generators must
* instead define a function `makeOutput` which is called only once when the schema is first
* requested. The attributes produced by this function will be automatically copied anytime rules
* result in changes to the Generator or its children.
*/
trait Generator extends Expression {
override def dataType: DataType = ArrayType(elementSchema)
override def foldable: Boolean = false
override def nullable: Boolean = false
/**
* The output element schema.
*/
def elementSchema: StructType
/** Should be implemented by child classes to perform specific Generators. */
override def eval(input: InternalRow): TraversableOnce[InternalRow]
/**
* Notifies that there are no more rows to process, clean up code, and additional
* rows can be made here.
*/
def terminate(): TraversableOnce[InternalRow] = Nil
/**
* Check if this generator supports code generation.
*/
def supportCodegen: Boolean = !isInstanceOf[CodegenFallback]
}
/**
* A collection producing [[Generator]]. This trait provides a different path for code generation,
* by allowing code generation to return either an [[ArrayData]] or a [[MapData]] object.
*/
trait CollectionGenerator extends Generator {
/** The position of an element within the collection should also be returned. */
def position: Boolean
/** Rows will be inlined during generation. */
def inline: Boolean
/** The type of the returned collection object. */
def collectionType: DataType = dataType
}
/**
* A generator that produces its output using the provided lambda function.
*/
case class UserDefinedGenerator(
elementSchema: StructType,
function: Row => TraversableOnce[InternalRow],
children: Seq[Expression])
extends Generator with CodegenFallback {
@transient private[this] var inputRow: InterpretedProjection = _
@transient private[this] var convertToScala: (InternalRow) => Row = _
private def initializeConverters(): Unit = {
inputRow = new InterpretedProjection(children)
convertToScala = {
val inputSchema = StructType(children.map { e =>
StructField(e.simpleString, e.dataType, nullable = true)
})
CatalystTypeConverters.createToScalaConverter(inputSchema)
}.asInstanceOf[InternalRow => Row]
}
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
if (inputRow == null) {
initializeConverters()
}
// Convert the objects into Scala Type before calling function, we need schema to support UDT
function(convertToScala(inputRow(input)))
}
override def toString: String = s"UserDefinedGenerator(${children.mkString(",")})"
}
/**
* Separate v1, ..., vk into n rows. Each row will have k/n columns. n must be constant.
* {{{
* SELECT stack(2, 1, 2, 3) ->
* 1 2
* 3 NULL
* }}}
*/
@ExpressionDescription(
usage = "_FUNC_(n, expr1, ..., exprk) - Separates `expr1`, ..., `exprk` into `n` rows.",
examples = """
Examples:
> SELECT _FUNC_(2, 1, 2, 3);
1 2
3 NULL
""")
case class Stack(children: Seq[Expression]) extends Generator {
private lazy val numRows = children.head.eval().asInstanceOf[Int]
private lazy val numFields = Math.ceil((children.length - 1.0) / numRows).toInt
/**
* Return true iff the first child exists and has a foldable IntegerType.
*/
def hasFoldableNumRows: Boolean = {
children.nonEmpty && children.head.dataType == IntegerType && children.head.foldable
}
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length <= 1) {
TypeCheckResult.TypeCheckFailure(s"$prettyName requires at least 2 arguments.")
} else if (children.head.dataType != IntegerType || !children.head.foldable || numRows < 1) {
TypeCheckResult.TypeCheckFailure("The number of rows must be a positive constant integer.")
} else {
for (i <- 1 until children.length) {
val j = (i - 1) % numFields
if (children(i).dataType != elementSchema.fields(j).dataType) {
return TypeCheckResult.TypeCheckFailure(
s"Argument ${j + 1} (${elementSchema.fields(j).dataType.catalogString}) != " +
s"Argument $i (${children(i).dataType.catalogString})")
}
}
TypeCheckResult.TypeCheckSuccess
}
}
def findDataType(index: Int): DataType = {
// Find the first data type except NullType.
val firstDataIndex = ((index - 1) % numFields) + 1
for (i <- firstDataIndex until children.length by numFields) {
if (children(i).dataType != NullType) {
return children(i).dataType
}
}
// If all values of the column are NullType, use it.
NullType
}
override def elementSchema: StructType =
StructType(children.tail.take(numFields).zipWithIndex.map {
case (e, index) => StructField(s"col$index", e.dataType)
})
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
val values = children.tail.map(_.eval(input)).toArray
for (row <- 0 until numRows) yield {
val fields = new Array[Any](numFields)
for (col <- 0 until numFields) {
val index = row * numFields + col
fields.update(col, if (index < values.length) values(index) else null)
}
InternalRow(fields: _*)
}
}
/**
* Only support code generation when stack produces 50 rows or less.
*/
override def supportCodegen: Boolean = numRows <= 50
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Rows - we write these into an array.
val rowData = ctx.addMutableState("InternalRow[]", "rows",
v => s"$v = new InternalRow[$numRows];")
val values = children.tail
val dataTypes = values.take(numFields).map(_.dataType)
val code = ctx.splitExpressionsWithCurrentInputs(Seq.tabulate(numRows) { row =>
val fields = Seq.tabulate(numFields) { col =>
val index = row * numFields + col
if (index < values.length) values(index) else Literal(null, dataTypes(col))
}
val eval = CreateStruct(fields).genCode(ctx)
s"${eval.code}\\n$rowData[$row] = ${eval.value};"
})
// Create the collection.
val wrapperClass = classOf[mutable.WrappedArray[_]].getName
ev.copy(code =
code"""
|$code
|$wrapperClass<InternalRow> ${ev.value} = $wrapperClass$$.MODULE$$.make($rowData);
""".stripMargin, isNull = FalseLiteral)
}
}
/**
* Wrapper around another generator to specify outer behavior. This is used to implement functions
* such as explode_outer. This expression gets replaced during analysis.
*/
case class GeneratorOuter(child: Generator) extends UnaryExpression with Generator {
final override def eval(input: InternalRow = null): TraversableOnce[InternalRow] =
throw new UnsupportedOperationException(s"Cannot evaluate expression: $this")
final override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode =
throw new UnsupportedOperationException(s"Cannot evaluate expression: $this")
override def elementSchema: StructType = child.elementSchema
override lazy val resolved: Boolean = false
}
/**
* A base class for [[Explode]] and [[PosExplode]].
*/
abstract class ExplodeBase extends UnaryExpression with CollectionGenerator with Serializable {
override val inline: Boolean = false
override def checkInputDataTypes(): TypeCheckResult = child.dataType match {
case _: ArrayType | _: MapType =>
TypeCheckResult.TypeCheckSuccess
case _ =>
TypeCheckResult.TypeCheckFailure(
"input to function explode should be array or map type, " +
s"not ${child.dataType.catalogString}")
}
// hive-compatible default alias for explode function ("col" for array, "key", "value" for map)
override def elementSchema: StructType = child.dataType match {
case ArrayType(et, containsNull) =>
if (position) {
new StructType()
.add("pos", IntegerType, nullable = false)
.add("col", et, containsNull)
} else {
new StructType()
.add("col", et, containsNull)
}
case MapType(kt, vt, valueContainsNull) =>
if (position) {
new StructType()
.add("pos", IntegerType, nullable = false)
.add("key", kt, nullable = false)
.add("value", vt, valueContainsNull)
} else {
new StructType()
.add("key", kt, nullable = false)
.add("value", vt, valueContainsNull)
}
}
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
child.dataType match {
case ArrayType(et, _) =>
val inputArray = child.eval(input).asInstanceOf[ArrayData]
if (inputArray == null) {
Nil
} else {
val rows = new Array[InternalRow](inputArray.numElements())
inputArray.foreach(et, (i, e) => {
rows(i) = if (position) InternalRow(i, e) else InternalRow(e)
})
rows
}
case MapType(kt, vt, _) =>
val inputMap = child.eval(input).asInstanceOf[MapData]
if (inputMap == null) {
Nil
} else {
val rows = new Array[InternalRow](inputMap.numElements())
var i = 0
inputMap.foreach(kt, vt, (k, v) => {
rows(i) = if (position) InternalRow(i, k, v) else InternalRow(k, v)
i += 1
})
rows
}
}
}
override def collectionType: DataType = child.dataType
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.genCode(ctx)
}
}
/**
* Given an input array produces a sequence of rows for each value in the array.
*
* {{{
* SELECT explode(array(10,20)) ->
* 10
* 20
* }}}
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Separates the elements of array `expr` into multiple rows, or the elements of map `expr` into multiple rows and columns.",
examples = """
Examples:
> SELECT _FUNC_(array(10, 20));
10
20
""")
// scalastyle:on line.size.limit
case class Explode(child: Expression) extends ExplodeBase {
override val position: Boolean = false
}
/**
* Given an input array produces a sequence of rows for each position and value in the array.
*
* {{{
* SELECT posexplode(array(10,20)) ->
* 0 10
* 1 20
* }}}
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr) - Separates the elements of array `expr` into multiple rows with positions, or the elements of map `expr` into multiple rows and columns with positions.",
examples = """
Examples:
> SELECT _FUNC_(array(10,20));
0 10
1 20
""")
// scalastyle:on line.size.limit
case class PosExplode(child: Expression) extends ExplodeBase {
override val position = true
}
/**
* Explodes an array of structs into a table.
*/
@ExpressionDescription(
usage = "_FUNC_(expr) - Explodes an array of structs into a table.",
examples = """
Examples:
> SELECT _FUNC_(array(struct(1, 'a'), struct(2, 'b')));
1 a
2 b
""")
case class Inline(child: Expression) extends UnaryExpression with CollectionGenerator {
override val inline: Boolean = true
override val position: Boolean = false
override def checkInputDataTypes(): TypeCheckResult = child.dataType match {
case ArrayType(st: StructType, _) =>
TypeCheckResult.TypeCheckSuccess
case _ =>
TypeCheckResult.TypeCheckFailure(
s"input to function $prettyName should be array of struct type, " +
s"not ${child.dataType.catalogString}")
}
override def elementSchema: StructType = child.dataType match {
case ArrayType(st: StructType, _) => st
}
override def collectionType: DataType = child.dataType
private lazy val numFields = elementSchema.fields.length
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
val inputArray = child.eval(input).asInstanceOf[ArrayData]
if (inputArray == null) {
Nil
} else {
for (i <- 0 until inputArray.numElements())
yield inputArray.getStruct(i, numFields)
}
}
override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
child.genCode(ctx)
}
}
| tejasapatil/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala | Scala | apache-2.0 | 14,278 |
package uk.gov.homeoffice.configuration
import java.time.Duration
import scala.concurrent.duration.FiniteDuration
import org.specs2.matcher.Scope
import org.specs2.mutable.Specification
class ConfigFactorySupportSpec extends Specification {
trait Context extends Scope with HasConfig with ConfigFactorySupport
"Configuration support" should {
"convert a 5 minute duration to a finite duration" in new Context {
val finitieDuration: FiniteDuration = Duration.parse("PT5M")
finitieDuration.toMinutes mustEqual 5
}
"default text" in new Context {
config.text("does-not-exist", "default") mustEqual "default"
}
"default int" in new Context {
config.int("does-not-exist", 99) mustEqual 99
}
"default boolean" in new Context {
config.boolean("does-not-exist", true) must beTrue
}
}
} | UKHomeOffice/rtp-io-lib | src/test/scala/uk/gov/homeoffice/configuration/ConfigFactorySupportSpec.scala | Scala | mit | 851 |
/*
* Copyright 2013-2014 IQ TECH <http://www.iqtech.pl>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.abyss.test
/**
* Created by cane, 12/2/13 5:41 PM
* $Id: ModelFactory.scala,v 1.2 2013-12-31 21:09:28 cane Exp $
*/
class ModelFactory {
// def produce(map: Map[String, Any], collection: String, pkg: String): Any = {
// val clazz = getClass.getClassLoader.loadClass(pkg+"."+collection)
// //val clazz = getClass.getClassLoader.loadClass("pl.iqtech.abyss.graph.test$Galaxy")
// val obj = clazz.newInstance()
// clazz.getDeclaredFields.foreach {
// fld =>
// fld.setAccessible(true)
// if(map.contains(fld.getName)) fld.set(obj, map(fld.getName))
// }
// obj
// }
//
//
// @Test
// def factoryTest = {
// val map = Map(
// "name" -> "Andromeda",
// "mass" -> 1.0
// )
// val g = produce(map = map, collection = "Galaxy", pkg = "pl.iqtech.abyss.graph.test").asInstanceOf[Galaxy]
// println(g)
// }
}
| iqtech/abyss | abyss-graph/src/test/scala/io/abyss/test/ModelFactory.scala | Scala | apache-2.0 | 1,456 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.avocado.algorithms.em
import org.bdgenomics.avocado.algorithms.math.MathTestUtils
import org.scalatest.FunSuite
import scala.math.{ abs, exp, log }
class EMForAllelesSuite extends FunSuite {
test("cannot run EM without specifying an iteration limit or target tolerance") {
intercept[AssertionError] {
EMForAlleles.emForMAF(Array(Array[Double]()),
1.0 - 1e-3)
}
}
test("run EM on single sample, definite ref") {
val psi = EMForAlleles.emForMAF(Array(Array(1e-12, 1e-6, 1.0 - 1e-6 - 1e-12).map(log(_))),
log(1.0 - 1e-3),
maxIterations = Some(10))
MathTestUtils.assertAlmostEqual(exp(psi), 1.0)
}
test("run EM on three samples, mix of hom ref, het, hom alt") {
val psi = EMForAlleles.emForMAF(Array(Array(0.0000001, 0.0001, 0.1).map(log(_)),
Array(0.0001, 0.1, 0.0001).map(log(_)),
Array(0.1, 0.0001, 0.0000001).map(log(_))),
log(1.0 - 1e-3),
maxIterations = Some(10))
MathTestUtils.assertAlmostEqual(exp(psi), 0.5)
}
test("run EM on five samples, one hom alt, all others hom ref") {
val psi = EMForAlleles.emForMAF(Array(Array(0.0000001, 0.0001, 0.1).map(log(_)),
Array(0.0000001, 0.0001, 0.1).map(log(_)),
Array(0.0000001, 0.0001, 0.1).map(log(_)),
Array(0.0000001, 0.0001, 0.1).map(log(_)),
Array(0.1, 0.0001, 0.0000001).map(log(_))),
log(1.0 - 1e-3),
maxIterations = Some(10))
MathTestUtils.assertAlmostEqual(exp(psi), 0.8)
}
test("run EM on five samples, with varying ploidy, M = 10, G = 7") {
val psi = EMForAlleles.emForMAF(Array(Array(0.0000001, 0.0001, 0.1).map(log(_)),
Array(0.0000001, 0.0001, 0.1).map(log(_)),
Array(0.0000001, 0.0001, 0.1).map(log(_)),
Array(0.1, 0.0001).map(log(_)),
Array(0.0001, 0.1, 0.0001, 0.0000001).map(log(_))),
log(1.0 - 1e-3),
maxIterations = Some(10))
MathTestUtils.assertAlmostEqual(exp(psi), 0.7)
}
}
| FusionWorks/avocado | avocado-core/src/test/scala/org/bdgenomics/avocado/algorithms/em/EMforAllelesSuite.scala | Scala | apache-2.0 | 2,751 |
package io.buoyant.linkerd
import com.twitter.finagle.naming.buoyant.DstBindingFactory
import com.twitter.finagle.naming.NameInterpreter
import com.twitter.finagle.param.Label
import com.twitter.finagle.stats.{BroadcastStatsReceiver, LoadedStatsReceiver, NullStatsReceiver}
import com.twitter.finagle.tracing.{BroadcastTracer, DefaultTracer, Tracer}
import com.twitter.finagle.util.{DefaultTimer, LoadService}
import com.twitter.finagle.{Namer, Path, Stack, param => fparam}
import com.twitter.logging.Logger
import com.twitter.jvm.JvmStats
import io.buoyant.admin.{Admin, AdminConfig}
import io.buoyant.config._
import io.buoyant.linkerd.telemeter.UsageDataTelemeterConfig
import io.buoyant.namer.Param.Namers
import io.buoyant.namer._
import io.buoyant.telemetry._
import io.buoyant.telemetry.admin.{AdminMetricsExportTelemeter, histogramSnapshotInterval}
import java.net.{InetAddress, InetSocketAddress}
import scala.util.control.NoStackTrace
/**
* Represents the total configuration of a Linkerd process.
*/
trait Linker {
def routers: Seq[Router]
def namers: Seq[(Path, Namer)]
def admin: Admin
def tracer: Tracer
def telemeters: Seq[Telemeter]
def configured[T: Stack.Param](t: T): Linker
}
object Linker {
private[this] val log = Logger()
private[this] val DefaultAdminAddress = new InetSocketAddress(InetAddress.getLoopbackAddress, 9990)
private[this] val DefaultAdminConfig = AdminConfig()
private[linkerd] case class Initializers(
protocol: Seq[ProtocolInitializer] = Nil,
namer: Seq[NamerInitializer] = Nil,
interpreter: Seq[InterpreterInitializer] = Nil,
transformer: Seq[TransformerInitializer] = Nil,
identifier: Seq[IdentifierInitializer] = Nil,
classifier: Seq[ResponseClassifierInitializer] = Nil,
telemetry: Seq[TelemeterInitializer] = Nil,
announcer: Seq[AnnouncerInitializer] = Nil,
failureAccrual: Seq[FailureAccrualInitializer] = Nil,
requestAuthorizer: Seq[RequestAuthorizerInitializer] = Nil,
tracePropagator: Seq[TracePropagatorInitializer] = Nil
) {
def iter: Iterable[Seq[ConfigInitializer]] =
Seq(
protocol,
namer,
interpreter,
identifier,
transformer,
classifier,
telemetry,
announcer,
failureAccrual,
requestAuthorizer,
tracePropagator
)
def all: Seq[ConfigInitializer] = iter.flatten.toSeq
def parse(config: String): LinkerConfig =
Linker.parse(config, this)
def load(config: String): Linker =
Linker.load(config, this)
}
private[linkerd] lazy val LoadedInitializers = Initializers(
LoadService[ProtocolInitializer],
LoadService[NamerInitializer],
LoadService[InterpreterInitializer] :+ DefaultInterpreterInitializer,
LoadService[TransformerInitializer],
LoadService[IdentifierInitializer],
LoadService[ResponseClassifierInitializer],
LoadService[TelemeterInitializer],
LoadService[AnnouncerInitializer],
LoadService[FailureAccrualInitializer],
LoadService[RequestAuthorizerInitializer],
LoadService[TracePropagatorInitializer]
)
def parse(
config: String,
inits: Initializers = LoadedInitializers
): LinkerConfig = {
val mapper = Parser.objectMapper(config, inits.iter)
mapper.readValue[LinkerConfig](config)
}
private[linkerd] def load(config: String, inits: Initializers): Linker =
parse(config, inits).mk()
def load(config: String): Linker =
load(config, LoadedInitializers)
object param {
case class LinkerConfig(config: Linker.LinkerConfig)
implicit object LinkerConfig extends Stack.Param[LinkerConfig] {
val default = LinkerConfig(Linker.LinkerConfig(None, Seq(), None, None, None))
}
}
case class LinkerConfig(
namers: Option[Seq[NamerConfig]],
routers: Seq[RouterConfig],
telemetry: Option[Seq[TelemeterConfig]],
admin: Option[AdminConfig],
usage: Option[UsageDataTelemeterConfig]
) {
def mk(): Linker = {
// At least one router must be specified
if (routers.isEmpty) throw NoRoutersSpecified
val metrics = MetricsTree()
val telemeterParams = Stack.Params.empty + param.LinkerConfig(this) + metrics
val adminTelemeter = new AdminMetricsExportTelemeter(metrics, histogramSnapshotInterval(), DefaultTimer)
val usageTelemeter = usage.getOrElse(UsageDataTelemeterConfig()).mk(telemeterParams)
val telemeters = telemetry.toSeq.flatten.map {
case t if t.disabled =>
val msg = s"The ${t.getClass.getCanonicalName} telemeter is experimental and must be " +
"explicitly enabled by setting the `experimental' parameter to `true'."
throw new IllegalArgumentException(msg) with NoStackTrace
case t => t.mk(telemeterParams)
} :+ adminTelemeter :+ usageTelemeter
// Telemeters may provide StatsReceivers.
val stats = mkStats(metrics, telemeters)
LoadedStatsReceiver.self = NullStatsReceiver
JvmStats.register(stats)
val tracer = mkTracer(telemeters)
DefaultTracer.self = tracer
val params = Stack.Params.empty + fparam.Tracer(tracer) + fparam.Stats(stats)
val namersByPrefix = mkNamers(params + fparam.Stats(stats.scope("namer")))
NameInterpreter.setGlobal(ConfiguredNamersInterpreter(namersByPrefix))
val routerImpls = mkRouters(params + Namers(namersByPrefix) + fparam.Stats(stats.scope("rt")))
val adminImpl = admin.getOrElse(DefaultAdminConfig).mk(DefaultAdminAddress, stats)
Impl(routerImpls, namersByPrefix, tracer, telemeters, adminImpl)
}
private[this] def mkStats(metrics: MetricsTree, telemeters: Seq[Telemeter]) = {
val receivers = telemeters.collect { case t if !t.stats.isNull => t.stats } :+ new MetricsTreeStatsReceiver(metrics)
for (r <- receivers) log.debug("stats: %s", r)
BroadcastStatsReceiver(receivers)
}
private[this] def mkTracer(telemeters: Seq[Telemeter]) = {
val all = telemeters.collect { case t if !t.tracer.isNull => t.tracer }
for (t <- all) log.info("tracer: %s", t)
BroadcastTracer(all)
}
private[this] def mkNamers(params: Stack.Params) = {
namers.getOrElse(Nil).reverse.map {
case n if n.disabled =>
val msg = s"The ${n.prefix.show} namer is experimental and must be " +
"explicitly enabled by setting the `experimental' parameter to `true'."
throw new IllegalArgumentException(msg) with NoStackTrace
case n => n.prefix -> n.mk(params)
}
}
private[this] def mkRouters(params: Stack.Params) = {
// Router labels must not conflict
for ((label, rts) <- routers.groupBy(_.label))
if (rts.size > 1) throw ConflictingLabels(label)
for (r <- routers) {
if (r.disabled) {
val msg = s"The ${r.protocol.name} protocol is experimental and must be " +
"explicitly enabled by setting the `experimental' parameter to `true' on each router."
throw new IllegalArgumentException(msg) with NoStackTrace
}
}
val impls = routers.map { router =>
val stats = params[fparam.Stats].statsReceiver.scope(router.label)
val interpreter = router.interpreter.interpreter(params + Label(router.label) + fparam.Stats(stats))
router.router(params + fparam.Stats(stats) + DstBindingFactory.Namer(interpreter))
}
// Server sockets must not conflict
impls.flatMap(_.servers).groupBy(_.addr).values.foreach {
case Seq(srv0, srv1, _*) => throw ConflictingPorts(srv0.addr, srv1.addr)
case _ =>
}
impls
}
}
/**
* Private concrete implementation, to help protect compatibility if
* the Linker api is extended.
*/
private case class Impl(
routers: Seq[Router],
namers: Seq[(Path, Namer)],
tracer: Tracer,
telemeters: Seq[Telemeter],
admin: Admin
) extends Linker {
override def configured[T: Stack.Param](t: T) =
copy(routers = routers.map { rt =>
t match {
case fparam.Stats(sr) => rt.configured(fparam.Stats(sr.scope("rt", rt.label)))
case _ => rt.configured(t)
}
})
}
}
| linkerd/linkerd | linkerd/core/src/main/scala/io/buoyant/linkerd/Linker.scala | Scala | apache-2.0 | 8,209 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.matchers.ShouldMatchers._
class TestFailedExceptionWithImportSpec extends Spec {
val baseLineNumber = 22
describe("The TestFailedException") {
it("should give the proper line on fail()") {
try {
fail()
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 6))
case None => fail("fail() didn't produce a file name and line number string: " + e.failedCodeFileNameAndLineNumberString, e)
}
case e =>
fail("fail() didn't produce a TestFailedException", e)
}
}
it("should give the proper line on fail(\\"message\\")") {
try {
fail("some message")
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 21))
case None => fail("fail(\\"some message\\") didn't produce a file name and line number string", e)
}
case e =>
fail("fail(\\"some message\\") didn't produce a TestFailedException", e)
}
}
it("should give the proper line on fail(throwable)") {
try {
fail(new RuntimeException)
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 36))
case None => fail("fail(throwable) didn't produce a file name and line number string", e)
}
case e =>
fail("fail(throwable) didn't produce a TestFailedException", e)
}
}
it("should give the proper line on fail(\\"some message\\", throwable)") {
try {
fail("some message", new RuntimeException)
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 51))
case None => fail("fail(\\"some message\\", throwable) didn't produce a file name and line number string", e)
}
case e =>
fail("fail(\\"some message\\", throwable) didn't produce a TestFailedException", e)
}
}
it("should give the proper line on assert(false)") {
try {
assert(false)
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 66))
case None => fail("assert(false) didn't produce a file name and line number string", e)
}
case e =>
fail("assert(false) didn't produce a TestFailedException", e)
}
}
it("should give the proper line on assert(false, \\"some message\\")") {
try {
assert(false, "some message")
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 81))
case None => fail("assert(false, \\"some message\\") didn't produce a file name and line number string", e)
}
case e =>
fail("assert(false, \\"some message\\") didn't produce a TestFailedException", e)
}
}
it("should give the proper line on assert(1 === 2)") {
try {
assert(1 === 2)
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 96))
case None => fail("assert(1 === 2) didn't produce a file name and line number string", e)
}
case e =>
fail("assert(1 === 2) didn't produce a TestFailedException", e)
}
}
it("should give the proper line on assert(1 === 2, \\"some message\\")") {
try {
assert(1 === 2, "some message")
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 111))
case None => fail("assert(1 === 2, \\"some message\\") didn't produce a file name and line number string", e)
}
case e =>
fail("assert(1 === 2, \\"some message\\") didn't produce a TestFailedException", e)
}
}
it("should give the proper line on expect(1) { 2 }") {
try {
expect(1) { 2 }
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 126))
case None => fail("expect(1) { 2 } didn't produce a file name and line number string", e)
}
case e =>
fail("expect(1) { 2 } didn't produce a TestFailedException", e)
}
}
it("should give the proper line on expect(1, \\"some message\\") { 2 }") {
try {
expect(1, "some message") { 2 }
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 141))
case None => fail("expect(1, \\"some message\\") { 2 } didn't produce a file name and line number string", e)
}
case e =>
fail("expect(1, \\"some message\\") { 2 } didn't produce a TestFailedException", e)
}
}
it("should give the proper line on intercept[IllegalArgumentException] {}") {
try {
intercept[IllegalArgumentException] {}
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 156))
case None => fail("intercept[IllegalArgumentException] {} didn't produce a file name and line number string", e)
}
case e =>
fail("intercept[IllegalArgumentException] {} didn't produce a TestFailedException", e)
}
}
it("should give the proper line on intercept[IllegalArgumentException] { throw new RuntimeException }") {
try {
intercept[IllegalArgumentException] { if (false) 1 else throw new RuntimeException }
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 171))
case None => fail("intercept[IllegalArgumentException] { throw new RuntimeException } didn't produce a file name and line number string", e)
}
case e =>
fail("intercept[IllegalArgumentException] { throw new RuntimeException } didn't produce a TestFailedException", e)
}
}
it("should give the proper line on 1 should be === 2") {
try {
1 should be === 2
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) =>
if (s != ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 186))) {
fail("s was: " + s, e)
}
case None => fail("assert(1 === 2) didn't produce a file name and line number string", e)
}
case e =>
fail("assert(1 === 2) didn't produce a TestFailedException", e)
}
}
it("should give the proper line on evaluating {} should produce [IllegalArgumentException] {}") {
try {
evaluating {} should produce [IllegalArgumentException]
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) =>
if (s != ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 204))) {
fail("s was: " + s, e)
}
case None => fail("evaluating {} should produce [IllegalArgumentException] didn't produce a file name and line number string", e)
}
case e =>
fail("evaluating {} should produce [IllegalArgumentException] didn't produce a TestFailedException", e)
}
}
it("should give the proper line on evaluating { throw new RuntimeException } should produce [IllegalArgumentException]") {
try {
evaluating { if (false) 1 else throw new RuntimeException } should produce [IllegalArgumentException]
}
catch {
case e: TestFailedException =>
e.failedCodeFileNameAndLineNumberString match {
case Some(s) => s should equal ("TestFailedExceptionWithImportSpec.scala:" + (baseLineNumber + 222))
case None => fail("evaluating { throw new RuntimeException } should produce [IllegalArgumentException] didn't produce a file name and line number string", e)
}
case e =>
fail("evaluating { throw new RuntimeException } should produce [IllegalArgumentException] didn't produce a TestFailedException", e)
}
}
}
}
| kevinwright/scalatest | src/test/scala/org/scalatest/TestFailedExceptionWithImportSpec.scala | Scala | apache-2.0 | 10,232 |
/*
* ActorBasedXmppComponentImpl.scala
*
* Licensed to the Communitivity, Inc under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.communitivity.shellack
import scala.actors.Actor
import scala.actors.Actor._
import org.xmpp.component.Component
import org.xmpp.component.ComponentManager
import org.xmpp.packet.Packet
import org.xmpp.packet.JID
// Needed to use fully qualified class name for Actor or NetBeans scala plugin gives hidden by sparqlweather.Actor error
protected class ActorBasedXmppComponentImpl(conf : ComponentConfig, wrapped : Actor) extends Component {
var manager : ComponentManager = null
var out = actor {
loop {
react {
case pkt : Packet =>
manager.sendPacket(this, pkt)
}
}
}
def getName() : String =
{
return conf.name()
}
def getDescription() : String =
{
return conf.description()
}
def initialize(jid : JID, componentManager : ComponentManager) {
manager = componentManager
}
def start() { wrapped.start() }
def shutdown() {}
def processPacket(pkt : Packet) {
Console.println("Packet heard, "+pkt.toXML)
wrapped ! (pkt, out)
}
}
| Java-Communitivity/Shellack | src/org/communitivity/shellack/ActorBasedXmppComponentImpl.scala | Scala | apache-2.0 | 1,967 |
package ch.descabato
import java.io.{ByteArrayInputStream, File}
import java.nio.file.Files
import ch.descabato.utils.Utils
import org.scalatest.FlatSpecLike
trait TestUtils extends Utils {
def replay(out: CustomByteArrayOutputStream) = {
new ByteArrayInputStream(finishByteArrayOutputStream(out))
}
def finishByteArrayOutputStream(out: CustomByteArrayOutputStream) = {
out.close()
out.toBytesWrapper.asArray()
}
// TODO there is a copy of this now in FileUtils
def deleteAll(folders: File*) = {
def walk(f: File) {
f.isDirectory() match {
case true =>
f.listFiles().toList.foreach(walk)
f.delete()
case false =>
f.delete()
Files.deleteIfExists(f.toPath())
}
}
for (f <- folders) {
var i = 0
do {
walk(f)
i += 1
Thread.sleep(500)
} while (i < 5 && f.exists)
if (i > 1) {
l.warn(s"Took delete all $i runs, now folder is deleted " + (!f.exists))
}
}
}
}
/**
* Adds a field `currentTestName` that you can use inside a FlatSpecLike test,
* if you for example have many tests that take rather long, and you wonder
* which one is currently running.
*/
trait RichFlatSpecLike extends FlatSpecLike {
private var _currentTestName: Option[String] = None
def currentTestName = _currentTestName getOrElse "DwE90RXP2"
protected override def runTest(testName: String, args: org.scalatest.Args) = {
_currentTestName = Some(testName)
super.runTest(testName, args)
}
} | Stivo/DeScaBaTo | core/src/test/scala/ch/descabato/TestUtils.scala | Scala | gpl-3.0 | 1,554 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v3
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600e.v3.retriever.CT600EBoxRetriever
case class E85(value: Option[Int]) extends CtBoxIdentifier("Income Other sources") with CtOptionalInteger with Input with ValidatableBox[CT600EBoxRetriever]{
override def validate(boxRetriever: CT600EBoxRetriever): Set[CtValidation] = validateZeroOrPositiveInteger(this)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600e/v3/E85.scala | Scala | apache-2.0 | 1,007 |
package slack
import javax.inject.Inject
import com.atlassian.jira.rest.client.api.domain.Issue
import com.google.common.collect.ImmutableList
import jira.JiraConfig
import jira.JiraQuery
import net.gpedro.integrations.slack.SlackAttachment
import net.gpedro.integrations.slack.SlackField
import phabricator.PhabricatorReporter
import phabricator.Review
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.Try
trait SlackCommandInterpreter {
def executeCommand(channelName: String, command: String, token: String): Try[ResponseBasedSlackMessage]
}
object SlackCommandInterpreter {
val HELP_TEXT = "/jumbo review [phab-username] \\n" +
"/jumbo ticket [jira-username] [project] \\n" +
"/jumbo ticket [jira-username] [ticket-no] \\n" +
"/jumbo ticket [jira-username] [ticket-no] close \\n" +
"/jumbo ticket [jira-username] create [project] [title] [description] \\n"
def ERROR_SLACK_RESPONSE(message: String, error: Throwable) =
new ResponseBasedSlackMessage(message + ". Exception: " + error.toString)
def HELP_SLACK_RESPONSE(message: String = "") =
new ResponseBasedSlackMessage(message + " \\n" + HELP_TEXT)
def ATTACHMENT_SLACK_RESPONSE(response: String, attachment: List[SlackAttachment], isInChannel: Boolean = false) =
new ResponseBasedSlackMessage(response, isInChannel).setAttachments(attachment)
}
class SlackCommandInterpreterImpl @Inject()(jiraConfig: JiraConfig,
slackConfig: SlackConfig,
phabClient: PhabricatorReporter,
jiraQuery: JiraQuery)
extends SlackCommandInterpreter {
import SlackCommandInterpreter._
private def convertReviewToAttachment(review: Review): SlackAttachment = {
new SlackAttachment()
.setTitle(review.title)
.setTitleLink(review.reviewUrl)
.setFallback(review.title)
.setFields(ImmutableList.of(new SlackField()
.setTitle("Days old").setValue(review.daysDifference + " days")))
}
def runPhabricatorCommands(commandTokens: List[String]): ResponseBasedSlackMessage = {
if (commandTokens.length != 2) {
return HELP_SLACK_RESPONSE("Missing user name for phabricator")
}
val username = commandTokens(1)
val report = Await.result(phabClient.generateOpenReviewUserName(username, 10), 10 seconds)
val attachments = report.openReviews.map(convertReviewToAttachment(_)).toList
return ATTACHMENT_SLACK_RESPONSE("Phabricator report for " + username, attachments)
}
private def convertIssueToAttachment(issue: Issue): SlackAttachment = {
new SlackAttachment()
.setTitle(issue.getSummary)
.setText(issue.getDescription)
.setTitleLink(jiraConfig.baseUrl + "/browse/" + issue.getKey)
.setFallback(issue.getSummary)
.setFields(ImmutableList.of(new SlackField()
.setTitle("Created on").setValue(issue.getCreationDate.toString("MM/dd/yyyy")),
new SlackField()
.setTitle("Status").setValue(issue.getStatus.getName),
new SlackField()
.setTitle("Creator").setValue(issue.getReporter.getDisplayName),
new SlackField()
.setTitle("Key").setValue(issue.getKey)))
}
def runJiraCommands(channelName: String, commandTokens: List[String]): ResponseBasedSlackMessage = {
if (commandTokens.length < 3) {
return HELP_SLACK_RESPONSE("Bad jira command [need min 3 args] - " + commandTokens)
}
val username = commandTokens(1)
commandTokens.length match {
case 3 => {
val projectOrTicket = commandTokens(2)
if (projectOrTicket.contains("-")) {
// It is a ticket (Not an ideal check)
return ATTACHMENT_SLACK_RESPONSE("Issue " + projectOrTicket,
List(convertIssueToAttachment(jiraQuery.getTicket(username, projectOrTicket))))
} else {
return ATTACHMENT_SLACK_RESPONSE("Issues for " + username,
jiraQuery.getOpenTickets(username, projectOrTicket)
.map(convertIssueToAttachment(_)))
}
}
case 4 => {
val ticket = commandTokens(2)
val state = commandTokens(3)
if (!(state.equalsIgnoreCase("resolve") || state.equalsIgnoreCase("close"))) {
return HELP_SLACK_RESPONSE("Bad jira command [resolve | close] " + commandTokens)
}
jiraQuery.closeTicket(username, ticket)
return ATTACHMENT_SLACK_RESPONSE("Resolve issue " + ticket,
List(convertIssueToAttachment(jiraQuery.getTicket(username, ticket))), true)
}
case 6 => {
val state = commandTokens(2)
if (!(state.equalsIgnoreCase("create"))) {
return HELP_SLACK_RESPONSE("Bad jira command [create] - " + commandTokens)
}
val project = commandTokens(3)
val title = commandTokens(4)
val description = commandTokens(5)
return ATTACHMENT_SLACK_RESPONSE("Created issue ",
List(convertIssueToAttachment(jiraQuery.createTicket(username, project, title, description))), true)
}
case _ => return HELP_SLACK_RESPONSE("Bad jira command [no match] - " + commandTokens)
}
}
override def executeCommand(channelName: String, command: String, token: String): Try[ResponseBasedSlackMessage] = {
if (!token.equals(slackConfig.commandToken)) {
return Try(HELP_SLACK_RESPONSE("Tokens did not match - " + token))
}
val commandTokens = command.trim.split("[ ]+(?=([^\\"]*\\"[^\\"]*\\")*[^\\"]*$)")
.filterNot(_.trim.length == 0).toList
if (commandTokens.length < 1) {
return Try(HELP_SLACK_RESPONSE("Should have minimum 1 arguments - " + commandTokens))
}
Try {
commandTokens(0) match {
case "help" => HELP_SLACK_RESPONSE("Help")
case "review" => runPhabricatorCommands(commandTokens)
case "ticket" => runJiraCommands(channelName, commandTokens)
case _ => HELP_SLACK_RESPONSE("Couldn't find match")
}
}
}
}
| rsumbaly/phabricator-report | app/slack/SlackCommandInterpreter.scala | Scala | apache-2.0 | 6,018 |
package scala.tasty.reflect
class ExprCastError(msg: String) extends Throwable(msg)
| som-snytt/dotty | library/src/scala/tasty/reflect/ExprCastError.scala | Scala | apache-2.0 | 85 |
package net.nablux.dockergen
import org.scalatest.{BeforeAndAfter, Matchers, FlatSpec}
class DockerImageSpec
extends FlatSpec
with Matchers
with BeforeAndAfter {
class MinimalImage extends DockerImage {
override def image: String = "test.img"
override def tag: String = "0.1"
}
var desc: DockerImage = null
before {
desc = new MinimalImage()
}
"An empty description" should "create a minimal Dockerfile" in {
desc.toDockerString shouldBe "FROM test.img:0.1\n"
}
"MAINTAINER()" should "set the MAINTAINER" in {
desc.MAINTAINER("John Doe", "doe@example.net")
desc.toDockerString should
include("\nMAINTAINER John Doe <doe@example.net>\n")
}
"ENV()" should "add an environment variable" in {
desc.ENV("LANG", "de_DE.UTF-8")
desc.toDockerString should
include("\nENV LANG de_DE.UTF-8\n")
}
"RUN()" should "add a RUN command" in {
desc.RUN("echo hello")
desc.toDockerString should
include("\nRUN echo hello\n")
}
"CMD()" should "add a CMD command" in {
desc.CMD("/bin/bash")
desc.toDockerString should
include("\nCMD /bin/bash\n")
}
"##()" should "add a comment" in {
desc.##("Helpful comment")
desc.toDockerString should
include("\n# Helpful comment\n")
}
}
| tgpfeiffer/dockergen | src/test/scala/net/nablux/dockergen/DockerImageSpec.scala | Scala | bsd-3-clause | 1,292 |
package org.eknet.publet.webdav
import grizzled.slf4j.Logging
import org.eknet.publet.vfs.Path
import org.eknet.publet.Publet
import io.milton.http.ResourceFactory
import io.milton.http.exceptions.BadRequestException
/**
* @author Eike Kettner eike.kettner@gmail.com
* @since 25.06.12 21:01
*/
class WebdavResourceFactory(publet: Publet, contextPath: String) extends ResourceFactory with Logging {
def getResource(host: String, path: String) = {
val resourcePath = Path(stripContextPath(path))
publet.rootContainer.lookup(resourcePath) match {
case Some(r) => WebdavResource(r)
case None => {
debug("No webdav resource found for path: "+ resourcePath.asString)
null
}
case r@_ => throw new BadRequestException("Cannot convert resource for webdav: "+ r)
}
}
private def stripContextPath(path: String): String = {
if (contextPath.isEmpty) {
path
} else {
val npath = if (!path.startsWith("/")) "/"+path else path
npath.substring(contextPath.length)
}
}
}
| eikek/publet | webdav/src/main/scala/org/eknet/publet/webdav/WebdavResourceFactory.scala | Scala | apache-2.0 | 1,051 |
/*
*************************************************************************************
* Copyright 2011-2013 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.web.snippet.administration
import net.liftweb.common._
import net.liftweb.http.js.JsCmds._
import net.liftweb.http.DispatchSnippet
import bootstrap.liftweb.RudderConfig
import net.liftweb.http._
import net.liftweb.http.js._
import JE._
import com.normation.rudder.domain.workflows._
import net.liftweb.util._
import net.liftweb.util.Helpers._
import net.liftweb.http.SHtml
import scala.xml.Text
import scala.xml.NodeSeq
import net.liftweb.http.SHtml
import com.normation.rudder.web.model.CurrentUser
import org.joda.time.DateTime
import com.normation.rudder.services.workflows.ChangeRequestService
import com.normation.rudder.web.components.DateFormaterService
import scala.xml.Node
import scala.xml.Elem
import com.normation.rudder.authorization.Edit
import com.normation.rudder.authorization.Read
class ChangeRequestManagement extends DispatchSnippet with Loggable {
private[this] val uuidGen = RudderConfig.stringUuidGenerator
private[this] val roCrRepo = RudderConfig.roChangeRequestRepository
private[this] val workflowService = RudderConfig.workflowService
private[this] val changeRequestEventLogService = RudderConfig.changeRequestEventLogService
private[this] val workflowLoggerService = RudderConfig.workflowEventLogService
private[this] val changeRequestTableId = "ChangeRequestId"
private[this] val currentUser = CurrentUser.checkRights(Read("validator")) || CurrentUser.checkRights(Read("deployer"))
private[this] val initFilter : Box[String] = S.param("filter").map(_.replace("_", " "))
val dataTableInit =
"""
jQuery.extend( jQuery.fn.dataTableExt.oSort, {
"num-html-pre": function ( a ) {
var x = String(a).replace( /<[\\s\\S]*?>/g, "" );
return parseFloat( x );
},
"num-html-asc": function ( a, b ) {
return ((a < b) ? -1 : ((a > b) ? 1 : 0));
},
"num-html-desc": function ( a, b ) {
return ((a < b) ? 1 : ((a > b) ? -1 : 0));
}
} );
""" +
s"""$$('#${changeRequestTableId}').dataTable( {
"asStripeClasses": [ 'color1', 'color2' ],
"bAutoWidth": false,
"bFilter" : true,
"bPaginate" : true,
"bLengthChange": true,
"sPaginationType": "full_numbers",
"bJQueryUI": true,
"oLanguage": {
"sSearch": ""
},
"sDom": '<"dataTables_wrapper_top"fl>rt<"dataTables_wrapper_bottom"ip>',
"aaSorting": [[ 0, "asc" ]],
"aoColumns": [
{ "sWidth": "20px" , "sType": "num-html"},
{ "sWidth": "40px" },
{ "sWidth": "100px" },
{ "sWidth": "40px" },
{ "sWidth": "40px" }
],
} );
$$('.dataTables_filter input').attr("placeholder", "Search");
${initFilter match {
case Full(filter) => s"$$('#${changeRequestTableId}').dataTable().fnFilter('${filter}',1,true,false,true);"
case eb:EmptyBox => s"$$('#${changeRequestTableId}').dataTable().fnFilter('pending',1,true,false,true);"
}
}"""
def CRLine(cr: ChangeRequest)= {
<tr>
<td id="crId">
{SHtml.a(() => S.redirectTo(s"/secure/utilities/changeRequest/${cr.id}"), Text(cr.id.value.toString))}
</td>
<td id="crStatus">
{workflowService.findStep(cr.id).getOrElse("Unknown")}
</td>
<td id="crName">
{cr.info.name}
</td>
<td id="crOwner">
{ cr.owner }
</td>
<td id="crDate">
{(changeRequestEventLogService.getLastLog(cr.id),workflowLoggerService.getLastLog(cr.id)) match {
case (Full(Some(crLog)),Full(Some(wfLog))) =>
if (crLog.creationDate.isAfter(wfLog.creationDate))
DateFormaterService.getFormatedDate(crLog.creationDate)
else
DateFormaterService.getFormatedDate(wfLog.creationDate)
case (Full(Some(crLog)),_) => DateFormaterService.getFormatedDate(crLog.creationDate)
case (_,Full(Some(wfLog))) => DateFormaterService.getFormatedDate(wfLog.creationDate)
case (_,_) => "Error while fetching last action Date"
}}
</td>
</tr>
}
def dispatch = {
case "filter" =>
xml => ("#actualFilter *" #> statusFilter).apply(xml)
case "display" => xml =>
( "#crBody" #> {
val changeRequests = if (currentUser) roCrRepo.getAll else roCrRepo.getByContributor(CurrentUser.getActor)
changeRequests match {
case Full(changeRequests) => changeRequests.flatMap(CRLine(_))
case eb:EmptyBox => val fail = eb ?~! s"Could not get change requests because of : ${eb}"
logger.error(fail.msg)
<error>{fail.msg}</error>
} }).apply(xml) ++
Script(OnLoad(JsRaw(dataTableInit)))
}
def statusFilter = {
val values = workflowService.stepsValue.map(_.value)
val selectValues = values.map(x=> (x,x))
var value = ""
val filterFunction =
s"""var filter = [];
var selected = $$(this).find(":selected")
if (selected.size() > 0) {
selected.each(function () {
filter.push($$(this).attr("value"));
} );
$$('#${changeRequestTableId}').dataTable().fnFilter(filter.join("|"),1,true,false,true);
}
else {
// No filter, display nothing
$$('#${changeRequestTableId}').dataTable().fnFilter(".",1);
}"""
val onChange = ("onchange" -> JsRaw(filterFunction))
def filterForm (select:Elem,link:String, transform: String => NodeSeq) = {
val submit =
SHtml.a(Text(link),JsRaw(s"$$('.expand').click();"), ("style"," float:right;font-size:9px;margin-top:12px; margin-left: 5px;")) ++
SHtml.ajaxSubmit(
link
, () => SetHtml("actualFilter",transform(value))
, ("class","expand")
, ("style","margin: 5px 10px; float:right; height:15px; width:18px; padding: 0; border-radius:25px; display:none")
) ++ Script(JsRaw("correctButtons()"))
SHtml.ajaxForm(
<b style="float:left; margin: 5px 10px">Status:</b> ++
select % onChange ++ submit
)
}
def unexpandedFilter(default:String):NodeSeq = {
val multipleValues = ("","All") :: ("Pending","Open") :: ("^(?!Pending)","Closed") :: Nil
val select :Elem =SHtml.select(
multipleValues ::: selectValues
, Full(default)
, list => value = list
, ("style","width:auto;")
)
(s"value='${default}' [selected]" #> "selected").apply(
("select *" #> {<optgroup label="Multiple" style="margin-bottom:10px" value="" >{multipleValues.map{case (value,label) => <option value={value} style="margin-left:10px">{label}</option>}}</optgroup>++
<optgroup label="Single">{selectValues.map{case (value,label) => <option value={value} style="margin-left:10px">{label}</option>}}</optgroup> } ).apply(
filterForm(select,"more",expandedFilter)))
}
def expandedFilter(default:String) = {
val extendedDefault =
default match {
case "" => values
case "Pending" => values.filter(_.contains("Pending"))
case "^(?!Pending)" => values.filterNot(_.contains("Pending"))
case default if (values.exists(_ == default)) => List(default)
case _ => Nil
}
def computeDefault(selectedValues:List[String]) = selectedValues match {
case allValues if allValues.size==4 => ""
case value :: Nil => value
case openValues if openValues.forall(_.contains("Pending")) => "Pending"
case closedValues if closedValues.forall(!_.contains("Pending")) => "^(?!Pending)"
case _ => selectedValues.head
}
val multiSelect = SHtml.multiSelect(
selectValues
, extendedDefault
, list => value = computeDefault(list)
, ("style","width:auto;padding-right:3px;")
)
filterForm(multiSelect,"less",unexpandedFilter)
}
unexpandedFilter(initFilter.getOrElse("Pending"))
}
}
| jooooooon/rudder | rudder-web/src/main/scala/com/normation/rudder/web/snippet/administration/ChangeRequestManagement.scala | Scala | agpl-3.0 | 9,849 |
package com.twitter.tiny.exceptions
import com.twitter.finagle.httpx.{Request, Response}
import com.twitter.finatra.http.exceptions.ExceptionMapper
import com.twitter.finatra.http.response.ResponseBuilder
import java.net.MalformedURLException
import javax.inject.Inject
class MalformedURLExceptionMapper @Inject()(response: ResponseBuilder)
extends ExceptionMapper[MalformedURLException] {
override def toResponse(request: Request, exception: MalformedURLException): Response = {
response.badRequest(s"Malformed URL - ${exception.getMessage}")
}
}
| deanh/finatra | examples/tiny-url/src/main/scala/com/twitter/tiny/exceptions/MalformedURLExceptionMapper.scala | Scala | apache-2.0 | 561 |
package com.sksamuel.elastic4s.testkit
import com.sksamuel.elastic4s.embedded.LocalNode
import com.sksamuel.elastic4s.{ElasticDsl, IndexAndTypes, Indexes, TcpClient}
import org.elasticsearch.ResourceAlreadyExistsException
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse
import org.elasticsearch.cluster.health.ClusterHealthStatus
import org.elasticsearch.transport.RemoteTransportException
import org.scalatest.{BeforeAndAfterAll, Suite}
import org.slf4j.LoggerFactory
trait ElasticSugar extends AbstractElasticSugar with ClassLocalNodeProvider with BeforeAndAfterAll {
this: Suite with LocalNodeProvider =>
implicit val node = getNode
implicit val client = node.elastic4sclient(false)
override def afterAll(): Unit = {
node.stop(true)
}
}
trait SharedElasticSugar extends AbstractElasticSugar with ClassloaderLocalNodeProvider {
this: Suite with LocalNodeProvider =>
implicit val node = getNode
implicit val client = node.elastic4sclient(false)
}
trait DualElasticSugar extends AbstractElasticSugar with AlwaysNewLocalNodeProvider {
this: Suite with LocalNodeProvider =>
}
/**
* Provides helper methods for things like refreshing an index, and blocking until an
* index has a certain count of documents. These methods are very useful when writing
* tests to allow for blocking, iterative coding
*/
trait AbstractElasticSugar extends ElasticDsl {
this: Suite with LocalNodeProvider =>
def node: LocalNode
def client: TcpClient
private val logger = LoggerFactory.getLogger(getClass)
// refresh all indexes
def refreshAll(): RefreshResponse = refresh(Indexes.All)
// refreshes all specified indexes
def refresh(indexes: Indexes): RefreshResponse = {
client.execute {
refreshIndex(indexes)
}.await
}
def blockUntilGreen(): Unit = {
blockUntil("Expected cluster to have green status") { () =>
client.execute {
clusterHealth()
}.await.getStatus == ClusterHealthStatus.GREEN
}
}
def blockUntil(explain: String)(predicate: () => Boolean): Unit = {
var backoff = 0
var done = false
while (backoff <= 16 && !done) {
if (backoff > 0) Thread.sleep(200 * backoff)
backoff = backoff + 1
try {
done = predicate()
} catch {
case e: Throwable =>
logger.warn("problem while testing predicate", e)
}
}
require(done, s"Failed waiting on: $explain")
}
def ensureIndexExists(index: String): Unit = {
try {
client.execute {
createIndex(index)
}.await
} catch {
case _: ResourceAlreadyExistsException => // Ok, ignore.
case _: RemoteTransportException => // Ok, ignore.
}
}
def doesIndexExists(name: String): Boolean = {
client.execute {
indexExists(name)
}.await.isExists
}
def deleteIndex(name: String): Unit = {
if (doesIndexExists(name)) {
client.execute {
ElasticDsl.deleteIndex(name)
}.await
}
}
def truncateIndex(index: String): Unit = {
deleteIndex(index)
ensureIndexExists(index)
blockUntilEmpty(index)
}
def blockUntilDocumentExists(id: String, index: String, `type`: String): Unit = {
blockUntil(s"Expected to find document $id") { () =>
client.execute {
get(id).from(index / `type`)
}.await.exists
}
}
def blockUntilCount(expected: Long, index: String): Unit = {
blockUntil(s"Expected count of $expected") { () =>
val result = client.execute {
search(index).matchAllQuery().size(0)
}.await
expected <= result.totalHits
}
}
def blockUntilCount(expected: Long, indexAndTypes: IndexAndTypes): Unit = {
blockUntil(s"Expected count of $expected") { () =>
val result = client.execute {
search(indexAndTypes).matchAllQuery().size(0)
}.await
expected <= result.totalHits
}
}
/**
* Will block until the given index and optional types have at least the given number of documents.
*/
def blockUntilCount(expected: Long, index: String, types: String*): Unit = {
blockUntil(s"Expected count of $expected") { () =>
val result = client.execute {
search(index / types).matchAllQuery().size(0)
}.await
expected <= result.totalHits
}
}
def blockUntilExactCount(expected: Long, index: String, types: String*): Unit = {
blockUntil(s"Expected count of $expected") { () =>
expected == client.execute {
search(index / types).size(0)
}.await.totalHits
}
}
def blockUntilEmpty(index: String): Unit = {
blockUntil(s"Expected empty index $index") { () =>
client.execute {
search(Indexes(index)).size(0)
}.await.totalHits == 0
}
}
def blockUntilIndexExists(index: String): Unit = {
blockUntil(s"Expected exists index $index") { () ⇒
doesIndexExists(index)
}
}
def blockUntilIndexNotExists(index: String): Unit = {
blockUntil(s"Expected not exists index $index") { () ⇒
!doesIndexExists(index)
}
}
def blockUntilDocumentHasVersion(index: String, `type`: String, id: String, version: Long): Unit = {
blockUntil(s"Expected document $id to have version $version") { () =>
client.execute {
get(id).from(index / `type`)
}.await.version == version
}
}
}
| tyth/elastic4s | elastic4s-testkit/src/main/scala/com/sksamuel/elastic4s/testkit/ElasticSugar.scala | Scala | apache-2.0 | 5,339 |
package org.scalatra
import org.scalatra.test.scalatest.ScalatraFunSuite
class AfterTestServlet extends ScalatraServlet with AfterTestAppBase
trait AfterTestAppBase extends ScalatraBase {
after() {
response.setStatus(204)
}
after("/some/path") {
response.setStatus(202)
}
after("/other/path") {
response.setStatus(206)
}
get("/some/path") {}
get("/other/path") {}
get("/third/path") {}
}
class AfterServletTest extends AfterTest {
mount(classOf[AfterTestServlet], "/*")
}
abstract class AfterTest extends ScalatraFunSuite {
test("afterAll is applied to all paths") {
get("/third/path") {
status should equal(204)
}
}
test("after only applies to a given path") {
get("/some/path") {
status should equal(202)
}
get("/other/path") {
status should equal(206)
}
}
}
| lightvector/scalatra | core/src/test/scala/org/scalatra/AfterTest.scala | Scala | bsd-2-clause | 856 |
package co.blocke.scalajack
package json4s
import model._
import model.Writer
import scala.collection.{Map, mutable}
import org.json4s._
case class Json4sWriter() extends Writer[JValue] {
def writeArray[Elem](t: Iterable[Elem], elemTypeAdapter: TypeAdapter[Elem], out: collection.mutable.Builder[JValue, JValue]): Unit =
t match {
case null => out += JNull
case a =>
var arr = JArray(List.empty[JValue])
val outBuf = JValueBuilder()
a.iterator.foreach { item =>
outBuf.clear()
elemTypeAdapter.write(item, this, outBuf)
arr = JArray(arr.arr :+ outBuf.result)
}
out += arr
}
def writeRaw(t: JValue, out: mutable.Builder[JValue, JValue]): Unit =
out += t
def writeBigInt(t: BigInt, out: collection.mutable.Builder[JValue, JValue]): Unit =
out += JInt(t)
def writeBoolean(t: Boolean, out: collection.mutable.Builder[JValue, JValue]): Unit =
out += JBool(t)
def writeDecimal(t: BigDecimal, out: collection.mutable.Builder[JValue, JValue]): Unit =
t match {
case null => out += JNull
case d => out += JDecimal(d)
}
def writeDouble(t: Double, out: collection.mutable.Builder[JValue, JValue]): Unit =
out += JDouble(t)
def writeInt(t: Int, out: collection.mutable.Builder[JValue, JValue]): Unit =
out += JInt(t)
def writeLong(t: Long, out: collection.mutable.Builder[JValue, JValue]): Unit =
out += JLong(t)
def writeMap[Key, Value, To](t: Map[Key, Value], keyTypeAdapter: TypeAdapter[Key], valueTypeAdapter: TypeAdapter[Value], out: mutable.Builder[JValue, JValue]): Unit =
t match {
case null => out += JNull
case daMap =>
val outBuf = JValueBuilder()
val outMap = daMap.map {
case (key, value) =>
if (key == null)
throw new ScalaJackError("Map keys cannot be null.")
outBuf.clear()
keyTypeAdapter.write(key.asInstanceOf[Key], this, outBuf)
val k = outBuf.result().values.toString
outBuf.clear()
valueTypeAdapter.write(value, this, outBuf)
k -> outBuf.result
}.toList
out += JObject(outMap)
}
def writeString(t: String, out: collection.mutable.Builder[JValue, JValue]): Unit =
t match {
case null => out += JNull
case _: String => out += JString(t)
}
def writeNull(out: collection.mutable.Builder[JValue, JValue]): Unit =
out += JNull
@inline private def writeFields(
fields: List[(String, Any, TypeAdapter[Any])]
): Map[String, JValue] = {
val outBuf = JValueBuilder()
fields.collect {
case (label, value, valueTypeAdapter) if value != None =>
outBuf.clear()
valueTypeAdapter.write(value, this, outBuf)
label -> outBuf.result()
}.toMap
}
def writeObject[T](
t: T,
orderedFieldNames: List[String],
fieldMembersByName: Map[String, ClassFieldMember[_,_]],
out: mutable.Builder[JValue, JValue],
extras: List[(String, ExtraFieldValue[_])] = List.empty[(String, ExtraFieldValue[_])]
): Unit =
t match {
case null => out += JNull
case _ =>
val extraFields = writeFields(
extras.map(
e =>
(
e._1,
e._2.value,
e._2.valueTypeAdapter.asInstanceOf[TypeAdapter[Any]]
)
)
)
val classFields = writeFields(orderedFieldNames.map { orn =>
val oneField = fieldMembersByName(orn)
(orn, oneField.info.valueOf(t), oneField.valueTypeAdapter.asInstanceOf[TypeAdapter[Any]])
})
val captureFields = t match {
case sjc: SJCapture =>
import scala.jdk.CollectionConverters._
sjc.captured.asScala.asInstanceOf[Map[String, JValue]]
case _ => Map.empty[String, JValue]
}
out += JObject((extraFields ++ classFields ++ captureFields).toList)
}
def writeTuple[T](
t: T,
writeFn: (Product) => List[(TypeAdapter[_], Any)],
out: mutable.Builder[JValue, JValue]
): Unit = {
var arr = JArray(List.empty[JValue])
val outBuf = JValueBuilder()
writeFn(t.asInstanceOf[Product]).foreach { (fieldTA, fieldValue) =>
outBuf.clear()
fieldTA.castAndWrite(fieldValue, this, outBuf)
arr = JArray(arr.arr :+ outBuf.result)
}
out += arr
}
}
| gzoller/ScalaJack | core/src/main/scala/co.blocke.scalajack/json4s/Json4sWriter.scala | Scala | mit | 4,450 |
package bifrost.program
import java.time.Instant
import bifrost.{BifrostGenerators, ValidGenerators}
import io.circe.{Json, JsonObject}
import org.scalacheck.Gen
import org.scalatest.prop.{GeneratorDrivenPropertyChecks, PropertyChecks}
import org.scalatest.{Matchers, PropSpec}
import bifrost.transaction.box.proposition.PublicKey25519Proposition
import scorex.crypto.signatures.Curve25519
import scala.util.{Failure, Random, Success, Try}
class ProgramSpec extends PropSpec
with PropertyChecks
with GeneratorDrivenPropertyChecks
with Matchers
with BifrostGenerators
with ValidGenerators {
//TODO Replace with
/*property("Calling a method not in the program will throw an error") {
forAll(programGen) {
c: Program => {
forAll(stringGen.suchThat(!validProgramMethods.contains(_))) {
m: String => {
val possibleArgs = JsonObject.empty
val party = propositionGen.sample.get
val result = Program.execute(c, m)(party)(possibleArgs)
assert(result.isFailure && result.failed.get.isInstanceOf[MatchError])
}
}
}
}
}*/
property("Json works properly for ExecutionBuilderTerms") {
forAll(validExecutionBuilderTermsGen) {
t: ExecutionBuilderTerms => {
t.json.as[ExecutionBuilderTerms].right.get shouldBe t
}
}
}
property("Cannot create ExecutionBuilderTerms with too long of a string") {
forAll(Gen.choose(16 * 1024 + 1, 100000)) {
size: Int => {
Try {
ExecutionBuilderTerms(Random.alphanumeric.take(size).mkString)
} shouldBe a[Failure[_]]
}
}
}
def mockExecutionBuilder: Json =
ExecutionBuilder(
ExecutionBuilderTerms("testing"),
"myAssetCode",
ProgramPreprocessor(
"test",
validInitJsGen(
"test",
"testCode",
).sample.get
)(JsonObject.empty)
).json
def getMockPublicKeyProposition(fillByte: Byte): PublicKey25519Proposition = {
PublicKey25519Proposition(Array.fill(Curve25519.KeyLength)(fillByte));
}
property("Can create program") {
Try {
Program(
Map(getMockPublicKeyProposition(0) -> "hub", getMockPublicKeyProposition(1) -> "producer"),
Instant.now.toEpochMilli,
Array(),
mockExecutionBuilder
)
} shouldBe a[Success[_]]
}
property("Can not create program due to incorrect number of parties") {
Try {
Program(
Map(),
Instant.now.toEpochMilli,
Array(),
mockExecutionBuilder
)
} shouldBe a[Failure[_]]
}
} | Topl/Project-Bifrost | src/test/scala/bifrost/program/ProgramSpec.scala | Scala | mpl-2.0 | 2,640 |
package com.clemble.util.validator
import play.api.libs.json.{JsObject, Reads, JsValue, __}
import shapeless._
import shapeless.labelled.FieldType
/**
* Partial JSON validator for entity, that allows to make partial object presentation validation.
* @tparam T entity type
*/
trait PatchValidator[T] {
/**
* Checks if this JSON is valid part of JSON presentation for provided
*
* @param json partial presentation to check
* @return true if presentation is valid, false otherwise
*/
def isValid(json: JsValue): Boolean
}
object PatchValidator extends PatchValidatorLowImplicits {
/**
* In case of empty HNil, ensure Json is empty as well, to prevent adding
* trash to JSON.
*
* @return tail PatchValidator
*/
implicit def deriveHNil: PatchValidator[HNil] =
new PatchValidator[HNil] {
def isValid(json: JsValue): Boolean = {
json match {
case JsObject(fields) => fields.isEmpty
case _ => true
}
}
}
/**
* Derive a case class field using a `PatchValidator`
*/
implicit def deriveHCons[K <: Symbol, V, T <: HList]
(implicit
key: Witness.Aux[K],
sv: Lazy[PatchValidator[V]],
st: Lazy[PatchValidator[T]]
): PatchValidator[FieldType[K, V] :: T] =
new PatchValidator[FieldType[K, V] :: T] {
def isValid(json: JsValue): Boolean = {
json match {
case obj: JsObject =>
val field = key.value.name
(__ \\ field).readNullable[JsValue].filter(_.forall(sv.value.isValid)).reads(obj).isSuccess && st.value.isValid(obj - field)
case _ => true
}
}
}
class PatchValidatorGen[T] {
def validator[TT](implicit gen: LabelledGeneric.Aux[T, TT], vtt: Lazy[PatchValidator[TT]]): PatchValidator[T] =
new PatchValidator[T] {
override def isValid(json: JsValue): Boolean = vtt.value.isValid(json)
}
}
/**
* Validator factory method
*
* @tparam T entity type
* @return PatchValidator for provided entity
*/
def apply[T] = new PatchValidatorGen[T]
/**
* Check JSON is valid partial presentation of type `T`
*
* @param json partial json presentation
* @param validator validator to use
* @tparam T type
* @return
*/
def isValid[T](json: JsValue)(implicit validator: PatchValidator[T]) = {
validator.isValid(json)
}
}
trait PatchValidatorLowImplicits {
/**
* Derive a case class field using a `Reads`
*/
implicit def deriveHConsReads[K <: Symbol, V, T <: HList]
(implicit
key: Witness.Aux[K],
rv: Lazy[Reads[V]],
st: Lazy[PatchValidator[T]]
): PatchValidator[FieldType[K, V] :: T] =
new PatchValidator[FieldType[K, V] :: T] {
def isValid(json: JsValue): Boolean = {
json match {
case obj: JsObject =>
val field = key.value.name
(__ \\ field).readNullable(rv.value).reads(obj).isSuccess && st.value.isValid(obj - field)
case _ => false
}
}
}
}
| clemble/scala-validator | src/main/scala/com/clemble/util/validator/PatchValidator.scala | Scala | apache-2.0 | 3,028 |
package io.shaka.http
import io.shaka.http.Http.HttpHandler
import io.shaka.http.HttpHeader.CONTENT_LENGTH
import io.shaka.http.Method.{GET, HEAD}
import io.shaka.http.Status.INTERNAL_SERVER_ERROR
object Handlers {
object HEADRequestHandler {
def ~>(handler: HttpHandler): HttpHandler = (request) => {
def foldHeadRequest[T](original: T)(doWhenHead: T => T): T = {
if(request.method == HEAD) doWhenHead(original) else original
}
val response = handler(foldHeadRequest(request)(_.copy(method = GET)))
foldHeadRequest(response)(_.header(CONTENT_LENGTH, response.entity.fold("0")(_.content.length.toString)))
}
}
object SafeRequestHandler {
def ~>(handler: HttpHandler): HttpHandler = (request) => try {
handler(request)
} catch {
case e: Throwable => Response().entity(s"Server error: ${e.getMessage}").status(INTERNAL_SERVER_ERROR)
}
}
}
| stacycurl/naive-http | src/main/scala/io/shaka/http/Handlers.scala | Scala | apache-2.0 | 916 |
package mimir.lenses
import java.io.File
import java.sql._
import java.util
import mimir.algebra._
import mimir.ctables._
import mimir.util.RandUtils
import mimir.Database
import mimir.parser._
import mimir.models._
import scala.collection.JavaConversions._
import scala.util._
import mimir.ml.spark.{SparkML, Classification}
object PickerLens {
def create(
db: Database,
name: ID,
humanReadableName: String,
query: Operator,
args:Seq[Expression]
): (Operator, Seq[Model]) =
{
val operSchema = db.typechecker.schemaOf(query)
val schemaMap = operSchema.toMap
val (pickFromColumns, pickerColTypes ) = args.flatMap {
case Function(ID("pick_from"), cols ) =>
Some( cols.map { case col:Var => (col.name, schemaMap(col.name))
case col => throw new RAException(s"Invalid pick_from argument: $col in PickerLens $name (not a column reference)")
} )
case _ => None
}.toSeq.flatten.unzip
pickerColTypes.foldLeft(pickerColTypes.head)((init, element) => element match {
case `init` => init
case x => throw new RAException(s"Invalid PICK_FROM argument Type: $x in PickerLens $name (PICK_FROM columns must be of same type)")
})
val pickToCol = args.flatMap {
case Function(ID("pick_as"), Seq(Var(col))) => Some( col )
case _ => None
}.headOption
.getOrElse( ID("PICK_ONE_", ID(pickFromColumns, "_")) )
val projectedOutPicFromCols = args.flatMap {
case Function(ID("hide_pick_from"), Seq(Var(col))) => Some( col )
case _ => None
}
val classifyUpFront = args.flatMap {
case Function(ID("classify_up_front"), Seq(bp@BoolPrimitive(b))) => Some( bp )
case _ => None
} match {
case Seq() => true
case Seq(BoolPrimitive(b)) => b
}
val useClassifier = args.flatMap {
case Function(ID("classifier"), Seq(StringPrimitive(classifier))) => Some(ID(classifier))
case Function(ID("classifier"), Seq(Var(classifier))) => Some(classifier)
case _ => None
}.headOption
val pickerModel = PickerModel.train(db,
ID(name,"_PICKER_MODEL:", ID(pickFromColumns, "_")),
pickToCol,
pickFromColumns,
pickerColTypes,
useClassifier,
classifyUpFront,
query
)
lazy val expressionSubstitutions : (Expression) => Expression = (expr) => {
expr match {
case Function(ID("avg"), Seq(Var(col))) => {
val exprSubQ = Project(Seq(ProjectArg(ID("AVG_",col), expr)), query)
db.query(exprSubQ)( results => {
var replacementExpr : Expression = NullPrimitive()
if(results.hasNext()){
replacementExpr = results.next()(0)
}
results.close()
replacementExpr
})
}
case x => x.recur(expressionSubstitutions(_))
}
}
val pickUncertainExprs : List[(Expression, Expression)] = args.flatMap {
case Function(ID("uexprs"), Seq(StringPrimitive(expr), StringPrimitive(resultExpr)) ) => Some( (
ExpressionParser.expr(expr),
VGTerm(pickerModel.name, 0,Seq[Expression](RowIdVar()).union(pickFromColumns.map(Var(_))), Seq(expressionSubstitutions(ExpressionParser.expr(resultExpr))))
) )
case _ => None
}.toList match {
case Seq() => {
List((BoolPrimitive(true), VGTerm(pickerModel.name, 0,Seq[Expression](RowIdVar()).union(pickFromColumns.map(Var(_))),Seq()) ))
}
case x => x
}
val pickCertainExprs : List[(Expression, Expression)] = args.flatMap {
case Function(ID("exprs"), Seq(StringPrimitive(expr), StringPrimitive(resultExpr)) ) => Some( (
ExpressionParser.expr(expr),
expressionSubstitutions(ExpressionParser.expr(resultExpr))
) )
case _ => None
}.toList
val pickExpr = ExpressionUtils.makeCaseExpression(
pickCertainExprs.union(pickUncertainExprs),
NullPrimitive()
)
val pickerColsTypeMap = pickFromColumns.zip(pickerColTypes).toMap
val projectArgs =
query.columnNames.
flatMap( col => pickerColsTypeMap.get(col) match {
case None => Some(ProjectArg(col, Var(col)))
case Some(pickFromCol) => {
if(projectedOutPicFromCols.contains(col))
None //none if you dont want the from cols
else
Some(ProjectArg(col, Var(col)))
}
}).union(Seq(ProjectArg(pickToCol, db.compiler.optimize(pickExpr))))
return (
Project(projectArgs, query),
Seq(pickerModel)
)
}
}
| UBOdin/mimir | src/main/scala/mimir/lenses/PickerLens.scala | Scala | apache-2.0 | 4,676 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import scala.reflect.ClassTag
import org.scalacheck.Gen
import org.scalactic.TripleEqualsSupport.Spread
import org.scalatest.exceptions.TestFailedException
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow}
import org.apache.spark.sql.catalyst.analysis.ResolveTimeZone
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.optimizer.SimpleTestOptimizer
import org.apache.spark.sql.catalyst.plans.PlanTestBase
import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Project}
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, ArrayData, MapData}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
/**
* A few helper functions for expression evaluation testing. Mixin this trait to use them.
*
* Note: when you write unit test for an expression and call `checkEvaluation` to check the result,
* please make sure that you explore all the cases that can lead to null result (including
* null in struct fields, array elements and map values). The framework will test the
* nullability flag of the expression automatically.
*/
trait ExpressionEvalHelper extends ScalaCheckDrivenPropertyChecks with PlanTestBase {
self: SparkFunSuite =>
protected def create_row(values: Any*): InternalRow = {
InternalRow.fromSeq(values.map(CatalystTypeConverters.convertToCatalyst))
}
// Currently MapData just stores the key and value arrays. Its equality is not well implemented,
// as the order of the map entries should not matter for equality. This method creates MapData
// with the entries ordering preserved, so that we can deterministically test expressions with
// map input/output.
protected def create_map(entries: (_, _)*): ArrayBasedMapData = {
create_map(entries.map(_._1), entries.map(_._2))
}
protected def create_map(keys: Seq[_], values: Seq[_]): ArrayBasedMapData = {
assert(keys.length == values.length)
val keyArray = CatalystTypeConverters
.convertToCatalyst(keys)
.asInstanceOf[ArrayData]
val valueArray = CatalystTypeConverters
.convertToCatalyst(values)
.asInstanceOf[ArrayData]
new ArrayBasedMapData(keyArray, valueArray)
}
private def prepareEvaluation(expression: Expression): Expression = {
val serializer = new JavaSerializer(new SparkConf()).newInstance
val resolver = ResolveTimeZone(new SQLConf)
val expr = resolver.resolveTimeZones(expression)
assert(expr.resolved)
serializer.deserialize(serializer.serialize(expr))
}
protected def checkEvaluation(
expression: => Expression, expected: Any, inputRow: InternalRow = EmptyRow): Unit = {
// Make it as method to obtain fresh expression everytime.
def expr = prepareEvaluation(expression)
val catalystValue = CatalystTypeConverters.convertToCatalyst(expected)
checkEvaluationWithoutCodegen(expr, catalystValue, inputRow)
checkEvaluationWithMutableProjection(expr, catalystValue, inputRow)
if (GenerateUnsafeProjection.canSupport(expr.dataType)) {
checkEvaluationWithUnsafeProjection(expr, catalystValue, inputRow)
}
checkEvaluationWithOptimization(expr, catalystValue, inputRow)
}
/**
* Check the equality between result of expression and expected value, it will handle
* Array[Byte], Spread[Double], MapData and Row. Also check whether nullable in expression is
* true if result is null
*/
protected def checkResult(result: Any, expected: Any, expression: Expression): Boolean = {
checkResult(result, expected, expression.dataType, expression.nullable)
}
protected def checkResult(
result: Any,
expected: Any,
exprDataType: DataType,
exprNullable: Boolean): Boolean = {
val dataType = UserDefinedType.sqlType(exprDataType)
// The result is null for a non-nullable expression
assert(result != null || exprNullable, "exprNullable should be true if result is null")
(result, expected) match {
case (result: Array[Byte], expected: Array[Byte]) =>
java.util.Arrays.equals(result, expected)
case (result: Double, expected: Spread[Double @unchecked]) =>
expected.asInstanceOf[Spread[Double]].isWithin(result)
case (result: InternalRow, expected: InternalRow) =>
val st = dataType.asInstanceOf[StructType]
assert(result.numFields == st.length && expected.numFields == st.length)
st.zipWithIndex.forall { case (f, i) =>
checkResult(
result.get(i, f.dataType), expected.get(i, f.dataType), f.dataType, f.nullable)
}
case (result: ArrayData, expected: ArrayData) =>
result.numElements == expected.numElements && {
val ArrayType(et, cn) = dataType.asInstanceOf[ArrayType]
var isSame = true
var i = 0
while (isSame && i < result.numElements) {
isSame = checkResult(result.get(i, et), expected.get(i, et), et, cn)
i += 1
}
isSame
}
case (result: MapData, expected: MapData) =>
val MapType(kt, vt, vcn) = dataType.asInstanceOf[MapType]
checkResult(result.keyArray, expected.keyArray, ArrayType(kt, false), false) &&
checkResult(result.valueArray, expected.valueArray, ArrayType(vt, vcn), false)
case (result: Double, expected: Double) =>
if (expected.isNaN) result.isNaN else expected == result
case (result: Float, expected: Float) =>
if (expected.isNaN) result.isNaN else expected == result
case (result: Row, expected: InternalRow) => result.toSeq == expected.toSeq(result.schema)
case _ =>
result == expected
}
}
protected def checkExceptionInExpression[T <: Throwable : ClassTag](
expression: => Expression,
expectedErrMsg: String): Unit = {
checkExceptionInExpression[T](expression, InternalRow.empty, expectedErrMsg)
}
protected def checkExceptionInExpression[T <: Throwable : ClassTag](
expression: => Expression,
inputRow: InternalRow,
expectedErrMsg: String): Unit = {
def checkException(eval: => Unit, testMode: String): Unit = {
withClue(s"($testMode)") {
val errMsg = intercept[T] {
eval
}.getMessage
if (!errMsg.contains(expectedErrMsg)) {
fail(s"Expected error message is `$expectedErrMsg`, but `$errMsg` found")
}
}
}
// Make it as method to obtain fresh expression everytime.
def expr = prepareEvaluation(expression)
checkException(evaluateWithoutCodegen(expr, inputRow), "non-codegen mode")
checkException(evaluateWithMutableProjection(expr, inputRow), "codegen mode")
if (GenerateUnsafeProjection.canSupport(expr.dataType)) {
checkException(evaluateWithUnsafeProjection(expr, inputRow), "unsafe mode")
}
}
protected def evaluateWithoutCodegen(
expression: Expression, inputRow: InternalRow = EmptyRow): Any = {
expression.foreach {
case n: Nondeterministic => n.initialize(0)
case _ =>
}
expression.eval(inputRow)
}
protected def generateProject(
generator: => Projection,
expression: Expression): Projection = {
try {
generator
} catch {
case e: Throwable =>
fail(
s"""
|Code generation of $expression failed:
|$e
|${Utils.exceptionString(e)}
""".stripMargin)
}
}
protected def checkEvaluationWithoutCodegen(
expression: Expression,
expected: Any,
inputRow: InternalRow = EmptyRow): Unit = {
val actual = try evaluateWithoutCodegen(expression, inputRow) catch {
case e: Exception => fail(s"Exception evaluating $expression", e)
}
if (!checkResult(actual, expected, expression)) {
val input = if (inputRow == EmptyRow) "" else s", input: $inputRow"
fail(s"Incorrect evaluation (codegen off): $expression, " +
s"actual: $actual, " +
s"expected: $expected$input")
}
}
protected def checkEvaluationWithMutableProjection(
expression: => Expression,
expected: Any,
inputRow: InternalRow = EmptyRow): Unit = {
val modes = Seq(CodegenObjectFactoryMode.CODEGEN_ONLY, CodegenObjectFactoryMode.NO_CODEGEN)
for (fallbackMode <- modes) {
withSQLConf(SQLConf.CODEGEN_FACTORY_MODE.key -> fallbackMode.toString) {
val actual = evaluateWithMutableProjection(expression, inputRow)
if (!checkResult(actual, expected, expression)) {
val input = if (inputRow == EmptyRow) "" else s", input: $inputRow"
fail(s"Incorrect evaluation (fallback mode = $fallbackMode): $expression, " +
s"actual: $actual, expected: $expected$input")
}
}
}
}
protected def evaluateWithMutableProjection(
expression: => Expression,
inputRow: InternalRow = EmptyRow): Any = {
val plan = generateProject(
MutableProjection.create(Alias(expression, s"Optimized($expression)")() :: Nil),
expression)
plan.initialize(0)
plan(inputRow).get(0, expression.dataType)
}
protected def checkEvaluationWithUnsafeProjection(
expression: Expression,
expected: Any,
inputRow: InternalRow = EmptyRow): Unit = {
val modes = Seq(CodegenObjectFactoryMode.CODEGEN_ONLY, CodegenObjectFactoryMode.NO_CODEGEN)
for (fallbackMode <- modes) {
withSQLConf(SQLConf.CODEGEN_FACTORY_MODE.key -> fallbackMode.toString) {
val unsafeRow = evaluateWithUnsafeProjection(expression, inputRow)
val input = if (inputRow == EmptyRow) "" else s", input: $inputRow"
val dataType = expression.dataType
if (!checkResult(unsafeRow.get(0, dataType), expected, dataType, expression.nullable)) {
fail("Incorrect evaluation in unsafe mode (fallback mode = $fallbackMode): " +
s"$expression, actual: $unsafeRow, expected: $expected, " +
s"dataType: $dataType, nullable: ${expression.nullable}")
}
if (expected == null) {
if (!unsafeRow.isNullAt(0)) {
val expectedRow = InternalRow(expected, expected)
fail(s"Incorrect evaluation in unsafe mode (fallback mode = $fallbackMode): " +
s"$expression, actual: $unsafeRow, expected: $expectedRow$input")
}
} else {
val lit = InternalRow(expected, expected)
val expectedRow = UnsafeProjection.create(Array(dataType, dataType)).apply(lit)
if (unsafeRow != expectedRow) {
fail(s"Incorrect evaluation in unsafe mode (fallback mode = $fallbackMode): " +
s"$expression, actual: $unsafeRow, expected: $expectedRow$input")
}
}
}
}
}
protected def evaluateWithUnsafeProjection(
expression: Expression,
inputRow: InternalRow = EmptyRow): InternalRow = {
// SPARK-16489 Explicitly doing code generation twice so code gen will fail if
// some expression is reusing variable names across different instances.
// This behavior is tested in ExpressionEvalHelperSuite.
val plan = generateProject(
UnsafeProjection.create(
Alias(expression, s"Optimized($expression)1")() ::
Alias(expression, s"Optimized($expression)2")() :: Nil),
expression)
plan.initialize(0)
plan(inputRow)
}
protected def checkEvaluationWithOptimization(
expression: Expression,
expected: Any,
inputRow: InternalRow = EmptyRow): Unit = {
val plan = Project(Alias(expression, s"Optimized($expression)")() :: Nil, OneRowRelation())
val optimizedPlan = SimpleTestOptimizer.execute(plan)
checkEvaluationWithoutCodegen(optimizedPlan.expressions.head, expected, inputRow)
}
protected def checkDoubleEvaluation(
expression: => Expression,
expected: Spread[Double],
inputRow: InternalRow = EmptyRow): Unit = {
checkEvaluationWithoutCodegen(expression, expected)
checkEvaluationWithMutableProjection(expression, expected)
checkEvaluationWithOptimization(expression, expected)
var plan = generateProject(
GenerateMutableProjection.generate(Alias(expression, s"Optimized($expression)")() :: Nil),
expression)
plan.initialize(0)
var actual = plan(inputRow).get(0, expression.dataType)
assert(checkResult(actual, expected, expression))
plan = generateProject(
GenerateUnsafeProjection.generate(Alias(expression, s"Optimized($expression)")() :: Nil),
expression)
plan.initialize(0)
val ref = new BoundReference(0, expression.dataType, nullable = true)
actual = GenerateSafeProjection.generate(ref :: Nil)(plan(inputRow)).get(0, expression.dataType)
assert(checkResult(actual, expected, expression))
}
/**
* Test evaluation results between Interpreted mode and Codegen mode, making sure we have
* consistent result regardless of the evaluation method we use.
*
* This method test against unary expressions by feeding them arbitrary literals of `dataType`.
*/
def checkConsistencyBetweenInterpretedAndCodegen(
c: Expression => Expression,
dataType: DataType): Unit = {
forAll (LiteralGenerator.randomGen(dataType)) { (l: Literal) =>
cmpInterpretWithCodegen(EmptyRow, c(l))
}
}
/**
* Test evaluation results between Interpreted mode and Codegen mode, making sure we have
* consistent result regardless of the evaluation method we use.
*
* This method test against binary expressions by feeding them arbitrary literals of `dataType1`
* and `dataType2`.
*/
def checkConsistencyBetweenInterpretedAndCodegen(
c: (Expression, Expression) => Expression,
dataType1: DataType,
dataType2: DataType): Unit = {
forAll (
LiteralGenerator.randomGen(dataType1),
LiteralGenerator.randomGen(dataType2)
) { (l1: Literal, l2: Literal) =>
cmpInterpretWithCodegen(EmptyRow, c(l1, l2))
}
}
/**
* Test evaluation results between Interpreted mode and Codegen mode, making sure we have
* consistent result regardless of the evaluation method we use. If an exception is thrown,
* it checks that both modes throw the same exception.
*
* This method test against binary expressions by feeding them arbitrary literals of `dataType1`
* and `dataType2`.
*/
def checkConsistencyBetweenInterpretedAndCodegenAllowingException(
c: (Expression, Expression) => Expression,
dataType1: DataType,
dataType2: DataType): Unit = {
forAll (
LiteralGenerator.randomGen(dataType1),
LiteralGenerator.randomGen(dataType2)
) { (l1: Literal, l2: Literal) =>
cmpInterpretWithCodegen(EmptyRow, c(l1, l2), true)
}
}
/**
* Test evaluation results between Interpreted mode and Codegen mode, making sure we have
* consistent result regardless of the evaluation method we use.
*
* This method test against ternary expressions by feeding them arbitrary literals of `dataType1`,
* `dataType2` and `dataType3`.
*/
def checkConsistencyBetweenInterpretedAndCodegen(
c: (Expression, Expression, Expression) => Expression,
dataType1: DataType,
dataType2: DataType,
dataType3: DataType): Unit = {
forAll (
LiteralGenerator.randomGen(dataType1),
LiteralGenerator.randomGen(dataType2),
LiteralGenerator.randomGen(dataType3)
) { (l1: Literal, l2: Literal, l3: Literal) =>
cmpInterpretWithCodegen(EmptyRow, c(l1, l2, l3))
}
}
/**
* Test evaluation results between Interpreted mode and Codegen mode, making sure we have
* consistent result regardless of the evaluation method we use.
*
* This method test against expressions take Seq[Expression] as input by feeding them
* arbitrary length Seq of arbitrary literal of `dataType`.
*/
def checkConsistencyBetweenInterpretedAndCodegen(
c: Seq[Expression] => Expression,
dataType: DataType,
minNumElements: Int = 0): Unit = {
forAll (Gen.listOf(LiteralGenerator.randomGen(dataType))) { (literals: Seq[Literal]) =>
whenever(literals.size >= minNumElements) {
cmpInterpretWithCodegen(EmptyRow, c(literals))
}
}
}
def cmpInterpretWithCodegen(
inputRow: InternalRow,
expr: Expression,
exceptionAllowed: Boolean = false): Unit = {
val (interpret, interpretExc) = try {
(Some(evaluateWithoutCodegen(expr, inputRow)), None)
} catch {
case e: Exception => if (exceptionAllowed) {
(None, Some(e))
} else {
fail(s"Exception evaluating $expr", e)
}
}
val plan = generateProject(
GenerateMutableProjection.generate(Alias(expr, s"Optimized($expr)")() :: Nil),
expr)
val (codegen, codegenExc) = try {
(Some(plan(inputRow).get(0, expr.dataType)), None)
} catch {
case e: Exception => if (exceptionAllowed) {
(None, Some(e))
} else {
fail(s"Exception evaluating $expr", e)
}
}
if (interpret.isDefined && codegen.isDefined && !compareResults(interpret.get, codegen.get)) {
fail(s"Incorrect evaluation: $expr, interpret: ${interpret.get}, codegen: ${codegen.get}")
} else if (interpretExc.isDefined && codegenExc.isEmpty) {
fail(s"Incorrect evaluation: $expr, interpet threw exception ${interpretExc.get}")
} else if (interpretExc.isEmpty && codegenExc.isDefined) {
fail(s"Incorrect evaluation: $expr, codegen threw exception ${codegenExc.get}")
} else if (interpretExc.isDefined && codegenExc.isDefined
&& !compareExceptions(interpretExc.get, codegenExc.get)) {
fail(s"Different exception evaluating: $expr, " +
s"interpret: ${interpretExc.get}, codegen: ${codegenExc.get}")
}
}
/**
* Checks the equality between two exceptions. Returns true iff the two exceptions are instances
* of the same class and they have the same message.
*/
private[this] def compareExceptions(e1: Exception, e2: Exception): Boolean = {
e1.getClass == e2.getClass && e1.getMessage == e2.getMessage
}
/**
* Check the equality between result of expression and expected value, it will handle
* Array[Byte] and Spread[Double].
*/
private[this] def compareResults(result: Any, expected: Any): Boolean = {
(result, expected) match {
case (result: Array[Byte], expected: Array[Byte]) =>
java.util.Arrays.equals(result, expected)
case (result: Double, expected: Double) if result.isNaN && expected.isNaN =>
true
case (result: Double, expected: Double) =>
relativeErrorComparison(result, expected)
case (result: Float, expected: Float) if result.isNaN && expected.isNaN =>
true
case _ => result == expected
}
}
/**
* Private helper function for comparing two values using relative tolerance.
* Note that if x or y is extremely close to zero, i.e., smaller than Double.MinPositiveValue,
* the relative tolerance is meaningless, so the exception will be raised to warn users.
*
* TODO: this duplicates functions in spark.ml.util.TestingUtils.relTol and
* spark.mllib.util.TestingUtils.relTol, they could be moved to common utils sub module for the
* whole spark project which does not depend on other modules. See more detail in discussion:
* https://github.com/apache/spark/pull/15059#issuecomment-246940444
*/
private def relativeErrorComparison(x: Double, y: Double, eps: Double = 1E-8): Boolean = {
val absX = math.abs(x)
val absY = math.abs(y)
val diff = math.abs(x - y)
if (x == y) {
true
} else if (absX < Double.MinPositiveValue || absY < Double.MinPositiveValue) {
throw new TestFailedException(
s"$x or $y is extremely close to zero, so the relative tolerance is meaningless.", 0)
} else {
diff < eps * math.min(absX, absY)
}
}
def testBothCodegenAndInterpreted(name: String)(f: => Unit): Unit = {
val modes = Seq(CodegenObjectFactoryMode.CODEGEN_ONLY, CodegenObjectFactoryMode.NO_CODEGEN)
for (fallbackMode <- modes) {
test(s"$name with $fallbackMode") {
withSQLConf(SQLConf.CODEGEN_FACTORY_MODE.key -> fallbackMode.toString) {
f
}
}
}
}
}
| rednaxelafx/apache-spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ExpressionEvalHelper.scala | Scala | apache-2.0 | 21,342 |
package nl.iljabooij.garmintrainer.importer
import java.io.{File,InputStream}
import scala.collection.jcl.Conversions._
import org.scalatest.junit.{AssertionsForJUnit,JUnit3Suite}
import org.scalatest.mock.MockitoSugar
import org.junit.{Before,Test}
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.hamcrest.core.IsInstanceOf
import com.google.inject.Provider
import nl.iljabooij.garmintrainer.model.{Activity,ApplicationState}
import nl.iljabooij.garmintrainer.parser.{TcxParser,ParseException}
class TcxImporterScalaTest extends JUnit3Suite with AssertionsForJUnit with MockitoSugar{
var tcxImporter: TcxImporter = null
var applicationState: ApplicationState = null
var tcxParser: TcxParser = null
var tcxParserProvider: Provider[TcxParser] = null
override def setUp() {
applicationState = mock[ApplicationState]
tcxParser = mock[TcxParser]
tcxParserProvider = mock[Provider[TcxParser]]
when(tcxParserProvider.get()).thenReturn(tcxParser);
tcxImporter = new TcxImporterImpl(applicationState, tcxParserProvider)
}
def testFileDoesNotExist() {
intercept[TcxImportException] {
tcxImporter.importTcx(new File("/ThisFileDoesNotExist"))
}
}
def testImportActivity() {
val uri = getClass().getResource("/sample.tcx").toURI()
val file = new File(uri)
val activities = List(mock[Activity])
when(tcxParser.parse(any(classOf[InputStream]))).thenReturn(activities)
tcxImporter.importTcx(file)
verify(applicationState, times(1)).currentActivity_=(Some(activities.first))
verify(tcxParser, times(1)).parse(any(classOf[InputStream]))
}
/**
* Test what happens when the parser throws a ParseException
*/
def testWithParserThrowingException() {
val uri = getClass().getResource("/sample.tcx").toURI()
val file = new File(uri);
val activities = List(mock[Activity])
when(tcxParser.parse(any(classOf[InputStream]))).thenThrow(
new ParseException("test exception"))
intercept[TcxImportException] {
tcxImporter.importTcx(file);
}
}
}
| chmandrade/garmintrainer | src/test/scala/nl/iljabooij/garmintrainer/importer/TcxImporterScalaTest.scala | Scala | gpl-3.0 | 2,134 |
package com.sksamuel.elastic4s
import org.scalatest.{Matchers, WordSpec}
class CountShowTest extends WordSpec with Matchers {
import ElasticDsl._
"Search" should {
"have a show typeclass implementation" in {
val request = {
count from "gameofthrones" / "characters" query {
bool {
should {
termQuery("name", "snow")
}.must {
matchQuery("location", "the wall")
}
}
} routing "routing" preference "prefs" minScore 1.4
}
request.show shouldBe """{
| "query" : {
| "bool" : {
| "must" : {
| "match" : {
| "location" : {
| "query" : "the wall",
| "type" : "boolean"
| }
| }
| },
| "should" : {
| "term" : {
| "name" : "snow"
| }
| }
| }
| }
|}""".stripMargin
}
}
}
| tototoshi/elastic4s | elastic4s-core-tests/src/test/scala/com/sksamuel/elastic4s/CountShowTest.scala | Scala | apache-2.0 | 1,441 |
package mesosphere.mesos
import mesosphere.marathon.MarathonTestHelper.Implicits._
import mesosphere.marathon.Protos.Constraint
import mesosphere.marathon.Protos.Constraint.Operator
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.state.AppDefinition
import mesosphere.marathon.{ MarathonSpec, MarathonTestHelper }
import mesosphere.mesos.protos.{ FrameworkID, OfferID, SlaveID, TextAttribute }
import org.apache.mesos.Protos.{ Attribute, Offer }
import org.scalatest.{ GivenWhenThen, Matchers }
import scala.collection.immutable.Seq
import scala.collection.JavaConverters._
import scala.util.Random
class ConstraintsTest extends MarathonSpec with GivenWhenThen with Matchers {
import mesosphere.mesos.protos.Implicits._
test("Select tasks to kill for a single group by works") {
Given("app with hostname group_by and 20 tasks even distributed on 2 hosts")
val app = AppDefinition(constraints = Set(makeConstraint("hostname", Operator.GROUP_BY, "")))
val tasks = 0.to(19).map(num => makeTaskWithHost(s"$num", s"srv${num % 2}"))
When("10 tasks should be selected to kill")
val result = Constraints.selectTasksToKill(app, tasks, 10)
Then("10 tasks got selected and evenly distributed")
result should have size 10
val dist = result.groupBy(_.taskId.idString.toInt % 2 == 1)
dist should have size 2
dist.values.head should have size 5
}
test("Select only tasks to kill for an unbalanced distribution") {
Given("app with hostname group_by and 30 tasks uneven distributed on 2 hosts")
val app = AppDefinition(constraints = Set(makeConstraint("hostname", Operator.GROUP_BY, "")))
val tasks = 0.to(19).map(num => makeTaskWithHost(s"$num", s"srv1")) ++
20.to(29).map(num => makeTaskWithHost(s"$num", s"srv2"))
When("10 tasks should be selected to kill")
val result = Constraints.selectTasksToKill(app, tasks, 10)
Then("All 10 tasks are from srv1")
result should have size 10
result.forall(_.agentInfo.host == "srv1") should be(true)
}
test("Select tasks to kill for multiple group by works") {
Given("app with 2 group_by distributions and 40 tasks even distributed")
val app = AppDefinition(constraints = Set(
makeConstraint("rack", Operator.GROUP_BY, ""),
makeConstraint("color", Operator.GROUP_BY, "")))
val tasks =
0.to(9).map(num => makeSampleTask(s"$num", Map("rack" -> "rack-1", "color" -> "blue"))) ++
10.to(19).map(num => makeSampleTask(s"$num", Map("rack" -> "rack-1", "color" -> "green"))) ++
20.to(29).map(num => makeSampleTask(s"$num", Map("rack" -> "rack-2", "color" -> "blue"))) ++
30.to(39).map(num => makeSampleTask(s"$num", Map("rack" -> "rack-2", "color" -> "green")))
When("20 tasks should be selected to kill")
val result = Constraints.selectTasksToKill(app, tasks, 20)
Then("20 tasks got selected and evenly distributed")
result should have size 20
result.count(_.agentInfo.attributes.exists(_.getText.getValue == "rack-1")) should be(10)
result.count(_.agentInfo.attributes.exists(_.getText.getValue == "rack-2")) should be(10)
result.count(_.agentInfo.attributes.exists(_.getText.getValue == "blue")) should be(10)
result.count(_.agentInfo.attributes.exists(_.getText.getValue == "green")) should be(10)
}
test("Does not select any task without constraint") {
Given("app with hostname group_by and 10 tasks even distributed on 5 hosts")
val app = AppDefinition()
val tasks = 0.to(9).map(num => makeSampleTask(s"$num", Map("rack" -> "rack-1", "color" -> "blue")))
When("10 tasks should be selected to kill")
val result = Constraints.selectTasksToKill(app, tasks, 5)
Then("0 tasks got selected")
result should have size 0
}
test("UniqueHostConstraint") {
val task1_host1 = makeTaskWithHost("task1", "host1")
val task2_host2 = makeTaskWithHost("task2", "host2")
val task3_host3 = makeTaskWithHost("task3", "host3")
val attributes: Set[Attribute] = Set()
val firstTask = Set()
val hostnameUnique = makeConstraint("hostname", Operator.UNIQUE, "")
val firstTaskOnHost = Constraints.meetsConstraint(
firstTask,
makeOffer("foohost", attributes),
makeConstraint("hostname", Operator.CLUSTER, ""))
assert(firstTaskOnHost, "Should meet first task constraint.")
val wrongHostName = Constraints.meetsConstraint(
firstTask,
makeOffer("wrong.com", attributes),
makeConstraint("hostname", Operator.CLUSTER, "right.com"))
assert(!wrongHostName, "Should not accept the wrong hostname.")
val differentHosts = Set(task1_host1, task2_host2, task3_host3)
val differentHostsDifferentTasks = Constraints.meetsConstraint(
differentHosts,
makeOffer("host4", attributes),
hostnameUnique)
assert(differentHostsDifferentTasks, "Should place host in array")
val reusingOneHost = Constraints.meetsConstraint(
differentHosts,
makeOffer("host2", attributes),
hostnameUnique)
assert(!reusingOneHost, "Should not place host")
val firstOfferFirstTaskInstance = Constraints.meetsConstraint(
firstTask,
makeOffer("host2", attributes),
hostnameUnique)
assert(firstOfferFirstTaskInstance, "Should not place host")
}
test("RackConstraints") {
val task1_rack1 = makeSampleTask("task1", Map("rackid" -> "rack-1"))
val task2_rack1 = makeSampleTask("task2", Map("rackid" -> "rack-1"))
val task3_rack2 = makeSampleTask("task3", Map("rackid" -> "rack-2"))
val freshRack = Set()
val sameRack = Set(task1_rack1, task2_rack1)
val uniqueRack = Set(task1_rack1, task3_rack2)
val clusterByRackId = makeConstraint("rackid", Constraint.Operator.CLUSTER, "")
val uniqueRackId = makeConstraint("rackid", Constraint.Operator.UNIQUE, "")
val clusterFreshRackMet = Constraints.meetsConstraint(
freshRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
clusterByRackId)
assert(clusterFreshRackMet, "Should be able to schedule in fresh rack.")
val clusterRackMet = Constraints.meetsConstraint(
sameRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
clusterByRackId)
assert(clusterRackMet, "Should meet clustered-in-rack constraints.")
val clusterRackNotMet = Constraints.meetsConstraint(
sameRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-2"))),
clusterByRackId)
assert(!clusterRackNotMet, "Should not meet cluster constraint.")
val clusterNoAttributeNotMet = Constraints.meetsConstraint(
freshRack,
makeOffer("foohost", Set()),
clusterByRackId)
assert(!clusterNoAttributeNotMet, "Should not meet cluster constraint.")
val uniqueFreshRackMet = Constraints.meetsConstraint(
freshRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
uniqueRackId)
assert(uniqueFreshRackMet, "Should meet unique constraint for fresh rack.")
val uniqueRackMet = Constraints.meetsConstraint(
uniqueRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-3"))),
uniqueRackId)
assert(uniqueRackMet, "Should meet unique constraint for rack")
val uniqueRackNotMet = Constraints.meetsConstraint(
sameRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
uniqueRackId)
assert(!uniqueRackNotMet, "Should not meet unique constraint for rack.")
val uniqueNoAttributeNotMet = Constraints.meetsConstraint(
freshRack,
makeOffer("foohost", Set()),
uniqueRackId)
assert(!uniqueNoAttributeNotMet, "Should not meet unique constraint.")
}
test("AttributesLikeByConstraints") {
val task1_rack1 = makeSampleTask("task1", Map("foo" -> "bar"))
val task2_rack1 = makeSampleTask("task2", Map("jdk" -> "7"))
val freshRack = Set(task1_rack1, task2_rack1)
val jdk7Constraint = makeConstraint("jdk", Constraint.Operator.LIKE, "7")
val likeVersionNotMet = Constraints.meetsConstraint(
freshRack, // list of tasks register in the cluster
makeOffer("foohost", Set(TextAttribute("jdk", "6"))), // slave attributes
jdk7Constraint)
assert(!likeVersionNotMet, "Should not meet like-version constraints.")
val likeVersionMet = Constraints.meetsConstraint(
freshRack, // list of tasks register in the cluster
makeOffer("foohost", Set(TextAttribute("jdk", "7"))), // slave attributes
jdk7Constraint)
assert(likeVersionMet, "Should meet like-version constraints.")
val likeNoAttributeNotMet = Constraints.meetsConstraint(
freshRack, // list of tasks register in the cluster
makeOffer("foohost", Set()), // no slave attribute
jdk7Constraint)
assert(!likeNoAttributeNotMet, "Should not meet like-no-attribute constraints.")
}
test("AttributesUnlikeByConstraints") {
val task1_rack1 = makeSampleTask("task1", Map("foo" -> "bar"))
val task2_rack1 = makeSampleTask("task2", Map("jdk" -> "7"))
val freshRack = Set(task1_rack1, task2_rack1)
val jdk7Constraint = makeConstraint("jdk", Constraint.Operator.UNLIKE, "7")
val unlikeVersionMet = Constraints.meetsConstraint(
freshRack, // list of tasks register in the cluster
makeOffer("foohost", Set(TextAttribute("jdk", "6"))), // slave attributes
jdk7Constraint)
assert(unlikeVersionMet, "Should meet unlike-version constraints.")
val unlikeVersionNotMet = Constraints.meetsConstraint(
freshRack, // list of tasks register in the cluster
makeOffer("foohost", Set(TextAttribute("jdk", "7"))), // slave attributes
jdk7Constraint)
assert(!unlikeVersionNotMet, "Should not meet unlike-version constraints.")
val unlikeNoAttributeMet = Constraints.meetsConstraint(
freshRack, // list of tasks register in the cluster
makeOffer("foohost", Set()), // no slave attribute
jdk7Constraint)
assert(unlikeNoAttributeMet, "Should meet unlike-no-attribute constraints.")
}
test("RackGroupedByConstraints") {
val task1_rack1 = makeSampleTask("task1", Map("rackid" -> "rack-1"))
val task2_rack1 = makeSampleTask("task2", Map("rackid" -> "rack-1"))
val task3_rack2 = makeSampleTask("task3", Map("rackid" -> "rack-2"))
val task4_rack1 = makeSampleTask("task4", Map("rackid" -> "rack-1"))
val task5_rack3 = makeSampleTask("task5", Map("rackid" -> "rack-3"))
var sameRack = Iterable.empty[Task]
val group2ByRack = makeConstraint("rackid", Constraint.Operator.GROUP_BY, "2")
val groupByFreshRackMet = Constraints.meetsConstraint(
sameRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
group2ByRack)
assert(groupByFreshRackMet, "Should be able to schedule in fresh rack.")
sameRack ++= Set(task1_rack1)
val groupByRackMet = Constraints.meetsConstraint(
sameRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
group2ByRack)
assert(!groupByRackMet, "Should not meet group-by-rack constraints.")
val groupByRackMet2 = Constraints.meetsConstraint(
sameRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-2"))),
group2ByRack)
assert(groupByRackMet2, "Should meet group-by-rack constraint.")
sameRack ++= Set(task3_rack2)
val groupByRackMet3 = Constraints.meetsConstraint(
sameRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
group2ByRack)
assert(groupByRackMet3, "Should meet group-by-rack constraints.")
sameRack ++= Set(task2_rack1)
val groupByRackNotMet = Constraints.meetsConstraint(
sameRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
group2ByRack)
assert(!groupByRackNotMet, "Should not meet group-by-rack constraint.")
val groupByNoAttributeNotMet = Constraints.meetsConstraint(
sameRack,
makeOffer("foohost", Set()),
group2ByRack)
assert(!groupByNoAttributeNotMet, "Should not meet group-by-no-attribute constraints.")
}
test("RackGroupedByConstraints2") {
val task1_rack1 = makeSampleTask("task1", Map("rackid" -> "rack-1"))
val task2_rack2 = makeSampleTask("task2", Map("rackid" -> "rack-2"))
val task3_rack3 = makeSampleTask("task3", Map("rackid" -> "rack-3"))
val task4_rack1 = makeSampleTask("task4", Map("rackid" -> "rack-1"))
val task5_rack2 = makeSampleTask("task5", Map("rackid" -> "rack-2"))
var groupRack = Iterable.empty[Task]
val groupByRack = makeConstraint("rackid", Constraint.Operator.GROUP_BY, "3")
val clusterFreshRackMet = Constraints.meetsConstraint(
groupRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
groupByRack)
assert(clusterFreshRackMet, "Should be able to schedule in fresh rack.")
groupRack ++= Set(task1_rack1)
val clusterRackMet1 = Constraints.meetsConstraint(
groupRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-2"))),
groupByRack)
assert(clusterRackMet1, "Should meet clustered-in-rack constraints.")
groupRack ++= Set(task2_rack2)
val clusterRackMet2 = Constraints.meetsConstraint(
groupRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-3"))),
groupByRack)
assert(clusterRackMet2, "Should meet clustered-in-rack constraints.")
groupRack ++= Set(task3_rack3)
val clusterRackMet3 = Constraints.meetsConstraint(
groupRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-1"))),
groupByRack)
assert(clusterRackMet3, "Should meet clustered-in-rack constraints.")
groupRack ++= Set(task4_rack1)
val clusterRackMet4 = Constraints.meetsConstraint(
groupRack,
makeOffer("foohost", Set(TextAttribute("foo", "bar"), TextAttribute("rackid", "rack-2"))),
groupByRack)
assert(clusterRackMet4, "Should meet clustered-in-rack constraints.")
}
test("HostnameGroupedByConstraints") {
val task1_host1 = makeTaskWithHost("task1", "host1")
val task2_host1 = makeTaskWithHost("task2", "host1")
val task3_host2 = makeTaskWithHost("task3", "host2")
val task4_host3 = makeTaskWithHost("task4", "host3")
var groupHost = Iterable.empty[Task]
val attributes: Set[Attribute] = Set()
val groupByHost = makeConstraint("hostname", Constraint.Operator.GROUP_BY, "2")
val groupByFreshHostMet = Constraints.meetsConstraint(
groupHost,
makeOffer("host1", attributes),
groupByHost)
assert(groupByFreshHostMet, "Should be able to schedule in fresh host.")
groupHost ++= Set(task1_host1)
val groupByHostMet = Constraints.meetsConstraint(
groupHost,
makeOffer("host1", attributes),
groupByHost)
assert(!groupByHostMet, "Should not meet group-by-host constraint.")
val groupByHostMet2 = Constraints.meetsConstraint(
groupHost,
makeOffer("host2", attributes),
groupByHost)
assert(groupByHostMet2, "Should meet group-by-host constraint.")
groupHost ++= Set(task3_host2)
val groupByHostMet3 = Constraints.meetsConstraint(
groupHost,
makeOffer("host1", attributes),
groupByHost)
assert(groupByHostMet3, "Should meet group-by-host constraint.")
groupHost ++= Set(task2_host1)
val groupByHostNotMet = Constraints.meetsConstraint(
groupHost,
makeOffer("host1", attributes),
groupByHost)
assert(!groupByHostNotMet, "Should not meet group-by-host constraint.")
val groupByHostMet4 = Constraints.meetsConstraint(
groupHost,
makeOffer("host3", attributes),
groupByHost)
assert(groupByHostMet4, "Should meet group-by-host constraint.")
groupHost ++= Set(task4_host3)
val groupByHostNotMet2 = Constraints.meetsConstraint(
groupHost,
makeOffer("host1", attributes),
groupByHost)
assert(!groupByHostNotMet2, "Should not meet group-by-host constraint.")
val groupByHostMet5 = Constraints.meetsConstraint(
groupHost,
makeOffer("host3", attributes),
groupByHost)
assert(groupByHostMet5, "Should meet group-by-host constraint.")
val groupByHostMet6 = Constraints.meetsConstraint(
groupHost,
makeOffer("host2", attributes),
groupByHost)
assert(groupByHostMet6, "Should meet group-by-host constraint.")
}
def makeSampleTask(id: String, attrs: Map[String, String]) = {
val attributes = attrs.map { case (name, value) => TextAttribute(name, value): Attribute }
MarathonTestHelper.stagedTask(id)
.withAgentInfo(_.copy(attributes = attributes))
.withHostPorts(Seq(999))
}
def makeOffer(hostname: String, attributes: Iterable[Attribute]) = {
Offer.newBuilder
.setId(OfferID(Random.nextString(9)))
.setSlaveId(SlaveID(Random.nextString(9)))
.setFrameworkId(FrameworkID(Random.nextString(9)))
.setHostname(hostname)
.addAllAttributes(attributes.asJava)
.build
}
def makeTaskWithHost(id: String, host: String) = {
MarathonTestHelper
.runningTask(id)
.withAgentInfo(_.copy(host = host))
.withHostPorts(Seq(999))
}
def makeConstraint(field: String, operator: Operator, value: String) = {
Constraint.newBuilder
.setField(field)
.setOperator(operator)
.setValue(value)
.build
}
}
| ss75710541/marathon | src/test/scala/mesosphere/mesos/ConstraintsTest.scala | Scala | apache-2.0 | 17,840 |
package gitbucket.core.controller
import org.scalatra.MovedPermanently
class PreProcessController extends PreProcessControllerBase
trait PreProcessControllerBase extends ControllerBase {
/**
* Provides GitHub compatible URLs for Git client.
*
* <ul>
* <li>git clone http://localhost:8080/owner/repo</li>
* <li>git clone http://localhost:8080/owner/repo.git</li>
* </ul>
*
* @see https://git-scm.com/book/en/v2/Git-Internals-Transfer-Protocols
*/
get("/*/*/info/refs") {
val query = Option(request.getQueryString).map("?" + _).getOrElse("")
halt(MovedPermanently(baseUrl + "/git" + request.getRequestURI + query))
}
/**
* Filter requests from anonymous users.
*
* If anonymous access is allowed, pass all requests.
* But if it's not allowed, demands authentication except some paths.
*/
get(!context.settings.allowAnonymousAccess, context.loginAccount.isEmpty) {
if(!context.currentPath.startsWith("/assets") && !context.currentPath.startsWith("/signin") &&
!context.currentPath.startsWith("/register") && !context.currentPath.endsWith("/info/refs")) {
Unauthorized()
} else {
pass()
}
}
}
| gencer/gitbucket | src/main/scala/gitbucket/core/controller/PreProcessController.scala | Scala | apache-2.0 | 1,192 |
package example
class EtaExpansion/*<-example::EtaExpansion#*/ {
Some/*->scala::Some.*//*->scala::Some.apply().*/(1).map/*->scala::Option#map().*/(identity/*->scala::Predef.identity().*//*->local0*/)
List/*->scala::package.List.*//*->scala::collection::IterableFactory#apply().*/(1).foldLeft/*->scala::collection::LinearSeqOps#foldLeft().*/("")(_ +/*->java::lang::String#`+`().*/ _)
}
| som-snytt/dotty | tests/semanticdb/expect/EtaExpansion.expect.scala | Scala | apache-2.0 | 390 |
package com.sksamuel.scapegoat.inspections.unneccesary
import scala.reflect.internal.Flags
import com.sksamuel.scapegoat._
/**
* @author Stephen Samuel
*/
class UnusedMethodParameter
extends Inspection(
text = "Unused parameter",
defaultLevel = Levels.Warning,
description = "Checks for unused method parameters.",
explanation = "Unused constructor or method parameters should be removed."
) {
def inspector(context: InspectionContext): Inspector =
new Inspector(context) {
override def postTyperTraverser =
new context.Traverser {
import context.global._
import definitions._
private def usesParameter(paramName: String, tree: Tree): Boolean = {
tree match {
case Ident(TermName(name)) =>
name == paramName
case _ =>
tree.children.exists(usesParameter(paramName, _))
}
}
private def usesField(paramName: String, tree: Tree): Boolean = {
tree match {
case Select(This(_), TermName(name)) =>
// FIXME: why is "trim" needed here? Is that a scalac bug?
// A test will fail if you take this out!
name.trim == paramName
case _ =>
tree.children.exists(usesField(paramName, _))
}
}
private def isParameterExcused(param: ValDef): Boolean =
param.symbol.annotations.exists(_.atp.toString == "scala.annotation.unused")
/**
* For constructor params, some params become vals / fields of the class:
* 1. all params in the first argument list for case classes
* 2. all params marked "val"
*
* In both cases, by the time we see the tree, a "def x = this.x" method
* will have been added by the compiler, so "usesField" will notice and
* not mark the param as unused.
*/
private def checkConstructor(
vparamss: List[List[ValDef]],
constructorBody: Tree,
classBody: Tree
): Unit = {
for {
vparams <- vparamss
vparam <- vparams
} {
val paramName = vparam.name.toString
if (!usesParameter(paramName, constructorBody) && !usesField(paramName, classBody))
context.warn(vparam.pos, self, s"Unused constructor parameter (${vparam.name}).")
}
}
override def inspect(tree: Tree): Unit = {
tree match {
// ignore traits, quite often you define a method in a trait with default impl that does nothing
case ClassDef(_, _, _, _) if tree.symbol.isTrait =>
case ClassDef(mods, _, _, _) if mods.hasAbstractFlag =>
case ClassDef(_, _, _, classBody @ Template(_, _, classTopLevelStmts)) =>
classTopLevelStmts.foreach {
case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, constructorBody) =>
checkConstructor(vparamss, constructorBody, classBody)
case DefDef(_, _, _, vparamss, _, constructorBody)
if tree.symbol != null && tree.symbol.isConstructor =>
checkConstructor(vparamss, constructorBody, classBody)
case _ =>
}
continue(tree)
// ignore abstract methods obv.
case DefDef(mods, _, _, _, _, _) if mods.hasFlag(Flag.ABSTRACT) =>
case d @ DefDef(_, _, _, _, _, _) if d.symbol != null && d.symbol.isAbstract =>
// ignore constructors, they're handled above
case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) =>
case DefDef(_, _, _, _, _, _) if tree.symbol != null && tree.symbol.isConstructor =>
// ignore methods that just throw, e.g. "???"
case DefDef(_, _, _, _, tpt, _) if tpt.tpe =:= NothingTpe =>
// ignore methods that just throw, e.g. "???" or "js.native"
case DefDef(_, _, _, _, _, rhs) if rhs.tpe =:= NothingTpe =>
// ignore overridden methods, the parameter might be used by other classes
case DefDef(mods, _, _, _, _, _)
if mods.isOverride ||
mods.hasFlag(Flags.OVERRIDE) ||
(tree.symbol != null && (tree.symbol.isAnyOverride || tree.symbol.isOverridingSymbol)) =>
// ignore main method
case DefDef(_, name, _, List(List(param)), tpt, _)
if name.toString == "main" &&
param.name.toString == "args" &&
tpt.tpe =:= UnitTpe &&
param.tpt.tpe =:= typeOf[Array[String]] =>
case DefDef(_, _, _, vparamss, _, rhs) =>
for {
vparams <- vparamss
vparam <- vparams
} if (!isParameterExcused(vparam) && !usesParameter(vparam.name.toString, rhs)) {
context.warn(tree.pos, self, s"Unused method parameter ($vparam).")
}
case _ => continue(tree)
}
}
}
}
}
| sksamuel/scalac-scapegoat-plugin | src/main/scala/com/sksamuel/scapegoat/inspections/unneccesary/UnusedMethodParameter.scala | Scala | apache-2.0 | 5,328 |
import sbt._
import Keys._
import java.io.{FileInputStream, FileOutputStream}
object BuildSettings {
val sonatypeRepo = "http://oss.sonatype.org/service/local/staging/deploy/maven2"
lazy val credentialsSetting = credentials ++=
(Seq("build.publish.user", "build.publish.password").map(k => Option(System.getProperty(k))) match {
case Seq(Some(user), Some(pass)) =>
Seq(Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", user, pass))
case _ =>
Seq.empty[Credentials]
})
val buildSettings = Defaults.defaultSettings ++ Seq (
organization := "com.linkedin",
version := "0.6.65",
scalaVersion := "2.8.1",
credentialsSetting,
publishArtifact in (Compile, packageDoc) := false,
publishTo <<= (version) { version: String =>
if (version.trim.endsWith("SNAPSHOT"))
Some("Sonatype Nexus Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots")
else
Some("Sonatype Nexus Repository Manager" at "https://oss.sonatype.org/service/local/staging/deploy/maven2")
}
)
}
object Resolvers {
val jbossRepo = "JBoss Maven 2 Repository" at "http://repository.jboss.org/nexus/content/groups/public/"
val norbertResolvers = Seq(jbossRepo)
}
object ClusterDependencies {
val ZOOKEEPER_VER = "3.3.0"
val PROTOBUF_VER = "2.4.0a"
val LOG4J_VER = "1.2.16"
val SPECS_VER = "1.6.9"
val MOCKITO_VER = "1.8.4"
val CGLIB_VER = "2.1_3"
val OBJENESIS = "1.0"
val JUNIT_VER = "4.8.1"
val zookeeper = "org.apache.zookeeper" % "zookeeper" % ZOOKEEPER_VER
val protobuf = "com.google.protobuf" % "protobuf-java" % PROTOBUF_VER
val log4j = "log4j" % "log4j" % LOG4J_VER
val specs = "org.scala-tools.testing" %% "specs" % SPECS_VER % "test"
val mockito = "org.mockito" % "mockito-all" % MOCKITO_VER % "test"
val cglib = "cglib" % "cglib" % CGLIB_VER % "test"
val objenesis = "org.objenesis" % "objenesis" % OBJENESIS % "test"
val junit = "junit" % "junit" % JUNIT_VER % "test"
val deps = Seq(zookeeper, protobuf, log4j, specs, xml, mockito, cglib, objenesis, junit)
}
object NetworkDependencies {
val NETTY_VER = "3.7.0.Final"
val SLF4J_VER = "1.5.6"
val JUNIT_VER = "4.8.1"
val netty = "io.netty" % "netty" % NETTY_VER
val slf4j = "org.slf4j" % "slf4j-api" % SLF4J_VER
val slf4jLog4j = "org.slf4j" % "slf4j-log4j12" % SLF4J_VER
val junit = "junit" % "junit" % JUNIT_VER % "test"
val deps = Seq(netty, slf4j, slf4jLog4j, junit)
}
object NorbertBuild extends Build {
import BuildSettings._
import Resolvers._
lazy val cluster = Project("cluster", file("cluster"),
settings = buildSettings ++ Seq(libraryDependencies ++= ClusterDependencies.deps, resolvers := norbertResolvers))
lazy val network = Project("network", file("network"),
settings = buildSettings ++ Seq(libraryDependencies ++= NetworkDependencies.deps, resolvers := norbertResolvers)) dependsOn(cluster % "compile;test->test")
lazy val javaCluster = Project("java-cluster", file("java-cluster"), settings = buildSettings) dependsOn(cluster)
lazy val javaNetwork = Project("java-network", file("java-network"), settings = buildSettings) dependsOn(cluster, javaCluster, network % "compile;test->test")
lazy val examples = Project("examples", file("examples"), settings = buildSettings) dependsOn(network, javaNetwork)
lazy val root = Project("root", file("."), settings = buildSettings) aggregate(cluster, network, javaCluster, javaNetwork, examples)
lazy val full = {
// The projects that are packaged in the full distribution.
val description = SettingKey[String]("description")
val projects = Seq(cluster, network, javaCluster, javaNetwork, examples )
val myManagedSources = TaskKey[Seq[Seq[File]]]("my-managed-sources")
val myUnmanagedSources = TaskKey[Seq[Seq[File]]]("my-unmanaged-sources")
val mySettings = buildSettings
def copyFile(input: File, output: File) {
output.getParentFile.mkdirs
output.createNewFile
val inputChannel = new FileInputStream(input).getChannel
val outputChannel = new FileOutputStream(output).getChannel
outputChannel.transferFrom(inputChannel, 0, Long.MaxValue)
}
def copyProtoFiles(classDir: Types.Id[File]): File = {
val protoClasses = (classDir ** "*Protos*.class").get
val parentDir = new File(classDir.getParent)
val protoTmpDir = parentDir / "proto-tmp"
protoTmpDir.delete
val rebased = protoClasses x rebase(oldBase = classDir, newBase = protoTmpDir)
rebased.foreach {
case (protoClass, newProtoClass) =>
copyFile(protoClass, newProtoClass)
}
protoTmpDir
}
// Generating documentation fails because of our proto sources. This is a hacked up task to put them on the
// classpath and proceed running with everything else.
def filteredDocTask: Project.Initialize[Task[File]] =
(classDirectory in Compile, cacheDirectory in Compile, compileInputs in Compile, streams in Compile, docDirectory in Compile, configuration in Compile , scaladocOptions in Compile)
.map { (classDir, cache, in, s, target, config, options) =>
val d = new Scaladoc(in.config.maxErrors, in.compilers.scalac)
val cp = in.config.classpath.toList - in.config.classesDirectory
val sources = in.config.sources
val (protoSources, scalaSources) = sources.partition(file => file.getName.contains("Protos"))
// add the java sources to the class path
val protoTmpDir: File = copyProtoFiles(classDir)
val classpath = protoTmpDir :: cp
// Skipping scaladoc for now since theres ANOTHER bug, https://issues.scala-lang.org/browse/SI-4284
val emptySources = Seq.empty[File]
d.cached(cache / "doc", Defaults.nameForSrc(config.name), /*scalaSources*/ emptySources, classpath , target, options, s.log)
target
}
Project(
id = "norbert",
base = file("full"),
settings = mySettings ++ Seq(
description := "Includes all of the norbert subprojects in one project",
myManagedSources <<= projects.map(managedSources in Compile in _).join,
unmanagedClasspath in Compile += Attributed.blank(new java.io.File("doesnotexist")),
myUnmanagedSources <<= projects.map(unmanagedSources in Compile in _).join,
(unmanagedSources in Compile) <<= (myUnmanagedSources).map(_.flatten),
(managedSources in Compile) <<= (myManagedSources).map(_.flatten),
(unmanagedSourceDirectories in Compile) <<= (projects.map(unmanagedSourceDirectories in Compile in _).join).map(_.flatten),
(doc in Compile) <<= filteredDocTask,
pomExtra <<= (pomExtra, name, description) { (extra, name, desc) => extra ++ Seq(
<name>{name}</name>,
<description>{desc}</description>,
<url>http://sna-projects.com/norbert</url>,
<licenses>
<license>
<name>Apache</name>
<url>http://github.com/linkedin-sna/norbert/raw/HEAD/LICENSE</url>
<distribution>repo</distribution>
</license>
</licenses>,
<scm>
<url>http://github.com/linkedin-sna/norbert</url>
<connection>scm:git:git://github.com/linkedin-sna/norbert.git</connection>
</scm>,
<developers>
<developer>
<id>jhartman</id>
<name>Joshua Hartman</name>
<url>http://twitter.com/hartmanster</url>
</developer>
<developer>
<id>rwang</id>
<name>Rui Wang</name>
<url>http://www.linkedin.com/profile/view?id=147539</url>
</developer>
</developers>
)},
libraryDependencies ++= ClusterDependencies.deps ++ NetworkDependencies.deps
)
)
}
}
| linkedin/norbert | project/Build.scala | Scala | apache-2.0 | 7,921 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.scala2_11.scalatest3_0_1
import org.jetbrains.plugins.scala.testingSupport.scalatest.ScalaTestPackageNewTest
class Scalatest2_11_3_0_1_PackageTest extends Scalatest2_11_3_0_1_Base with ScalaTestPackageNewTest
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/testingSupport/scalatest/scala2_11/scalatest3_0_1/Scalatest2_11_3_0_1_PackageTest.scala | Scala | apache-2.0 | 272 |
package io.github.binaryfoo.lagotto.shell
import java.util
import scala.collection.JavaConversions._
/**
* Keep at most N things.
*/
class BoundedQueue[T](val capacity: Int) extends util.ArrayDeque[T] {
override def add(e: T): Boolean = {
if (capacity == 0 || !isEmpty && peek() == e) {
return false
}
if (capacity == size) {
remove()
}
super.add(e)
}
/**
* Return the things in FIFO order and empty the queue.
*/
def dump(): List[T] = {
val list = this.toList
clear()
list
}
}
| binaryfoo/lagotto | src/main/scala/io/github/binaryfoo/lagotto/shell/BoundedQueue.scala | Scala | mit | 544 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.features.bindings
import scala.collection.JavaConverters._
import io.fabric8.kubernetes.api.model.{ContainerBuilder, EnvVarBuilder, HasMetadata}
import org.apache.spark.deploy.k8s.{KubernetesConf, KubernetesDriverSpecificConf, KubernetesUtils, SparkPod}
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.features.KubernetesFeatureConfigStep
private[spark] class RDriverFeatureStep(
kubernetesConf: KubernetesConf[KubernetesDriverSpecificConf])
extends KubernetesFeatureConfigStep {
override def configurePod(pod: SparkPod): SparkPod = {
val roleConf = kubernetesConf.roleSpecificConf
require(roleConf.mainAppResource.isDefined, "R Main Resource must be defined")
// Delineation is done by " " because that is input into RRunner
val maybeRArgs = Option(roleConf.appArgs).filter(_.nonEmpty).map(
rArgs =>
new EnvVarBuilder()
.withName(ENV_R_ARGS)
.withValue(rArgs.mkString(" "))
.build())
val envSeq =
Seq(new EnvVarBuilder()
.withName(ENV_R_PRIMARY)
.withValue(KubernetesUtils.resolveFileUri(kubernetesConf.sparkRMainResource().get))
.build())
val rEnvs = envSeq ++
maybeRArgs.toSeq
val withRPrimaryContainer = new ContainerBuilder(pod.container)
.addAllToEnv(rEnvs.asJava)
.addToArgs("driver-r")
.addToArgs("--properties-file", SPARK_CONF_PATH)
.addToArgs("--class", roleConf.mainClass)
.build()
SparkPod(pod.pod, withRPrimaryContainer)
}
override def getAdditionalPodSystemProperties(): Map[String, String] = Map.empty
override def getAdditionalKubernetesResources(): Seq[HasMetadata] = Seq.empty
}
| lvdongr/spark | resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStep.scala | Scala | apache-2.0 | 2,548 |
package scala.meta.tests
package semanticdb
// Contributing tips:
// - To run an individual test: ~testsJVM/testOnly *TargetedSuite -- -z "package YYY"
// and replace YYY with the package name of your test.
// - On test failure, the obtained output is printed to the console for
// easy copy-paste to replace the current expected output.
// - Try to follow the alphabetical order of the enclosing package, at the time
// of this writing the latest package is `g`, so the next package should be `h`.
// - glhf, and if you have any questions don't hesitate to ask in the gitter channel :)
class TargetedSuite extends SemanticdbSuite {
targeted(
// curried function application with named args, #648
"""package a
|object Curry {
| def bar(children: Int)(x: Int) = children + x
| <<bar>>(children = 4)(3)
|}
""".trim.stripMargin, { (_, second) =>
assert(second === "a/Curry.bar().")
}
)
targeted(
"""
|package b
|case class User(name: String, age: Int)
|object M {
| val u: User = ???
| u.<<copy>>(<<age>> = 43)
|}
""".trim.stripMargin, { (_, copy, age) =>
assert(copy === "b/User#copy().")
assert(age === "b/User#copy().(age)")
}
)
diagnostics(
"""
|package c
|import scala.collection.mutable. /* comment */{ Map, Set, ListBuffer }
|import scala.concurrent._, collection.mutable.{HashSet, Buffer}
|import scala.collection.{ /* comment */mutable /* comment */ => m}
|object a {
| ListBuffer.empty[Int]
| HashSet.empty[Int]
|}
""".stripMargin.trim,
"""|[1:48..1:51) [warning] Unused import
|[1:53..1:56) [warning] Unused import
|[2:24..2:25) [warning] Unused import
|[2:56..2:62) [warning] Unused import
|[3:39..3:46) [warning] Unused import
""".stripMargin.trim
)
diagnostics(
// See https://github.com/scalameta/scalameta/issues/899
"""import scala.io._
|object d""".stripMargin,
"[0:16..0:17) [warning] Unused import"
)
// Checks def macros that we can't test in expect tests because expect tests have no dependencies.
occurrences(
"""|package e
|import scala.meta._
|import org.scalatest._
|object x extends FunSuite {
| val x = q"Foo"
| val y = q"Bar"
| val z = q"$x + $y"
| val k = sourcecode.Name.generate
| assert(x.value == "Foo")
|}
""".stripMargin,
"""|[0:8..0:9): e <= e/
|[1:7..1:12): scala => scala/
|[1:13..1:17): meta => scala/meta/
|[2:7..2:10): org => org/
|[2:11..2:20): scalatest => org/scalatest/
|[3:7..3:8): x <= e/x.
|[3:17..3:25): FunSuite => org/scalatest/FunSuite#
|[3:26..3:26): => org/scalatest/FunSuite#`<init>`().
|[4:6..4:7): x <= e/x.x.
|[4:10..4:11): q => scala/meta/internal/quasiquotes/Unlift.
|[5:6..5:7): y <= e/x.y.
|[5:10..5:11): q => scala/meta/internal/quasiquotes/Unlift.
|[6:6..6:7): z <= e/x.z.
|[6:10..6:11): q => scala/meta/internal/quasiquotes/Unlift.
|[6:13..6:14): x => e/x.x.
|[6:18..6:19): y => e/x.y.
|[7:6..7:7): k <= e/x.k.
|[7:10..7:20): sourcecode => sourcecode/
|[7:21..7:25): Name => sourcecode/Name.
|[7:26..7:34): generate => sourcecode/Name.generate().
|[8:2..8:8): assert => org/scalatest/Assertions#assert().
|[8:9..8:10): x => e/x.x.
|[8:11..8:16): value => scala/meta/Term.Name#value().
|[8:17..8:19): == => java/lang/Object#`==`().
|""".stripMargin
)
targeted(
"""package f
|object an {
| for {
| i <- List(1, 2)
| <<j>> <- List(3, 4)
| } yield j
|}
""".stripMargin, { (db, j) =>
val denot = db.symbols.find(_.symbol == j).get
assert(denot.symbol.startsWith("local"))
}
)
targeted(
"""package g
|object ao {
| object <<foo>>
| def <<foo>>(a: Int): Unit = ()
| def <<foo>>(a: String): Unit = ()
|}
""".stripMargin,
(doc, foo1, foo2, foo3) => {
assert(foo1 == "g/ao.foo.")
assert(foo2 == "g/ao.foo().")
assert(foo3 == "g/ao.foo(+1).")
}
)
}
| olafurpg/scalameta | tests/jvm/src/test/scala/scala/meta/tests/semanticdb/TargetedSuite.scala | Scala | bsd-3-clause | 4,254 |
/***
* Excerpted from "Seven Concurrency Models in Seven Weeks",
* published by The Pragmatic Bookshelf.
* Copyrights apply to this code. It may not be used to create training material,
* courses, books, articles, and the like. Contact us if you are in doubt.
* We make no guarantees that this code is fit for any purpose.
* Visit http://www.pragmaticprogrammer.com/titles/pb7con for more book information.
***/
package com.paulbutcher
import akka.actor._
import collection.mutable.{LinkedHashMap, Queue}
case object RequestBatch
case class Processed(id: Int)
class Parser(filename: String, batchSize: Int, limit: Int) extends Actor {
val pages = Pages(limit, filename)
var nextId = 1
val pending = LinkedHashMap[Int, Batch]()
val accumulator = context.actorOf(Props(new Accumulator(self)))
def receive = {
case RequestBatch =>
if (pages.hasNext) {
val batch = Batch(nextId, pages.take(batchSize).toVector, accumulator)
pending(nextId) = batch
sender ! batch
nextId += 1
} else {
val (id, batch) = pending.head // The oldest pending item
pending -= id // Remove and re-add so it's now
pending(id) = batch // the youngest
sender ! batch
}
case Processed(id) =>
pending.remove(id)
if (!pages.hasNext && pending.isEmpty)
context.system.shutdown
}
}
| XBOOS/concurrency | code/ActorsScala/WordCountFaultTolerant/src/main/scala/com/paulbutcher/Parser.scala | Scala | gpl-2.0 | 1,418 |
object Curry {
def curry[A,B,C](f: (A, B) => C): A => (B=>C) = {
def inner(a:A): (B=>C) = {
def even_inner(b:B):C = {
f(a, b)
}
even_inner
}
inner
}
}
| Bolt64/my_code | scala/curry.scala | Scala | mit | 193 |
import javax.mail._
import javax.mail.internet._
import java.io._
import java.text.SimpleDateFormat
import java.util._
import java.lang.management.ManagementFactory
import com.google.api.client.auth.oauth2.Credential
import com.google.api.client.extensions.java6.auth.oauth2.AuthorizationCodeInstalledApp
import com.google.api.client.extensions.jetty.auth.oauth2.LocalServerReceiver
import com.google.api.client.googleapis.auth.oauth2.GoogleAuthorizationCodeFlow
import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport
import com.google.api.client.http.HttpTransport
import com.google.api.client.json.jackson2.JacksonFactory
import com.google.api.client.json.JsonFactory
import com.google.api.client.util.store.FileDataStoreFactory
import com.google.api.client.util.DateTime
import com.google.api.services.tasks.Tasks
import com.google.api.services.tasks.TasksScopes
import com.google.api.services.tasks.model._
import com.beust.jcommander.JCommander
import com.beust.jcommander.Parameter
import com.beust.jcommander.ParameterException
import javax.mail._
import javax.mail.internet._
import java.io._
import java.util._
import java.net.ServerSocket
import java.net.Socket
import java.net.InetAddress
import java.net.URL
import java.io.DataInputStream
import scala.collection.JavaConversions._
import scala.sys
import scala.Console
import scala.concurrent.Await
import scala.concurrent.duration._
import org.jsoup.Jsoup
import org.apache.commons.codec.binary.Base64
import akka.actor.Actor
import akka.actor.ActorSystem
import akka.actor.Props
import akka.pattern.ask
import akka.util.Timeout
val debug = true
implicit val timeout = Timeout(65 seconds)
val TASK_SYMBOL = scala.collection.Map[String,String](
"c" -> "☎", // call
"m" -> "✉", // mail
"p" -> "⎙", // print
"M" -> "♫", // music
"w" -> "ʬ", // set of related small taks or just a small task
"t" -> "ʘ", // figure out next step, faire le point
"W" -> "Ɯ", // Task
"€" -> "€", // Money task
"x" -> "ɸ" // find a use for thi symbol someday
)
val TASK_SYMBOL_STRING = "☎✉⎙♫ʬʘɸƜ€"
val TASK_CLIENT_SECRET_FILE_ENV_VAR_NAME = "TASKS_CLIENT_SECRET"
val TASK_SERVER_BIND_ADDR_ENV_VAR_NAME = "TASKS_SERVER_BIND_ADDR"
val TASK_SERVER_PORT_ENV_VAR_NAME = "TASKS_SERVER_PORT"
val TASK_SERVER_PID_FILE_ENV_VAR_NAME = "TASKSD_PIDFILE"
val ONE_DAY__IN_MILLIS = 86400000
val USERNAME_ENV_NAME="TASKS_USERNAME"
val PASSWORD_ENV_NAME="TASKS_PASSWORD"
def buildTask(title:String, desc: String, date: DateTime = today(), symbol: String = "") = {
val task = new com.google.api.services.tasks.model.Task()
task.setTitle(addSymbolToTitle(title, symbol))
task.setNotes(desc)
task.setDue(date)
task
}
def today() = new DateTime(System.currentTimeMillis())
def tomorrow() = new DateTime(System.currentTimeMillis() + ONE_DAY__IN_MILLIS)
def readEmail(f:String) = {
val msg = readMailFile(f)
msg.getContent() match {
case m: Multipart => m.getBodyPart(0).getContent().toString()
case _ => msg.getContent().toString()
}
}
def readMailFile(f:String) = {
val is = new FileInputStream(new File(f))
val s = Session.getDefaultInstance(new Properties())
new MimeMessage(s, is)
}
def readSubject(f:String) = readMailFile(f).getSubject
def encodeString(string:String):String = new String(Base64.encodeBase64(string.getBytes()))
def prepareUrlConnection(bugUrl:String) = if ( bugUrl.contains("issues.jboss.org") ) jsoupConnectionWithBasicAuth(bugUrl) else jsoupConnectionNoAuth(bugUrl)
def jsoupConnectionWithBasicAuth(bugUrl:String) = Jsoup.connect(bugUrl).header("Authorization", "Basic " + encodeString(buildUsernamePasswordPrefixIfProvided()))
def jsoupConnectionNoAuth(bugUrl:String) = Jsoup.connect(bugUrl)
def bugIdFromBugUrl(bugUrl: String):String = {
try {
prepareUrlConnection(bugUrl).get().title()
} catch {
case e: Throwable => println("Can't access " + bugUrl + " following back to building id from URL:" + e.getMessage())
}
bugUrl.substring(bugUrl.lastIndexOf('/') + 1) + " - Missing Description"
}
def returnAuthentificationHostPrefix(username:String, password:String) = if ( ! "".equals(username) && ! "".equals(password) ) username + ":" + password else ""
def buildUsernamePasswordPrefixIfProvided():String = {
try {
return returnAuthentificationHostPrefix(sys.env("TASKS_USERNAME"),sys.env("TASKS_PASSWORD"))
} catch { case e: Throwable => println("No USERNAME / PASSWORD provided:" + e.getMessage() ) }
return ""
}
def openHttpConnectionForUrl(bugUrl:String):java.net.HttpURLConnection = new URL(bugUrl).openConnection.asInstanceOf[java.net.HttpURLConnection]
def getInputStreamFrom(bugUrl:String) = {
val hc = openHttpConnectionForUrl(bugUrl)
hc.setRequestProperty("Authorization", "Basic "+ encodeString(buildUsernamePasswordPrefixIfProvided))
hc.getInputStream()
}
def prIdFromPrUrl(prUrl: String) = {
val url = prUrl.replaceFirst("/github.com/","/api.github.com/repos/").replaceFirst("pull","pulls")
val content = scala.io.Source.fromURL(prUrl.replaceFirst("/github.com/","/api.github.com/repos/").replaceFirst("pull","pulls")).mkString
val res = scala.util.parsing.json.JSON.parseFull(content) match {
case Some(map: scala.collection.immutable.HashMap[String, Any] @unchecked) => { "PR" +
map("number").toString + " - " + map("title").toString }
case _ => println("Should never happen!")
}
res.toString
}
def getDesc(description:String, email:String, bugUrl:String, prUrl: String): String = {
if ( ! "".equals(prUrl))
return "\\n\\n" + prUrl
if ( ! "".equals(bugUrl))
return bugIdFromBugUrl(bugUrl) + " - " + bugUrl
if ( ! "".equals(email) )
return readEmail(email)
description
}
def parseTaskLine(line: String, sep: String = ";") = {
val arr = line.split(sep)
if (arr.length == 1 )
(arr(0),"")
else if ( arr.length == 2)
(arr(0),arr(1))
else
("","")
}
def getTitle(title:String, email:String, bugUrl:String, prUrl: String):String = {
if ( ! "".equals(prUrl) )
return prIdFromPrUrl(prUrl)
if ( ! "".equals(bugUrl) )
return bugIdFromBugUrl(bugUrl)
if ( ! "".equals(title) )
return title
if ( ! "".equals(email) ) {
val subject = readSubject(email)
if ( "".equals(subject) ) return "Mail based task"
return subject
}
title
}
def getDueDate(dueDate:String) = {
if ( ! "".equals(dueDate) )
new DateTime(new java.text.SimpleDateFormat("dd/MM/yyyy").parse(dueDate))
else
today
}
def getSymbol(symbolLetter:String): String = {
if ( TASK_SYMBOL.keySet.contains(symbolLetter) ) {
val symbol = TASK_SYMBOL.get(symbolLetter)
if ( symbol != None ) {
return symbol.get
}
}
return ""
}
def connectAndGetService() = {
val APPLICATION_NAME = "Google Tasks API Java Quickstart"
val DATA_STORE_DIR = new File(System.getProperty("user.home"), ".credentials/tasks-java-quickstart")
val DATA_STORE_FACTORY = new FileDataStoreFactory(DATA_STORE_DIR)
val JSON_FACTORY = JacksonFactory.getDefaultInstance()
val HTTP_TRANSPORT = GoogleNetHttpTransport.newTrustedTransport()
val SCOPES = Arrays.asList(TasksScopes.TASKS)
val clientSecretFilename = sys.env(TASK_CLIENT_SECRET_FILE_ENV_VAR_NAME)
if ( ! new java.io.File(clientSecretFilename).exists() ) throw new IllegalStateException("Certificate file does not exists: " + clientSecretFilename)
val clientSecrets = GoogleClientSecrets.load(JSON_FACTORY, new InputStreamReader(new FileInputStream( clientSecretFilename )))
val flow = new GoogleAuthorizationCodeFlow.Builder( HTTP_TRANSPORT, JSON_FACTORY, clientSecrets, SCOPES)
.setDataStoreFactory(DATA_STORE_FACTORY)
.setAccessType("offline")
.build()
val credential = new AuthorizationCodeInstalledApp(flow, new LocalServerReceiver()).authorize("user")
new Tasks.Builder(HTTP_TRANSPORT, JSON_FACTORY, credential).setApplicationName(APPLICATION_NAME).build()
}
def emptyStringIfNull(s: String):String = { if (s == null) "" else s }
def searchAndQuit(search: String, done: () => Unit ):Unit = {
if ( ! "".equals(search) ) {
val tasks = service.tasks.list("@default").execute()
for (task <- tasks.getItems )
if ( task.getTitle().toLowerCase().contains(search.toLowerCase()) )
Console.out.println(taskDisplay(task))
done()
}
}
def isToday(due:DateTime) = isSameDay(due, Calendar.getInstance().getTime())
def isSameDay(due: DateTime, day: Date) = {
//Console.out.println("Is " + due.toStringRfc3339.substring(0,10) + " equals to " + new SimpleDateFormat("y-MM-dd").format(day) + " ?")
due.toStringRfc3339.substring(0,10).equals(new SimpleDateFormat("y-MM-dd").format(day))
}
def isSameDay(due: DateTime, day: DateTime) = due.toStringRfc3339.substring(0,10).equals(day.toStringRfc3339.substring(0,10))
def dateDisplay(date: DateTime) = new SimpleDateFormat("dd/MM/YYYY").format(new Date(date.getValue()))
def taskNotesDisplay(task: com.google.api.services.tasks.model.Task, noNotesDisplay: Boolean) = {
if ( noNotesDisplay ) "" else { "\\n" + emptyStringIfNull(task.getNotes) }
}
def taskDisplay(task: com.google.api.services.tasks.model.Task, noNotesDisplay: Boolean = false) = {
"[" + task.getId() + "] " + task.getTitle + "\\nDue on: " + dateDisplay(task.getDue()) + taskNotesDisplay(task, noNotesDisplay)
}
def notNullNorEmpty(value: String) = {
(value != null && ! "".equals(value) )
}
def addSymbolToTitle(title: String, symbol: String): String = {
if ( notNullNorEmpty(symbol) ) return title + " " + symbol
title
}
// Features methods
def listTasksForDayAndQuit(Args: Args, dueDate: DateTime, done: () => Unit):Unit = {
val tasks = service.tasks.list("@default").execute()
Console.out.println("Tasks due on " + dueDate + ":")
Console.out.println
var taskNumber = 1
for (task <- tasks.getItems ) if ( task.getDue() != null && isSameDay( dueDate, task.getDue())) {
Console.out.println(taskNumber + ") " + taskDisplay(task, Args.noNotesDisplay))
taskNumber = taskNumber + 1
}
done()
}
def listTasksAndQuit(Args: Args, done: () => Unit):Unit = {
if ( Args.list )
listTasksForDayAndQuit(Args, today(), done)
if ( Args.listTomorrow )
listTasksForDayAndQuit(Args, tomorrow(), done)
}
def bumpDueDate(days:Int, id:String, done: () => Unit):Unit = {
if ( days > 0 && id != null && ! "".equals(id) ) {
val NB_SECONDS_BY_DAY = 86400L * 1000
val task = service.tasks.get("@default", id).execute()
task.setDue(new DateTime(task.getDue().getValue() + (days * NB_SECONDS_BY_DAY)))
val result = service.tasks.update("@default", task.getId(), task).execute();
Console.out.println("Task '" + result.getTitle() + " has been bumped by " + days + " days:" + dateDisplay(result.getDue()))
done()
}
}
def addTask(task: com.google.api.services.tasks.model.Task, done: () => Unit) = {
"Task '" + service.tasks.insert("@default", task).execute().getTitle() + "' has been created and added"
}
def editTask(id:String, newTask: com.google.api.services.tasks.model.Task, symbol: String = "", done: () => Unit):Unit = {
if ( notNullNorEmpty(id) ) {
val task = service.tasks.get("@default", id).execute()
Console.out.println("New Title:" + newTask.getTitle())
if ( notNullNorEmpty(newTask.getTitle().filterNot(TASK_SYMBOL_STRING.toSet).replaceAll(" ","")) ) task.setTitle(newTask.getTitle())
if ( notNullNorEmpty(newTask.getNotes()) ) task.setNotes(newTask.getNotes())
if ( newTask.getDue() != null ) task.setDue(newTask.getDue())
val result = service.tasks.update("@default", task.getId(), task).execute()
Console.out.println(taskDisplay(task))
done()
}
}
def taskDone(id:String, done: () => Unit) = {
if ( notNullNorEmpty(id) ) {
service.tasks.delete("@default", id).execute()
Console.out.println("Task [" + id + "] has been removed.")
done()
}
}
class TaskCreatorActor extends Actor {
def receive = {
case task: com.google.api.services.tasks.model.Task => sender ! addTask(task, () => {})
case _ => Console.out.println("Not a valid instance of Task")
}
}
def sendTaskToActor(lines: Iterator[String], taskActors: akka.actor.ActorRef) = {
val queue = new scala.collection.mutable.Queue[scala.concurrent.Future[Any]]
for ( line <- lines ) {
val (title, desc) = parseTaskLine(line)
if ( notNullNorEmpty(title) )
queue += taskActors ? buildTask( title, desc)
}
queue
}
def waitForTasksToBeCreated(queue: scala.collection.mutable.Queue[scala.concurrent.Future[Any]]) = {
for ( future <- queue ) Console.out.println(Await.result(future, timeout.duration).asInstanceOf[String])
}
def bulkTasksAdd(tasksFile:String, done: () => Unit) = {
if ( notNullNorEmpty(tasksFile) ) {
Console.out.println("Loading task from file:" + tasksFile)
val system = ActorSystem("BulkTaskActors")
waitForTasksToBeCreated(sendTaskToActor(scala.io.Source.fromFile(tasksFile).getLines(), system.actorOf(Props(new TaskCreatorActor()), name = "task-actor")))
system.shutdown
done()
}
}
class Args {
// Parameters for new tasks
@Parameter(names = Array("-t", "--task-title"), description = "Task title", required = false)
var title: String = ""
@Parameter(names = Array("-d", "--task-description"), description = "Task description", required = false)
var description: String = ""
// optional param for creation
@Parameter(names = Array("-T", "--task-type"), description = "Type of the task (☎,✉,⎙)", required = false)
var symbol : String = ""
@Parameter(names = Array("-D", "--due-date"), description = "Task description", required = false)
var dueDate: String = ""
// Shortcut to task creation
@Parameter(names = Array("-e", "--email-as-description"), description = "Task description", required = false)
var email: String = ""
@Parameter(names = Array("-b", "--bug-url"), description = "A Bug entry URL", required = false)
var bugUrl: String = ""
@Parameter(names = Array("-p", "--pull-request"), description = "A PR URL entry", required = false)
var prUrl: String = ""
// Other features
@Parameter(names = Array("-s", "--search-tasks"), description = "Search a task title containing the provided string", required = false)
var search: String = ""
@Parameter(names= Array("-l" , "--list-today-tasks"), description = "List today's tasks" , required = false )
var list = false
@Parameter(names= Array("-ll" , "--list-tomorrow-tasks"), description = "List tomorow's tasks" , required = false )
var listTomorrow = false
// Features using the extra -i parameter
@Parameter(names= Array("-B", "--bump-task"), description = "Bump due date", required = false)
var bump: Int = 0
@Parameter(names= Array("-i", "--task-id"), description = "Task ID", required = false)
var id: String = ""
// Features using id as passed value to main paramter AND reusing title, desc, ...
@Parameter(names= Array("-E", "--edit-task-title"), description = "Edit task title, requires task id", required = false)
var taskToEdit: String = ""
@Parameter(names= Array("-F", "--task-finished"), description = "Mark task as done, requires task id as value", required = false)
var taskToFinishId: String = ""
@Parameter(names= Array("-A", "--bulk-add"), description = "Add tasks in bulk, using a simple 'one-line' by task name", required = false)
var bulkAdd: String = ""
@Parameter(names= Array("-N", "--no-notes"), description = "Do not show notes when printing task out", required= false)
var noNotesDisplay = false
}
def processRequest(Args: Args, done:() => Unit) = {
bulkTasksAdd(Args.bulkAdd, done)
taskDone(Args.taskToFinishId, done)
bumpDueDate(Args.bump, Args.id, done)
listTasksAndQuit(Args, done)
searchAndQuit(Args.search, done)
val symbol = getSymbol(Args.symbol)
val task = buildTask( getTitle(Args.title, Args.email, Args.bugUrl, Args.prUrl), getDesc(Args.description, Args.email, Args.bugUrl, Args.prUrl), getDueDate(Args.dueDate), symbol)
editTask(Args.taskToEdit, task , symbol, done)
addTask(task, done)
}
class TaskRequestActor extends Actor {
def receive = {
case socket: java.net.Socket => sender ! launchProcessRequest(socket)
case _ => Console.out.println("Not a valid java.net.Socket ! ")
}
}
def launchProcessRequest(socket: Socket) = {
val input = new DataInputStream(socket.getInputStream())
val s: String = input.readLine
val out = Console.out
println(">>> Processing command: " + s )
Console.setOut(socket.getOutputStream())
val args = parseCommandLine(s.split(" "))
try {
processRequest(args, () => { throw new IllegalStateException("done") } )
} catch {
case done: IllegalStateException => {} //
case _: Throwable => Console.out.println("something went wrong:")
}
Console.setOut(out)
input.close
socket.close
}
def parseCommandLine(args: Array[java.lang.String]) = {
val arguments = new Args
new JCommander(arguments, args.toArray: _*)
arguments
}
def writeInFile(filename: String, content: String) = { new PrintWriter(filename) { write(content); close } }
val service = connectAndGetService()
if ( args.length == 0 ) {
val pid = ManagementFactory.getRuntimeMXBean().getName().split("@")(0)
println("Server Mode Started (PID:" + pid + ")")
writeInFile(sys.env(TASK_SERVER_PID_FILE_ENV_VAR_NAME), pid)
val system = ActorSystem("TaskRequestActors")
val actor = system.actorOf(Props(new TaskRequestActor()), name = "task-request-actor")
try {
val server = new ServerSocket(sys.env(TASK_SERVER_PORT_ENV_VAR_NAME).toInt, 1, InetAddress.getByName(sys.env(TASK_SERVER_BIND_ADDR_ENV_VAR_NAME)))
while (true) {
actor ? server.accept
}
} finally {
system.shutdown
}
} else
processRequest(parseCommandLine(args), () => System.exit(0) )
| rpelisse/tasks | src/main/scala/tasks.scala | Scala | gpl-2.0 | 17,950 |
package mesosphere.marathon.api.v2
import java.util
import javax.inject.Inject
import javax.ws.rs._
import javax.ws.rs.core.{ MediaType, Response }
import com.codahale.metrics.annotation.Timed
import mesosphere.marathon.Protos.MarathonTask
import mesosphere.marathon.api.v2.json.EnrichedTask
import mesosphere.marathon.api.{ TaskKiller, EndpointsHelper, RestResource }
import mesosphere.marathon.health.HealthCheckManager
import mesosphere.marathon.state.GroupManager
import mesosphere.marathon.tasks.{ TaskIdUtil, TaskTracker }
import mesosphere.marathon.{ BadRequestException, MarathonConf, MarathonSchedulerService }
import org.apache.log4j.Logger
import org.apache.mesos.Protos.TaskState
import play.api.libs.json.Json
import scala.collection.IterableView
import scala.collection.JavaConverters._
@Path("v2/tasks")
class TasksResource @Inject() (
service: MarathonSchedulerService,
taskTracker: TaskTracker,
taskKiller: TaskKiller,
val config: MarathonConf,
groupManager: GroupManager,
healthCheckManager: HealthCheckManager,
taskIdUtil: TaskIdUtil) extends RestResource {
val log = Logger.getLogger(getClass.getName)
@GET
@Produces(Array(MediaType.APPLICATION_JSON))
@Timed
def indexJson(
@QueryParam("status") status: String,
@QueryParam("status[]") statuses: util.List[String]): Response = {
//scalastyle:off null
if (status != null) {
statuses.add(status)
}
//scalastyle:on
val statusSet = statuses.asScala.flatMap(toTaskState).toSet
val tasks = taskTracker.list.values.view.flatMap { app =>
app.tasks.view.map(t => app.appName -> t)
}
val appIds = taskTracker.list.keySet
val appToPorts = appIds.map { appId =>
appId -> service.getApp(appId).map(_.servicePorts).getOrElse(Nil)
}.toMap
val health = appIds.flatMap { appId =>
result(healthCheckManager.statuses(appId))
}.toMap
val enrichedTasks: IterableView[EnrichedTask, Iterable[_]] = for {
(appId, task) <- tasks
if statusSet.isEmpty || statusSet(task.getStatus.getState)
} yield {
EnrichedTask(
appId,
task,
health.getOrElse(task.getId, Nil),
appToPorts.getOrElse(appId, Nil)
)
}
ok(Map(
"tasks" -> enrichedTasks
))
}
@GET
@Produces(Array(MediaType.TEXT_PLAIN))
@Timed
def indexTxt(): Response = ok(EndpointsHelper.appsToEndpointString(
taskTracker,
service.listApps().toSeq,
"\\t"
))
@POST
@Produces(Array(MediaType.APPLICATION_JSON))
@Consumes(Array(MediaType.APPLICATION_JSON))
@Timed
@Path("delete")
def killTasks(
@QueryParam("scale")@DefaultValue("false") scale: Boolean,
body: Array[Byte]): Response = {
val taskIds = (Json.parse(body) \\ "ids").as[Set[String]]
val groupedTasks = taskIds.flatMap { taskId =>
val appId = try {
taskIdUtil.appId(taskId)
}
catch {
case e: MatchError => throw new BadRequestException(s"Invalid task id '$taskId'.")
}
taskTracker.fetchTask(appId, taskId)
}.groupBy { x =>
taskIdUtil.appId(x.getId)
}
groupedTasks.foreach {
case (appId, tasks) =>
def findToKill(appTasks: Set[MarathonTask]) = tasks.toSet
if (scale) {
taskKiller.killAndScale(appId, findToKill, force = true)
}
else {
taskKiller.kill(appId, findToKill, force = true)
}
}
// TODO: does anyone expect a response with all the deployment plans in case of scaling?
Response.ok().build()
}
private def toTaskState(state: String): Option[TaskState] = state.toLowerCase match {
case "running" => Some(TaskState.TASK_RUNNING)
case "staging" => Some(TaskState.TASK_STAGING)
case _ => None
}
}
| murat-lacework/marathon | src/main/scala/mesosphere/marathon/api/v2/TasksResource.scala | Scala | apache-2.0 | 3,784 |
package com.datastax.driver.spark
/** Contains components for writing RDDs to Cassandra */
package object writer {
}
| bovigny/cassandra-driver-spark | src/main/scala/com/datastax/driver/spark/writer/package.scala | Scala | apache-2.0 | 119 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.http
import scala.concurrent.duration.Duration
import scala.concurrent.duration.FiniteDuration
import scala.util.Try
import akka.http.scaladsl.model.StatusCode
import akka.http.scaladsl.model.StatusCodes.Forbidden
import akka.http.scaladsl.model.StatusCodes.NotFound
import akka.http.scaladsl.model.MediaType
import akka.http.scaladsl.server.Directives
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport.sprayJsonMarshaller
import akka.http.scaladsl.server.StandardRoute
import spray.json._
import whisk.common.TransactionId
import whisk.core.entity.SizeError
import whisk.core.entity.ByteSize
import whisk.core.entity.Exec
import whisk.core.entity.ActivationId
object Messages {
/** Standard message for reporting resource conflicts. */
val conflictMessage = "Concurrent modification to resource detected."
/**
* Standard message for reporting resource conformance error when trying to access
* a resource from a different collection.
*/
val conformanceMessage = "Resource by this name exists but is not in this collection."
val corruptedEntity = "Resource is corrupted and cannot be read."
/**
* Standard message for reporting deprecated runtimes.
*/
def runtimeDeprecated(e: Exec) =
s"The '${e.kind}' runtime is no longer supported. You may read and delete but not update or invoke this action."
/** Standard message for resource not found. */
val resourceDoesNotExist = "The requested resource does not exist."
/** Standard message for too many activation requests within a rolling time window. */
def tooManyRequests(count: Int, allowed: Int) =
s"Too many requests in the last minute (count: $count, allowed: $allowed)."
/** Standard message for too many concurrent activation requests within a time window. */
def tooManyConcurrentRequests(count: Int, allowed: Int) =
s"Too many concurrent requests in flight (count: $count, allowed: $allowed)."
/** System overload message. */
val systemOverloaded = "System is overloaded, try again later."
/** Standard message when supplied authkey is not authorized for an operation. */
val notAuthorizedtoOperateOnResource = "The supplied authentication is not authorized to access this resource."
/** Standard error message for malformed fully qualified entity names. */
val malformedFullyQualifiedEntityName =
"The fully qualified name of the entity must contain at least the namespace and the name of the entity."
def entityNameTooLong(error: SizeError) = {
s"${error.field} longer than allowed: ${error.is.toBytes} > ${error.allowed.toBytes}."
}
val entityNameIllegal = "The name of the entity contains illegal characters."
val namespaceIllegal = "The namespace contains illegal characters."
/** Standard error for malformed activation id. */
val activationIdIllegal = "The activation id is not valid."
def activationIdLengthError(error: SizeError) = {
s"${error.field} length is ${error.is.toBytes} but must be ${error.allowed.toBytes}."
}
/** Error messages for sequence actions. */
val sequenceIsTooLong = "Too many actions in the sequence."
val sequenceNoComponent = "No component specified for the sequence."
val sequenceIsCyclic = "Sequence may not refer to itself."
val sequenceComponentNotFound = "Sequence component does not exist."
/** Error message for packages. */
val bindingDoesNotExist = "Binding references a package that does not exist."
val packageCannotBecomeBinding = "Resource is a package and cannot be converted into a binding."
val bindingCannotReferenceBinding = "Cannot bind to another package binding."
val requestedBindingIsNotValid = "Cannot bind to a resource that is not a package."
val notAllowedOnBinding = "Operation not permitted on package binding."
/** Error messages for sequence activations. */
def sequenceRetrieveActivationTimeout(id: ActivationId) =
s"Timeout reached when retrieving activation $id for sequence component."
val sequenceActivationFailure = "Sequence failed."
/** Error messages for bad requests where parameters do not conform. */
val parametersNotAllowed = "Request defines parameters that are not allowed (e.g., reserved properties)."
def invalidTimeout(max: FiniteDuration) = s"Timeout must be number of milliseconds up to ${max.toMillis}."
/** Error messages for activations. */
val abnormalInitialization = "The action did not initialize and exited unexpectedly."
val abnormalRun = "The action did not produce a valid response and exited unexpectedly."
def badEntityName(value: String) = s"Parameter is not a valid value for a entity name: $value"
def badNamespace(value: String) = s"Parameter is not a valid value for a namespace: $value"
def badEpoch(value: String) = s"Parameter is not a valid value for epoch seconds: $value"
/** Error message for size conformance. */
def entityTooBig(error: SizeError) = {
s"${error.field} larger than allowed: ${error.is.toBytes} > ${error.allowed.toBytes} bytes."
}
def maxActivationLimitExceeded(value: Int, max: Int) = s"Activation limit of $value exceeds maximum limit of $max."
def truncateLogs(limit: ByteSize) = {
s"Logs were truncated because the total bytes size exceeds the limit of ${limit.toBytes} bytes."
}
/** Error for meta api. */
val propertyNotFound = "Response does not include requested property."
def invalidMedia(m: MediaType) = s"Response is not valid '${m.value}'."
def contentTypeExtensionNotSupported(extensions: Set[String]) = {
s"""Extension must be specified and one of ${extensions.mkString("[", ", ", "]")}."""
}
val unsupportedContentType = """Content type is not supported."""
def unsupportedContentType(m: MediaType) = s"""Content type '${m.value}' is not supported."""
val errorExtractingRequestBody = "Failed extracting request body."
val responseNotReady = "Response not yet ready."
val httpUnknownContentType = "Response did not specify a known content-type."
val httpContentTypeError = "Response type in header did not match generated content type."
val errorProcessingRequest = "There was an error processing your request."
def invalidInitResponse(actualResponse: String) = {
"The action failed during initialization" + {
Option(actualResponse) filter { _.nonEmpty } map { s =>
s": $s"
} getOrElse "."
}
}
def invalidRunResponse(actualResponse: String) = {
"The action did not produce a valid JSON response" + {
Option(actualResponse) filter { _.nonEmpty } map { s =>
s": $s"
} getOrElse "."
}
}
def truncatedResponse(length: ByteSize, maxLength: ByteSize): String = {
s"The action produced a response that exceeded the allowed length: ${length.toBytes} > ${maxLength.toBytes} bytes."
}
def truncatedResponse(trunk: String, length: ByteSize, maxLength: ByteSize): String = {
s"${truncatedResponse(length, maxLength)} The truncated response was: $trunk"
}
def timedoutActivation(timeout: Duration, init: Boolean) = {
s"The action exceeded its time limits of ${timeout.toMillis} milliseconds" + {
if (!init) "." else " during initialization."
}
}
val actionRemovedWhileInvoking = "Action could not be found or may have been deleted."
}
/** Replaces rejections with Json object containing cause and transaction id. */
case class ErrorResponse(error: String, code: TransactionId)
object ErrorResponse extends Directives with DefaultJsonProtocol {
def terminate(status: StatusCode, error: String)(implicit transid: TransactionId,
jsonPrinter: JsonPrinter): StandardRoute = {
terminate(status, Option(error) filter { _.trim.nonEmpty } map { e =>
Some(ErrorResponse(e.trim, transid))
} getOrElse None)
}
def terminate(status: StatusCode, error: Option[ErrorResponse] = None, asJson: Boolean = true)(
implicit transid: TransactionId,
jsonPrinter: JsonPrinter): StandardRoute = {
val errorResponse = error getOrElse response(status)
if (asJson) {
complete(status, errorResponse)
} else {
complete(status, s"${errorResponse.error} (code: ${errorResponse.code})")
}
}
def response(status: StatusCode)(implicit transid: TransactionId): ErrorResponse = status match {
case NotFound => ErrorResponse(Messages.resourceDoesNotExist, transid)
case Forbidden => ErrorResponse(Messages.notAuthorizedtoOperateOnResource, transid)
case _ => ErrorResponse(status.defaultMessage, transid)
}
implicit val serializer = new RootJsonFormat[ErrorResponse] {
def write(er: ErrorResponse) = JsObject("error" -> er.error.toJson, "code" -> er.code.meta.id.toJson)
def read(v: JsValue) =
Try {
v.asJsObject.getFields("error", "code") match {
case Seq(JsString(error), JsNumber(code)) =>
ErrorResponse(error, TransactionId(code))
case Seq(JsString(error)) =>
ErrorResponse(error, TransactionId.unknown)
}
} getOrElse deserializationError("error response malformed")
}
}
| tysonnorris/openwhisk | common/scala/src/main/scala/whisk/http/ErrorResponse.scala | Scala | apache-2.0 | 9,905 |
package util.plugins
import collins.graphs.{GraphPlugin => GraphPlayPlugin}
import play.api.{Application, Mode, Play}
object GraphPlugin {
def option(): Option[GraphPlayPlugin] = {
Play.maybeApplication.flatMap { app =>
app.plugin[GraphPlayPlugin].filter(_.enabled)
}
}
}
| Shopify/collins | app/util/plugins/GraphPlugin.scala | Scala | apache-2.0 | 292 |
package poly.collection.search
import poly.collection._
/**
* Enables searching functions on objects.
* @author Tongfei Chen
* @since 0.1.0
*/
object ops extends LowerPriorityImplicits {
implicit class withSearchOps[T](val x: T) extends AnyVal {
/**
* Performs depth first tree traversal given the transition function.
*/
def depthFirstTreeTraversal[U >: T](f: U => Traversable[U]) =
StateSpace(f).depthFirstTreeTraversal(x)
/**
* Performs breadth first tree traversal given the transition function.
*/
def breadthFirstTreeTraversal[U >: T](f: U => Traversable[U]) =
StateSpace(f).breadthFirstTreeTraversal(x)
/**
* Performs depth first graph traversal given the transition function.
*/
def depthFirstTraversal[U >: T : Eq](f: U => Traversable[U]) =
EqStateSpace(f).depthFirstTraversal(x)
/**
* Performs breadth first graph traversal given the transition function.
*/
def breadthFirstTraversal[U >: T : Eq](f: U => Traversable[U]) =
EqStateSpace(f).breadthFirstTraversal(x)
/**
* Performs uniform cost traversal given the transition function.
*/
def uniformCostTraversal[U >: T : Eq, R : Order : AdditiveMonoid](f: U => Traversable[(U, R)]) =
WeightedStateSpace(f).uniformCostTraversal(x)
}
implicit class withWeightedStateOps[S, C](val s: S)(implicit S: WeightedStateSpace[S, C]) {
def succ = S.succ(s)
def succWithCost = S.succWithCost(s)
}
implicit class withNodeOps[N, S](val n: N)(implicit I: SearchNodeInfo[N, S]) {
def state: S = I.state(n)
def next(s: S): N = I.nextNode(n)(s)
}
implicit class withWeightedNodeOps[N, S, C](val n: N)(implicit I: WeightedSearchNodeInfo[N, S, C]) {
def state = I.state(n)
def next(s: S, c: C): N = I.nextNode(n)(s, c)
}
}
trait LowerPriorityImplicits {
implicit class withStateOps[S](val s: S)(implicit S: StateSpace[S]) {
def succ = S.succ(s)
}
}
| ctongfei/poly-collection | core/src/main/scala/poly/collection/search/ops.scala | Scala | mit | 1,972 |
object SCL5790 {
import scala.reflect.api.Universe
class Test[U <: Universe](val u: U) {
def someMethod: u.Type = ???
}
class AnotherTest[U <: Universe](val u: U) {
val test = new Test[u.type](/*start*/u/*end*/)
def anotherMethod: u.Type = test.someMethod
}
}
//AnotherTest.this.u.type | ilinum/intellij-scala | testdata/typeInference/scalaReflect/SCL5790.scala | Scala | apache-2.0 | 309 |
/*
* Copyright 2012 Pascal Voitot
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package play.modules.reactivemongo
import javax.inject.Inject
import play.api._
import play.api.libs.concurrent.Akka
import reactivemongo.api._
import scala.concurrent.{ Await, ExecutionContext }
import scala.util.control.NonFatal
/**
* Deprecated since Play Framework 2.4 release. Plugins should be modules
*/
@deprecated("Use ReactiveMongoModule and ReactiveMongoApi.", since = "0.12.1")
class ReactiveMongoPlugin @Inject() (app: Application) extends Plugin {
private var _helper: Option[ReactiveMongoHelper] = None
def helper = _helper.getOrElse(throw new RuntimeException(
"ReactiveMongoPlugin error: no ReactiveMongoHelper available?"))
override def onStart() {
Logger info "ReactiveMongoPlugin starting..."
try {
val conf = DefaultReactiveMongoApi.parseConf(app.configuration)
_helper = Some(ReactiveMongoHelper(conf, app))
Logger.info("ReactiveMongoPlugin successfully started with db '%s'! Servers:\\n\\t\\t%s"
.format(
conf.db.get,
conf.hosts.map { s => s"[${s._1}:${s._2}]" }.mkString("\\n\\t\\t")))
} catch {
case NonFatal(e) =>
throw new ReactiveMongoPluginException("An exception occurred while initializing the ReactiveMongoPlugin.", e)
}
}
override def onStop() {
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
Logger.info("ReactiveMongoPlugin stops, closing connections...")
_helper.foreach { h =>
val f = h.connection.askClose()(10.seconds)
f.onComplete {
case e => {
Logger.info("ReactiveMongo Connections stopped. [" + e + "]")
}
}
Await.ready(f, 10.seconds)
h.driver.close()
}
_helper = None
}
}
/**
* MongoDB access methods.
*/
@deprecated("Use ReactiveMongoModule and ReactiveMongoApi.", since = "0.12.1")
object ReactiveMongoPlugin {
/** Returns the current instance of the driver. */
def driver(implicit app: Application) = current.helper.driver
/** Returns the current MongoConnection instance (the connection pool manager). */
def connection(implicit app: Application) = current.helper.connection
/** Returns the default database (as specified in `application.conf`). */
def db(implicit app: Application) = current.helper.db
/** Returns the current instance of the plugin. */
def current(implicit app: Application): ReactiveMongoPlugin = app.plugin[ReactiveMongoPlugin] match {
case Some(plugin) => plugin
case _ => throw new ReactiveMongoPluginException("The ReactiveMongoPlugin has not been initialized! Please edit your conf/play.plugins file and add the following line: '400:play.modules.reactivemongo.ReactiveMongoPlugin' (400 is an arbitrary priority and may be changed to match your needs).")
}
/** Returns the current instance of the plugin (from a [[play.Application]] - Scala's [[play.api.Application]] equivalent for Java). */
def current(app: play.Application): ReactiveMongoPlugin = app.plugin(classOf[ReactiveMongoPlugin]) match {
case plugin if plugin != null => plugin
case _ => throw new ReactiveMongoPluginException("The ReactiveMongoPlugin has not been initialized! Please edit your conf/play.plugins file and add the following line: '400:play.modules.reactivemongo.ReactiveMongoPlugin' (400 is an arbitrary priority and may be changed to match your needs).")
}
}
private[reactivemongo] case class ReactiveMongoHelper(parsedURI: MongoConnection.ParsedURI, app: Application) {
implicit val ec: ExecutionContext = ExecutionContext.Implicits.global
lazy val driver = new MongoDriver(Option(app.configuration.underlying))
lazy val connection = driver.connection(parsedURI)
lazy val db = DB(parsedURI.db.get, connection)
}
| avdv/Play-ReactiveMongo | src/main/scala/play/modules/reactivemongo/ReactiveMongoPlugin.scala | Scala | apache-2.0 | 4,368 |
package wykopml.storage
import java.time.LocalDateTime
import com.websudos.phantom.dsl._
import wykopml.bo.Wykop
class WykopsTable extends CassandraTable[WykopsTable, Wykop] {
object id extends IntColumn(this) with PartitionKey[Int]
object title extends StringColumn(this)
object description extends StringColumn(this)
object author extends StringColumn(this)
object tags extends SetColumn[WykopsTable, Wykop, String](this)
object url extends StringColumn(this)
object numberOfPoints extends IntColumn(this)
object numberOfComments extends IntColumn(this)
object publishedAt extends DateTimeColumn(this)
object isOnMain extends BooleanColumn(this)
def fromRow(row: Row): Wykop = {
Wykop(
id(row).toInt,
title(row),
description(row),
author(row),
tags(row),
url(row),
numberOfPoints(row),
numberOfComments(row),
LocalDateTime.now(), //publishedAt(row), //FIXME: wrong date saved by phantom!? :-/
isOnMain(row)
)
}
}
| blstream/wykopml | updater/src/main/scala/wykopml/storage/WykopsTable.scala | Scala | mit | 1,022 |
package edu.gemini.spModel.obs
import edu.gemini.pot.sp.{SPComponentType, ISPObservation}
import edu.gemini.spModel.core.Site
import edu.gemini.spModel.obs.plannedtime.PlannedTimeCalculator
import edu.gemini.spModel.rich.pot.sp.obsWrapper
import edu.gemini.spModel.rich.shared.immutable._
import edu.gemini.spModel.target.obsComp.TargetObsComp
import edu.gemini.util.skycalc.SiderealTarget
import edu.gemini.util.skycalc.calc.{Interval, TargetCalculator}
import jsky.coords.WorldCoords
import edu.gemini.skycalc.{TimeUtils, Coordinates}
import scala.collection.JavaConverters._
object ObsTargetCalculatorService {
private def create(obs: ISPObservation): Option[TargetCalculator] = {
// First, determine the Site at which the instrument is located based on
// the instrument used, e.g., GMOS-N or S. If no instrument or a
// multi-site instrument, None.
def site = obs.sites.toList match {
case List(s) => Some(s)
case _ => None
}
def block = obs.spObservation.flatMap(_.getSchedulingBlock.asScalaOpt)
// Now, based on the SchedulingBlock determine if a TargetCalc should be created.
// Get the TargetEnvironment if it exists, and from there, extract the RA and Dec.
def coords = obs.findObsComponentByType(SPComponentType.TELESCOPE_TARGETENV).flatMap {
_.getDataObject
.asInstanceOf[TargetObsComp]
.getTargetEnvironment
.getBase
.getSkycalcCoordinates(block.map(_.start : java.lang.Long).asGeminiOpt).asScalaOpt
}
def calc(s: Site, b: SchedulingBlock, c: Coordinates): TargetCalculator = {
val st = SiderealTarget(new WorldCoords(c.getRaDeg, c.getDecDeg))
// Andy says:
// duration is equivalent to science time, if specific explicitly
// science time is plannedTime.totalTime - plannedTime.setup.time
// Ideally, if you hover over duration box in GUI, should say acquisition + science time OR not.
// Since we need start < end explicitly, if the duration is None, we cannot use it.
val duration = b.duration.toOption getOrElse calculateRemainingTime(obs)
val end = b.start + duration
// If the duration is going to be smaller than the default step size of 30 seconds used by the
// target calc, we will have divide by 0 issues, so take this into account.
val stepSize = if (duration >= TimeUtils.seconds(30)) TimeUtils.seconds(30) else duration
if (end > b.start) {
TargetCalculator(s, st, Interval(b.start, end), stepSize)
} else {
TargetCalculator(s, st, b.start)
}
}
for {
s <- site
b <- block
c <- coords
} yield calc(s, b, c)
}
private def lookupOrCreate(obs: ISPObservation): Option[TargetCalculator] =
(for {
tcOpt <- Option(SPObsCache.getTargetCalculator(obs))
tc <- tcOpt.asScalaOpt
} yield tc).orElse(create(obs))
def targetCalculation(obs: ISPObservation): Option[TargetCalculator] = {
val res = lookupOrCreate(obs)
SPObsCache.setTargetCalculator(obs, res.asGeminiOpt)
res
}
def calculateRemainingTime(ispObservation: ISPObservation): Long =
PlannedTimeCalculator.instance
.calc(ispObservation)
.steps.asScala
.filterNot(_.executed)
.map(_.totalTime)
.sum
}
| spakzad/ocs | bundle/edu.gemini.pot/src/main/scala/edu/gemini/spModel/obs/ObsTargetCalculatorService.scala | Scala | bsd-3-clause | 3,302 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.bridge.scala
import org.apache.flink.api.common.JobExecutionResult
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala.{DataSet, ExecutionEnvironment}
import org.apache.flink.table.api.{TableEnvironment, _}
import org.apache.flink.table.catalog.{CatalogManager, GenericInMemoryCatalog}
import org.apache.flink.table.descriptors.{BatchTableDescriptor, ConnectorDescriptor}
import org.apache.flink.table.expressions.Expression
import org.apache.flink.table.functions.{AggregateFunction, TableFunction}
import org.apache.flink.table.module.ModuleManager
/**
* The [[TableEnvironment]] for a Scala batch [[ExecutionEnvironment]] that works
* with [[DataSet]]s.
*
* A TableEnvironment can be used to:
* - convert a [[DataSet]] to a [[Table]]
* - register a [[DataSet]] in the [[TableEnvironment]]'s catalog
* - register a [[Table]] in the [[TableEnvironment]]'s catalog
* - scan a registered table to obtain a [[Table]]
* - specify a SQL query on registered tables to obtain a [[Table]]
* - convert a [[Table]] into a [[DataSet]]
* - explain the AST and execution plan of a [[Table]]
*/
trait BatchTableEnvironment extends TableEnvironment {
/**
* Registers a [[TableFunction]] under a unique name in the TableEnvironment's catalog.
* Registered functions can be referenced in Table API and SQL queries.
*
* @param name The name under which the function is registered.
* @param tf The TableFunction to register.
* @tparam T The type of the output row.
*/
def registerFunction[T: TypeInformation](name: String, tf: TableFunction[T]): Unit
/**
* Registers an [[AggregateFunction]] under a unique name in the TableEnvironment's catalog.
* Registered functions can be referenced in Table API and SQL queries.
*
* @param name The name under which the function is registered.
* @param f The AggregateFunction to register.
* @tparam T The type of the output value.
* @tparam ACC The type of aggregate accumulator.
*/
def registerFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: AggregateFunction[T, ACC]): Unit
/**
* Converts the given [[DataSet]] into a [[Table]].
*
* The field names of the [[Table]] are automatically derived from the type of the [[DataSet]].
*
* @param dataSet The [[DataSet]] to be converted.
* @tparam T The type of the [[DataSet]].
* @return The converted [[Table]].
*/
def fromDataSet[T](dataSet: DataSet[T]): Table
/**
* Converts the given [[DataSet]] into a [[Table]] with specified field names.
*
* There are two modes for mapping original fields to the fields of the [[Table]]:
*
* 1. Reference input fields by name:
* All fields in the schema definition are referenced by name
* (and possibly renamed using an alias (as). In this mode, fields can be reordered and
* projected out. This mode can be used for any input type, including POJOs.
*
* Example:
*
* {{{
* val set: DataSet[(String, Long)] = ...
* val table: Table = tableEnv.fromDataSet(
* set,
* $"_2", // reorder and use the original field
* $"_1" as "name" // reorder and give the original field a better name
* )
* }}}
*
* 2. Reference input fields by position:
* In this mode, fields are simply renamed. This mode can only be
* used if the input type has a defined field order (tuple, case class, Row) and none of
* the `fields` references a field of the input type.
*
* Example:
*
* {{{
* val set: DataSet[(String, Long)] = ...
* val table: Table = tableEnv.fromDataSet(
* set,
* $"a", // renames the first field to 'a'
* $"b" // renames the second field to 'b'
* )
* }}}
*
* @param dataSet The [[DataSet]] to be converted.
* @param fields The fields expressions to map original fields of the DataSet to the fields of
* the [[Table]].
* @tparam T The type of the [[DataSet]].
* @return The converted [[Table]].
*/
def fromDataSet[T](dataSet: DataSet[T], fields: Expression*): Table
/**
* Creates a view from the given [[DataSet]].
* Registered views can be referenced in SQL queries.
*
* The field names of the [[Table]] are automatically derived from the type of the [[DataSet]].
*
* The view is registered in the namespace of the current catalog and database. To register the
* view in a different catalog use [[createTemporaryView]].
*
* Temporary objects can shadow permanent ones. If a permanent object in a given path exists,
* it will be inaccessible in the current session. To make the permanent object available again
* you can drop the corresponding temporary object.
*
* @param name The name under which the [[DataSet]] is registered in the catalog.
* @param dataSet The [[DataSet]] to register.
* @tparam T The type of the [[DataSet]] to register.
* @deprecated use [[createTemporaryView]]
*/
@deprecated
def registerDataSet[T](name: String, dataSet: DataSet[T]): Unit
/**
* Creates a view from the given [[DataSet]] in a given path.
* Registered tables can be referenced in SQL queries.
*
* The field names of the [[Table]] are automatically derived
* from the type of the [[DataSet]].
*
* Temporary objects can shadow permanent ones. If a permanent object in a given path exists,
* it will be inaccessible in the current session. To make the permanent object available again
* you can drop the corresponding temporary object.
*
* @param path The path under which the [[DataSet]] is created.
* See also the [[TableEnvironment]] class description for the format of the path.
* @param dataSet The [[DataSet]] out of which to create the view.
* @tparam T The type of the [[DataSet]].
*/
def createTemporaryView[T](path: String, dataSet: DataSet[T]): Unit
/**
* Creates a view from the given [[DataSet]] in a given path with specified field names.
* Registered views can be referenced in SQL queries.
*
* There are two modes for mapping original fields to the fields of the View:
*
* 1. Reference input fields by name:
* All fields in the schema definition are referenced by name
* (and possibly renamed using an alias (as). In this mode, fields can be reordered and
* projected out. This mode can be used for any input type, including POJOs.
*
* Example:
*
* {{{
* val set: DataSet[(String, Long)] = ...
* tableEnv.registerDataSet(
* "myTable",
* set,
* $"_2", // reorder and use the original field
* $"_1" as "name" // reorder and give the original field a better name
* );
* }}}
*
* 2. Reference input fields by position:
* In this mode, fields are simply renamed. This mode can only be
* used if the input type has a defined field order (tuple, case class, Row) and none of
* the `fields` references a field of the input type.
*
* Example:
*
* {{{
* val set: DataSet[(String, Long)] = ...
* tableEnv.registerDataSet(
* "myTable",
* set,
* $"a", // renames the first field to 'a'
* $"b" // renames the second field to 'b'
* )
* }}}
*
* The view is registered in the namespace of the current catalog and database. To register the
* view in a different catalog use [[createTemporaryView]].
*
* Temporary objects can shadow permanent ones. If a permanent object in a given path exists,
* it will be inaccessible in the current session. To make the permanent object available again
* you can drop the corresponding temporary object.
*
* @param name The name under which the [[DataSet]] is registered in the catalog.
* @param dataSet The [[DataSet]] to register.
* @param fields The fields expressions to map original fields of the DataSet to the fields of
* the View.
* @tparam T The type of the [[DataSet]] to register.
* @deprecated use [[createTemporaryView]]
*/
@deprecated
def registerDataSet[T](name: String, dataSet: DataSet[T], fields: Expression*): Unit
/**
* Creates a view from the given [[DataSet]] in a given path with specified field names.
* Registered views can be referenced in SQL queries.
*
* There are two modes for mapping original fields to the fields of the View:
*
* 1. Reference input fields by name:
* All fields in the schema definition are referenced by name
* (and possibly renamed using an alias (as). In this mode, fields can be reordered and
* projected out. This mode can be used for any input type, including POJOs.
*
* Example:
*
* {{{
* val set: DataSet[(String, Long)] = ...
* tableEnv.createTemporaryView(
* "cat.db.myTable",
* set,
* $"_2", // reorder and use the original field
* $"_1" as "name" // reorder and give the original field a better name
* )
* }}}
*
* 2. Reference input fields by position:
* In this mode, fields are simply renamed. This mode can only be
* used if the input type has a defined field order (tuple, case class, Row) and none of
* the `fields` references a field of the input type.
*
* Example:
*
* {{{
* val set: DataSet[(String, Long)] = ...
* tableEnv.createTemporaryView(
* "cat.db.myTable",
* set,
* $"a", // renames the first field to 'a'
* $"b" // renames the second field to 'b'
* )
* }}}
*
* Temporary objects can shadow permanent ones. If a permanent object in a given path exists,
* it will be inaccessible in the current session. To make the permanent object available again
* you can drop the corresponding temporary object.
*
* @param path The path under which the [[DataSet]] is created.
* See also the [[TableEnvironment]] class description for the format of the
* path.
* @param dataSet The [[DataSet]] out of which to create the view.
* @param fields The fields expressions to map original fields of the DataSet to the fields of
* the View.
* @tparam T The type of the [[DataSet]].
*/
def createTemporaryView[T](path: String, dataSet: DataSet[T], fields: Expression*): Unit
/**
* Converts the given [[Table]] into a [[DataSet]] of a specified type.
*
* The fields of the [[Table]] are mapped to [[DataSet]] fields as follows:
* - [[org.apache.flink.types.Row]] and [[org.apache.flink.api.java.tuple.Tuple]]
* types: Fields are mapped by position, field types must match.
* - POJO [[DataSet]] types: Fields are mapped by field name, field types must match.
*
* @param table The [[Table]] to convert.
* @tparam T The type of the resulting [[DataSet]].
* @return The converted [[DataSet]].
*/
def toDataSet[T: TypeInformation](table: Table): DataSet[T]
/**
* Triggers the program execution. The environment will execute all parts of
* the program.
*
* The program execution will be logged and displayed with the provided name
*
* It calls the ExecutionEnvironment#execute on the underlying
* [[ExecutionEnvironment]]. In contrast to the [[TableEnvironment]] this
* environment translates queries eagerly.
*
* @param jobName Desired name of the job
* @return The result of the job execution, containing elapsed time and accumulators.
* @throws Exception which occurs during job execution.
*/
@throws[Exception]
override def execute(jobName: String): JobExecutionResult
/**
* Creates a temporary table from a descriptor.
*
* Descriptors allow for declaring the communication to external systems in an
* implementation-agnostic way. The classpath is scanned for suitable table factories that match
* the desired configuration.
*
* The following example shows how to read from a connector using a JSON format and
* registering a temporary table as "MyTable":
*
* {{{
*
* tableEnv
* .connect(
* new ExternalSystemXYZ()
* .version("0.11"))
* .withFormat(
* new Json()
* .jsonSchema("{...}")
* .failOnMissingField(false))
* .withSchema(
* new Schema()
* .field("user-name", "VARCHAR").from("u_name")
* .field("count", "DECIMAL")
* .createTemporaryTable("MyTable")
* }}}
*
* @param connectorDescriptor connector descriptor describing the external system
* @deprecated The SQL `CREATE TABLE` DDL is richer than this part of the API.
* This method might be refactored in the next versions.
* Please use [[executeSql]] to register a table instead.
*/
@deprecated
override def connect(connectorDescriptor: ConnectorDescriptor): BatchTableDescriptor
}
object BatchTableEnvironment {
/**
* The [[TableEnvironment]] for a Scala batch [[ExecutionEnvironment]] that works
* with [[DataSet]]s.
*
* A TableEnvironment can be used to:
* - convert a [[DataSet]] to a [[Table]]
* - register a [[DataSet]] in the [[TableEnvironment]]'s catalog
* - register a [[Table]] in the [[TableEnvironment]]'s catalog
* - scan a registered table to obtain a [[Table]]
* - specify a SQL query on registered tables to obtain a [[Table]]
* - convert a [[Table]] into a [[DataSet]]
* - explain the AST and execution plan of a [[Table]]
*
* @param executionEnvironment The Scala batch [[ExecutionEnvironment]] of the TableEnvironment.
*/
def create(executionEnvironment: ExecutionEnvironment): BatchTableEnvironment = {
create(executionEnvironment, new TableConfig)
}
/**
* The [[TableEnvironment]] for a Scala batch [[ExecutionEnvironment]] that works
* with [[DataSet]]s.
*
* A TableEnvironment can be used to:
* - convert a [[DataSet]] to a [[Table]]
* - register a [[DataSet]] in the [[TableEnvironment]]'s catalog
* - register a [[Table]] in the [[TableEnvironment]]'s catalog
* - scan a registered table to obtain a [[Table]]
* - specify a SQL query on registered tables to obtain a [[Table]]
* - convert a [[Table]] into a [[DataSet]]
* - explain the AST and execution plan of a [[Table]]
*
* @param executionEnvironment The Scala batch [[ExecutionEnvironment]] of the TableEnvironment.
* @param tableConfig The configuration of the TableEnvironment.
*/
def create(executionEnvironment: ExecutionEnvironment, tableConfig: TableConfig)
: BatchTableEnvironment = {
try {
// temporary solution until FLINK-15635 is fixed
val classLoader = Thread.currentThread.getContextClassLoader
val moduleManager = new ModuleManager
val defaultCatalog = "default_catalog"
val catalogManager = CatalogManager.newBuilder
.classLoader(classLoader)
.config(tableConfig.getConfiguration)
.defaultCatalog(
defaultCatalog,
new GenericInMemoryCatalog(defaultCatalog, "default_database"))
.executionConfig(executionEnvironment.getConfig)
.build
val clazz = Class
.forName("org.apache.flink.table.api.bridge.scala.internal.BatchTableEnvironmentImpl")
val con = clazz
.getConstructor(
classOf[ExecutionEnvironment],
classOf[TableConfig],
classOf[CatalogManager],
classOf[ModuleManager])
con.newInstance(executionEnvironment, tableConfig, catalogManager, moduleManager)
.asInstanceOf[BatchTableEnvironment]
} catch {
case t: Throwable => throw new TableException("Create BatchTableEnvironment failed.", t)
}
}
}
| darionyaphet/flink | flink-table/flink-table-api-scala-bridge/src/main/scala/org/apache/flink/table/api/bridge/scala/BatchTableEnvironment.scala | Scala | apache-2.0 | 16,743 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import org.ensime.api._
import org.ensime.fixture._
import org.ensime.util.EnsimeSpec
import scala.reflect.internal.util.{ OffsetPosition, RangePosition }
class ImplicitAnalyzerSpec extends EnsimeSpec
with IsolatedRichPresentationCompilerFixture
with RichPresentationCompilerTestUtils
with ReallyRichPresentationCompilerFixture {
def original = EnsimeConfigFixture.EmptyTestProject
def getImplicitDetails(cc: RichPresentationCompiler, content: String) = {
val file = srcFile(cc.config, "abc.scala", contents(content))
cc.askLoadedTyped(file)
val pos = new RangePosition(file, 0, 0, file.length)
val dets = new ImplicitAnalyzer(cc).implicitDetails(pos)
dets.map {
case c: ImplicitConversionInfo => (
"conversion",
content.substring(c.start, c.end),
c.fun.name
)
case c: ImplicitParamInfo => (
"param",
content.substring(c.start, c.end),
c.fun.name,
c.params.map { p => p.name },
c.funIsImplicit
)
}
}
"ImplicitAnalyzer" should "render implicit conversions" in {
withPresCompiler { (config, cc) =>
val dets = getImplicitDetails(
cc,
"""
package com.example
class Test {}
object I {
implicit def StringToTest(v: String): Test = new Test
val t: Test = "sample";
}
"""
)
dets should ===(List(
("conversion", "\\"sample\\"", "StringToTest")
))
}
}
it should "render implicit parameters passed to implicit conversion functions" in {
withPresCompiler { (config, cc) =>
val dets = getImplicitDetails(
cc,
"""
package com.example
class Test {}
class Thing {}
object I {
implicit def myThing = new Thing
implicit def StringToTest(v: String)(implicit th: Thing): Test = new Test
val t: Test = "sample"
}
"""
)
dets should ===(List(
("param", "\\"sample\\"", "StringToTest", List("myThing"), true),
("conversion", "\\"sample\\"", "StringToTest")
))
}
}
it should "render implicit parameters" in {
withPresCompiler { (config, cc) =>
val dets = getImplicitDetails(
cc,
"""
package com.example
class Thing {}
class Thong {}
object I {
implicit def myThing = new Thing
implicit val myThong = new Thong
def zz(u: Int)(v: String)(implicit s: Thing, t: Thong) = u
def yy(implicit s: Thing) = s
val t = zz(1)("abc") // Two explicit applications
val z = yy // Zero explicit application
}
"""
)
dets should ===(List(
("param", "zz(1)(\\"abc\\")", "zz", List("myThing", "myThong"), false),
("param", "yy", "yy", List("myThing"), false)
))
}
}
it should "work with offset positions" in {
withPresCompiler { (config, cc) =>
val content = """
package com.example
class Test {}
object I {
implicit def StringToTest(v: String): Test = new Test
val t: Test = "sample"/*1*/;
}
"""
val file = srcFile(cc.config, "abc.scala", content)
cc.askLoadedTyped(file)
val implicitPos = content.indexOf("/*1*/")
val pos = new OffsetPosition(file, implicitPos)
val dets = new ImplicitAnalyzer(cc).implicitDetails(pos)
dets should have length 1
val pos1 = new OffsetPosition(file, implicitPos + 1)
val dets1 = new ImplicitAnalyzer(cc).implicitDetails(pos1)
dets1 shouldBe empty
}
}
}
| d1egoaz/ensime-sbt | src/sbt-test/sbt-ensime/ensime-server/core/src/it/scala/org/ensime/core/ImplicitAnalyzerSpec.scala | Scala | apache-2.0 | 3,942 |
package unluac.parse
class LString(val size:BSizeT, _value:String) extends LObject {
final val value: String = if (_value.length == 0) "" else _value.substring(0, _value.length - 1)
override def deref: String = {
value
}
override def toString: String = {
"\"" + value + "\""
}
override def equals(o: Any): Boolean = {
o match {
case os: LString =>
os.value == value
case _ => false
}
}
} | danielwegener/unluac-scala | shared/src/main/scala/unluac/parse/LString.scala | Scala | mit | 442 |
package com.twitter.finagle.exp.mysql.transport
import com.twitter.finagle.client.Transporter
import com.twitter.finagle.exp.mysql.{Request, Result}
import com.twitter.finagle.netty3.{ChannelSnooper, Netty3Transporter}
import com.twitter.finagle.Stack
import com.twitter.util.NonFatal
import java.util.logging.{Level, Logger}
import org.jboss.netty.buffer.ChannelBuffer
import org.jboss.netty.channel._
import org.jboss.netty.channel.{Channels, ChannelPipelineFactory}
import org.jboss.netty.handler.codec.frame.FrameDecoder
/**
* Decodes logical MySQL packets that could be fragmented across
* frames. MySQL packets are a length encoded set of bytes written
* in little endian byte order.
*/
class PacketFrameDecoder extends FrameDecoder {
override def decode(ctx: ChannelHandlerContext, channel: Channel, buffer: ChannelBuffer): Packet = {
if (buffer.readableBytes < Packet.HeaderSize)
return null
buffer.markReaderIndex()
val header = new Array[Byte](Packet.HeaderSize)
buffer.readBytes(header)
val br = BufferReader(header)
val length = br.readInt24()
val seq = br.readUnsignedByte()
if (buffer.readableBytes < length) {
buffer.resetReaderIndex()
return null
}
val body = new Array[Byte](length)
buffer.readBytes(body)
Packet(seq, Buffer(body))
}
}
class PacketEncoder extends SimpleChannelDownstreamHandler {
override def writeRequested(ctx: ChannelHandlerContext, evt: MessageEvent) =
evt.getMessage match {
case p: Packet =>
try {
val cb = p.toChannelBuffer
Channels.write(ctx, evt.getFuture, cb, evt.getRemoteAddress)
} catch {
case NonFatal(e) =>
evt.getFuture.setFailure(new ChannelException(e.getMessage))
}
case unknown =>
evt.getFuture.setFailure(new ChannelException(
"Unsupported request type %s".format(unknown.getClass.getName)))
}
}
/**
* A Netty3 pipeline that is responsible for framing network
* traffic in terms of mysql logical packets.
*/
object MysqlClientPipelineFactory extends ChannelPipelineFactory {
def getPipeline = {
val pipeline = Channels.pipeline()
pipeline.addLast("packetDecoder", new PacketFrameDecoder)
pipeline.addLast("packetEncoder", new PacketEncoder)
pipeline
}
}
/**
* Responsible for the transport layer plumbing required to produce
* a Transporter[Packet, Packet]. The current implementation uses
* Netty3.
*/
object MysqlTransporter {
def apply(params: Stack.Params): Transporter[Packet, Packet] =
Netty3Transporter(MysqlClientPipelineFactory, params)
} | JustinTulloss/finagle | finagle-mysql/src/main/scala/com/twitter/finagle/mysql/transport/Netty3.scala | Scala | apache-2.0 | 2,628 |
package app
import service._
import util.Directory._
import util.{UsersAuthenticator, OwnerAuthenticator}
import jp.sf.amateras.scalatra.forms._
import org.apache.commons.io.FileUtils
import org.scalatra.FlashMapSupport
import org.scalatra.i18n.Messages
import service.WebHookService.WebHookPayload
import util.JGitUtil.CommitInfo
import util.ControlUtil._
import org.eclipse.jgit.api.Git
class RepositorySettingsController extends RepositorySettingsControllerBase
with RepositoryService with AccountService with WebHookService
with OwnerAuthenticator with UsersAuthenticator
trait RepositorySettingsControllerBase extends ControllerBase with FlashMapSupport {
self: RepositoryService with AccountService with WebHookService
with OwnerAuthenticator with UsersAuthenticator =>
// for repository options
case class OptionsForm(description: Option[String], defaultBranch: String, isPrivate: Boolean)
val optionsForm = mapping(
"description" -> trim(label("Description" , optional(text()))),
"defaultBranch" -> trim(label("Default Branch" , text(required, maxlength(100)))),
"isPrivate" -> trim(label("Repository Type", boolean()))
)(OptionsForm.apply)
// for collaborator addition
case class CollaboratorForm(userName: String)
val collaboratorForm = mapping(
"userName" -> trim(label("Username", text(required, collaborator)))
)(CollaboratorForm.apply)
// for web hook url addition
case class WebHookForm(url: String)
val webHookForm = mapping(
"url" -> trim(label("url", text(required, webHook)))
)(WebHookForm.apply)
/**
* Redirect to the Options page.
*/
get("/:owner/:repository/settings")(ownerOnly { repository =>
redirect(s"/${repository.owner}/${repository.name}/settings/options")
})
/**
* Display the Options page.
*/
get("/:owner/:repository/settings/options")(ownerOnly {
settings.html.options(_, flash.get("info"))
})
/**
* Save the repository options.
*/
post("/:owner/:repository/settings/options", optionsForm)(ownerOnly { (form, repository) =>
saveRepositoryOptions(
repository.owner,
repository.name,
form.description,
form.defaultBranch,
repository.repository.parentUserName.map { _ =>
repository.repository.isPrivate
} getOrElse form.isPrivate
)
flash += "info" -> "Repository settings has been updated."
redirect(s"/${repository.owner}/${repository.name}/settings/options")
})
/**
* Display the Collaborators page.
*/
get("/:owner/:repository/settings/collaborators")(ownerOnly { repository =>
settings.html.collaborators(
getCollaborators(repository.owner, repository.name),
getAccountByUserName(repository.owner).get.isGroupAccount,
repository)
})
/**
* Add the collaborator.
*/
post("/:owner/:repository/settings/collaborators/add", collaboratorForm)(ownerOnly { (form, repository) =>
if(!getAccountByUserName(repository.owner).get.isGroupAccount){
addCollaborator(repository.owner, repository.name, form.userName)
}
redirect(s"/${repository.owner}/${repository.name}/settings/collaborators")
})
/**
* Add the collaborator.
*/
get("/:owner/:repository/settings/collaborators/remove")(ownerOnly { repository =>
if(!getAccountByUserName(repository.owner).get.isGroupAccount){
removeCollaborator(repository.owner, repository.name, params("name"))
}
redirect(s"/${repository.owner}/${repository.name}/settings/collaborators")
})
/**
* Display the web hook page.
*/
get("/:owner/:repository/settings/hooks")(ownerOnly { repository =>
settings.html.hooks(getWebHookURLs(repository.owner, repository.name), repository, flash.get("info"))
})
/**
* Add the web hook URL.
*/
post("/:owner/:repository/settings/hooks/add", webHookForm)(ownerOnly { (form, repository) =>
addWebHookURL(repository.owner, repository.name, form.url)
redirect(s"/${repository.owner}/${repository.name}/settings/hooks")
})
/**
* Delete the web hook URL.
*/
get("/:owner/:repository/settings/hooks/delete")(ownerOnly { repository =>
deleteWebHookURL(repository.owner, repository.name, params("url"))
redirect(s"/${repository.owner}/${repository.name}/settings/hooks")
})
/**
* Send the test request to registered web hook URLs.
*/
get("/:owner/:repository/settings/hooks/test")(ownerOnly { repository =>
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
import scala.collection.JavaConverters._
val commits = git.log
.add(git.getRepository.resolve(repository.repository.defaultBranch))
.setMaxCount(3)
.call.iterator.asScala.map(new CommitInfo(_))
getWebHookURLs(repository.owner, repository.name) match {
case webHookURLs if(webHookURLs.nonEmpty) =>
for(ownerAccount <- getAccountByUserName(repository.owner)){
callWebHook(repository.owner, repository.name, webHookURLs,
WebHookPayload(git, ownerAccount, "refs/heads/" + repository.repository.defaultBranch, repository, commits.toList, ownerAccount))
}
case _ =>
}
flash += "info" -> "Test payload deployed!"
}
redirect(s"/${repository.owner}/${repository.name}/settings/hooks")
})
/**
* Display the delete repository page.
*/
get("/:owner/:repository/settings/delete")(ownerOnly {
settings.html.delete(_)
})
/**
* Delete the repository.
*/
post("/:owner/:repository/settings/delete")(ownerOnly { repository =>
deleteRepository(repository.owner, repository.name)
FileUtils.deleteDirectory(getRepositoryDir(repository.owner, repository.name))
FileUtils.deleteDirectory(getWikiRepositoryDir(repository.owner, repository.name))
FileUtils.deleteDirectory(getTemporaryDir(repository.owner, repository.name))
redirect(s"/${repository.owner}")
})
/**
* Provides duplication check for web hook url.
*/
private def webHook: Constraint = new Constraint(){
override def validate(name: String, value: String, messages: Messages): Option[String] =
getWebHookURLs(params("owner"), params("repository")).map(_.url).find(_ == value).map(_ => "URL had been registered already.")
}
/**
* Provides Constraint to validate the collaborator name.
*/
private def collaborator: Constraint = new Constraint(){
override def validate(name: String, value: String, messages: Messages): Option[String] =
getAccountByUserName(value) match {
case None => Some("User does not exist.")
case Some(x) if(x.isGroupAccount)
=> Some("User does not exist.")
case Some(x) if(x.userName == params("owner") || getCollaborators(params("owner"), params("repository")).contains(x.userName))
=> Some("User can access this repository already.")
case _ => None
}
}
} | geeksville/gitbucket | src/main/scala/app/RepositorySettingsController.scala | Scala | apache-2.0 | 7,140 |
/***
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker.step.base
import javax.servlet.FilterChain
import com.rackspace.com.papi.components.checker.servlet._
import com.rackspace.com.papi.components.checker.step.results.{MismatchResult, MultiFailResult, Result}
import scala.collection.mutable.ListBuffer
//
// Steps that ore connected to other steps
//
abstract class ConnectedStep(id : String, label : String, val next : Array[Step]) extends Step (id, label) {
//
// Check the currest step. If the step is a match return a new
// updated context, if not then return None
//
def checkStep(req : CheckerServletRequest, resp : CheckerServletResponse, chain : FilterChain, context : StepContext) : Option[StepContext] = None
//
// Check the currest step. If the step is a match return the next
// URI level if not then return -1. This method is written for
// compatability reasons, there was a time where the only context
// was the URI level.
//
final def checkStep(req : CheckerServletRequest, resp : CheckerServletResponse, chain : FilterChain, uriLevel : Int) : Int = {
checkStep(req, resp, chain, StepContext(uriLevel)) match {
case Some(context) => context.uriLevel
case None => -1
}
}
//
// The error message when there is a step mismatch.
//
val mismatchMessage : String = "Step Mismatch"
//
// Go to the next step.
//
def nextStep (req : CheckerServletRequest,
resp : CheckerServletResponse,
chain : FilterChain,
context : StepContext) : Array[Result] = {
val resultBuffer = new ListBuffer[Result]
for (i <- 0 to next.length-1) {
val oresult = next(i).check(req, resp, chain, context)
if (oresult.isDefined) {
val result = oresult.get
if (result.valid) {
return Array(result)
} else {
resultBuffer += result
}
}
}
return resultBuffer.toArray
}
//
// Check this step, if successful, check next relevant steps.
//
override def check(req : CheckerServletRequest,
resp : CheckerServletResponse,
chain : FilterChain,
context : StepContext) : Option[Result] = {
var result : Option[Result] = None
val nextContext = checkStep(req, resp, chain, context)
if (nextContext != None) {
val results : Array[Result] =
nextStep (req, resp, chain, nextContext.get.handler.map{ handler => handler.inStep(this, req, resp, nextContext.get) }.getOrElse(nextContext.get))
if (results.size == 1) {
results(0).addStepId(id)
result = Some(results(0))
} else {
result = Some(new MultiFailResult (results, id))
}
} else {
result = Some( new MismatchResult( mismatchMessage, context, id) )
}
return result
}
}
| tylerroyal/api-checker | core/src/main/scala/com/rackspace/com/papi/components/checker/step/base/ConnectedStep.scala | Scala | apache-2.0 | 3,494 |
package wykopml.spark
import com.typesafe.scalalogging.StrictLogging
import org.apache.spark.{SparkConf, SparkContext}
case object WithSpark extends StrictLogging {
def apply(fn: (SparkContext) => Unit): Unit = {
try {
val sparkConfig = new SparkConf().setAppName("wykopml")
.set("spark.executor.memory", "4g")
.set("spark.cassandra.connection.host", "127.0.0.1")
.setMaster("local[*]")
val sc = new SparkContext(sparkConfig)
try {
fn(sc)
} catch {
case t: Throwable =>
logger.error("Exception when executing with spark", t)
throw t
} finally {
sc.stop()
}
} catch {
case t: Throwable =>
logger.error("WithSpark has failed", t)
sys.exit(1)
} finally {
sys.exit(0)
}
}
}
| blstream/wykopml | prediction/src/main/scala/wykopml/spark/WithSpark.scala | Scala | mit | 826 |
package com.sksamuel.elastic4s.indexes
import java.util.UUID
import com.sksamuel.elastic4s.VersionType.External
import com.sksamuel.elastic4s.testkit.DockerTests
import com.sksamuel.elastic4s.{Indexable, RefreshPolicy, VersionType}
import org.scalatest.{Matchers, WordSpec}
import scala.util.Try
class IndexTest extends WordSpec with Matchers with DockerTests {
case class Phone(name: String, speed: String)
implicit object PhoneIndexable extends Indexable[Phone] {
override def json(t: Phone): String = s"""{ "name" : "${t.name}", "speed" : "${t.speed}" }"""
}
Try {
client.execute {
deleteIndex("electronics")
}.await
}
client.execute {
createIndex("electronics").mappings(mapping("electronics"))
}.await
client.execute {
bulk(
indexInto("electronics" / "electronics").fields(Map("name" -> "galaxy", "screensize" -> 5)).withId("55A").version(42l).versionType(VersionType.External),
indexInto("electronics" / "electronics").fields(Map("name" -> "razor", "colours" -> Array("white", "blue"))),
indexInto("electronics" / "electronics").fields(Map("name" -> "iphone", "colour" -> null)),
indexInto("electronics" / "electronics").fields(Map("name" -> "m9", "locations" -> Array(Map("id" -> "11", "name" -> "manchester"), Map("id" -> "22", "name" -> "sheffield")))),
indexInto("electronics" / "electronics").fields(Map("name" -> "iphone2", "models" -> Map("5s" -> Array("standard", "retina")))),
indexInto("electronics" / "electronics").fields(Map("name" -> "pixel", "apps" -> Map("maps" -> "google maps", "email" -> null))),
indexInto("electronics" / "electronics").source(Phone("nokia blabble", "4g"))
).refresh(RefreshPolicy.Immediate)
}.await
"an index request" should {
"index fields" in {
client.execute {
search("electronics").query(matchQuery("name", "galaxy"))
}.await.result.totalHits shouldBe 1
}
"support index names with +" in {
client.execute {
createIndex("hello+world").mappings(mapping("wobble"))
}.await
client.execute {
indexInto("hello+world/wobble").fields(Map("foo" -> "bar")).withId("a").refreshImmediately
}.await
client.execute {
search("hello+world").matchAllQuery()
}.await.result.totalHits shouldBe 1
}
"support / in ids" in {
client.execute {
createIndex("indexidtest").mappings(mapping("wobble"))
}.await
client.execute {
indexInto("indexidtest/wobble").fields(Map("foo" -> "bar")).withId("a/b").refreshImmediately
}.await
client.execute {
search("indexidtest").matchAllQuery()
}.await.result.totalHits shouldBe 1
client.execute {
get("indexidtest", "wobble", "a/b")
}.await.result.exists shouldBe true
}
"support external versions" in {
val found = client.execute {
search("electronics").query(matchQuery("name", "galaxy"))
}.await.result.hits.hits(0)
found.id shouldBe "55A"
found.version shouldBe 42l
}
"handle custom id" in {
client.execute {
search("electronics").query(idsQuery("55A"))
}.await.result.totalHits shouldBe 1
}
"handle numbers" in {
client.execute {
search("electronics").query(termQuery("screensize", 5))
}.await.result.totalHits shouldBe 1
}
"handle arrays" in {
client.execute {
search("electronics").query(matchQuery("name", "razor"))
}.await.result.hits.hits.head.sourceAsMap shouldBe Map("name" -> "razor", "colours" -> List("white", "blue"))
}
"handle nested arrays" in {
val hit = client.execute {
search("electronics").query(matchQuery("name", "iphone2"))
}.await.result.hits.hits.head
hit.sourceAsMap("models") shouldBe Map("5s" -> List("standard", "retina"))
}
"handle arrays of maps" in {
val hit = client.execute {
search("electronics").query(matchQuery("name", "m9"))
}.await.result.hits.hits.head
hit.sourceAsMap("locations") shouldBe
Seq(
Map("id" -> "11", "name" -> "manchester"),
Map("id" -> "22", "name" -> "sheffield")
)
}
"handle null fields" in {
client.execute {
search("electronics").query(matchQuery("name", "iphone"))
}.await.result.hits.hits.head.sourceAsMap shouldBe Map("colour" -> null, "name" -> "iphone")
}
"handle nested null fields" in {
val hit = client.execute {
search("electronics").query(matchQuery("name", "pixel"))
}.await.result.hits.hits.head
hit.sourceAsMap("apps") shouldBe Map("maps" -> "google maps", "email" -> null)
}
"index from indexable typeclass" in {
client.execute {
search("electronics").query(termQuery("speed", "4g"))
}.await.result.totalHits shouldBe 1
}
"create aliases with index" in {
val id = UUID.randomUUID()
val indexName = s"electronics-$id"
client.execute {
createIndex(indexName).mappings(mapping("electronics"))
.alias("alias_1")
.alias("alias_2")
}.await
val index = client.execute {
getIndex(indexName)
}.await.result.apply(indexName)
index.aliases should contain key "alias_1"
index.aliases should contain key "alias_2"
client.execute {
deleteIndex(indexName)
}.await
}
"return created status" in {
val result = client.execute {
indexInto("electronics" / "electronics").fields("name" -> "super phone").refresh(RefreshPolicy.Immediate)
}.await
result.result.result shouldBe "created"
}
"return OK status if the document already exists" in {
val id = UUID.randomUUID().toString
client.execute {
indexInto("electronics" / "electronics").fields("name" -> "super phone").withId(id).refresh(RefreshPolicy.Immediate)
}.await
val result = client.execute {
indexInto("electronics" / "electronics").fields("name" -> "super phone").withId(id).refresh(RefreshPolicy.Immediate)
}.await
result.result.result shouldBe "updated"
}
"handle update concurrency" in {
val id = UUID.randomUUID.toString
client.execute {
indexInto("electronics" / "electronics")
.fields("name" -> "super phone")
.withId(id)
.version(2l)
.versionType(External)
.refresh(RefreshPolicy.Immediate)
}.await
val result = client.execute {
indexInto("electronics" / "electronics")
.fields("name" -> "super phone")
.withId(id)
.version(2l)
.versionType(External)
.refresh(RefreshPolicy.Immediate)
}.await
result.error.toString should include ("version_conflict_engine_exception")
}
"return Left when the request has an invalid index name" in {
val result = client.execute {
indexInto("**1w11oowo/!!!!o_$$$")
}.await
result.error should not be null
}
}
}
| Tecsisa/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/indexes/IndexTest.scala | Scala | apache-2.0 | 7,031 |
/*
* Copyright (c) 2016 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import org.junit.Test
import org.junit.Assert._
import record._
object ShapelessTaggedAux {
import tag.@@
trait CustomTag
case class Dummy(i: Int @@ CustomTag)
}
class LabelledGeneric211Tests {
@Test
def testShapelessTagged {
import ShapelessTaggedAux._
val lgen = LabelledGeneric[Dummy]
val s = s"${lgen from Record(i=tag[CustomTag](0))}"
assertEquals(s, "Dummy(0)")
}
@Test
def testScalazTagged {
import ScalazTaggedAux._
type R = Record.`'i -> Int @@ CustomTag`.T
val lgen = LabelledGeneric[Dummy]
implicitly[lgen.Repr =:= R]
implicitly[TC[R]]
type RT = Record.`'b -> Boolean, 'i -> Int @@ CustomTag`.T
val lgent = LabelledGeneric[DummyTagged]
implicitly[lgent.Repr =:= RT]
implicitly[TC[RT]]
}
}
| liff/shapeless | core/src/test/scala_2.11+/shapeless/labelledgeneric.scala | Scala | apache-2.0 | 1,399 |
package mesosphere.marathon
package core.launcher.impl
import java.util
import java.util.Collections
import mesosphere.UnitTest
import mesosphere.marathon.core.instance.TestInstanceBuilder._
import mesosphere.marathon.core.instance.{ Instance, TestInstanceBuilder }
import mesosphere.marathon.core.launcher.{ InstanceOp, TaskLauncher }
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.state.PathId
import mesosphere.marathon.stream.Implicits._
import mesosphere.marathon.test.MarathonTestHelper
import mesosphere.mesos.protos.Implicits._
import mesosphere.mesos.protos.OfferID
import org.apache.mesos.Protos.TaskInfo
import org.apache.mesos.{ Protos, SchedulerDriver }
import org.mockito.Mockito
import org.mockito.Mockito.when
class TaskLauncherImplTest extends UnitTest {
private[this] val offerId = OfferID("offerId")
private[this] val offerIdAsJava: util.Collection[Protos.OfferID] = Collections.singleton[Protos.OfferID](offerId)
private[this] def launch(taskInfoBuilder: TaskInfo.Builder): InstanceOp.LaunchTask = {
val taskInfo = taskInfoBuilder.build()
val instance = TestInstanceBuilder.newBuilderWithInstanceId(instanceId).addTaskWithBuilder().taskFromTaskInfo(taskInfo).build().getInstance()
val task: Task = instance.appTask
new InstanceOpFactoryHelper(Some("principal"), Some("role")).launchEphemeral(taskInfo, task, instance)
}
private[this] val appId = PathId("/test")
private[this] val instanceId = Instance.Id.forRunSpec(appId)
private[this] val launch1 = launch(MarathonTestHelper.makeOneCPUTask(Task.Id.forInstanceId(instanceId, None)))
private[this] val launch2 = launch(MarathonTestHelper.makeOneCPUTask(Task.Id.forInstanceId(instanceId, None)))
private[this] val ops = Seq(launch1, launch2)
private[this] val opsAsJava = ops.flatMap(_.offerOperations).asJava
private[this] val filter = Protos.Filters.newBuilder().setRefuseSeconds(0).build()
case class Fixture(driver: Option[SchedulerDriver] = Some(mock[SchedulerDriver])) {
val driverHolder: MarathonSchedulerDriverHolder = new MarathonSchedulerDriverHolder
driverHolder.driver = driver
val launcher: TaskLauncher = new TaskLauncherImpl(driverHolder)
def verifyClean(): Unit = {
driverHolder.driver.foreach(Mockito.verifyNoMoreInteractions(_))
}
}
"TaskLauncherImpl" should {
"launchTasks without driver" in new Fixture(driver = None) {
assert(!launcher.acceptOffer(offerId, ops))
verifyClean()
}
"unsuccessful launchTasks" in new Fixture {
when(driverHolder.driver.get.acceptOffers(offerIdAsJava, opsAsJava, filter))
.thenReturn(Protos.Status.DRIVER_ABORTED)
assert(!launcher.acceptOffer(offerId, ops))
verify(driverHolder.driver.get).acceptOffers(offerIdAsJava, opsAsJava, filter)
verifyClean()
}
"successful launchTasks" in new Fixture {
when(driverHolder.driver.get.acceptOffers(offerIdAsJava, opsAsJava, filter))
.thenReturn(Protos.Status.DRIVER_RUNNING)
assert(launcher.acceptOffer(offerId, ops))
verify(driverHolder.driver.get).acceptOffers(offerIdAsJava, opsAsJava, filter)
verifyClean()
}
"declineOffer without driver" in new Fixture(driver = None) {
launcher.declineOffer(offerId, refuseMilliseconds = None)
verifyClean()
}
"declineOffer with driver" in new Fixture {
launcher.declineOffer(offerId, refuseMilliseconds = None)
verify(driverHolder.driver.get).declineOffer(offerId, Protos.Filters.getDefaultInstance)
verifyClean()
}
"declineOffer with driver and defined refuse seconds" in new Fixture {
launcher.declineOffer(offerId, Some(123))
val filter = Protos.Filters.newBuilder().setRefuseSeconds(123 / 1000.0).build()
verify(driverHolder.driver.get).declineOffer(offerId, filter)
verifyClean()
}
}
}
| guenter/marathon | src/test/scala/mesosphere/marathon/core/launcher/impl/TaskLauncherImplTest.scala | Scala | apache-2.0 | 3,876 |
/*
* This file is part of the sbt-concrete plugin.
* Copyright (c) 2012 Scott Buckley, Anthony M Sloane, Macquarie University.
* All rights reserved.
* Distributed under the New BSD license.
* See file LICENSE at top of distribution.
*/
import sbt._
object Transformer {
import scala.util.matching.Regex
/**
* Transformation function. `flags` is the collection of plugin flags
* which may modify the operation of the transformation. `in` is a
* set of the `.cscala` files in the project that have changed since
* the last time the transformation was run. `outDir` is the directory
* in which the transformed files should be written. Should return a
* set of the generated files.
*/
def transform (flags : Flags, in : Set[File], outDir : File) : Set[File] =
for (inFile <- in)
yield {
val outFileName = inFile.base + ".scala"
val outFile = outDir / outFileName // pathname concatenationw
IO.write (outFile, transformContents (IO.read (inFile)))
outFile
}
/**
* Given the contents of a `.cscala` file return the new contents.
* FIXME: this is a dummy, just to do a simple transformation for testing.
* It turns things of the form `[[foo]]` into `println (foo)`.
*/
def transformContents (oldContents : String) : String = {
val pattern = new Regex ("""\\[\\[([^]])+\\]\\]""", "body")
pattern.replaceAllIn (oldContents, m => "println (2 * " + m.group ("body") + ")")
}
}
| scottbuckley/sbt-concrete | src/Transformer.scala | Scala | bsd-3-clause | 1,571 |
///////////////////////////////////////////////////////////////////////////////
// Copyright (C) 2010 Travis Brown, The University of Texas at Austin
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
///////////////////////////////////////////////////////////////////////////////
package opennlp.fieldspring.tr.util.sanity
import java.io._
import scala.collection.JavaConversions._
import opennlp.fieldspring.tr.text.Corpus
import opennlp.fieldspring.tr.text.Toponym
import opennlp.fieldspring.tr.text.io.TrXMLDirSource
import opennlp.fieldspring.tr.text.prep.OpenNLPTokenizer
import opennlp.fieldspring.tr.topo.Location
object CandidateCheck extends App {
override def main(args: Array[String]) {
val tokenizer = new OpenNLPTokenizer
val corpus = Corpus.createStreamCorpus
val cands = scala.collection.mutable.Map[java.lang.String, java.util.List[Location]]()
corpus.addSource(new TrXMLDirSource(new File(args(0)), tokenizer))
corpus.foreach { _.foreach { _.getToponyms.foreach {
case toponym: Toponym => {
if (!cands.contains(toponym.getForm)) {
//println("Doesn't contain: " + toponym.getForm)
cands(toponym.getForm) = toponym.getCandidates
} else {
val prev = cands(toponym.getForm)
val here = toponym.getCandidates
//println("Contains: " + toponym.getForm)
if (prev.size != here.size) {
println("=====Size error for " + toponym.getForm + ": " + prev.size + " " + here.size)
} else {
prev.zip(here).foreach { case (p, h) =>
println(p.getRegion.getCenter + " " + h.getRegion.getCenter)
//case (p, h) if p != h => println("=====Mismatch for " + toponym.getForm)
//case _ => ()
}
}
}
}
}}}
}
}
| utcompling/fieldspring | src/main/scala/opennlp/fieldspring/tr/util/sanity/CandidateCheck.scala | Scala | apache-2.0 | 2,340 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.security
import java.security.PrivilegedExceptionAction
import scala.util.control.NonFatal
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION
import org.apache.hadoop.minikdc.MiniKdc
import org.apache.hadoop.security.{Credentials, UserGroupInformation}
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.security.HadoopDelegationTokenProvider
import org.apache.spark.util.Utils
private class ExceptionThrowingDelegationTokenProvider extends HadoopDelegationTokenProvider {
ExceptionThrowingDelegationTokenProvider.constructed = true
throw new IllegalArgumentException
override def serviceName: String = "throw"
override def delegationTokensRequired(
sparkConf: SparkConf,
hadoopConf: Configuration): Boolean = throw new IllegalArgumentException
override def obtainDelegationTokens(
hadoopConf: Configuration,
sparkConf: SparkConf,
creds: Credentials): Option[Long] = throw new IllegalArgumentException
}
private object ExceptionThrowingDelegationTokenProvider {
var constructed = false
}
class HadoopDelegationTokenManagerSuite extends SparkFunSuite {
private val hadoopConf = new Configuration()
test("default configuration") {
ExceptionThrowingDelegationTokenProvider.constructed = false
val manager = new HadoopDelegationTokenManager(new SparkConf(false), hadoopConf, null)
assert(manager.isProviderLoaded("hadoopfs"))
assert(manager.isProviderLoaded("hbase"))
// This checks that providers are loaded independently and they have no effect on each other
assert(ExceptionThrowingDelegationTokenProvider.constructed)
assert(!manager.isProviderLoaded("throw"))
}
test("disable hadoopfs credential provider") {
val sparkConf = new SparkConf(false).set("spark.security.credentials.hadoopfs.enabled", "false")
val manager = new HadoopDelegationTokenManager(sparkConf, hadoopConf, null)
assert(!manager.isProviderLoaded("hadoopfs"))
}
test("using deprecated configurations") {
val sparkConf = new SparkConf(false)
.set("spark.yarn.security.tokens.hadoopfs.enabled", "false")
val manager = new HadoopDelegationTokenManager(sparkConf, hadoopConf, null)
assert(!manager.isProviderLoaded("hadoopfs"))
assert(manager.isProviderLoaded("hbase"))
}
test("SPARK-29082: do not fail if current user does not have credentials") {
// SparkHadoopUtil overrides the UGI configuration during initialization. That normally
// happens early in the Spark application, but here it may affect the test depending on
// how it's run, so force its initialization.
SparkHadoopUtil.get
var kdc: MiniKdc = null
try {
// UserGroupInformation.setConfiguration needs default kerberos realm which can be set in
// krb5.conf. MiniKdc sets "java.security.krb5.conf" in start and removes it when stop called.
val kdcDir = Utils.createTempDir()
val kdcConf = MiniKdc.createConf()
// The port for MiniKdc service gets selected in the constructor, but will be bound
// to it later in MiniKdc.start() -> MiniKdc.initKDCServer() -> KdcServer.start().
// In meantime, when some other service might capture the port during this progress, and
// cause BindException.
// This makes our tests which have dedicated JVMs and rely on MiniKDC being flaky
//
// https://issues.apache.org/jira/browse/HADOOP-12656 get fixed in Hadoop 2.8.0.
//
// The workaround here is to periodically repeat this process with a timeout , since we are
// using Hadoop 2.7.4 as default.
// https://issues.apache.org/jira/browse/SPARK-31631
eventually(timeout(60.seconds), interval(1.second)) {
try {
kdc = new MiniKdc(kdcConf, kdcDir)
kdc.start()
} catch {
case NonFatal(e) =>
if (kdc != null) {
kdc.stop()
kdc = null
}
throw e
}
}
val krbConf = new Configuration()
krbConf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos")
UserGroupInformation.setConfiguration(krbConf)
val manager = new HadoopDelegationTokenManager(new SparkConf(false), krbConf, null)
val testImpl = new PrivilegedExceptionAction[Unit] {
override def run(): Unit = {
assert(UserGroupInformation.isSecurityEnabled())
val creds = new Credentials()
manager.obtainDelegationTokens(creds)
assert(creds.numberOfTokens() === 0)
assert(creds.numberOfSecretKeys() === 0)
}
}
val realUser = UserGroupInformation.createUserForTesting("realUser", Array.empty)
realUser.doAs(testImpl)
val proxyUser = UserGroupInformation.createProxyUserForTesting("proxyUser", realUser,
Array.empty)
proxyUser.doAs(testImpl)
} finally {
if (kdc != null) {
kdc.stop()
}
UserGroupInformation.reset()
}
}
}
| ueshin/apache-spark | core/src/test/scala/org/apache/spark/deploy/security/HadoopDelegationTokenManagerSuite.scala | Scala | apache-2.0 | 5,985 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.ml
import scala.collection.mutable
import scala.language.reflectiveCalls
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.examples.mllib.AbstractParams
import org.apache.spark.ml.{Pipeline, PipelineStage}
import org.apache.spark.ml.classification.{GBTClassificationModel, GBTClassifier}
import org.apache.spark.ml.feature.{StringIndexer, VectorIndexer}
import org.apache.spark.ml.regression.{GBTRegressionModel, GBTRegressor}
import org.apache.spark.sql.DataFrame
/**
* An example runner for decision trees. Run with
* 决策树的一个例子,运行
* {{{
* ./bin/run-example ml.GBTExample [options]
* }}}
* Decision Trees and ensembles can take a large amount of memory. If the run-example command
* above fails, try running via spark-submit and specifying the amount of memory as at least 1g.
* For local mode, run
* {{{
* ./bin/spark-submit --class org.apache.spark.examples.ml.GBTExample --driver-memory 1g
* [examples JAR path] [options]
* }}}
* If you use it as a template to create your own app, please use `spark-submit` to submit your app.
*/
object GBTExample {
case class Params(
input: String = "../data/mllib/rf_libsvm_data.txt",
testInput: String = "",
dataFormat: String = "libsvm",//数据格式
algo: String = "classification",//"regression",算法类型
maxDepth: Int = 5,//树的最大深度,为了防止过拟合,设定划分的终止条件
maxBins: Int = 32,//连续特征离散化的最大数量,以及选择每个节点分裂特征的方式
minInstancesPerNode: Int = 1,//切分后每个子节点至少包含的样本实例数,否则停止切分,于终止迭代计算
minInfoGain: Double = 0.0,//分裂节点时所需最小信息增益
maxIter: Int = 10,//迭代次数
fracTest: Double = 0.2,
cacheNodeIds: Boolean = false,
checkpointDir: Option[String] = None,//检查点目录
//设置检查点间隔(>=1),或不设置检查点(-1)
checkpointInterval: Int = 10) extends AbstractParams[Params]
def main(args: Array[String]) {
val defaultParams = Params()
val parser = new OptionParser[Params]("GBTExample") {
head("GBTExample: an example Gradient-Boosted Trees app.")
opt[String]("algo")
.text(s"algorithm (classification, regression), default: ${defaultParams.algo}")
.action((x, c) => c.copy(algo = x))
opt[Int]("maxDepth")//树的最大深度,为了防止过拟合,设定划分的终止条件
.text(s"max depth of the tree, default: ${defaultParams.maxDepth}")
.action((x, c) => c.copy(maxDepth = x))
opt[Int]("maxBins")
.text(s"max number of bins, default: ${defaultParams.maxBins}")
.action((x, c) => c.copy(maxBins = x))
opt[Int]("minInstancesPerNode")//切分后每个子节点至少包含的样本实例数,否则停止切分,于终止迭代计算
.text(s"min number of instances required at child nodes to create the parent split," +
s" default: ${defaultParams.minInstancesPerNode}")//切分后每个子节点至少包含的样本实例数,否则停止切分,于终止迭代计算
.action((x, c) => c.copy(minInstancesPerNode = x))
opt[Double]("minInfoGain")//分裂节点时所需最小信息增益
.text(s"min info gain required to create a split, default: ${defaultParams.minInfoGain}")
.action((x, c) => c.copy(minInfoGain = x))
opt[Int]("maxIter")
.text(s"number of trees in ensemble, default: ${defaultParams.maxIter}")
.action((x, c) => c.copy(maxIter = x))
opt[Double]("fracTest")
.text(s"fraction of data to hold out for testing. If given option testInput, " +
s"this option is ignored. default: ${defaultParams.fracTest}")
.action((x, c) => c.copy(fracTest = x))
opt[Boolean]("cacheNodeIds")
.text(s"whether to use node Id cache during training, " +
s"default: ${defaultParams.cacheNodeIds}")
.action((x, c) => c.copy(cacheNodeIds = x))
opt[String]("checkpointDir")
.text(s"checkpoint directory where intermediate node Id caches will be stored, " +
s"default: ${
defaultParams.checkpointDir match {
case Some(strVal) => strVal
case None => "None"
}
}")
.action((x, c) => c.copy(checkpointDir = Some(x)))
opt[Int]("checkpointInterval")//设置检查点间隔(>=1),或不设置检查点(-1)
.text(s"how often to checkpoint the node Id cache, " +
s"default: ${defaultParams.checkpointInterval}")//设置检查点间隔(>=1),或不设置检查点(-1)
.action((x, c) => c.copy(checkpointInterval = x))
opt[String]("testInput")
.text(s"input path to test dataset. If given, option fracTest is ignored." +
s" default: ${defaultParams.testInput}")
.action((x, c) => c.copy(testInput = x))
opt[String]("dataFormat")
.text("data format: libsvm (default), dense (deprecated in Spark v1.1)")
.action((x, c) => c.copy(dataFormat = x))
/* arg[String]("<input>")
.text("input path to labeled examples")
.required()
.action((x, c) => c.copy(input = x))*/
checkConfig { params =>
if (params.fracTest < 0 || params.fracTest >= 1) {
failure(s"fracTest ${params.fracTest} value incorrect; should be in [0,1).")
} else {
success
}
}
}
parser.parse(args, defaultParams).map { params =>
run(params)
}.getOrElse {
sys.exit(1)
}
}
def run(params: Params) {
val conf = new SparkConf().setAppName(s"GBTExample with $params").setMaster("local[*]")
val sc = new SparkContext(conf)
params.checkpointDir.foreach(sc.setCheckpointDir)
val algo = params.algo.toLowerCase
println(s"GBTExample with parameters:\\n$params")
// Load training and test data and cache it.加载训练和测试数据并将其缓存
val (training: DataFrame, test: DataFrame) = DecisionTreeExample.loadDatasets(sc, params.input,
params.dataFormat, params.testInput, algo, params.fracTest)
// Set up Pipeline 建立管道
//将特征转换,特征聚合,模型等组成一个管道,并调用它的fit方法拟合出模型*/
//一个 Pipeline 在结构上会包含一个或多个 PipelineStage,每一个 PipelineStage 都会完成一个任务
val stages = new mutable.ArrayBuffer[PipelineStage]()
// (1) For classification, re-index classes. 对于分类,重新索引类
val labelColName = if (algo == "classification") "indexedLabel" else "label"
if (algo == "classification") {
val labelIndexer = new StringIndexer()
.setInputCol("labelString")
.setOutputCol(labelColName)
stages += labelIndexer
}
// (2) Identify categorical features using VectorIndexer. 确定使用vectorindexer分类特征
// Features with more than maxCategories values will be treated as continuous.
//VectorIndexer是对数据集特征向量中的类别(离散值)特征进行编号
val featuresIndexer = new VectorIndexer()
.setInputCol("features")
.setOutputCol("indexedFeatures")
.setMaxCategories(10)
stages += featuresIndexer
// (3) Learn GBT
val dt = algo match {
case "classification" =>
new GBTClassifier()
//训练数据集DataFrame中存储特征数据的列名
.setFeaturesCol("indexedFeatures")
.setLabelCol(labelColName)//标签列的名称
.setMaxDepth(params.maxDepth)//树的最大深度,为了防止过拟合,设定划分的终止条件
.setMaxBins(params.maxBins)//离散连续性变量时最大的分箱数,默认是 32
.setMinInstancesPerNode(params.minInstancesPerNode)//切分后每个子节点至少包含的样本实例数,否则停止切分,于终止迭代计算
.setMinInfoGain(params.minInfoGain)//分裂节点时所需最小信息增益
.setCacheNodeIds(params.cacheNodeIds)
//设置检查点间隔(>=1),或不设置检查点(-1)
.setCheckpointInterval(params.checkpointInterval)
.setMaxIter(params.maxIter)//
case "regression" =>
new GBTRegressor()
.setFeaturesCol("indexedFeatures")//训练数据集DataFrame中存储特征数据的列名
.setLabelCol(labelColName)//标签列的名称
.setMaxDepth(params.maxDepth)//树的最大深度,为了防止过拟合,设定划分的终止条件
.setMaxBins(params.maxBins)//离散连续性变量时最大的分箱数,默认是 32
.setMinInstancesPerNode(params.minInstancesPerNode)//切分后每个子节点至少包含的样本实例数,否则停止切分,于终止迭代计算
.setMinInfoGain(params.minInfoGain)//分裂节点时所需最小信息增益
.setCacheNodeIds(params.cacheNodeIds)
//设置检查点间隔(>=1),或不设置检查点(-1)
.setCheckpointInterval(params.checkpointInterval)
.setMaxIter(params.maxIter)
case _ => throw new IllegalArgumentException("Algo ${params.algo} not supported.")
}
stages += dt
//PipeLine:将多个DataFrame和Estimator算法串成一个特定的ML Wolkflow
//一个 Pipeline在结构上会包含一个或多个 PipelineStage,每一个 PipelineStage 都会完成一个任务
val pipeline = new Pipeline().setStages(stages.toArray)
// Fit the Pipeline 安装管道
//系统计时器的当前值,以毫微秒为单位
val startTime = System.nanoTime()
//fit()方法将DataFrame转化为一个Transformer的算法
val pipelineModel = pipeline.fit(training)
//1e9就为1*(10的九次方),也就是十亿
val elapsedTime = (System.nanoTime() - startTime) / 1e9
println(s"Training time: $elapsedTime seconds")
// Get the trained GBT from the fitted PipelineModel
//从管道模型,得到训练的GBT
algo match {
case "classification" =>
//从管道模型,得到训练的GBT模型
val rfModel = pipelineModel.stages.last.asInstanceOf[GBTClassificationModel]
if (rfModel.totalNumNodes < 30) {
println(rfModel.toDebugString) // Print full model.
} else {
println(rfModel) // Print model summary.
}
case "regression" =>
val rfModel = pipelineModel.stages.last.asInstanceOf[GBTRegressionModel]
if (rfModel.totalNumNodes < 30) {
println(rfModel.toDebugString) // Print full model.
} else {
println(rfModel) // Print model summary.
}
case _ => throw new IllegalArgumentException("Algo ${params.algo} not supported.")
}
// Evaluate model on training, test data
//训练评估模型,测试数据
algo match {
case "classification" =>
println("Training data results:")
DecisionTreeExample.evaluateClassificationModel(pipelineModel, training, labelColName)
println("Test data results:")
DecisionTreeExample.evaluateClassificationModel(pipelineModel, test, labelColName)
case "regression" =>
println("Training data results:")
DecisionTreeExample.evaluateRegressionModel(pipelineModel, training, labelColName)
println("Test data results:")
DecisionTreeExample.evaluateRegressionModel(pipelineModel, test, labelColName)
case _ =>
throw new IllegalArgumentException("Algo ${params.algo} not supported.")
}
sc.stop()
}
}
// scalastyle:on println
| tophua/spark1.52 | examples/src/main/scala/org/apache/spark/examples/ml/GBTExample.scala | Scala | apache-2.0 | 12,420 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.tree.impl
import org.apache.spark.internal.Logging
import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.linalg.Vector
import org.apache.spark.ml.regression.{DecisionTreeRegressionModel, DecisionTreeRegressor}
import org.apache.spark.mllib.tree.configuration.{Algo => OldAlgo}
import org.apache.spark.mllib.tree.configuration.{BoostingStrategy => OldBoostingStrategy}
import org.apache.spark.mllib.tree.impurity.{Variance => OldVariance}
import org.apache.spark.mllib.tree.loss.{Loss => OldLoss}
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.util.PeriodicRDDCheckpointer
import org.apache.spark.storage.StorageLevel
private[spark] object GradientBoostedTrees extends Logging {
/**
* Method to train a gradient boosting model
* @param input Training dataset: RDD of `LabeledPoint`.
* @param seed Random seed.
* @return tuple of ensemble models and weights:
* (array of decision tree models, array of model weights)
*/
def run(
input: RDD[LabeledPoint],
boostingStrategy: OldBoostingStrategy,
seed: Long,
featureSubsetStrategy: String): (Array[DecisionTreeRegressionModel], Array[Double]) = {
val algo = boostingStrategy.treeStrategy.algo
algo match {
case OldAlgo.Regression =>
GradientBoostedTrees.boost(input, input, boostingStrategy, validate = false,
seed, featureSubsetStrategy)
case OldAlgo.Classification =>
// Map labels to -1, +1 so binary classification can be treated as regression.
val remappedInput = input.map(x => new LabeledPoint((x.label * 2) - 1, x.features))
GradientBoostedTrees.boost(remappedInput, remappedInput, boostingStrategy, validate = false,
seed, featureSubsetStrategy)
case _ =>
throw new IllegalArgumentException(s"$algo is not supported by gradient boosting.")
}
}
/**
* Method to validate a gradient boosting model
* @param input Training dataset: RDD of `LabeledPoint`.
* @param validationInput Validation dataset.
* This dataset should be different from the training dataset,
* but it should follow the same distribution.
* E.g., these two datasets could be created from an original dataset
* by using `org.apache.spark.rdd.RDD.randomSplit()`
* @param seed Random seed.
* @return tuple of ensemble models and weights:
* (array of decision tree models, array of model weights)
*/
def runWithValidation(
input: RDD[LabeledPoint],
validationInput: RDD[LabeledPoint],
boostingStrategy: OldBoostingStrategy,
seed: Long,
featureSubsetStrategy: String): (Array[DecisionTreeRegressionModel], Array[Double]) = {
val algo = boostingStrategy.treeStrategy.algo
algo match {
case OldAlgo.Regression =>
GradientBoostedTrees.boost(input, validationInput, boostingStrategy,
validate = true, seed, featureSubsetStrategy)
case OldAlgo.Classification =>
// Map labels to -1, +1 so binary classification can be treated as regression.
val remappedInput = input.map(
x => new LabeledPoint((x.label * 2) - 1, x.features))
val remappedValidationInput = validationInput.map(
x => new LabeledPoint((x.label * 2) - 1, x.features))
GradientBoostedTrees.boost(remappedInput, remappedValidationInput, boostingStrategy,
validate = true, seed, featureSubsetStrategy)
case _ =>
throw new IllegalArgumentException(s"$algo is not supported by the gradient boosting.")
}
}
/**
* Compute the initial predictions and errors for a dataset for the first
* iteration of gradient boosting.
* @param data: training data.
* @param initTreeWeight: learning rate assigned to the first tree.
* @param initTree: first DecisionTreeModel.
* @param loss: evaluation metric.
* @return an RDD with each element being a zip of the prediction and error
* corresponding to every sample.
*/
def computeInitialPredictionAndError(
data: RDD[LabeledPoint],
initTreeWeight: Double,
initTree: DecisionTreeRegressionModel,
loss: OldLoss): RDD[(Double, Double)] = {
data.map { lp =>
val pred = updatePrediction(lp.features, 0.0, initTree, initTreeWeight)
val error = loss.computeError(pred, lp.label)
(pred, error)
}
}
/**
* Update a zipped predictionError RDD
* (as obtained with computeInitialPredictionAndError)
* @param data: training data.
* @param predictionAndError: predictionError RDD
* @param treeWeight: Learning rate.
* @param tree: Tree using which the prediction and error should be updated.
* @param loss: evaluation metric.
* @return an RDD with each element being a zip of the prediction and error
* corresponding to each sample.
*/
def updatePredictionError(
data: RDD[LabeledPoint],
predictionAndError: RDD[(Double, Double)],
treeWeight: Double,
tree: DecisionTreeRegressionModel,
loss: OldLoss): RDD[(Double, Double)] = {
val newPredError = data.zip(predictionAndError).mapPartitions { iter =>
iter.map { case (lp, (pred, error)) =>
val newPred = updatePrediction(lp.features, pred, tree, treeWeight)
val newError = loss.computeError(newPred, lp.label)
(newPred, newError)
}
}
newPredError
}
/**
* Add prediction from a new boosting iteration to an existing prediction.
*
* @param features Vector of features representing a single data point.
* @param prediction The existing prediction.
* @param tree New Decision Tree model.
* @param weight Tree weight.
* @return Updated prediction.
*/
def updatePrediction(
features: Vector,
prediction: Double,
tree: DecisionTreeRegressionModel,
weight: Double): Double = {
prediction + tree.rootNode.predictImpl(features).prediction * weight
}
/**
* Method to calculate error of the base learner for the gradient boosting calculation.
* Note: This method is not used by the gradient boosting algorithm but is useful for debugging
* purposes.
* @param data Training dataset: RDD of `LabeledPoint`.
* @param trees Boosted Decision Tree models
* @param treeWeights Learning rates at each boosting iteration.
* @param loss evaluation metric.
* @return Measure of model error on data
*/
def computeError(
data: RDD[LabeledPoint],
trees: Array[DecisionTreeRegressionModel],
treeWeights: Array[Double],
loss: OldLoss): Double = {
data.map { lp =>
val predicted = trees.zip(treeWeights).foldLeft(0.0) { case (acc, (model, weight)) =>
updatePrediction(lp.features, acc, model, weight)
}
loss.computeError(predicted, lp.label)
}.mean()
}
/**
* Method to compute error or loss for every iteration of gradient boosting.
*
* @param data RDD of `LabeledPoint`
* @param trees Boosted Decision Tree models
* @param treeWeights Learning rates at each boosting iteration.
* @param loss evaluation metric.
* @param algo algorithm for the ensemble, either Classification or Regression
* @return an array with index i having the losses or errors for the ensemble
* containing the first i+1 trees
*/
def evaluateEachIteration(
data: RDD[LabeledPoint],
trees: Array[DecisionTreeRegressionModel],
treeWeights: Array[Double],
loss: OldLoss,
algo: OldAlgo.Value): Array[Double] = {
val sc = data.sparkContext
val remappedData = algo match {
case OldAlgo.Classification => data.map(x => new LabeledPoint((x.label * 2) - 1, x.features))
case _ => data
}
val broadcastTrees = sc.broadcast(trees)
val localTreeWeights = treeWeights
val treesIndices = trees.indices
val dataCount = remappedData.count()
val evaluation = remappedData.map { point =>
treesIndices.map { idx =>
val prediction = broadcastTrees.value(idx)
.rootNode
.predictImpl(point.features)
.prediction
prediction * localTreeWeights(idx)
}
.scanLeft(0.0)(_ + _).drop(1)
.map(prediction => loss.computeError(prediction, point.label))
}
.aggregate(treesIndices.map(_ => 0.0))(
(aggregated, row) => treesIndices.map(idx => aggregated(idx) + row(idx)),
(a, b) => treesIndices.map(idx => a(idx) + b(idx)))
.map(_ / dataCount)
broadcastTrees.destroy()
evaluation.toArray
}
/**
* Internal method for performing regression using trees as base learners.
* @param input training dataset
* @param validationInput validation dataset, ignored if validate is set to false.
* @param boostingStrategy boosting parameters
* @param validate whether or not to use the validation dataset.
* @param seed Random seed.
* @return tuple of ensemble models and weights:
* (array of decision tree models, array of model weights)
*/
def boost(
input: RDD[LabeledPoint],
validationInput: RDD[LabeledPoint],
boostingStrategy: OldBoostingStrategy,
validate: Boolean,
seed: Long,
featureSubsetStrategy: String): (Array[DecisionTreeRegressionModel], Array[Double]) = {
val timer = new TimeTracker()
timer.start("total")
timer.start("init")
boostingStrategy.assertValid()
// Initialize gradient boosting parameters
val numIterations = boostingStrategy.numIterations
val baseLearners = new Array[DecisionTreeRegressionModel](numIterations)
val baseLearnerWeights = new Array[Double](numIterations)
val loss = boostingStrategy.loss
val learningRate = boostingStrategy.learningRate
// Prepare strategy for individual trees, which use regression with variance impurity.
val treeStrategy = boostingStrategy.treeStrategy.copy
val validationTol = boostingStrategy.validationTol
treeStrategy.algo = OldAlgo.Regression
treeStrategy.impurity = OldVariance
treeStrategy.assertValid()
// Cache input
val persistedInput = if (input.getStorageLevel == StorageLevel.NONE) {
input.persist(StorageLevel.MEMORY_AND_DISK)
true
} else {
false
}
// Prepare periodic checkpointers
val predErrorCheckpointer = new PeriodicRDDCheckpointer[(Double, Double)](
treeStrategy.getCheckpointInterval, input.sparkContext)
val validatePredErrorCheckpointer = new PeriodicRDDCheckpointer[(Double, Double)](
treeStrategy.getCheckpointInterval, input.sparkContext)
timer.stop("init")
logDebug("##########")
logDebug("Building tree 0")
logDebug("##########")
// Initialize tree
timer.start("building tree 0")
val firstTree = new DecisionTreeRegressor().setSeed(seed)
val firstTreeModel = firstTree.train(input, treeStrategy, featureSubsetStrategy)
val firstTreeWeight = 1.0
baseLearners(0) = firstTreeModel
baseLearnerWeights(0) = firstTreeWeight
var predError: RDD[(Double, Double)] =
computeInitialPredictionAndError(input, firstTreeWeight, firstTreeModel, loss)
predErrorCheckpointer.update(predError)
logDebug("error of gbt = " + predError.values.mean())
// Note: A model of type regression is used since we require raw prediction
timer.stop("building tree 0")
var validatePredError: RDD[(Double, Double)] =
computeInitialPredictionAndError(validationInput, firstTreeWeight, firstTreeModel, loss)
if (validate) validatePredErrorCheckpointer.update(validatePredError)
var bestValidateError = if (validate) validatePredError.values.mean() else 0.0
var bestM = 1
var m = 1
var doneLearning = false
while (m < numIterations && !doneLearning) {
// Update data with pseudo-residuals
val data = predError.zip(input).map { case ((pred, _), point) =>
LabeledPoint(-loss.gradient(pred, point.label), point.features)
}
timer.start(s"building tree $m")
logDebug("###################################################")
logDebug("Gradient boosting tree iteration " + m)
logDebug("###################################################")
val dt = new DecisionTreeRegressor().setSeed(seed + m)
val model = dt.train(data, treeStrategy, featureSubsetStrategy)
timer.stop(s"building tree $m")
// Update partial model
baseLearners(m) = model
// Note: The setting of baseLearnerWeights is incorrect for losses other than SquaredError.
// Technically, the weight should be optimized for the particular loss.
// However, the behavior should be reasonable, though not optimal.
baseLearnerWeights(m) = learningRate
predError = updatePredictionError(
input, predError, baseLearnerWeights(m), baseLearners(m), loss)
predErrorCheckpointer.update(predError)
logDebug("error of gbt = " + predError.values.mean())
if (validate) {
// Stop training early if
// 1. Reduction in error is less than the validationTol or
// 2. If the error increases, that is if the model is overfit.
// We want the model returned corresponding to the best validation error.
validatePredError = updatePredictionError(
validationInput, validatePredError, baseLearnerWeights(m), baseLearners(m), loss)
validatePredErrorCheckpointer.update(validatePredError)
val currentValidateError = validatePredError.values.mean()
if (bestValidateError - currentValidateError < validationTol * Math.max(
currentValidateError, 0.01)) {
doneLearning = true
} else if (currentValidateError < bestValidateError) {
bestValidateError = currentValidateError
bestM = m + 1
}
}
m += 1
}
timer.stop("total")
logInfo("Internal timing for DecisionTree:")
logInfo(s"$timer")
predErrorCheckpointer.unpersistDataSet()
predErrorCheckpointer.deleteAllCheckpoints()
validatePredErrorCheckpointer.unpersistDataSet()
validatePredErrorCheckpointer.deleteAllCheckpoints()
if (persistedInput) input.unpersist()
if (validate) {
(baseLearners.slice(0, bestM), baseLearnerWeights.slice(0, bestM))
} else {
(baseLearners, baseLearnerWeights)
}
}
}
| pgandhi999/spark | mllib/src/main/scala/org/apache/spark/ml/tree/impl/GradientBoostedTrees.scala | Scala | apache-2.0 | 15,214 |
package spire.math
import spire.algebra.{IsIntegral, Order, Rig, Signed}
object UInt extends UIntInstances {
@inline final def apply(n: Int): UInt = new UInt(n)
@inline final def apply(n: Long): UInt = new UInt(n.toInt)
@inline final val MinValue: UInt = UInt(0)
@inline final val MaxValue: UInt = UInt(-1)
}
class UInt(val signed: Int) extends AnyVal {
def toByte: Byte = signed.toByte
def toChar: Char = signed.toChar
def toShort: Short = signed.toShort
def toInt: Int = signed
def toLong: Long = signed & 0xffffffffL
def toFloat: Float = toLong.toFloat
def toDouble: Double = toLong.toDouble
def toBigInt: BigInt = BigInt(toLong)
def isValidByte: Boolean = toInt == toByte
def isValidShort: Boolean = toInt == toShort
def isValidChar: Boolean = toInt == toChar
def isValidInt: Boolean = signed >= 0
def isValidLong: Boolean = true
override def toString: String = toLong.toString
def == (that: UInt): Boolean = this.signed == that.signed
def != (that: UInt): Boolean = this.signed != that.signed
def <= (that: UInt): Boolean = this.toLong <= that.toLong
def < (that: UInt): Boolean = this.toLong < that.toLong
def >= (that: UInt): Boolean = this.toLong >= that.toLong
def > (that: UInt): Boolean = this.toLong > that.toLong
def unary_- : UInt = UInt(this.signed)
def + (that: UInt): UInt = UInt(this.signed + that.signed)
def - (that: UInt): UInt = UInt(this.signed - that.signed)
def * (that: UInt): UInt = UInt(this.signed * that.signed)
def / (that: UInt): UInt = UInt(this.toLong / that.toLong)
def % (that: UInt): UInt = UInt(this.toLong % that.toLong)
def unary_~ : UInt = UInt(~this.signed)
def << (shift: Int): UInt = UInt(signed << shift)
def >> (shift: Int): UInt = UInt(signed >>> shift)
def >>> (shift: Int): UInt = UInt(signed >>> shift)
def & (that: UInt): UInt = UInt(this.signed & that.signed)
def | (that: UInt): UInt = UInt(this.signed | that.signed)
def ^ (that: UInt): UInt = UInt(this.signed ^ that.signed)
def ** (that: UInt): UInt = UInt(pow(this.toLong, that.toLong))
}
trait UIntInstances {
implicit final val UIntAlgebra = new UIntAlgebra
implicit final val UIntBitString = new UIntBitString
import spire.math.NumberTag._
implicit final val UIntTag = new UnsignedIntTag[UInt](UInt.MinValue, UInt.MaxValue)
}
private[math] trait UIntIsRig extends Rig[UInt] {
def one: UInt = UInt(1)
def plus(a:UInt, b:UInt): UInt = a + b
override def pow(a:UInt, b:Int): UInt = {
if (b < 0)
throw new IllegalArgumentException("negative exponent: %s" format b)
a ** UInt(b)
}
override def times(a:UInt, b:UInt): UInt = a * b
def zero: UInt = UInt(0)
}
private[math] trait UIntOrder extends Order[UInt] {
override def eqv(x:UInt, y:UInt): Boolean = x == y
override def neqv(x:UInt, y:UInt): Boolean = x != y
override def gt(x: UInt, y: UInt): Boolean = x > y
override def gteqv(x: UInt, y: UInt): Boolean = x >= y
override def lt(x: UInt, y: UInt): Boolean = x < y
override def lteqv(x: UInt, y: UInt): Boolean = x <= y
def compare(x: UInt, y: UInt): Int = if (x < y) -1 else if (x > y) 1 else 0
}
@SerialVersionUID(0L)
private[math] class UIntBitString extends BitString[UInt] with Serializable {
def one: UInt = UInt(-1)
def zero: UInt = UInt(0)
def and(a: UInt, b: UInt): UInt = a & b
def or(a: UInt, b: UInt): UInt = a | b
def complement(a: UInt): UInt = ~a
override def xor(a: UInt, b: UInt): UInt = a ^ b
def signed: Boolean = false
def width: Int = 32
def toHexString(n: UInt): String = Integer.toHexString(n.signed)
def bitCount(n: UInt): Int = Integer.bitCount(n.signed)
def highestOneBit(n: UInt): UInt = UInt(Integer.highestOneBit(n.signed))
def lowestOneBit(n: UInt): UInt = UInt(Integer.lowestOneBit(n.signed))
def numberOfLeadingZeros(n: UInt): Int = Integer.numberOfLeadingZeros(n.signed)
def numberOfTrailingZeros(n: UInt): Int = Integer.numberOfTrailingZeros(n.signed)
def leftShift(n: UInt, i: Int): UInt = n << i
def rightShift(n: UInt, i: Int): UInt = n >> i
def signedRightShift(n: UInt, i: Int): UInt = n >>> i
def rotateLeft(n: UInt, i: Int): UInt = UInt(Integer.rotateLeft(n.signed, i))
def rotateRight(n: UInt, i: Int): UInt = UInt(Integer.rotateRight(n.signed, i))
}
private[math] trait UIntIsSigned extends Signed[UInt] {
def signum(a: UInt): Int = java.lang.Integer.signum(a.signed) & 1
def abs(a: UInt): UInt = a
}
private[math] trait UIntIsReal extends IsIntegral[UInt] with UIntOrder with UIntIsSigned {
def toDouble(n: UInt): Double = n.toDouble
def toBigInt(n: UInt): BigInt = n.toBigInt
}
@SerialVersionUID(0L)
private[math] class UIntAlgebra extends UIntIsRig with UIntIsReal with Serializable
| woparry/spire | core/src/main/scala/spire/math/UInt.scala | Scala | mit | 4,731 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Prettifier
import collection.GenTraversable
import SharedHelpers._
import matchers.should.Matchers._
import org.scalactic.ArrayHelper.deep
class AllOfContainMatcherEqualitySpec extends funspec.AnyFunSpec {
private val prettifier = Prettifier.default
class TrimEquality extends Equality[String] {
def areEqual(left: String, right: Any) =
left.trim == (right match {
case s: String => s.trim
case other => other
})
}
class MapTrimEquality extends Equality[(Int, String)] {
def areEqual(left: (Int, String), right: Any) =
right match {
case t2: Tuple2[_, _] =>
left._1 == t2._1 &&
left._2.trim == (t2._2 match {
case s: String => s.trim
case other => other
})
case right => left == right
}
}
class JavaMapTrimEquality extends Equality[java.util.Map.Entry[Int, String]] {
def areEqual(left: java.util.Map.Entry[Int, String], right: Any) =
right match {
case rightEntry: java.util.Map.Entry[_, _] =>
left.getKey == rightEntry.getKey &&
left.getValue.trim == (rightEntry.getValue match {
case s: String => s.trim
case other => other
})
case right => left == right
}
}
class FalseEquality extends Equality[Int] {
def areEqual(left: Int, right: Any): Boolean = false
}
class MapFalseEquality extends Equality[(Int, String)] {
def areEqual(left: (Int, String), right: Any): Boolean = false
}
class JavaMapFalseEquality extends Equality[java.util.Map.Entry[Int, String]] {
def areEqual(left: java.util.Map.Entry[Int, String], right: Any): Boolean = false
}
describe("allOf ") {
def checkShouldContainStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int): Unit = {
val leftText = FailureMessages.decorateToStringValue(prettifier, left)
e.message should be (Some(leftText + " did not contain all of (" + right.map(r => FailureMessages.decorateToStringValue(prettifier, r)).mkString(", ") + ")"))
e.failedCodeFileName should be (Some("AllOfContainMatcherEqualitySpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
def checkShouldNotContainStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int): Unit = {
val leftText = FailureMessages.decorateToStringValue(prettifier, left)
e.message should be (Some(leftText + " contained all of (" + right.map(r => FailureMessages.decorateToStringValue(prettifier, r)).mkString(", ") + ")"))
e.failedCodeFileName should be (Some("AllOfContainMatcherEqualitySpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
it("should take custom implicit equality in scope when 'should contain' is used") {
implicit val trimEquality = new TrimEquality
implicit val mapTrimEquality = new MapTrimEquality
implicit val javaMapTrimEquality = new JavaMapTrimEquality
List("1 ", "2", "3 ") should contain allOf ("1", "2 ", "3")
Set("1 ", "2", "3 ") should contain allOf ("1", "2 ", "3")
Array("1 ", "2", "3 ") should contain allOf ("1", "2 ", "3")
Map(1 -> "one ", 2 -> "two", 3 -> "three ") should contain allOf (1 -> "one", 2 -> "two ", 3 -> "three")
// SKIP-SCALATESTJS,NATIVE-START
javaList("1 ", "2", "3 ") should contain allOf ("1", "2 ", "3")
javaSet("1 ", "2", "3 ") should contain allOf ("1", "2 ", "3")
javaMap(Entry(1, "one "), Entry(2, "two"), Entry(3, "three ")) should contain allOf (Entry(1, "one"), Entry(2, "two "), Entry(3, "three"))
// SKIP-SCALATESTJS,NATIVE-END
}
it("should take custom implicit equality in scope when 'should not contain' is used") {
implicit val trimEquality = new TrimEquality
implicit val mapTrimEquality = new MapTrimEquality
implicit val javaMapTrimEquality = new JavaMapTrimEquality
List("A ", "B", "C ") should not contain allOf ("a ", "b", "c ")
Set("A ", "B", "C ") should not contain allOf ("a ", "b", "c ")
Array("A ", "B", "C ") should not contain allOf ("a ", "b", "c ")
Map(1 -> "A ", 2 -> "B", 3 -> "C ") should not contain allOf (1 -> "a ", 2 -> "b", 3 -> "c ")
// SKIP-SCALATESTJS,NATIVE-START
javaList("A ", "B", "C ") should not contain allOf ("a ", "b", "c ")
javaSet("A ", "B", "C ") should not contain allOf ("a ", "b", "c ")
javaMap(Entry(1, "A "), Entry(2, "B"), Entry(3, "C ")) should not contain allOf (Entry(1, "a "), Entry(2, "b"), Entry(3, "c "))
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should contain custom matcher' failed with custom implicit equality in scope") {
implicit val falseEquality = new FalseEquality
implicit val mapFalseEquality = new MapFalseEquality
implicit val javaMapFalseEquality = new JavaMapFalseEquality
val left1 = List(1, 2, 3)
val e1 = intercept[exceptions.TestFailedException] {
left1 should contain allOf (1, 2, 3)
}
checkShouldContainStackDepth(e1, left1, deep(Array(1, 2, 3)), thisLineNumber - 2)
val left2 = Set(1, 2, 3)
val e2 = intercept[exceptions.TestFailedException] {
left2 should contain allOf (1, 2, 3)
}
checkShouldContainStackDepth(e2, left2, deep(Array(1, 2, 3)), thisLineNumber - 2)
val left3 = Array(1, 2, 3)
val e3 = intercept[exceptions.TestFailedException] {
left3 should contain allOf (1, 2, 3)
}
checkShouldContainStackDepth(e3, left3, deep(Array(1, 2, 3)), thisLineNumber - 2)
val left4 = Map(1 -> "one", 2 -> "two", 3 -> "three")
val e4 = intercept[exceptions.TestFailedException] {
left4 should contain allOf (1 -> "one", 2 -> "two", 3 -> "three")
}
checkShouldContainStackDepth(e4, left4, deep(Array(1 -> "one", 2 -> "two", 3 -> "three")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left5 = javaList(1, 2, 3)
val e5 = intercept[exceptions.TestFailedException] {
left5 should contain allOf (1, 2, 3)
}
checkShouldContainStackDepth(e5, left5, deep(Array(1, 2, 3)), thisLineNumber - 2)
val left6 = javaMap(Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))
val e6 = intercept[exceptions.TestFailedException] {
left6 should contain allOf (Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))
}
checkShouldContainStackDepth(e6, left6, deep(Array(Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should not contain custom matcher' failed with custom implicit equality in scope") {
implicit val trimEquality = new TrimEquality
implicit val mapTrimEquality = new MapTrimEquality
implicit val javaMapTrimEquality = new JavaMapTrimEquality
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
left1 should not contain allOf ("1", "2 ", "3")
}
checkShouldNotContainStackDepth(e1, left1, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val left2 = Set("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
left2 should not contain allOf ("1", "2 ", "3")
}
checkShouldNotContainStackDepth(e2, left2, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val left3 = Array("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
left3 should not contain allOf ("1", "2 ", "3")
}
checkShouldNotContainStackDepth(e3, left3, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val left4 = Map(1 -> "one ", 2 -> "two", 3 -> "three ")
val e4 = intercept[exceptions.TestFailedException] {
left4 should not contain allOf (1 -> "one", 2 -> "two ", 3 -> "three")
}
checkShouldNotContainStackDepth(e4, left4, deep(Array(1 -> "one", 2 -> "two ", 3 -> "three")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left5 = javaList("1 ", "2", "3 ")
val e5 = intercept[exceptions.TestFailedException] {
left5 should not contain allOf ("1", "2 ", "3")
}
checkShouldNotContainStackDepth(e5, left5, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val left6 = javaMap(Entry(1, "one "), Entry(2, "two"), Entry(3, "three "))
val e6 = intercept[exceptions.TestFailedException] {
left6 should not contain allOf (Entry(1, "one"), Entry(2, "two "), Entry(3, "three"))
}
checkShouldNotContainStackDepth(e6, left6, deep(Array(Entry(1, "one"), Entry(2, "two "), Entry(3, "three"))), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should take passed in custom explicit equality when 'should contain' is used") {
val trimEquality = new TrimEquality
val mapTrimEquality = new MapTrimEquality
val javaMapTrimEquality = new JavaMapTrimEquality
(List("1 ", "2", "3 ") should contain allOf ("1", "2 ", "3 ")) (trimEquality)
(Set("1 ", "2", "3 ") should contain allOf ("1", "2 ", "3 ")) (trimEquality)
(Array("1 ", "2", "3 ") should contain allOf ("1", "2 ", "3 ")) (trimEquality)
(Map(1 -> "one ", 2 -> "two", 3 -> "three ") should contain allOf (1 -> "one", 2 -> "two ", 3 -> "three")) (mapTrimEquality)
// SKIP-SCALATESTJS,NATIVE-START
(javaList("1 ", "2", "3 ") should contain allOf ("1", "2 ", "3 ")) (trimEquality)
(javaMap(Entry(1, "one "), Entry(2, "two"), Entry(3, "three ")) should contain allOf (Entry(1, "one"), Entry(2, "two "), Entry(3, "three"))) (javaMapTrimEquality)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should take passed in custom explicit equality when 'should not contain' is used") {
val equality = new FalseEquality
(List(1, 2, 3) should not contain allOf (1, 2, 3)) (equality)
(Set(1, 2, 3) should not contain allOf (1, 2, 3)) (equality)
(Array(1, 2, 3) should not contain allOf (1, 2, 3)) (equality)
val mapEquality = new MapFalseEquality
(Map(1 -> "one", 2 -> "two", 3 -> "three") should not contain allOf (1 -> "one", 2 -> "two", 3 -> "three")) (mapEquality)
// SKIP-SCALATESTJS,NATIVE-START
(javaList(1, 2, 3) should not contain allOf (1, 2, 3)) (equality)
val javaMapEquality = new JavaMapFalseEquality
(javaMap(Entry(1, "one"), Entry(2, "two"), Entry(3, "three")) should not contain allOf (Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))) (javaMapEquality)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should contain custom matcher' failed with custom explicit equality") {
val equality = new FalseEquality
val left1 = List(1, 2, 3)
val e1 = intercept[exceptions.TestFailedException] {
(left1 should contain allOf (1, 2, 3)) (equality)
}
checkShouldContainStackDepth(e1, left1, deep(Array(1, 2, 3)), thisLineNumber - 2)
val left2 = Set(1, 2, 3)
val e2 = intercept[exceptions.TestFailedException] {
(left2 should contain allOf (1, 2, 3)) (equality)
}
checkShouldContainStackDepth(e2, left2, deep(Array(1, 2, 3)), thisLineNumber - 2)
val left3 = Array(1, 2, 3)
val e3 = intercept[exceptions.TestFailedException] {
(left3 should contain allOf (1, 2, 3)) (equality)
}
checkShouldContainStackDepth(e3, left3, deep(Array(1, 2, 3)), thisLineNumber - 2)
val mapEquality = new MapFalseEquality
val left4 = Map(1 -> "one", 2 -> "two", 3 -> "three")
val e4 = intercept[exceptions.TestFailedException] {
(left4 should contain allOf (1 -> "one", 2 -> "two", 3 -> "three")) (mapEquality)
}
checkShouldContainStackDepth(e4, left4, deep(Array(1 -> "one", 2 -> "two", 3 -> "three")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left5 = javaList(1, 2, 3)
val e5 = intercept[exceptions.TestFailedException] {
(left5 should contain allOf (1, 2, 3)) (equality)
}
checkShouldContainStackDepth(e5, left5, deep(Array(1, 2, 3)), thisLineNumber - 2)
val javaMapEquality = new JavaMapFalseEquality
val left6 = javaMap(Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))
val e6 = intercept[exceptions.TestFailedException] {
(left6 should contain allOf (Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))) (javaMapEquality)
}
checkShouldContainStackDepth(e6, left6, deep(Array(Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when 'should not contain custom matcher' failed with custom explicit equality") {
val trimEquality = new TrimEquality
val left1 = List("1 ", "2", "3 ")
val e1 = intercept[exceptions.TestFailedException] {
(left1 should not contain allOf ("1", "2 ", "3")) (trimEquality)
}
checkShouldNotContainStackDepth(e1, left1, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val left2 = Set("1 ", "2", "3 ")
val e2 = intercept[exceptions.TestFailedException] {
(left2 should not contain allOf ("1", "2 ", "3")) (trimEquality)
}
checkShouldNotContainStackDepth(e2, left2, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val left3 = Array("1 ", "2", "3 ")
val e3 = intercept[exceptions.TestFailedException] {
(left3 should not contain allOf ("1", "2 ", "3")) (trimEquality)
}
checkShouldNotContainStackDepth(e3, left3, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val mapTrimEquality = new MapTrimEquality
val left4 = Map(1 -> "one ", 2 -> "two", 3 -> "three ")
val e4 = intercept[exceptions.TestFailedException] {
(left4 should not contain allOf (1 -> "one", 2 -> "two ", 3 -> "three")) (mapTrimEquality)
}
checkShouldNotContainStackDepth(e4, left4, deep(Array(1 -> "one", 2 -> "two ", 3 -> "three")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left5 = javaList("1 ", "2", "3 ")
val e5 = intercept[exceptions.TestFailedException] {
(left5 should not contain allOf ("1", "2 ", "3")) (trimEquality)
}
checkShouldNotContainStackDepth(e5, left5, deep(Array("1", "2 ", "3")), thisLineNumber - 2)
val javaMapTrimEquality = new JavaMapTrimEquality
val left6 = javaMap(Entry(1, "one "), Entry(2, "two"), Entry(3, "three "))
val e6 = intercept[exceptions.TestFailedException] {
(left6 should not contain allOf (Entry(1, "one"), Entry(2, "two "), Entry(3, "three"))) (javaMapTrimEquality)
}
checkShouldNotContainStackDepth(e6, left6, deep(Array(Entry(1, "one"), Entry(2, "two "), Entry(3, "three"))), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
}
} | scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/AllOfContainMatcherEqualitySpec.scala | Scala | apache-2.0 | 16,042 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity, TensorModule}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.serializer._
import com.intel.analytics.bigdl.utils.serializer.{ContainerSerializable, ModuleSerializer}
import com.intel.analytics.bigdl.utils.{T, Table}
import com.intel.analytics.bigdl.utils.serializer.converters.DataConverter
import serialization.Bigdl.{AttrValue, BigDLModule}
import scala.reflect.runtime.universe
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
/**
* [[Recurrent]] module is a container of rnn cells
* Different types of rnn cells can be added using add() function
*
* The recurrent includes some mask mechanisms
* if the `maskZero` variable is set to true, the `Recurrent` module will
* not consider zero vector inputs. For each time step input, if a certain row is
* a zero vector (all the elements of the vector equals zero), then output of certain row
* of this time step would be a zero vector, and the hidden state of the certain row of
* this time step would be the same as the corresponding row of the hidden state of the
* previous step.
*
*/
class Recurrent[T : ClassTag](
var batchNormParams: BatchNormParams[T] = null,
var maskZero: Boolean = false
)
(implicit ev: TensorNumeric[T]) extends DynamicContainer[Tensor[T], Tensor[T], T] {
protected var hidden: Activity = null
protected var gradHidden: Activity = null
protected var hiddenShape: Array[Int] = null
protected var currentInput = T()
protected val currentGradOutput = T()
protected val gradInput2Cell = Tensor[T]()
protected var input2Cell = Tensor[T]()
protected var _input = T()
protected val batchDim = Recurrent.batchDim
protected val timeDim = Recurrent.timeDim
protected val inputDim = 1
protected val hidDim = 2
protected var (batchSize, times) = (0, 0)
protected var topology: Cell[T] = null
protected val stepInput2CellBuf = Tensor[T]()
protected val stepGradBuffer = Tensor[T]()
protected var preTopology: AbstractModule[Activity, Activity, T] = null
private val dropouts: ArrayBuffer[Array[Dropout[T]]] =
new ArrayBuffer[Array[Dropout[T]]]
private var layer: TensorModule[T] = null
private var maskBuffer: Tensor[T] = Tensor()
private var gradOutputBuff: Table = T()
private var indexBuffer: Tensor[T] = Tensor()
private var inputBuffer: Tensor[T] = Tensor()
private var outputBuffers: ArrayBuffer[Tensor[T]] = ArrayBuffer(Tensor())
private var minLength: Int = 0
/**
*
* modules: -- preTopology
* |- BatchNormalization (optional)
* |- topology (cell)
*
* The topology (or cell) will be cloned for N times w.r.t the time dimension.
* The preTopology will be execute only once before the recurrence.
*
* @param module module to be add
* @return this container
*/
override def add(module: AbstractModule[_ <: Activity, _ <: Activity, T]): this.type = {
require(module.isInstanceOf[Cell[T]],
"Recurrent: added module should be Cell type!")
require(!module.isInstanceOf[MultiRNNCell[T]],
"Recurrent: added module cannot be MultiRNNCell," +
"use Sequential().add(Recurrent(cell)).add(Recurrent(cell))... instead!")
topology = module.asInstanceOf[Cell[T]]
preTopology = if (topology.preTopology != null) {
TimeDistributed(topology.preTopology, maskZero = maskZero)
} else topology.preTopology
if (batchNormParams != null && preTopology == null) {
throw new IllegalArgumentException(
s"${topology.getName} does not support BatchNormalization." +
s" Please add preTopology for it. You can simply using: " +
s"override def preTopology: AbstractModule[Activity, Activity, T] = Identity()")
}
if (batchNormParams != null) {
layer = batchNormalization(batchNormParams)
preTopology = Sequential[T]().add(preTopology).add(layer)
}
if (preTopology != null) {
modules += preTopology
}
modules += topology
require((preTopology == null && modules.length == 1) ||
(topology != null && preTopology != null && modules.length == 2),
"Recurrent extend: should contain only one cell or plus a pre-topology" +
" to process input")
this
}
private def batchNormalization(batchNormParams: BatchNormParams[T]) = {
TimeDistributed[T](BatchNormalization[T](
nOutput = topology.hiddenSizeOfPreTopo,
batchNormParams.eps,
batchNormParams.momentum,
affine = batchNormParams.affine,
batchNormParams.initWeight,
batchNormParams.initBias,
batchNormParams.initGradWeight,
batchNormParams.initGradBias))
}
// list of cell modules cloned from added modules
protected val cells: ArrayBuffer[Cell[T]]
= ArrayBuffer[Cell[T]]()
/**
* Clone N models; N depends on the time dimension of the input
* @param sizes, the first element is hiddensize, the left is size of images
*/
protected def initHidden(sizes: Array[Int]): Unit = {
val stepShape = sizes
if (hidden == null) {
cells.clear()
cells += topology
val cell = cells.head
// The cell will help initialize or resize the hidden variable.
hidden = cell.hidResize(hidden = null, batchSize = batchSize, stepShape)
/*
* Since the gradHidden is only used as an empty Tensor or Table during
* backward operations. We can reuse the hidden variable by pointing the
* gradHidden to it.
*/
gradHidden = hidden
} else {
cells.head.hidResize(hidden = hidden, batchSize = batchSize, stepShape)
gradHidden = hidden
}
}
protected def cloneCells(): Unit = {
var t = cells.length
if (t < times) {
val cloneCell = cells.head.cloneModule()
cloneCell.parameters()._1.map(_.set())
cloneCell.parameters()._2.map(_.set())
// preTopology's output is useless here, clear it.
// Notice: preTopology is a merge output of all i2h,
// it's a bigdl tensor, and shouldn't be cloned.
if (cloneCell.preTopology != null) {
cloneCell.preTopology.output.set()
}
while (t < times) {
cells += cloneCell.cloneModule()
.asInstanceOf[Cell[T]]
outputBuffers.append(Tensor())
t += 1
}
share(cells)
}
}
/**
* Sharing weights, bias, gradWeights across all the cells in time dim
* @param cells
*/
def share(cells: ArrayBuffer[Cell[T]]): Unit = {
val params = cells.head.parameters()
cells.foreach(c => {
if (!c.parameters().eq(params)) {
var i = 0
while (i < c.parameters()._1.length) {
c.parameters()._1(i).set(params._1(i))
i += 1
}
i = 0
while (i < c.parameters()._2.length) {
c.parameters()._2(i).set(params._2(i))
i += 1
}
dropouts.append(findDropouts(c))
}
})
val stepLength = dropouts.length
for (i <- dropouts.head.indices) {
val head = dropouts.head(i)
val noise = head.noise
for (j <- 1 until stepLength) {
val current = dropouts(j)(i)
current.noise = noise
current.isResampling = false
}
}
}
def findDropouts(cell: Cell[T]): Array[Dropout[T]] = {
var result: Array[Dropout[T]] = null
cell.cell match {
case container: Container[_, _, T] =>
result = container
.findModules("Dropout")
.toArray
.map(_.asInstanceOf[Dropout[T]])
case _ =>
}
result
}
override def updateOutput(input: Tensor[T]): Tensor[T] = {
require(input.dim == 3 || input.dim == 5 || input.dim == 6,
"Recurrent: input should be a 3D/5D/6D Tensor, e.g [batch, times, nDim], " +
s"current input.dim = ${input.dim}")
batchSize = input.size(batchDim)
times = input.size(timeDim)
input2Cell = if (preTopology != null) {
preTopology.forward(input).toTensor[T]
} else {
input
}
val hiddenSize = topology.hiddensShape(0)
val outputSize = input.size()
outputSize(2) = hiddenSize
output.resize(outputSize)
/**
* currentInput forms a T() type. It contains two elements, hidden and input.
* Each time it will feed the cell with T(hidden, input) (or T(input, hidden) depends on
* your hidDim and inputDim), and the cell will give a table output containing two
* identical elements T(output, output). One of the elements from the cell output is
* the updated hidden. Thus the currentInput will update its hidden element with this output.
*/
var i = 1
// Clone N modules along the sequence dimension.
initHidden(outputSize.drop(2))
cloneCells()
if (maskZero) {
require(input.dim == 3,
"If maskZero set to true, input should be a 3D Tensor, e.g [batch, times, nDim]")
inputBuffer.resizeAs(input).abs(input).max(maskBuffer, indexBuffer, 3)
minLength = ev.toType[Int](maskBuffer.sign().sum(2).min(1)._1(Array(1, 1, 1)))
}
currentInput(hidDim) = if (initHiddenState != null) initHiddenState
else hidden
while (i <= times) {
currentInput(inputDim) = Recurrent.selectCopy(input2Cell, i, stepInput2CellBuf)
cells(i - 1).forward(currentInput)
val curOutput = cells(i - 1).output
if (maskZero && i > minLength) {
val curMask = maskBuffer.select(2, i)
val curOut = curOutput[Table](hidDim)[Tensor[T]](1)
// Copy output to a new new tensor as output, because for some cells
// such as LSTM the hidden h and ouput o refer to the same tensor.
// But in this case, we want h and o have difference values.
curOutput.update(inputDim, outputBuffers(i - 1).resizeAs(curOut).copy(curOut))
for (b <- 1 to curMask.size(1)) {
if (curMask(Array(b, 1)) == ev.zero) {
val newState = curOutput[Table](hidDim)
val originState = currentInput[Table](hidDim)
for (j <- 1 to newState.length()) {
newState[Tensor[T]](j).select(1, b).copy(originState[Tensor[T]](j).select(1, b))
}
curOutput[Tensor[T]](inputDim).select(1, b).zero()
}
}
}
currentInput(hidDim) = curOutput[Table](hidDim)
i += 1
}
Recurrent.copy(cells.map(x => x.output.toTable[Tensor[T]](inputDim)),
output)
output
}
// get hidden state at the last time step
def getHiddenState(): Activity = {
require(cells != null && cells(times - 1).output != null,
"getHiddenState need to be called after updateOutput")
cells(times - 1).output.toTable(hidDim)
}
// set hidden state at the first time step
protected var initHiddenState: Activity = null
def setHiddenState(hiddenState: Activity): Unit = {
initHiddenState = hiddenState
}
override def accGradParameters(input: Tensor[T], gradOutput: Tensor[T]): Unit = {
currentGradOutput(hidDim) = gradHidden
/**
* Since we clone module along the time dimension, the output of each
* iteration have been recorded by the cloned modules. Thus, we can
* reuse these outputs during the backward operations by copying the
* outputs to _input variable.
*
* The output of Cell(i-1) should be one of the elements fed to the inputs
* of Cell(i)
* The first module in the cells array accepts zero hidden parameter.
*/
var i = times
while (i >= 1) {
currentGradOutput(inputDim) = Recurrent.selectCopy(gradOutput, i, stepGradBuffer)
_input(hidDim) = if (i > 1) cells(i - 2).output.toTable(hidDim)
else if (initHiddenState == null) hidden else initHiddenState
_input(inputDim) = Recurrent.selectCopy(input2Cell, i, stepInput2CellBuf)
if (i == 1) {
cells(i - 1).regluarized(true)
} else {
cells(i - 1).regluarized(false)
}
if (maskZero && i > minLength) {
val curMask = maskBuffer.select(2, i)
if (gradOutputBuff.length() == 0) {
Utils.recursiveResizeAs(gradOutputBuff, currentGradOutput)
}
Utils.recursiveCopy(gradOutputBuff, currentGradOutput)
for (b <- 1 to curMask.size(1)) {
if (curMask(Array(b, 1)) == ev.zero) {
val originState = gradOutputBuff[Table](Recurrent.hidDim)
for (j <- 1 to originState.length()) {
originState[Tensor[T]](j).select(1, b).zero()
}
}
}
cells(i - 1).accGradParameters(_input, currentGradOutput)
for (b <- 1 to curMask.size(1)) {
if (curMask(Array(b, 1)) == ev.zero) {
val newState = cells(i - 1).gradInput[Table](hidDim)
val originState = currentGradOutput[Table](hidDim)
for (j <- 1 to newState.length()) {
newState[Tensor[T]](j).select(1, b).copy(originState[Tensor[T]](j).select(1, b))
}
}
}
} else {
cells(i - 1).accGradParameters(_input, currentGradOutput)
}
currentGradOutput(hidDim) = cells(i - 1).gradInput.toTable(hidDim)
i -= 1
}
if (preTopology != null) {
preTopology.accGradParameters(input, gradInput2Cell)
}
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
gradInput = if (preTopology != null) {
/**
* if preTopology is Sequential, it has not created gradInput.
* Thus, it needs to create a new Tensor.
*/
if (preTopology.gradInput == null) {
preTopology.gradInput = Tensor[T]()
}
preTopology.gradInput.toTensor[T]
} else {
gradInput2Cell
}
gradInput2Cell.resizeAs(input2Cell)
currentGradOutput(hidDim) = gradHidden
var i = times
while (i >= 1) {
currentGradOutput(inputDim) = Recurrent.selectCopy(gradOutput, i, stepGradBuffer)
_input(hidDim) = if (i > 1) cells(i - 2).output.toTable(hidDim)
else if (initHiddenState == null) hidden else initHiddenState
_input(inputDim) = Recurrent.selectCopy(input2Cell, i, stepInput2CellBuf)
if (maskZero && i > minLength) {
val curMask = maskBuffer.select(2, i)
if (gradOutputBuff.length() == 0) {
Utils.recursiveResizeAs(gradOutputBuff, currentGradOutput)
}
Utils.recursiveCopy(gradOutputBuff, currentGradOutput)
for (b <- 1 to curMask.size(1)) {
if (curMask(Array(b, 1)) == ev.zero) {
val originState = gradOutputBuff[Table](Recurrent.hidDim)
for (j <- 1 to originState.length()) {
originState[Tensor[T]](j).select(1, b).zero()
}
}
}
cells(i - 1).updateGradInput(_input, currentGradOutput)
for (b <- 1 to curMask.size(1)) {
if (curMask(Array(b, 1)) == ev.zero) {
val newState = cells(i - 1).gradInput[Table](hidDim)
val originState = currentGradOutput[Table](hidDim)
for (j <- 1 to newState.length()) {
newState[Tensor[T]](j).select(1, b).copy(originState[Tensor[T]](j).select(1, b))
}
}
}
} else {
cells(i - 1).updateGradInput(_input, currentGradOutput)
}
currentGradOutput(hidDim) = cells(i - 1).gradInput.toTable(hidDim)
i -= 1
}
Recurrent.copy(cells.map(x => x.gradInput.toTable[Tensor[T]](inputDim)), gradInput2Cell)
if (preTopology != null) {
gradInput = preTopology.updateGradInput(input, gradInput2Cell).toTensor[T]
}
gradInput
}
override def backward(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
val before = System.nanoTime
currentGradOutput(hidDim) = gradHidden
var i = times
while (i >= 1) {
currentGradOutput(inputDim) = Recurrent.selectCopy(gradOutput, i, stepGradBuffer)
_input(hidDim) = if (i > 1) cells(i - 2).output.toTable(hidDim)
else if (initHiddenState == null) hidden else initHiddenState
_input(inputDim) = Recurrent.selectCopy(input2Cell, i, stepInput2CellBuf)
if (i == 1) {
cells(i - 1).regluarized(true)
} else {
cells(i - 1).regluarized(false)
}
if (maskZero && i > minLength) {
val curMask = maskBuffer.select(2, i)
if (gradOutputBuff.length() == 0) {
Utils.recursiveResizeAs(gradOutputBuff, currentGradOutput)
}
Utils.recursiveCopy(gradOutputBuff, currentGradOutput)
for (b <- 1 to curMask.size(1)) {
if (curMask(Array(b, 1)) == ev.zero) {
val originState = gradOutputBuff[Table](Recurrent.hidDim)
for (j <- 1 to originState.length()) {
originState[Tensor[T]](j).select(1, b).zero()
}
}
}
cells(i - 1).backward(_input, gradOutputBuff).toTable
for (b <- 1 to curMask.size(1)) {
if (curMask(Array(b, 1)) == ev.zero) {
val newState = cells(i - 1).gradInput[Table](hidDim)
val originState = currentGradOutput[Table](hidDim)
for (j <- 1 to newState.length()) {
newState[Tensor[T]](j).select(1, b).copy(originState[Tensor[T]](j).select(1, b))
}
}
}
} else {
cells(i - 1).backward(_input, currentGradOutput)
}
currentGradOutput(hidDim) = cells(i - 1).gradInput.toTable(hidDim)
i -= 1
}
gradInput = if (preTopology != null) {
/**
* if preTopology is Sequential, it has not created gradInput.
* Thus, it needs to create a new Tensor.
*/
if (preTopology.gradInput == null) {
preTopology.gradInput = Tensor[T]()
}
preTopology.gradInput.toTensor[T]
} else {
gradInput2Cell
}
gradInput2Cell.resizeAs(input2Cell)
Recurrent.copy(cells.map(x => x.gradInput.toTable[Tensor[T]](inputDim)), gradInput2Cell)
if (preTopology != null) {
gradInput = preTopology.backward(input, gradInput2Cell).toTensor[T]
}
this.backwardTime += System.nanoTime - before
gradInput
}
override def getTimes(): Array[(AbstractModule[_ <: Activity, _ <: Activity, T], Long, Long)] = {
val timeBuffer =
new ArrayBuffer[(AbstractModule[_ <: Activity, _ <: Activity, T], Long, Long)]
if (!cells.isEmpty) {
timeBuffer.append(
cells.flatMap(_.getTimes()).reduce((a, b) => (a._1, a._2 + b._2, a._3 + b._3)))
}
if (preTopology != null) {
timeBuffer.appendAll(preTopology.getTimes())
}
val (bufferForward, bufferBackward) =
timeBuffer.map(t => (t._2, t._3)).reduce((a, b) => (a._1 + b._1, a._2 + b._2))
timeBuffer.append(
(this,
forwardTime - bufferForward,
backwardTime - bufferBackward))
timeBuffer.toArray
}
override def resetTimes(): Unit = {
super.resetTimes()
if (preTopology != null) {
preTopology.resetTimes
}
cells.foreach(_.resetTimes())
}
override def clearState() : this.type = {
super.clearState()
hidden = null
gradHidden = null
hiddenShape = null
gradInput2Cell.set()
input2Cell.set()
currentInput.clear()
currentGradOutput.clear()
_input.clear()
cells.foreach(x => x.clearState())
cells.clear()
initHiddenState = null
stepInput2CellBuf.set()
stepGradBuffer.set()
maskBuffer.set()
gradOutputBuff.clear()
inputBuffer.set()
indexBuffer.set()
outputBuffers.clear()
minLength = 0
this
}
override def reset(): Unit = {
require((preTopology == null && modules.length == 1) ||
(topology != null && preTopology != null && modules.length == 2),
"Recurrent extend: should contain only one cell or plus a pre-topology" +
" to process input.")
require(topology.isInstanceOf[Cell[T]],
"Recurrent: should contain module with Cell type")
modules.foreach(_.reset())
cells.clear()
hidden = null
}
override def canEqual(other: Any): Boolean = other.isInstanceOf[Recurrent[T]]
override def equals(other: Any): Boolean = other match {
case that: Recurrent[T] =>
super.equals(that) &&
(that canEqual this) &&
cells == that.cells
case _ => false
}
override def hashCode(): Int = {
val state = Seq(super.hashCode(), cells)
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
override def toString(): String = s"${getPrintName}${modules}"
}
object Recurrent extends ContainerSerializable {
private val batchDim = 1
private val timeDim = 2
val inputDim = 1
val hidDim = 2
def apply[@specialized(Float, Double) T: ClassTag](
batchNormParams: BatchNormParams[T] = null,
maskZero: Boolean = false
)
(implicit ev: TensorNumeric[T]) : Recurrent[T] = {
new Recurrent[T](batchNormParams, maskZero = maskZero)
}
/**
* set the cells' output and gradInput to recurrent's output and gradInput
* to decrease the copy expense.
* Copy src tensor to dst tensor along timeDime, default timeDime 2, batchDim 1
* @param src
* @param dst
*/
private[bigdl] def copy[T: ClassTag](
src: ArrayBuffer[Tensor[T]], dst: Tensor[T]): Unit = {
val timeSize = dst.size(timeDim)
var t = 1
while (t <= timeSize) {
copyToIndex(src(t -1), dst, t)
t += 1
}
}
/**
* select srcIndex subset of the 2-th dimension from src, and copy to dst
* @param src
* @param srcIndex the index of 2-th dimension from src
* @param dst
*/
private[bigdl] def selectCopy[T: ClassTag](
src: Tensor[T], srcIndex: Int, dst: Tensor[T]): Tensor[T] = {
if (src.isContiguous() && dst.isContiguous()) {
if ((dst.nElement() == 0) || (dst.nElement() != (src.nElement() / src.size(2)))) {
dst.resizeAs(src.select(2, srcIndex))
}
val batchSize = src.size(batchDim)
val timeSize = src.size(timeDim)
val stepSize = src.nElement() / (batchSize * timeSize)
val srcArr = src.storage().array()
var srcOffset = src.storageOffset() - 1
val dstArr = dst.storage().array()
var dstOffset = dst.storageOffset() - 1
val recordSize = timeSize * stepSize
val indexSize = (srcIndex-1) * stepSize
var b = 0
while (b < batchSize) {
System.arraycopy(srcArr, srcOffset + indexSize, dstArr, dstOffset, stepSize)
srcOffset += recordSize
dstOffset += stepSize
b += 1
}
} else {
val output = src.select(2, srcIndex)
dst.resizeAs(output).copy(output)
}
dst
}
/**
* copy src to be dst dstIndex subset of the 2-th dimension
* @param src
* @param dst
* @param dstIndex the index of 2-th dimension from dst
*/
private[bigdl] def copyToIndex[T: ClassTag](
src: Tensor[T], dst: Tensor[T], dstIndex: Int): Tensor[T] = {
if (src.isContiguous() && dst.isContiguous()) {
val batchSize = dst.size(batchDim)
val timeSize = dst.size(timeDim)
val stepSize = dst.nElement() / (batchSize * timeSize)
val dstArr = dst.storage().array()
var dstOffset = dst.storageOffset() - 1
val srcArr = src.storage().array()
var srcOffset = src.storageOffset() - 1
val recordSize = timeSize * stepSize
val indexSize = (dstIndex - 1) * stepSize
var b = 0
while (b < batchSize) {
System.arraycopy(srcArr, srcOffset, dstArr, dstOffset + indexSize, stepSize)
srcOffset += stepSize
dstOffset += recordSize
b += 1
}
} else {
dst.select(2, dstIndex).copy(src)
}
dst
}
override def doLoadModule[T: ClassTag](context : DeserializeContext)
(implicit ev: TensorNumeric[T]) : AbstractModule[Activity, Activity, T] = {
val attrMap = context.bigdlModule.getAttrMap
val flag = DataConverter
.getAttributeValue(context, attrMap.get("bnorm"))
.asInstanceOf[Boolean]
val recurrent = if (flag) {
Recurrent[T](BatchNormParams[T]())
} else {
Recurrent[T]()
}
val topologyAttr = attrMap.get("topology")
recurrent.topology = DataConverter.getAttributeValue(context, topologyAttr).
asInstanceOf[Cell[T]]
val preTopologyAttr = attrMap.get("preTopology")
recurrent.preTopology = DataConverter.getAttributeValue(context, preTopologyAttr).
asInstanceOf[AbstractModule[Activity, Activity, T]]
if (recurrent.preTopology != null) {
recurrent.modules.append(recurrent.preTopology)
}
recurrent.modules.append(recurrent.topology)
if (flag) {
val bnormEpsAttr = attrMap.get("bnormEps")
recurrent.batchNormParams.eps =
DataConverter.getAttributeValue(context, bnormEpsAttr)
.asInstanceOf[Double]
val bnormMomentumAttr = attrMap.get("bnormMomentum")
recurrent.batchNormParams.momentum =
DataConverter.getAttributeValue(context, bnormMomentumAttr)
.asInstanceOf[Double]
val bnormInitWeightAttr = attrMap.get("bnormInitWeight")
recurrent.batchNormParams.initWeight =
DataConverter.getAttributeValue(context, bnormInitWeightAttr)
.asInstanceOf[Tensor[T]]
val bnormInitBiasAttr = attrMap.get("bnormInitBias")
recurrent.batchNormParams.initBias =
DataConverter.getAttributeValue(context, bnormInitBiasAttr)
.asInstanceOf[Tensor[T]]
val bnormInitGradWeightAttr = attrMap.get("bnormInitGradWeight")
recurrent.batchNormParams.initGradWeight =
DataConverter.getAttributeValue(context, bnormInitGradWeightAttr)
.asInstanceOf[Tensor[T]]
val bnormInitGradBiasAttr = attrMap.get("bnormInitGradBias")
recurrent.batchNormParams.initGradBias =
DataConverter.getAttributeValue(context, bnormInitGradBiasAttr)
.asInstanceOf[Tensor[T]]
val bnormAffineAttr = attrMap.get("bnormAffine")
recurrent.batchNormParams.affine =
DataConverter.getAttributeValue(context, bnormAffineAttr)
.asInstanceOf[Boolean]
}
recurrent
}
override def doSerializeModule[T: ClassTag](context: SerializeContext[T],
recurrentBuilder : BigDLModule.Builder)
(implicit ev: TensorNumeric[T]) : Unit = {
val recurrent = context.moduleData.module.asInstanceOf[Recurrent[T]]
val topologyBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, topologyBuilder, recurrent.topology,
ModuleSerializer.abstractModuleType)
recurrentBuilder.putAttr("topology", topologyBuilder.build)
val preTopologyBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, preTopologyBuilder,
recurrent.preTopology, ModuleSerializer.abstractModuleType)
recurrentBuilder.putAttr("preTopology", preTopologyBuilder.build)
val flag = if (recurrent.batchNormParams != null) {
val bnormEpsBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, bnormEpsBuilder,
recurrent.batchNormParams.eps, universe.typeOf[Double])
recurrentBuilder.putAttr("bnormEps", bnormEpsBuilder.build)
val bnormMomentumBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, bnormMomentumBuilder,
recurrent.batchNormParams.momentum, universe.typeOf[Double])
recurrentBuilder.putAttr("bnormMomentum", bnormMomentumBuilder.build)
val bnormInitWeightBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, bnormInitWeightBuilder,
recurrent.batchNormParams.initWeight, ModuleSerializer.tensorType)
recurrentBuilder.putAttr("bnormInitWeight", bnormInitWeightBuilder.build)
val bnormInitBiasBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, bnormInitBiasBuilder,
recurrent.batchNormParams.initBias, ModuleSerializer.tensorType)
recurrentBuilder.putAttr("bnormInitBias", bnormInitBiasBuilder.build)
val bnormInitGradWeightBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, bnormInitGradWeightBuilder,
recurrent.batchNormParams.initGradWeight, ModuleSerializer.tensorType)
recurrentBuilder.putAttr("bnormInitGradWeight", bnormInitGradWeightBuilder.build)
val bnormInitGradBiasBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, bnormInitGradBiasBuilder,
recurrent.batchNormParams.initGradBias, ModuleSerializer.tensorType)
recurrentBuilder.putAttr("bnormInitGradBias", bnormInitGradBiasBuilder.build)
val bnormAffineBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, bnormAffineBuilder,
recurrent.batchNormParams.affine, universe.typeOf[Boolean])
recurrentBuilder.putAttr("bnormAffine", bnormAffineBuilder.build)
true
} else {
false
}
val bNormBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, bNormBuilder,
flag, universe.typeOf[Boolean])
recurrentBuilder.putAttr("bnorm", bNormBuilder.build)
}
}
case class BatchNormParams[T : ClassTag](
var eps: Double = 1e-5, // avoid divde zero
var momentum: Double = 0.1, // momentum for weight update
var initWeight: Tensor[T] = null,
var initBias: Tensor[T] = null,
var initGradWeight: Tensor[T] = null,
var initGradBias: Tensor[T] = null,
var affine: Boolean = true)(implicit ev: TensorNumeric[T])
| yiheng/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Recurrent.scala | Scala | apache-2.0 | 30,227 |
/**
* Author : Florian Simon <florian@tentwentyfour.lu>
* Role : Textual version of the main menu view.
*/
package Views.Swing;
import Game.Grid;
import Views.Base;
import Controllers.Controller;
import Views.Swing.Components.GridCanvas;
import scala.util.Success;
import scala.swing.event.WindowClosing;
import scala.concurrent.duration.Duration;
import scala.swing.{MainFrame, Dimension};
import scala.concurrent.{Await, Promise, Future};
import scala.concurrent.ExecutionContext.Implicits.global;
/**
* The main menu view.
*/
case class GameView(controller : Controllers.GameView) extends Base.GameView {
val viewTerminated = Promise[Option[Controller]]();
val gridCanvas = new GridCanvas(controller.game.teams, controller.game.grid);
/**
* @inheritdoc
*/
override def gridUpdated(grid : Grid) = {
gridCanvas.repaint();
};
/**
* @inheritdoc
*/
override def show() : Option[Controller] = {
Future {
this.setupWindow;
};
return Await.result(viewTerminated.future, Duration.Inf);
}
/**
* Does the window init and shows it.
*
* @return Unit
*/
def setupWindow = (new MainFrame {
/* Configures the window; */
this.minimumSize = new Dimension(800, 800);
this.title = "Test window";
this.contents = gridCanvas;
/* Main window events. */
this.reactions += {
/* When the menu window is closed, terminates the app. */
case WindowClosing(_) ⇒ viewTerminated.complete(Success(None));
}
}).open;
}
| floriansimon1/learning.trpg | src/Views/Swing/GameView.scala | Scala | mit | 1,527 |
import types.{MalList, _list, _list_Q, MalVector, MalHashMap,
Func, MalFunction}
import env.Env
object step9_try {
// read
def READ(str: String): Any = {
reader.read_str(str)
}
// eval
def is_pair(x: Any): Boolean = {
types._sequential_Q(x) && x.asInstanceOf[MalList].value.length > 0
}
def quasiquote(ast: Any): Any = {
if (!is_pair(ast)) {
return _list(Symbol("quote"), ast)
} else {
val a0 = ast.asInstanceOf[MalList](0)
if (types._symbol_Q(a0) &&
a0.asInstanceOf[Symbol].name == "unquote") {
return ast.asInstanceOf[MalList](1)
} else if (is_pair(a0)) {
val a00 = a0.asInstanceOf[MalList](0)
if (types._symbol_Q(a00) &&
a00.asInstanceOf[Symbol].name == "splice-unquote") {
return _list(Symbol("concat"),
a0.asInstanceOf[MalList](1),
quasiquote(ast.asInstanceOf[MalList].drop(1)))
}
}
return _list(Symbol("cons"),
quasiquote(a0),
quasiquote(ast.asInstanceOf[MalList].drop(1)))
}
}
def is_macro_call(ast: Any, env: Env): Boolean = {
ast match {
case ml: MalList => {
if (ml.value.length > 0 &&
types._symbol_Q(ml(0)) &&
env.find(ml(0).asInstanceOf[Symbol]) != null) {
env.get(ml(0).asInstanceOf[Symbol]) match {
case f: MalFunction => return f.ismacro
case _ => return false
}
}
return false
}
case _ => return false
}
}
def macroexpand(orig_ast: Any, env: Env): Any = {
var ast = orig_ast;
while (is_macro_call(ast, env)) {
ast.asInstanceOf[MalList].value match {
case f :: args => {
val mac = env.get(f.asInstanceOf[Symbol])
ast = mac.asInstanceOf[MalFunction](args)
}
case _ => throw new Exception("macroexpand: invalid call")
}
}
ast
}
def eval_ast(ast: Any, env: Env): Any = {
ast match {
case s : Symbol => env.get(s)
case v: MalVector => v.map(EVAL(_, env))
case l: MalList => l.map(EVAL(_, env))
case m: MalHashMap => {
m.map{case (k,v) => (k, EVAL(v, env))}
}
case _ => ast
}
}
def EVAL(orig_ast: Any, orig_env: Env): Any = {
var ast = orig_ast; var env = orig_env;
while (true) {
//println("EVAL: " + printer._pr_str(ast,true))
if (!_list_Q(ast))
return eval_ast(ast, env)
// apply list
ast = macroexpand(ast, env)
if (!_list_Q(ast))
return eval_ast(ast, env)
ast.asInstanceOf[MalList].value match {
case Nil => {
return ast
}
case Symbol("def!") :: a1 :: a2 :: Nil => {
return env.set(a1.asInstanceOf[Symbol], EVAL(a2, env))
}
case Symbol("let*") :: a1 :: a2 :: Nil => {
val let_env = new Env(env)
for (g <- a1.asInstanceOf[MalList].value.grouped(2)) {
let_env.set(g(0).asInstanceOf[Symbol],EVAL(g(1),let_env))
}
env = let_env
ast = a2 // continue loop (TCO)
}
case Symbol("quote") :: a1 :: Nil => {
return a1
}
case Symbol("quasiquote") :: a1 :: Nil => {
ast = quasiquote(a1) // continue loop (TCO)
}
case Symbol("defmacro!") :: a1 :: a2 :: Nil => {
val f = EVAL(a2, env)
f.asInstanceOf[MalFunction].ismacro = true
return env.set(a1.asInstanceOf[Symbol], f)
}
case Symbol("macroexpand") :: a1 :: Nil => {
return macroexpand(a1, env)
}
case Symbol("try*") :: a1 :: rest => {
try {
return EVAL(a1, env)
} catch {
case t: Throwable => {
rest(0).asInstanceOf[MalList].value match {
case List(Symbol("catch*"), a21, a22) => {
val exc: Any = t match {
case mex: types.MalException => mex.value
case _ => t.getMessage
}
return EVAL(a22, new Env(env,
List(a21).iterator,
List(exc).iterator))
}
}
throw t
}
}
}
case Symbol("do") :: rest => {
eval_ast(_list(rest.slice(0,rest.length-1):_*), env)
ast = ast.asInstanceOf[MalList].value.last // continue loop (TCO)
}
case Symbol("if") :: a1 :: a2 :: rest => {
val cond = EVAL(a1, env)
if (cond == null || cond == false) {
if (rest.length == 0) return null
ast = rest(0) // continue loop (TCO)
} else {
ast = a2 // continue loop (TCO)
}
}
case Symbol("fn*") :: a1 :: a2 :: Nil => {
return new MalFunction(a2, env, a1.asInstanceOf[MalList],
(args: List[Any]) => {
EVAL(a2, new Env(env, types._toIter(a1), args.iterator))
}
)
}
case _ => {
// function call
eval_ast(ast, env).asInstanceOf[MalList].value match {
case f :: el => {
f match {
case fn: MalFunction => {
env = fn.gen_env(el)
ast = fn.ast // continue loop (TCO)
}
case fn: Func => {
return fn(el)
}
case _ => {
throw new Exception("attempt to call non-function: " + f)
}
}
}
case _ => throw new Exception("invalid apply")
}
}
}
}
}
// print
def PRINT(exp: Any): String = {
printer._pr_str(exp, true)
}
// repl
def main(args: Array[String]) = {
val repl_env: Env = new Env()
val REP = (str: String) => PRINT(EVAL(READ(str), repl_env))
// core.scala: defined using scala
core.ns.map{case (k: String,v: Any) => {
repl_env.set(Symbol(k), new Func(v))
}}
repl_env.set(Symbol("eval"), new Func((a: List[Any]) => EVAL(a(0), repl_env)))
repl_env.set(Symbol("*ARGV*"), _list(args.slice(1,args.length):_*))
// core.mal: defined using the language itself
REP("(def! not (fn* (a) (if a false true)))")
REP("(def! load-file (fn* (f) (eval (read-string (str \\"(do \\" (slurp f) \\")\\")))))")
REP("(defmacro! cond (fn* (& xs) (if (> (count xs) 0) (list 'if (first xs) (if (> (count xs) 1) (nth xs 1) (throw \\"odd number of forms to cond\\")) (cons 'cond (rest (rest xs)))))))")
REP("(defmacro! or (fn* (& xs) (if (empty? xs) nil (if (= 1 (count xs)) (first xs) `(let* (or_FIXME ~(first xs)) (if or_FIXME or_FIXME (or ~@(rest xs))))))))")
if (args.length > 0) {
REP("(load-file \\"" + args(0) + "\\")")
System.exit(0)
}
// repl loop
var line:String = null
while ({line = readLine("user> "); line != null}) {
try {
println(REP(line))
} catch {
case e : Throwable => {
println("Error: " + e.getMessage)
println(" " + e.getStackTrace.mkString("\\n "))
}
}
}
}
}
// vim: ts=2:sw=2
| hterkelsen/mal | scala/step9_try.scala | Scala | mpl-2.0 | 7,118 |
package dev.budget.reconciler.es
import io.dropwizard.lifecycle.Managed
import org.elasticsearch.node.Node
import org.slf4j.Logger
import org.slf4j.LoggerFactory.getLogger
import scaldi.{Injector, Injectable}
class ManagedElasticSearch(implicit val injector: Injector) extends Managed with Injectable {
val log: Logger = getLogger(getClass)
val node: Node = inject [Node]
val esAdmin: ElasticSearchAdmin = inject [ElasticSearchAdmin]
override def start() = {
log.info("Starting ElasticSearch...")
node.start()
esAdmin.createIndexIfNotExists(MintESIndex)
esAdmin.createIndexIfNotExists(YnabESIndex)
log.info("Started ElasticSearch")
}
override def stop() = {
log.info("Stopping ElasticSearch...")
node.stop()
log.info("ElasticSearch stopped")
}
}
| jhungerford/MintYnabReconciler | src/main/scala/dev/budget/reconciler/es/ManagedElasticSearch.scala | Scala | apache-2.0 | 799 |
package com.scala.exercises.impatient.chapter5
/**
* Created by Who on 2014/7/8.
*/
class Counter {
private[this] var value = 0
def increment() {
value += 1
}
def current() = value
def isLess(other: Counter) = {
// Because the member value is tagged private[this], so it will be only accessible for the same object.
// value < other.value
}
}
| laonawuli/scalalearning | src/com/scala/exercises/impatient/chapter5/Counter.scala | Scala | mit | 372 |
/*
* Copyright (C) 2017 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.tikitakka.core
import akka.actor.Actor
import akka.actor.ActorLogging
import akka.actor.ActorRef
import akka.actor.Props
import com.stratio.tikitakka.common.message._
import com.stratio.tikitakka.common.model.discovery.DiscoveryAppInfo
import scalaz.Reader
class ServiceActor(discoveryActor: ActorRef, orchestrator: ActorRef) extends Actor with ActorLogging {
var status: Option[DiscoveryAppInfo] = None
var appName: String = "UNKNOWN"
override def receive: Receive = {
case ManageApp(name) =>
appName = name
log.debug(s"[$appName] trigger discovery check")
discoveryActor ! DiscoverService(appName)
context.become(ready)
case _ => log.debug(s"Unknown message")
}
def ready: Receive = {
case AppDiscovered(Some(appDiscovered)) =>
log.info(s"[${appDiscovered.name}] discovered with id: ${appDiscovered.id}")
if(Option(appDiscovered) != status) {
status = Option(appDiscovered)
orchestrator ! RegisterApplication(appDiscovered)
} else {
log.debug(s"[${appDiscovered.name} discovered status has no changes]")
}
case AppDiscovered(None) =>
log.debug(s"[${status.map(_.name).getOrElse("UNKNOWN")}] not discovered sending message to killing myself")
context.parent ! AppUnDiscovered(appName)
status.foreach(st => orchestrator ! UnregisterApplication(st))
case _ => log.debug(s"Unknown message")
}
}
object ServiceActor {
def props = Reader {
(dependencies: Dependencies) =>
Props(classOf[ServiceActor], dependencies.discoveryActorRef, dependencies.orchestratorActorRef)
}
}
| compae/tiki-takka | core/src/main/scala/com/stratio/tikitakka/core/ServiceActor.scala | Scala | apache-2.0 | 2,258 |
package object sakismet {
import dispatch.{HttpExecutor, Http}
val UserAgent = "Sakismet/1.0.0"
val AkismetBaseHost = "rest.akismet.com"
val AkismetKeyedHost = "%s.rest.akismet.com"
val AkismetDebugHeader = "x-akismet-debug-help"
val AkismetBasePath = "1.1"
val AkismetPort = 80
type N = None.type
type S = Some.type
implicit def http = new Http
object Sakismet {
def apply[H <: HttpExecutor](key: String, blog: String)(implicit http: H) =
new Builder[H, {
type UserIp = N
type Referrer = N
type Permalink = N
type CommentType = N
type CommentAuthor = N
type CommentAuthorEmail = N
type CommentAuthorUrl = N
type CommentContent = N
}](key, new Invoker[H](http)) {
override val params = Map("blog" -> blog, "user_agent" -> UserAgent)
}
}
class Builder[H <: HttpExecutor, Xs] private[sakismet](key: String, val inv: Invoker[H]) {
self: Builder[H, _] =>
type Ev[X] = Xs <:< X
val params = Map[String, String]()
private def builder[Xs](mapping: (String, String)) = new Builder[H, Xs](key, inv) {
override val params = self.params + mapping
}
def user_ip[X <: { type UserIp <: N }: Ev](v: String) =
builder[Xs { type UserIp = S }]("user_ip" -> v)
def referrer[X <: { type Referrer <: N }: Ev](v: String) =
builder[Xs { type Referrer = S }]("referrer" -> v)
def permalink[X <: { type Permalink <: N }: Ev](v: String) =
builder[Xs { type Permalink = S }]("permalink" -> v)
def comment_type[X <: { type CommentType <: N }: Ev](v: String) =
builder[Xs { type CommentType = S }]("comment_type" -> v)
def comment_author[X <: { type CommentAuthor <: N }: Ev](v: String) =
builder[Xs { type CommentAuthor = S }]("comment_author" -> v)
def comment_author_email[X <: { type CommentAuthorEmail <: N }: Ev](v: String) =
builder[Xs { type CommentAuthorEmail = S }]("comment_author_email" -> v)
def comment_author_url[X <: { type CommentAuthorUrl <: N }: Ev](v: String) =
builder[Xs { type CommentAuthorUrl = S }]("comment_author_url" -> v)
def comment_content[X <: { type CommentContent <: N }: Ev](v: String) =
builder[Xs { type CommentContent = S }]("comment_content" -> v)
type AkismetOk = { type UserIp <: S; type Referrer <: S }
/**
* Verify that the given key and blog are recognized by Akismet.
*
* @return {@code true} if key is valid, otherwise {@code false}
*/
def verify_key() = inv.invoke(None, "verify-key", params + ("key" -> key)) {
case "valid" => true
case "invalid" => false
}
/**
* Check whether Akismet thinks a given comment is spam.
*
* @return {@code true} if comment is spam, otherwise {@code false}
*/
def comment_check[X <: AkismetOk: Ev]() = inv.invoke(key, "comment-check", params) {
case "true" => true
case "false" => false
}
/**
* Report false negative to Akismet.
*/
def submit_spam[X <: AkismetOk: Ev]() = inv.invoke(key, "submit-spam", params) {
case "Thanks for making the web a better place." => ()
}
/**
* Report false positive to Akismet.
*/
def submit_ham[X <: AkismetOk: Ev]() = inv.invoke(key, "submit-ham", params) {
case "Thanks for making the web a better place." => ()
}
def shutdown = inv.http.shutdown
}
class Invoker[H <: HttpExecutor] private[sakismet](val http: H) {
import dispatch._
type PF[R] = PartialFunction[String, R]
private def baseUrl(key: Option[String]) = :/(key match {
case Some(key) => AkismetKeyedHost format key
case None => AkismetBaseHost
}, AkismetPort) / AkismetBasePath
def invoke[R](key: Option[String], op: String, params: Map[String, String])(f: PF[R]) =
http(baseUrl(key) / op << params >:+ { (headers, req) => req.as_str ~> f.orElse {
case "invalid" => throw new InvalidRequest(headers(AkismetDebugHeader).mkString)
case _ => throw new UnknownResponse(headers(AkismetDebugHeader).mkString)
}})
}
private implicit def str2opt(s: String): Option[String] = Some(s)
class InvalidRequest(x: String) extends RuntimeException(x)
class UnknownResponse(x: String) extends RuntimeException(x)
} | seance/Sakismet | src/main/scala/sakismet/sakismet.scala | Scala | bsd-3-clause | 4,374 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2014, Gary Keorkunian **
** **
\\* */
package squants.market
import squants._
/**
* Represents a price
*
* A price is an [[squants.Ratio]] between a quantity of [[squants.market.Money]]
* and some other [[squants.Quantity]]
*
* @author garyKeorkunian
* @since 0.1
*
* @param money Money
* @param quantity Quantity
* @tparam A Quantity Type
*/
case class Price[A <: Quantity[A]](money: Money, quantity: A) extends Ratio[Money, A] with Serializable {
def base = money
def counter = quantity
// TODO Add verification that money amounts are the same OR convert
def plus(that: Price[A]): Price[A] = Price(money + that.money, quantity)
def +(that: Price[A]): Price[A] = plus(that)
def minus(that: Price[A]): Price[A] = Price(money - that.money, quantity)
def -(that: Price[A]): Price[A] = minus(that)
def times(that: Double): Price[A] = Price(money * that, quantity)
def *(that: Double): Price[A] = Price(money * that, quantity)
def times(that: BigDecimal): Price[A] = Price(money * that, quantity)
def *(that: BigDecimal): Price[A] = Price(money * that, quantity)
def divide(that: Double): Price[A] = Price(money / that, quantity)
def /(that: Double): Price[A] = divide(that)
def divide(that: BigDecimal): Price[A] = Price(money / that, quantity)
def /(that: BigDecimal): Price[A] = divide(that)
def divide(that: Price[A]): BigDecimal = money.amount / that.money.amount
def /(that: Price[A]): BigDecimal = divide(that)
def in(currency: Currency)(implicit moneyContext: MoneyContext) =
(money in currency) / quantity
/**
* Returns the Cost (Money) for a quantity `that` of A
* @param that Quantity
* @return
*/
def *(that: A): Money = convertToBase(that)
/**
* Returns the Quantity that will cost that)
* @param that Money
* @return
*/
def *(that: Money): A = convertToCounter(that)
override def toString = money.toString + "/" + quantity.toString
def toString(unit: UnitOfMeasure[A]) = money.toString + "/" + quantity.toString(unit)
def toString(currency: Currency, unit: UnitOfMeasure[A])(implicit moneyContext: MoneyContext) =
(money in currency).toString + "/" + quantity.toString(unit)
}
| non/squants | src/main/scala/squants/market/Price.scala | Scala | apache-2.0 | 2,684 |
/* *\\
** \\ \\ / _) \\ \\ / \\ | **
** \\ \\ / | __ \\ _ \\ __| \\ \\ / |\\/ | **
** \\ \\ / | | | __/ | \\ \\ / | | **
** \\_/ _| .__/ \\___| _| \\_/ _| _| **
** _| **
** **
** ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ **
** **
** http://www.vipervm.org **
** GPLv3 **
\\* */
package org.vipervm.platform.opencl
import org.vipervm.platform._
import com.sun.jna.Pointer
/**
* Network between OpenCL device memory and host memory
*/
class OpenCLNetwork(val device:OpenCLProcessor) extends Network {
private val mem = device.memory
val memoryCopier = new OpenCLMemoryCopier
/**
* Return a link from source to target using this network if possible
*/
def link(source:MemoryNode,target:MemoryNode): Option[Link] = (source,target) match {
case (m:OpenCLMemoryNode,n:HostMemoryNode) if m == mem => Some(OpenCLReadLink(this, mem, n))
case (n:HostMemoryNode,m:OpenCLMemoryNode) if m == mem => Some(OpenCLWriteLink(this, n, mem))
case _ => None
}
}
class OpenCLMemoryCopier extends MemoryCopier with Copy1DSupport {
def copy1D(link:Link,source:BufferView1D,target:BufferView1D):DataTransfer = {
link match {
case OpenCLReadLink(net,srcMem,tgtMem) => {
val cq = net.device.commandQueue
val srcPeer = srcMem.get(source.buffer).peer
val tgtPeer = tgtMem.get(target.buffer).peer
val ptr = new Pointer(Pointer.nativeValue(tgtPeer) + target.offset)
val ev = cq.enqueueReadBuffer(srcPeer, false, source.offset, source.size, ptr, Nil)
val event = new OpenCLEvent(ev)
new DataTransfer(link,source,target,event)
}
case OpenCLWriteLink(net,srcMem,tgtMem) => {
val cq = net.device.commandQueue
val srcPeer = srcMem.get(source.buffer).peer
val tgtPeer = tgtMem.get(target.buffer).peer
val ptr = new Pointer(Pointer.nativeValue(srcPeer) + source.offset)
val ev = cq.enqueueWriteBuffer(tgtPeer, false, target.offset, source.size, ptr, Nil)
val event = new OpenCLEvent(ev)
new DataTransfer(link,source,target,event)
}
case _ => throw new Exception("trying to copy with invalid link")
}
}
}
| hsyl20/Scala_ViperVM | src/main/scala/org/vipervm/platform/opencl/Network.scala | Scala | gpl-3.0 | 2,542 |
// See LICENSE.txt for license details.
package solutions
import chisel3.iotesters.PeekPokeTester
class VendingMachineTests(c: VendingMachine) extends PeekPokeTester(c) {
var money = 0
var isValid = false
for (t <- 0 until 20) {
val coin = rnd.nextInt(3)*5
val isNickel = coin == 5
val isDime = coin == 10
// Advance circuit
poke(c.io.nickel, if (isNickel) 1 else 0)
poke(c.io.dime, if (isDime) 1 else 0)
step(1)
// Advance model
money = if (isValid) 0 else money + coin
isValid = money >= 20
// Compare
expect(c.io.valid, if (isValid) 1 else 0)
}
}
| timtian090/Playground | chiselTutorial/src/test/scala/solutions/VendingMachineTest.scala | Scala | mit | 619 |
package skinny.test
import org.scalatest._
import org.scalatest.junit.{ JUnit3Suite, JUnitSuite }
import skinny.engine.test.scalatest.SkinnyEngineSuite
/**
* Convenience trait to add Skinny test support to JUnit3Suite.
*/
trait SkinnyJUnit3Suite
extends JUnit3Suite
with SkinnyEngineSuite
/**
* Convenience trait to add Skinny test support to JUnitSuite.
*/
trait SkinnyJUnitSuite
extends JUnitSuite
with SkinnyEngineSuite
/**
* Convenience trait to add Skinny test support to FeatureSpec.
*/
trait SkinnyFeatureSpec
extends FeatureSpecLike
with SkinnyEngineSuite
/**
* Convenience trait to add Skinny test support to Spec.
*/
trait SkinnySpec
extends FunSpecLike
with SkinnyEngineSuite
/**
* Convenience trait to add Skinny test support to FlatSpec.
*/
trait SkinnyFlatSpec
extends FlatSpecLike
with SkinnyEngineSuite
/**
* Convenience trait to add Skinny test support to FunSpec.
*/
trait SkinnyFunSpec
extends FunSpecLike
with SkinnyEngineSuite
/**
* Convenience trait to add Skinny test support to FreeSpec.
*/
trait SkinnyFreeSpec
extends FreeSpecLike
with SkinnyEngineSuite
/**
* Convenience trait to add Skinny test support to WordSpec.
*/
trait SkinnyWordSpec
extends WordSpecLike
with SkinnyEngineSuite
/**
* Convenience trait to add Skinny test support to FunSuite.
*/
trait SkinnyFunSuite
extends FunSuite
with SkinnyEngineSuite
| holycattle/skinny-framework | engine-test/src/main/scala/skinny/test/ScalaTestSpecs.scala | Scala | mit | 1,404 |
/*
* Copyright 2011 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.scrooge
import com.twitter.scrooge.ast.Document
import com.twitter.scrooge.backend.{GeneratorFactory, ScalaGenerator, ServiceOption}
import com.twitter.scrooge.frontend.{TypeResolver, ThriftParser, Importer}
import java.io.{File, FileWriter}
import scala.collection.concurrent.TrieMap
import scala.collection.mutable
class Compiler {
val defaultDestFolder = "."
var destFolder: String = defaultDestFolder
val includePaths = new mutable.ListBuffer[String]
val thriftFiles = new mutable.ListBuffer[String]
val flags = new mutable.HashSet[ServiceOption]
val namespaceMappings = new mutable.HashMap[String, String]
var verbose = false
var strict = true
var skipUnchanged = false
var experimentFlags = new mutable.ListBuffer[String]
var fileMapPath: scala.Option[String] = None
var fileMapWriter: scala.Option[FileWriter] = None
var dryRun: Boolean = false
var language: String = "scala"
var defaultNamespace: String = "thrift"
var scalaWarnOnJavaNSFallback: Boolean = false
def run() {
// if --gen-file-map is specified, prepare the map file.
fileMapWriter = fileMapPath.map { path =>
val file = new File(path)
val dir = file.getParentFile
if (dir != null && !dir.exists()) {
dir.mkdirs()
}
if (verbose) {
println("+ Writing file mapping to %s".format(path))
}
new FileWriter(file)
}
val importer = Importer(new File(".")) +: Importer(includePaths)
val isJava = language.equals("java")
val isScala = language.equals("scala")
val rhsStructs = isJava || isScala
val documentCache = new TrieMap[String, Document]
// compile
for (inputFile <- thriftFiles) {
val parser = new ThriftParser(importer, strict, defaultOptional = isJava, skipIncludes = false, documentCache)
val doc0 = parser.parseFile(inputFile).mapNamespaces(namespaceMappings.toMap)
if (verbose) println("+ Compiling %s".format(inputFile))
val resolvedDoc = TypeResolver(allowStructRHS = rhsStructs)(doc0) // TODO: THRIFT-54
val generator = GeneratorFactory(
language,
resolvedDoc.resolver.includeMap,
defaultNamespace,
experimentFlags)
generator match {
case g: ScalaGenerator => g.warnOnJavaNamespaceFallback = scalaWarnOnJavaNSFallback
case _ => ()
}
val generatedFiles = generator(
resolvedDoc.document,
flags.toSet,
new File(destFolder),
dryRun
).map { _.getPath }
if (verbose) {
println("+ Generated %s".format(generatedFiles.mkString(", ")))
}
fileMapWriter.foreach { w =>
generatedFiles.foreach { path =>
w.write(inputFile + " -> " + path + "\\n")
}
}
}
// flush and close the map file
fileMapWriter.foreach { _.close() }
}
}
| VisualDNA/scrooge | scrooge-generator/src/main/scala/com/twitter/scrooge/Compiler.scala | Scala | apache-2.0 | 3,456 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.concurrent.Executors
import scala.collection.parallel.immutable.ParRange
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration._
import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.scheduler.{SparkListener, SparkListenerJobStart}
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.types._
import org.apache.spark.util.ThreadUtils
class SQLExecutionSuite extends SparkFunSuite {
test("concurrent query execution (SPARK-10548)") {
val conf = new SparkConf()
.setMaster("local[*]")
.setAppName("test")
val goodSparkContext = new SparkContext(conf)
try {
testConcurrentQueryExecution(goodSparkContext)
} finally {
goodSparkContext.stop()
}
}
test("concurrent query execution with fork-join pool (SPARK-13747)") {
val spark = SparkSession.builder
.master("local[*]")
.appName("test")
.getOrCreate()
import spark.implicits._
try {
// Should not throw IllegalArgumentException
new ParRange(1 to 100).foreach { _ =>
spark.sparkContext.parallelize(1 to 5).map { i => (i, i) }.toDF("a", "b").count()
}
} finally {
spark.sparkContext.stop()
}
}
/**
* Trigger SPARK-10548 by mocking a parent and its child thread executing queries concurrently.
*/
private def testConcurrentQueryExecution(sc: SparkContext): Unit = {
val spark = SparkSession.builder.getOrCreate()
import spark.implicits._
// Initialize local properties. This is necessary for the test to pass.
sc.getLocalProperties
// Set up a thread that runs executes a simple SQL query.
// Before starting the thread, mutate the execution ID in the parent.
// The child thread should not see the effect of this change.
var throwable: Option[Throwable] = None
val child = new Thread {
override def run(): Unit = {
try {
sc.parallelize(1 to 100).map { i => (i, i) }.toDF("a", "b").collect()
} catch {
case t: Throwable =>
throwable = Some(t)
}
}
}
sc.setLocalProperty(SQLExecution.EXECUTION_ID_KEY, "anything")
child.start()
child.join()
// The throwable is thrown from the child thread so it doesn't have a helpful stack trace
throwable.foreach { t =>
t.setStackTrace(t.getStackTrace ++ Thread.currentThread.getStackTrace)
throw t
}
}
test("Finding QueryExecution for given executionId") {
val spark = SparkSession.builder.master("local[*]").appName("test").getOrCreate()
import spark.implicits._
var queryExecution: QueryExecution = null
spark.sparkContext.addSparkListener(new SparkListener {
override def onJobStart(jobStart: SparkListenerJobStart): Unit = {
val executionIdStr = jobStart.properties.getProperty(SQLExecution.EXECUTION_ID_KEY)
if (executionIdStr != null) {
queryExecution = SQLExecution.getQueryExecution(executionIdStr.toLong)
}
SQLExecutionSuite.canProgress = true
}
})
val df = spark.range(1).map { x =>
while (!SQLExecutionSuite.canProgress) {
Thread.sleep(1)
}
x
}
df.collect()
assert(df.queryExecution === queryExecution)
spark.stop()
}
test("SPARK-32813: Table scan should work in different thread") {
val executor1 = Executors.newSingleThreadExecutor()
val executor2 = Executors.newSingleThreadExecutor()
var session: SparkSession = null
SparkSession.cleanupAnyExistingSession()
withTempDir { tempDir =>
try {
val tablePath = tempDir.toString + "/table"
val df = ThreadUtils.awaitResult(Future {
session = SparkSession.builder().appName("test").master("local[*]").getOrCreate()
session.createDataFrame(
session.sparkContext.parallelize(Row(Array(1, 2, 3)) :: Nil),
StructType(Seq(
StructField("a", ArrayType(IntegerType, containsNull = false), nullable = false))))
.write.parquet(tablePath)
session.read.parquet(tablePath)
}(ExecutionContext.fromExecutorService(executor1)), 1.minute)
ThreadUtils.awaitResult(Future {
assert(df.rdd.collect()(0) === Row(Seq(1, 2, 3)))
}(ExecutionContext.fromExecutorService(executor2)), 1.minute)
} finally {
executor1.shutdown()
executor2.shutdown()
session.stop()
}
}
}
}
object SQLExecutionSuite {
@volatile var canProgress = false
}
| maropu/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/SQLExecutionSuite.scala | Scala | apache-2.0 | 5,417 |
package models.req
/**
* Date: 14/06/19.
*/
case class DeleteSnapshot(userId: Long, snapId: Long)
object DeleteSnapshot {
def fromReq(req: Map[String, Seq[String]]): Option[DeleteSnapshot] = {
val f = controllers.Common.reqHead(req)(_)
for {
userId <- f("userId")
snapId <- f("snapId")
} yield {
DeleteSnapshot(userId.toLong, snapId.toLong)
}
}
}
| nekoworkshop/MyFleetGirls | server/app/models/req/DeleteSnapshot.scala | Scala | mit | 389 |
package scala.virtualization.lms
package common
import java.io.PrintWriter
import scala.reflect.SourceContext
trait ImplicitOps extends Base {
/**
* Implicit conversion from Rep[X] to Rep[Y]
*
* As long as a conversion is in scope, it will be invoked in the generated scala code.
* Code-gen for other platforms should implement the conversions.
**/
def implicit_convert[X,Y](x: Rep[X])(implicit c: X => Y, mX: Manifest[X], mY: Manifest[Y], pos: SourceContext) : Rep[Y] // = x.asInstanceOf[Rep[Y]
}
trait ImplicitOpsExp extends ImplicitOps with BaseExp {
case class ImplicitConvert[X,Y](x: Exp[X])(implicit val mX: Manifest[X], val mY: Manifest[Y]) extends Def[Y]
def implicit_convert[X,Y](x: Exp[X])(implicit c: X => Y, mX: Manifest[X], mY: Manifest[Y], pos: SourceContext) : Rep[Y] = {
if (mX == mY) x.asInstanceOf[Rep[Y]] else ImplicitConvert[X,Y](x)
}
override def mirror[A:Manifest](e: Def[A], f: Transformer)(implicit pos: SourceContext): Exp[A] = (e match {
case im@ImplicitConvert(x) => toAtom(ImplicitConvert(f(x))(im.mX,im.mY))(mtype(manifest[A]),pos)
case _ => super.mirror(e,f)
}).asInstanceOf[Exp[A]]
}
trait ScalaGenImplicitOps extends ScalaGenBase {
val IR: ImplicitOpsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = rhs match {
// TODO: this valDef is redundant; we really just want the conversion to be a no-op in the generated code.
// TODO: but we still need to link the defs together
case ImplicitConvert(x) => emitValDef(sym, quote(x))
case _ => super.emitNode(sym, rhs)
}
}
trait CLikeGenImplicitOps extends CLikeGenBase {
val IR: ImplicitOpsExp
import IR._
override def emitNode(sym: Sym[Any], rhs: Def[Any]) = {
rhs match {
case im@ImplicitConvert(x) =>
gen"${im.mY} $sym = (${im.mY})$x;"
case _ => super.emitNode(sym, rhs)
}
}
}
trait CudaGenImplicitOps extends CudaGenBase with CLikeGenImplicitOps
trait OpenCLGenImplicitOps extends OpenCLGenBase with CLikeGenImplicitOps
trait CGenImplicitOps extends CGenBase with CLikeGenImplicitOps
| afernandez90/virtualization-lms-core | src/common/ImplicitOps.scala | Scala | bsd-3-clause | 2,110 |
package dsentric
trait AndMatcher {
object && {
def unapply[A](a: A) = Some((a, a))
}
}
object AndMatcher extends AndMatcher | HigherState/dsentric | core/src/main/scala/dsentric/AndMatcher.scala | Scala | apache-2.0 | 133 |
package org.http4s.client.blaze
import org.http4s.client.ClientRouteTestBattery
class BlazeSimpleHttp1ClientSpec extends
ClientRouteTestBattery("SimpleHttp1Client", SimpleHttp1Client(BlazeClientConfig.defaultConfig))
| m4dc4p/http4s | blaze-client/src/test/scala/org/http4s/client/blaze/BlazeSimpleHttp1ClientSpec.scala | Scala | apache-2.0 | 219 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.abstractnn._
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.tensor.{FloatType, Tensor}
import com.intel.analytics.bigdl.utils.serializer._
import com.intel.analytics.bigdl.utils.serializer.converters.DataConverter
import com.intel.analytics.bigdl.utils.{Engine, ParameterSynchronizer, T, Table}
import com.intel.analytics.bigdl.serialization.Bigdl.{AttrValue, BigDLModule}
import scala.reflect.ClassTag
/**
* This layer implements Batch Normalization as described in the paper:
* "Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift"
* by Sergey Ioffe, Christian Szegedy https://arxiv.org/abs/1502.03167
*
* This implementation is useful for inputs NOT coming from convolution layers.
* For convolution layers, use nn.SpatialBatchNormalization.
*
* The operation implemented is:
* ( x - mean(x) )
* y = -------------------- * gamma + beta
* standard-deviation(x)
* where gamma and beta are learnable parameters.The learning of gamma and beta is optional.
* @param nOutput output feature map number
* @param eps avoid divide zero
* @param momentum momentum for weight update
* @param affine affine operation on output or not
* @param ev numeric operator
* @tparam T numeric type
*/
@SerialVersionUID(- 3181824540272906068L)
class BatchNormalization[T: ClassTag](
val nOutput: Int, // output feature map number
val eps: Double = 1e-5, // avoid divde zero
val momentum: Double = 0.1, // momentum for weight update
val affine: Boolean = true, // affine operation on output or not
private val initWeight: Tensor[T] = null,
private val initBias: Tensor[T] = null,
private val initGradWeight: Tensor[T] = null,
private val initGradBias: Tensor[T] = null
)(implicit ev: TensorNumeric[T]) extends TensorModule[T] with Initializable
with MklInt8Convertible {
require(nOutput > 0, "output feature map number must be greater than zero")
private var parallism : Option[Int] = None
/**
* Set parameter sync parallisim number
* @param parallism Concurrent sync threads number
*/
def setParallism(parallism: Int): Unit = {
this.parallism = Some(parallism)
}
def getParallism(): Option[Int] = this.parallism
val meanKey: String = s"${this.getName}_mean"
val stdKey: String = s"${this.getName}_std"
val gmKey: String = s"${this.getName}_gm"
val gxmKey: String = s"${this.getName}_gxm"
val nDim = 2
val channelDim = 2
var runningMean = if (affine) Tensor[T](nOutput) else Tensor[T]()
var runningVar = if (affine) Tensor[T](nOutput).fill(ev.one) else Tensor[T]()
var saveMean = if (affine) Tensor[T](nOutput) else Tensor[T]()
var saveStd = if (affine) Tensor[T](nOutput).fill(ev.zero) else Tensor[T]()
val weight: Tensor[T] =
if (initWeight != null) initWeight else if (affine) Tensor[T](nOutput) else null
val bias: Tensor[T] =
if (initBias != null) initBias else if (affine) Tensor[T](nOutput) else null
val gradWeight: Tensor[T] =
if (initGradWeight != null) initGradWeight else if (affine) Tensor[T](nOutput) else null
val gradBias: Tensor[T] =
if (initGradBias != null) initGradBias else if (affine) Tensor[T](nOutput) else null
@transient
// BatchNormalization has internal parameters (saveMean, saveStd)
// that changes at every forward, so a standard gradcheck won't work with this module.
// if you want to do a gradcheck, you will need to fix those variables, otherwise not fix.
protected var needFix: Boolean = false
{
val wInit = RandomUniform(0, 1)
val bInit = Zeros
setInitMethod(wInit, bInit)
}
override def reset(): Unit = {
if (null != weight && initWeight == null) {
weightInitMethod.init(weight, VariableFormat.ONE_D)
}
if (null != bias && initBias == null) {
biasInitMethod.init(bias, VariableFormat.ONE_D)
}
zeroGradParameters()
}
@inline
// to fix internal parameters (saveMean, saveStd)
def setInit(status: Boolean = true): this.type = {
needFix = status
this
}
@inline
protected def checkInputDim(input: Tensor[T]): Unit = {
require(input.dim() == nDim || (input.dim() == nDim - 1 && train == false),
s"only mini-batch supported (${nDim}D tensor), got ${input.dim()}D tensor instead")
}
@inline
protected def makeBatch(input: Tensor[T]): Tensor[T] = {
if (input.dim() == nDim - 1 && train == false) {
input.addSingletonDimension()
} else {
input
}
}
@inline
protected def initializeBuffer(channels: Int): Unit = {
runningMean.resize(channels).zero
runningVar.resize(channels).fill(ev.one)
}
protected val gMean = Tensor[T]()
protected val gxMean = Tensor[T]()
protected val _input = Tensor[T]()
protected val _gradOutput = Tensor[T]()
var globalMean: Array[T] = new Array[T](0)
var globalStd: Array[T] = new Array[T](0)
var globalGMean: Array[T] = new Array[T](0)
var globalGxmMean: Array[T] = new Array[T](0)
override def clearState(): this.type = {
super.clearState()
gMean.set()
gxMean.set()
this
}
override def parameters(): (Array[Tensor[T]], Array[Tensor[T]]) = {
if (affine) {
(Array(this.weight, this.bias), Array(this.gradWeight, this.gradBias))
} else {
null
}
}
override def getExtraParameter(): Array[Tensor[T]] = {
Array(runningMean, runningVar)
}
override def getParametersTable(): Table = {
if (affine) {
T(getName() -> T("weight" -> weight, "bias" -> bias,
"gradWeight" -> gradWeight, "gradBias" -> gradBias,
"runningMean" -> runningMean, "runningVar" -> runningVar))
} else {
T(getName() -> T("runningMean" -> runningMean, "runningVar" -> runningVar))
}
}
override def toString(): String = {
s"nn.BatchNormalization($nOutput, $eps, $momentum, $affine)"
}
override def canEqual(other: Any): Boolean = other.isInstanceOf[BatchNormalization[T]]
override def equals(other: Any): Boolean = other match {
case that: BatchNormalization[T] =>
super.equals(that) &&
(that canEqual this) &&
nDim == that.nDim &&
runningMean == that.runningMean &&
runningVar == that.runningVar &&
weight == that.weight &&
bias == that.bias &&
nOutput == that.nOutput &&
eps == that.eps &&
momentum == that.momentum &&
affine == that.affine
case _ => false
}
override def hashCode(): Int = {
val state = Seq(super.hashCode(), nDim, runningMean, runningVar, weight, bias,
nOutput, eps, momentum, affine)
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
override def updateOutput(input: Tensor[T]): Tensor[T] = {
val parallism = getParallism().getOrElse(1)
val meanKeyWithId = s"${this.meanKey}_${this.getId}"
val stdKeyWithId = s"${this.stdKey}_${this.getId}"
val gmKeyWithId = s"${this.gmKey}_${this.getId}"
val gxmKeyWithId = s"${this.gxmKey}_${this.getId}"
val needSync = if (parallism != 1) {
ParameterSynchronizer.register(meanKeyWithId, parallism)
ParameterSynchronizer.register(stdKeyWithId, parallism)
ParameterSynchronizer.register(gmKeyWithId, parallism)
ParameterSynchronizer.register(gxmKeyWithId, parallism)
true
} else false
checkInputDim(input)
output.resizeAs(input)
_input.set(input)
makeBatch(_input)
_input.addSingletonDimension(_input, 3)
_input.addSingletonDimension(_input, 4)
val nInput = _input.size(channelDim)
if (runningMean.nElement == 0 || runningMean.nElement < nInput) {
initializeBuffer(nInput)
}
saveMean.resizeAs(runningMean).zero
saveStd.resizeAs(runningVar).fill(ev.zero)
val nChannels = _input.size(2)
if (globalMean.size < nChannels) {
globalMean = new Array[T](nChannels)
}
if (globalStd.size < nChannels) {
globalStd = new Array[T](nChannels)
}
if (train) {
if (ev.getType() == FloatType) {
SpatialBatchNormalization.updateOutputNCHWTrainFloat(
_input.asInstanceOf[Tensor[Float]], output.asInstanceOf[Tensor[Float]],
saveMean.asInstanceOf[Tensor[Float]], saveStd.asInstanceOf[Tensor[Float]],
runningMean.asInstanceOf[Tensor[Float]], runningVar.asInstanceOf[Tensor[Float]],
weight.asInstanceOf[Tensor[Float]], bias.asInstanceOf[Tensor[Float]],
eps.toFloat, momentum.toFloat,
globalMean = globalMean.asInstanceOf[Array[Float]],
globalStd = globalStd.asInstanceOf[Array[Float]],
meanKey = meanKeyWithId, stdKey = stdKeyWithId, needSync = needSync)
} else {
SpatialBatchNormalization.updateOutputNCHWTrainDouble(
_input.asInstanceOf[Tensor[Double]], output.asInstanceOf[Tensor[Double]],
saveMean.asInstanceOf[Tensor[Double]], saveStd.asInstanceOf[Tensor[Double]],
runningMean.asInstanceOf[Tensor[Double]], runningVar.asInstanceOf[Tensor[Double]],
weight.asInstanceOf[Tensor[Double]], bias.asInstanceOf[Tensor[Double]],
eps, momentum,
globalMean = globalMean.asInstanceOf[Array[Double]],
globalStd = globalStd.asInstanceOf[Array[Double]],
meanKey = meanKeyWithId, stdKey = stdKeyWithId, needSync = needSync)
}
} else {
if (ev.getType() == FloatType) {
SpatialBatchNormalization.updateOutputNCHWInferFloat(
_input.asInstanceOf[Tensor[Float]], output.asInstanceOf[Tensor[Float]],
runningMean.asInstanceOf[Tensor[Float]], runningVar.asInstanceOf[Tensor[Float]],
weight.asInstanceOf[Tensor[Float]], bias.asInstanceOf[Tensor[Float]], eps.toFloat)
} else {
SpatialBatchNormalization.updateOutputNCHWInferDouble(
_input.asInstanceOf[Tensor[Double]], output.asInstanceOf[Tensor[Double]],
runningMean.asInstanceOf[Tensor[Double]], runningVar.asInstanceOf[Tensor[Double]],
weight.asInstanceOf[Tensor[Double]], bias.asInstanceOf[Tensor[Double]], eps)
}
}
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
val gmKeyWithId = s"${this.gmKey}_${this.getId}"
val gxmKeyWithId = s"${this.gxmKey}_${this.getId}"
val needSync = getParallism() != None && getParallism().get > 1
_gradOutput.set(gradOutput)
makeBatch(_gradOutput)
_gradOutput.addSingletonDimension(_gradOutput, 3)
_gradOutput.addSingletonDimension(_gradOutput, 4)
gxMean.zero()
gMean.zero()
val nChannel = _gradOutput.size(2)
if (globalGMean.size < nChannel) {
globalGMean = new Array[T](nChannel)
}
if (globalGxmMean.size < nChannel) {
globalGxmMean = new Array[T](nChannel)
}
if (train) {
if (ev.getType() == FloatType) {
SpatialBatchNormalization.updateGradInputNCHWTrainFloat(
_input.asInstanceOf[Tensor[Float]], _gradOutput.asInstanceOf[Tensor[Float]],
gradInput.asInstanceOf[Tensor[Float]], weight.asInstanceOf[Tensor[Float]],
saveMean.asInstanceOf[Tensor[Float]], saveStd.asInstanceOf[Tensor[Float]],
gMean.asInstanceOf[Tensor[Float]], gxMean.asInstanceOf[Tensor[Float]],
globalGMean.asInstanceOf[Array[Float]], globalGxmMean.asInstanceOf[Array[Float]],
gMeanKey = gmKeyWithId, gxMeanKey = gxmKeyWithId, needSync = needSync)
} else {
SpatialBatchNormalization.updateGradInputNCHWTrainDouble(
_input.asInstanceOf[Tensor[Double]], _gradOutput.asInstanceOf[Tensor[Double]],
gradInput.asInstanceOf[Tensor[Double]], weight.asInstanceOf[Tensor[Double]],
saveMean.asInstanceOf[Tensor[Double]], saveStd.asInstanceOf[Tensor[Double]],
gMean.asInstanceOf[Tensor[Double]], gxMean.asInstanceOf[Tensor[Double]],
globalGMean.asInstanceOf[Array[Double]], globalGxmMean.asInstanceOf[Array[Double]],
gMeanKey = gmKeyWithId, gxMeanKey = gxmKeyWithId, needSync = needSync)
}
} else {
if (ev.getType() == FloatType) {
SpatialBatchNormalization.updateGradInputNCHWInferFloat(
_gradOutput.asInstanceOf[Tensor[Float]],
gradInput.asInstanceOf[Tensor[Float]], weight.asInstanceOf[Tensor[Float]],
bias.asInstanceOf[Tensor[Float]])
} else {
SpatialBatchNormalization.updateGradInputNCHWInferDouble(
_gradOutput.asInstanceOf[Tensor[Double]],
gradInput.asInstanceOf[Tensor[Double]], weight.asInstanceOf[Tensor[Double]],
bias.asInstanceOf[Tensor[Double]])
}
}
gradInput.squeeze(4)
gradInput.squeeze(3)
gradInput
}
override def accGradParameters(input: Tensor[T], gradOutput: Tensor[T]): Unit = {
if (weight == null || scaleW == 0) {
return
}
if (ev.getType() == FloatType) {
SpatialBatchNormalization.accGradientNCHWFloat(_gradOutput.asInstanceOf[Tensor[Float]],
gradWeight.asInstanceOf[Tensor[Float]], gradBias.asInstanceOf[Tensor[Float]],
_input.asInstanceOf[Tensor[Float]], saveMean.asInstanceOf[Tensor[Float]],
saveStd.asInstanceOf[Tensor[Float]], scaleW.toFloat, scaleB.toFloat)
} else {
SpatialBatchNormalization.accGradientNCHWDouble(_gradOutput.asInstanceOf[Tensor[Double]],
gradWeight.asInstanceOf[Tensor[Double]], gradBias.asInstanceOf[Tensor[Double]],
_input.asInstanceOf[Tensor[Double]], saveMean.asInstanceOf[Tensor[Double]],
saveStd.asInstanceOf[Tensor[Double]], scaleW, scaleB)
}
}
}
object BatchNormalization extends ModuleSerializable {
def apply[@specialized(Float, Double) T: ClassTag](
nOutput: Int,
eps: Double = 1e-5,
momentum: Double = 0.1,
affine: Boolean = true,
initWeight: Tensor[T] = null,
initBias: Tensor[T] = null,
initGradWeight: Tensor[T] = null,
initGradBias: Tensor[T] = null)
(implicit ev: TensorNumeric[T]): BatchNormalization[T] = {
new BatchNormalization[T](
nOutput, eps, momentum, affine, initWeight, initBias, initGradWeight, initGradBias)
}
def apply[@specialized(Float, Double) T: ClassTag](
affine: Option[Int])(implicit ev: TensorNumeric[T]): BatchNormalization[T] = {
new BatchNormalization[T](nOutput = affine.getOrElse(1), affine = affine.isDefined)
}
override def doLoadModule[T: ClassTag](context: DeserializeContext)
(implicit ev: TensorNumeric[T]) : AbstractModule[Activity, Activity, T] = {
val attrMap = context.bigdlModule.getAttrMap
val batchNorm = super.doLoadModule(context).asInstanceOf[BatchNormalization[T]]
batchNorm.runningMean = DataConverter.
getAttributeValue(context, attrMap.get("runningMean")).
asInstanceOf[Tensor[T]]
batchNorm.runningVar = DataConverter.
getAttributeValue(context, attrMap.get("runningVar")).
asInstanceOf[Tensor[T]]
batchNorm.saveMean = DataConverter.
getAttributeValue(context, attrMap.get("saveMean")).
asInstanceOf[Tensor[T]]
batchNorm.saveStd = DataConverter.
getAttributeValue(context, attrMap.get("saveStd")).
asInstanceOf[Tensor[T]]
batchNorm
}
override def doSerializeModule[T: ClassTag](context: SerializeContext[T],
batchNormBuilder : BigDLModule.Builder)
(implicit ev: TensorNumeric[T]) : Unit = {
super.doSerializeModule(context, batchNormBuilder)
val batchNorm = context.moduleData.module.asInstanceOf[BatchNormalization[T]]
val runningMeanBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, runningMeanBuilder,
batchNorm.runningMean, ModuleSerializer.tensorType)
batchNormBuilder.putAttr("runningMean", runningMeanBuilder.build)
val runningVarBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, runningVarBuilder,
batchNorm.runningVar, ModuleSerializer.tensorType)
batchNormBuilder.putAttr("runningVar", runningVarBuilder.build)
val saveMeanBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, saveMeanBuilder,
batchNorm.saveMean, ModuleSerializer.tensorType)
batchNormBuilder.putAttr("saveMean", saveMeanBuilder.build)
val saveStdBuilder = AttrValue.newBuilder
DataConverter.setAttributeValue(context, saveStdBuilder,
batchNorm.saveStd, ModuleSerializer.tensorType)
batchNormBuilder.putAttr("saveStd", saveStdBuilder.build)
}
}
| wzhongyuan/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/BatchNormalization.scala | Scala | apache-2.0 | 17,192 |
package edu.gemini.qv.plugin.charts.util
import edu.gemini.qpt.shared.sp.Obs
import edu.gemini.qv.plugin.QvContext
import edu.gemini.qv.plugin.selector.OptionsSelector._
import edu.gemini.qv.plugin.selector.{OptionsSelector, ConstraintsSelector}
import edu.gemini.qv.plugin.util.ConstraintsCache.{MaxElevation, MinElevation}
import edu.gemini.qv.plugin.util.{SemesterData, NonSiderealCache, SolutionProvider}
import edu.gemini.skycalc.TimeUtils
import edu.gemini.spModel.core.Site
import edu.gemini.util.skycalc.calc.{TargetCalculator, Interval, Solution}
import edu.gemini.util.skycalc.{SkycalcTarget, SiderealTarget, Night}
import java.awt.{Stroke, Color}
import java.util.UUID
import org.jfree.chart.axis.ValueAxis
import org.jfree.chart.plot.XYPlot
import org.jfree.chart.renderer.xy.{XYSplineRenderer, XYLineAndShapeRenderer}
import org.jfree.data.xy.{XYSeries, XYSeriesCollection}
import scala.collection._
/**
* Some helper functionality to draw QV related functions on a JFreeChart XYPlot.
*/
class XYPlotter(ctx: QvContext, nights: Seq[Night], constraints: ConstraintsSelector, options: OptionsSelector, plot : XYPlot) extends XYAxes {
val range = intervalFor(nights)
val sampling = if (nights.size < 8) regularSampling else midNightTimeSampling
val overSampling = if (nights.size < 8) regularSampling else overMidNightTimeSampling
val elevationAxis =
if (options.isSelected(AirmassRuler))
if (nights.size > 8) MidNightAirmassAxis else AirmassAxis
else
if (nights.size > 8) MainMidNightElevationAxis else MainElevationAxis
def plotCurves(obs: Seq[Obs], options: Set[ChartOption], inRenderer: XYLineAndShapeRenderer, outRenderer: XYLineAndShapeRenderer): Unit = {
def targetFor(o: Obs): SkycalcTarget =
if (NonSiderealCache.isHorizonsTarget(o)) NonSiderealCache.get(nights, o)
else SiderealTarget(o.getRa, o.getDec)
def targetCalcFor(o: Obs): TargetCalculator =
XYPlotter.getCalculator(ctx.site, targetFor(o), overSampling)
val tcs = obs.map(o => o -> targetCalcFor(o)).toMap
options.foreach {
case ElevationCurve =>
plotSolution(elevationAxis, obs, inRenderer, sampling, o => t => {
// in order to make it more obvious where a target is visible sometime during the night but just
// happens to be below the horizon at middle night time we draw negative elevation values during
// solution intervals as 0
val e = tcs(o).elevationAt(t)
if (e > 0) e else 0
})
case SkyBrightnessCurve =>
plotSolution(SkyBrightnessAxis, obs, inRenderer, sampling, o => t => tcs(o).skyBrightnessAt(t))
case ParallacticAngleCurve =>
plotSolution(ParallacticAngleAxis, obs, inRenderer, sampling, o => t => tcs(o).parallacticAngleAt(t))
case LunarDistanceCurve =>
plotSolution(LunarDistanceAxis, obs, inRenderer, sampling, o => t => tcs(o).lunarDistanceAt(t))
case HourAngleCurve =>
plotSolution(HourAngleAxis, obs, inRenderer, sampling, o => t => tcs(o).hourAngleAt(t))
case _ => // Ignore
}
options.foreach {
case ElevationCurve =>
plotFunction(elevationAxis, obs, outRenderer, sampling, (o, t) => tcs(o).elevationAt(t))
case SkyBrightnessCurve =>
plotFunction(SkyBrightnessAxis, obs, outRenderer, sampling, (o, t) => tcs(o).skyBrightnessAt(t))
case ParallacticAngleCurve =>
plotFunction(ParallacticAngleAxis, obs, outRenderer, sampling, (o, t) => tcs(o).parallacticAngleAt(t))
case LunarDistanceCurve =>
plotFunction(LunarDistanceAxis, obs, outRenderer, sampling, (o, t) => tcs(o).lunarDistanceAt(t))
case HourAngleCurve =>
plotFunction(HourAngleAxis, obs, outRenderer, sampling, (o, t) => tcs(o).hourAngleAt(t))
case _ => // Ignore
}
}
def plotFunction(axis: ValueAxis, obs: Seq[Obs], renderer: XYLineAndShapeRenderer, sampling: Seq[Long], f: (Obs, Long) => Double): Unit = {
val data = new XYSeriesCollection
obs.foreach { o => plotFunction(data, o, sampling, f) }
plotCurves(axis, data, renderer)
}
def plotFunction(data: XYSeriesCollection, o: Obs, sampling: Seq[Long], f: (Obs, Long) => Double): Unit = {
val series = new XYSeries(o)
sampling.foreach(t => series.add(t, f(o, t)))
data.addSeries(series)
}
def plotFunction(axis: ValueAxis, renderer: XYLineAndShapeRenderer, f: MyFunction): Unit = {
val data = new XYSeriesCollection
val series = new XYSeries(UUID.randomUUID())
plotSolution2(series, f.defined.restrictTo(range), f.times, t => f.valueAt(t))
data.addSeries(series)
plotCurves(axis, data, renderer)
}
def plotFunction(axis: ValueAxis, obs: Seq[Obs], renderer: XYLineAndShapeRenderer, f: Obs => MyFunction): Unit = {
val data = new XYSeriesCollection
obs.foreach { o =>
val func = f(o)
val series = new XYSeries(o)
plotSolution2(series, func.defined.restrictTo(range), func.times, t => func.valueAt(t))
data.addSeries(series)
}
plotCurves(axis, data, renderer)
}
def plotSolution(axis: ValueAxis, obs: Seq[Obs], renderer: XYLineAndShapeRenderer, f: Obs => MyFunction): Unit = {
val data = new XYSeriesCollection
obs.foreach { o =>
val func = f(o)
val s = SolutionProvider(ctx).solution(nights, constraints.selected, o).restrictTo(range)
val solution = if (nights.size < 8) s else s.allDay(ctx.timezone)
val series = new XYSeries(o)
plotSolution2(series, func.defined.intersect(solution), func.times, t => func.valueAt(t))
data.addSeries(series)
}
plotCurves(axis, data, renderer)
}
def plotSolution(axis: ValueAxis, obs: Seq[Obs], renderer: XYLineAndShapeRenderer, sampling: Seq[Long], f: Obs => Long => Double): Unit = {
val data = new XYSeriesCollection
obs.foreach { o =>
val s = SolutionProvider(ctx).solution(nights, constraints.selected, o).restrictTo(range)
val solution = if (nights.size < 8) s else s.allDay(ctx.timezone)
val series = new XYSeries(o)
plotSolution2(series, solution, sampling, f(o))
data.addSeries(series)
}
plotCurves(axis, data, renderer)
}
private def plotSolution2(series: XYSeries, solution: Solution, sampling: Seq[Long], f: Long => Double): Unit = {
solution.intervals.foreach { i =>
series.add(i.start-1, null)
series.add(i.start, f(i.start))
sampling.filter(t => t > i.start && t < i.end).foreach { t =>
series.add(t, f(t))
}
series.add(i.end, f(i.end))
series.add(i.end+1, null)
}
}
/** Adds all currently selected options. */
def plotOptions(obs: Seq[Obs], details: Set[ChartOption], inRenderer: XYLineAndShapeRenderer, outRenderer: XYLineAndShapeRenderer): Unit = {
if (details.contains(MinElevationCurve))
plotFunction(elevationAxis, obs, inRenderer, o => new NightlyFunction(nights, n => SolutionProvider(ctx).value(MinElevation, n, o)))
if (details.contains(MaxElevationCurve))
plotFunction(elevationAxis, obs, inRenderer, o => new NightlyFunction(nights, n => SolutionProvider(ctx).value(MaxElevation, n, o)))
if (details.contains(MinElevationCurve))
plotSolution(elevationAxis, obs, outRenderer, o => new NightlyFunction(nights, n => SolutionProvider(ctx).value(MinElevation, n, o)))
if (details.contains(MaxElevationCurve))
plotSolution(elevationAxis, obs, outRenderer, o => new NightlyFunction(nights, n => SolutionProvider(ctx).value(MaxElevation, n, o)))
}
/**
* Adds a set of curves to the plot.
* All curves use the given axis as their range axis; the axis is added to the plot if needed.
* The renderer finally assigns the colors to each curve (i.e. for each data series in the series collection).
*
* @param axis
* @param data
* @param renderer
*/
def plotCurves(axis: ValueAxis, data: XYSeriesCollection, renderer: XYLineAndShapeRenderer): Unit = {
val ix = plot.getDatasetCount
val existingAxis = findAxis(plot, axis)
val axisIndex =
if (existingAxis.isDefined) existingAxis.get
else { plot.setRangeAxis(ix, axis); ix }
plot.setDataset(ix, data)
plot.setRenderer(ix, renderer)
plot.mapDatasetToRangeAxis(ix, axisIndex)
}
private def intervalFor(nights: Seq[Night]): Interval = {
require(nights.size > 0)
Interval(nights.head.start, nights.last.end)
}
// === Sampling rates
private def regularSampling: Vector[Long] = {
val rate = (range.end - range.start) / 200
val times = for (i <- 0 to 200) yield range.start + (i * rate)
times.toVector
}
private def midNightTimeSampling: Vector[Long] = {
require(nights.size > 0)
val times =
if (nights.size > 600) nights.sliding(1,4).flatten.map(_.middleNightTime).toVector
else if (nights.size > 300) nights.sliding(1,3).flatten.map(_.middleNightTime).toVector
else if (nights.size > 150) nights.sliding(1,2).flatten.map(_.middleNightTime).toVector
else nights.map(_.middleNightTime).toVector
times
}
private def overMidNightTimeSampling: Vector[Long] = {
require(nights.size > 0)
val allNights = SemesterData.nights(ctx.site, ctx.range)
val times = (allNights.head.middleNightTime - TimeUtils.days(1)) +:
allNights.map(_.middleNightTime) :+
(allNights.last.middleNightTime + TimeUtils.days(1))
times.toVector
}
}
object XYPlotter {
def lineRenderer(color: Color, stroke: Stroke, count: Int = 1) = new XYLineAndShapeRenderer() {
setBaseShapesVisible(false)
for (ix <- 0 to count-1) {
setSeriesPaint(ix, color)
setSeriesStroke(ix, stroke)
}
}
def splineRenderer(color: Color, stroke: Stroke, count: Int = 1) = new XYSplineRenderer() {
setBaseShapesVisible(false)
for (ix <- 0 to count-1) {
setSeriesPaint(ix, color)
setSeriesStroke(ix, stroke)
}
}
// Target Calc cache!!
case class CalcKey(site: Site, target: SkycalcTarget, sampling: Vector[Long])
case class TimedKey(t: Long, key: CalcKey) extends Ordered[TimedKey] {
def compare(that: TimedKey): Int = (this.t - that.t).toInt
}
private val calcCache = concurrent.TrieMap[CalcKey, TargetCalculator]()
private val calcAge = mutable.SortedSet[TimedKey]()
def getCalculator(site: Site, target: SkycalcTarget, sampling: Vector[Long]): TargetCalculator = {
val key = CalcKey(site, target, sampling)
calcCache.getOrElseUpdate(key, {
if (calcCache.size >= 500) {
val oldest = calcAge.head
calcAge.remove(oldest)
calcCache.remove(oldest.key)
}
val tc = TargetCalculator(site, target, sampling)
val ts = System.currentTimeMillis()
calcAge.add(new TimedKey(ts, key))
calcCache.put(key, tc)
tc
})
}
}
| arturog8m/ocs | bundle/edu.gemini.qv.plugin/src/main/scala/edu/gemini/qv/plugin/charts/util/XYPlotter.scala | Scala | bsd-3-clause | 10,838 |
package me.axiometry.blocknet.entity
import me.axiometry.blocknet.item._
trait Humanoid extends Creature {
trait Armor {
def helmet: Option[ItemStack]
def chestplate: Option[ItemStack]
def leggings: Option[ItemStack]
def boots: Option[ItemStack]
def helmet_=(helmet: Option[ItemStack]): Armor
def chestplate_=(chestplate: Option[ItemStack]): Armor
def leggings_=(leggings: Option[ItemStack]): Armor
def boots_=(boots: Option[ItemStack]): Armor
}
def armor: Armor
def heldItem: Option[ItemStack]
def armor_=(armor: Armor)
def heldItem_=(heldItem: Option[ItemStack])
} | Axiometry/Blocknet | blocknet-api/src/main/scala/me/axiometry/blocknet/entity/Humanoid.scala | Scala | bsd-2-clause | 615 |
/**
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert.norbertutils
trait ClockComponent {
val clock: Clock
}
trait Clock {
def getCurrentTimeMilliseconds: Long
//do not use this for absolute time
//only for computing intervals
def getCurrentTimeOffsetMicroseconds: Long
}
object MockClock extends Clock {
var currentTime = 0L
override def getCurrentTimeMilliseconds = currentTime
override def getCurrentTimeOffsetMicroseconds = currentTime
}
object SystemClock extends Clock {
def getCurrentTimeOffsetMicroseconds = System.nanoTime/1000
def getCurrentTimeMilliseconds = System.currentTimeMillis
}
object SystemClockComponent extends ClockComponent {
val clock = SystemClock
}
| jhartman/norbert | cluster/src/main/scala/com/linkedin/norbert/norbertutils/Clock.scala | Scala | apache-2.0 | 1,278 |
/**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.models.classification.logistic_regression
import breeze.linalg.DenseMatrix
import org.apache.spark.SparkContext
import org.apache.spark.mllib.classification.org.trustedanalytics.sparktk.LogisticRegressionModelWithFrequency
import org.apache.spark.sql.Row
import org.json4s.JsonAST.JValue
import org.trustedanalytics.sparktk.TkContext
import org.trustedanalytics.sparktk.frame._
import org.trustedanalytics.sparktk.frame.internal.RowWrapper
import org.trustedanalytics.sparktk.frame.internal.ops.classificationmetrics.{ ClassificationMetricsFunctions, ClassificationMetricValue }
import org.trustedanalytics.sparktk.frame.internal.rdd.{ ScoreAndLabel, RowWrapperFunctions, FrameRdd }
import org.trustedanalytics.sparktk.saveload.{ SaveLoad, TkSaveLoad, TkSaveableObject }
import scala.language.implicitConversions
import org.trustedanalytics.scoring.interfaces.{ ModelMetaData, Field, Model }
import org.apache.spark.mllib.linalg.DenseVector
import org.trustedanalytics.sparktk.models.{ SparkTkModelAdapter, ScoringModelUtils }
import java.nio.file.{ Files, Path }
import org.apache.commons.io.FileUtils
object LogisticRegressionModel extends TkSaveableObject {
/**
* Build logistic regression model.
*
* Create a logistic regression model and train it using the obseravtion columns and label column of a given frame
*
* @param frame A frame to train the model on.
* @param observationColumns Column(s) containing the observations.
* @param labelColumn Column name containing the label for each observation.
* @param frequencyColumn Optional column containing the frequency of observations.
* @param numClasses Number of classes
* numClasses should not exceed the number of distinct values in labelColumn
* @param optimizer Set type of optimizer.
* LBFGS - Limited-memory BFGS.
* LBFGS supports multinomial logistic regression.
* SGD - Stochastic Gradient Descent.
* SGD only supports binary logistic regression.
* @param computeCovariance Compute covariance matrix for the model.
* @param intercept Add intercept column to training data.
* @param featureScaling Perform feature scaling before training model.
* @param threshold Threshold for separating positive predictions from negative predictions.
* @param regType Set type of regularization
* L1 - L1 regularization with sum of absolute values of coefficients
* L2 - L2 regularization with sum of squares of coefficients
* @param regParam Regularization parameter
* @param numIterations Maximum number of iterations
* @param convergenceTolerance Convergence tolerance of iterations for L-BFGS. Smaller value will lead to higher accuracy with the cost of more iterations.
* @param numCorrections Number of corrections used in LBFGS update.
* Default is 10.
* Values of less than 3 are not recommended;
* large values will result in excessive computing time.
* @param miniBatchFraction Fraction of data to be used for each SGD iteration
* @param stepSize Initial step size for SGD. In subsequent steps, the step size decreases by stepSize/sqrt(t)
* @return A LogisticRegressionModel with a summary of the trained model.
* The data returned is composed of multiple components\:
* **int** : *numFeatures*
* Number of features in the training data
* **int** : *numClasses*
* Number of classes in the training data
* **table** : *summaryTable*
* A summary table composed of:
* **Frame** : *CovarianceMatrix (optional)*
* Covariance matrix of the trained model.
* The covariance matrix is the inverse of the Hessian matrix for the trained model.
* The Hessian matrix is the second-order partial derivatives of the model's log-likelihood function.
*/
def train(frame: Frame,
observationColumns: Seq[String],
labelColumn: String,
frequencyColumn: Option[String] = None,
numClasses: Int = 2,
optimizer: String = "LBFGS",
computeCovariance: Boolean = true,
intercept: Boolean = true,
featureScaling: Boolean = false,
threshold: Double = 0.5,
regType: String = "L2",
regParam: Double = 0,
numIterations: Int = 100,
convergenceTolerance: Double = 0.0001,
numCorrections: Int = 10,
miniBatchFraction: Double = 1d,
stepSize: Double = 1d) = {
require(frame != null, "frame is required")
require(optimizer == "LBFGS" || optimizer == "SGD", "optimizer name must be 'LBFGS' or 'SGD'")
require(numClasses > 1, "number of classes must be greater than 1")
if (optimizer == "SGD") require(numClasses == 2, "multinomial logistic regression not supported for SGD")
require(observationColumns != null && observationColumns.nonEmpty, "observation columns must not be null nor empty")
require(labelColumn != null && !labelColumn.isEmpty, "label column must not be null nor empty")
require(numIterations > 0, "number of iterations must be a positive value")
require(regType == "L1" || regType == "L2", "regularization type must be 'L1' or 'L2'")
require(convergenceTolerance > 0, "convergence tolerance for LBFGS must be a positive value")
require(numCorrections > 0, "number of corrections for LBFGS must be a positive value")
require(miniBatchFraction > 0, "mini-batch fraction for SGD must be a positive value")
require(stepSize > 0, "step size for SGD must be a positive value")
frame.schema.validateColumnsExist(observationColumns :+ labelColumn)
val arguments = LogisticRegressionTrainArgs(frame,
observationColumns.toList,
labelColumn,
frequencyColumn,
numClasses,
optimizer,
computeCovariance,
intercept,
featureScaling,
threshold,
regType,
regParam,
numIterations,
convergenceTolerance,
numCorrections,
miniBatchFraction,
stepSize)
val frameRdd = new FrameRdd(frame.schema, frame.rdd)
//create RDD from the frame
val labeledTrainRdd = frameRdd.toLabeledPointRDDWithFrequency(labelColumn, observationColumns.toList, frequencyColumn)
//Running MLLib
val mlModel = LogisticRegressionModelWrapperFactory.createModel(arguments)
val sparkLogRegModel = mlModel.getModel.run(labeledTrainRdd)
val trainingSummary = buildSummaryTable(frame.rdd.sparkContext, sparkLogRegModel, observationColumns, intercept, mlModel.getHessianMatrix)
LogisticRegressionModel(observationColumns.toList,
labelColumn,
frequencyColumn,
numClasses,
optimizer,
computeCovariance,
intercept,
featureScaling,
threshold,
regType,
regParam,
numIterations,
convergenceTolerance,
numCorrections,
miniBatchFraction,
stepSize,
trainingSummary,
mlModel.getHessianMatrix,
sparkLogRegModel)
}
/**
*
* @param sc active spark context
* @param path the source path
* @param formatVersion the version of the format for the tk metadata that should be recorded.
* @param tkMetadata the data to save (should be a case class), must be serializable to JSON using json4s
*/
def loadTkSaveableObject(sc: SparkContext, path: String, formatVersion: Int, tkMetadata: JValue): Any = {
validateFormatVersion(formatVersion, 1)
val m: LogisticRegressionModelMetaData = SaveLoad.extractFromJValue[LogisticRegressionModelMetaData](tkMetadata)
val sparkLogRegModel: LogisticRegressionModelWithFrequency = LogisticRegressionModelWithFrequency.load(sc, path)
val hessianMatrixNew: Option[DenseMatrix[Double]] = m.hessianMatrixData match {
case null => None
case other => Some(new DenseMatrix(m.hessianMatrixRows, m.hessianMatrixCols, m.hessianMatrixData))
}
val finalSummaryTable = buildSummaryTable(sc, sparkLogRegModel, m.observationColumns, m.intercept, hessianMatrixNew)
LogisticRegressionModel(m.observationColumns,
m.labelColumn,
m.frequencyColumn,
m.numClasses,
m.optimizer,
m.computeCovariance,
m.intercept,
m.featureScaling,
m.threshold,
m.regType,
m.regParam,
m.numIterations,
m.convergenceTolerance,
m.numCorrections,
m.miniBatchFraction,
m.stepSize,
finalSummaryTable,
hessianMatrixNew,
sparkLogRegModel)
}
/**
* Load a PcaModel from the given path
*
* @param tc TkContext
* @param path location
* @return
*/
def load(tc: TkContext, path: String): LogisticRegressionModel = {
tc.load(path).asInstanceOf[LogisticRegressionModel]
}
//Helper to build logistic regressioin summary table
def buildSummaryTable(sc: SparkContext,
sparkLogRegModel: LogisticRegressionModelWithFrequency,
observationColumns: Seq[String],
intercept: Boolean,
hessianMatrix: Option[DenseMatrix[Double]]): LogisticRegressionSummaryTable = {
//Create summary table and covariance frame
val summaryTable = SummaryTableBuilder(sparkLogRegModel,
observationColumns.toList,
intercept,
hessianMatrix)
val covarianceFrame = summaryTable.approxCovarianceMatrix match {
case Some(matrix) =>
val coFrameRdd = matrix.toFrameRdd(sc, summaryTable.coefficientNames)
val coFrame = new Frame(coFrameRdd.rdd, coFrameRdd.schema)
Some(coFrame)
case _ => None
}
summaryTable.build(covarianceFrame)
}
}
/**
* Logistic Regression Model
*
* @param observationColumns Column(s) containing the observations.
* @param labelColumn Column name containing the label for each observation.
* @param frequencyColumn Optional column containing the frequency of observations.
* @param numClasses Number of classes
* @param optimizer Set type of optimizer.
* LBFGS - Limited-memory BFGS.
* LBFGS supports multinomial logistic regression.
* SGD - Stochastic Gradient Descent.
* SGD only supports binary logistic regression.
* @param computeCovariance Compute covariance matrix for the model.
* @param intercept Add intercept column to training data.
* @param featureScaling Perform feature scaling before training model.
* @param threshold Threshold for separating positive predictions from negative predictions.
* @param regType Set type of regularization
* L1 - L1 regularization with sum of absolute values of coefficients
* L2 - L2 regularization with sum of squares of coefficients
* @param regParam Regularization parameter
* @param numIterations Maximum number of iterations
* @param convergenceTolerance Convergence tolerance of iterations for L-BFGS. Smaller value will lead to higher accuracy with the cost of more iterations.
* @param numCorrections Number of corrections used in LBFGS update.
* Default is 10.
* Values of less than 3 are not recommended;
* large values will result in excessive computing time.
* @param miniBatchFraction Fraction of data to be used for each SGD iteration
* @param stepSize Initial step size for SGD. In subsequent steps, the step size decreases by stepSize/sqrt(t)
* @param trainingSummary logistic regression training summary table
* @param hessianMatrix hessianMatrix
* @param sparkModel Spark LogisticRegressionModel
*/
case class LogisticRegressionModel private[logistic_regression] (observationColumns: List[String],
labelColumn: String,
frequencyColumn: Option[String],
numClasses: Int,
optimizer: String,
computeCovariance: Boolean,
intercept: Boolean,
featureScaling: Boolean,
threshold: Double,
regType: String,
regParam: Double,
numIterations: Int,
convergenceTolerance: Double,
numCorrections: Int,
miniBatchFraction: Double,
stepSize: Double,
trainingSummary: LogisticRegressionSummaryTable,
hessianMatrix: Option[DenseMatrix[Double]],
sparkModel: LogisticRegressionModelWithFrequency) extends Serializable with Model {
implicit def rowWrapperToRowWrapperFunctions(rowWrapper: RowWrapper): RowWrapperFunctions = {
new RowWrapperFunctions(rowWrapper)
}
/**
* Predict labels for data points using trained logistic regression model.
*
* Predict the labels for a test frame using trained logistic regression model, and create a new frame revision with
* existing columns and a new predicted label's column.
*
* @param frame A frame whose labels are to be predicted. By default, predict is run on the same columns over which the model is trained.
* @param observationColumnsPredict Column(s) containing the observations whose labels are to be predicted. Default is the labels the model was trained on.
* @return Frame containing the original frame's columns and a column with the predicted label.
*/
def predict(frame: Frame, observationColumnsPredict: Option[List[String]]): Frame = {
require(frame != null, "frame is required")
//Running MLLib
if (observationColumnsPredict.isDefined) {
require(observationColumns.length == observationColumnsPredict.get.length,
"Number of columns for train and predict should be same")
}
val logRegColumns = observationColumnsPredict.getOrElse(observationColumns)
//predicting a label for the observation columns
val predictColumn = Column(frame.schema.getNewColumnName("predicted_label"), DataTypes.int32)
val predictMapper: RowWrapper => Row = row => {
val point = row.valuesAsDenseVector(logRegColumns)
val prediction = sparkModel.predict(point).toInt
Row.apply(prediction)
}
val predictSchema = frame.schema.addColumn(predictColumn)
val wrapper = new RowWrapper(predictSchema)
val predictRdd = frame.rdd.map(row => Row.merge(row, predictMapper(wrapper(row))))
new Frame(predictRdd, predictSchema)
}
/**
* Saves this model to a file
*
* @param sc active SparkContext
* @param path save to path
*/
def save(sc: SparkContext, path: String): Unit = {
sparkModel.save(sc, path)
val formatVersion: Int = 1
val (hessMatrixRows, hessMatrixCols, hessMatrixDataArray) = hessianMatrix match {
case Some(matrix) => (matrix.rows, matrix.cols, matrix.data)
case None => (0, 0, null)
}
val tkMetaData = LogisticRegressionModelMetaData(observationColumns.toList,
labelColumn,
frequencyColumn,
numClasses,
optimizer,
computeCovariance,
intercept,
featureScaling,
threshold,
regType,
regParam,
numIterations,
convergenceTolerance,
numCorrections,
miniBatchFraction,
stepSize,
hessMatrixRows,
hessMatrixCols,
hessMatrixDataArray)
TkSaveLoad.saveTk(sc, path, LogisticRegressionModel.formatId, formatVersion, tkMetaData)
}
/**
* Get the predictions for observations in a test frame
*
* @param frame Frame whose labels are to be predicted.
* @param labelColumn Column containing the actual label for each observation.
* @param observationColumnsTest Column(s) containing the observations whose labels are to be predicted and tested. Default is to test over the columns the SVM model was trained on.
* @return A dictionary with binary classification metrics.
* The data returned is composed of the following keys\:
* 'accuracy' : double
* The proportion of predictions that are correctly identified
* 'confusion_matrix' : dictionary
* A table used to describe the performance of a classification model
* 'f_measure' : double
* The harmonic mean of precision and recall
* 'precision' : double
* The proportion of predicted positive instances that are correctly identified
* 'recall' : double
* The proportion of positive instances that are correctly identified.
*/
def test(frame: Frame, labelColumn: String, observationColumnsTest: Option[List[String]]): ClassificationMetricValue = {
if (observationColumnsTest.isDefined) {
require(observationColumns.length == observationColumnsTest.get.length, "Number of columns for train and test should be same")
}
val logRegColumns = observationColumnsTest.getOrElse(observationColumns)
val frameRdd = new FrameRdd(frame.schema, frame.rdd)
//predicting and testing
val scoreAndLabelRdd = frameRdd.toScoreAndLabelRdd(row => {
val labeledPoint = row.valuesAsLabeledPoint(logRegColumns, labelColumn)
val score = sparkModel.predict(labeledPoint.features)
ScoreAndLabel(score, labeledPoint.label)
})
//Run classification metrics
sparkModel.numClasses match {
case 2 => {
val posLabel: Double = 1.0d
ClassificationMetricsFunctions.binaryClassificationMetrics(scoreAndLabelRdd, posLabel)
}
case _ => ClassificationMetricsFunctions.multiclassClassificationMetrics(scoreAndLabelRdd)
}
}
override def score(row: Array[Any]): Array[Any] = {
require(row != null && row.length > 0, "scoring input row must not be null nor empty")
val doubleArray = row.map(i => ScoringModelUtils.asDouble(i))
val predictedLabel = sparkModel.predict(new DenseVector(doubleArray)).toInt
row :+ predictedLabel
}
override def modelMetadata(): ModelMetaData = {
new ModelMetaData("Logistic Regression", classOf[LogisticRegressionModel].getName, classOf[SparkTkModelAdapter].getName, Map())
}
override def input(): Array[Field] = {
val obsCols = observationColumns
var input = Array[Field]()
obsCols.foreach { name =>
input = input :+ Field(name, "Double")
}
input
}
override def output(): Array[Field] = {
var output = input()
output :+ Field("PredictedLabel", "Int")
}
def exportToMar(sc: SparkContext, marSavePath: String): String = {
var tmpDir: Path = null
try {
tmpDir = Files.createTempDirectory("sparktk-scoring-model")
save(sc, "file://" + tmpDir.toString)
ScoringModelUtils.saveToMar(marSavePath, classOf[LogisticRegressionModel].getName, tmpDir)
}
finally {
sys.addShutdownHook(FileUtils.deleteQuietly(tmpDir.toFile)) // Delete temporary directory on exit
}
}
}
/**
* Logistic Regression Meta data
*
* @param observationColumns Column(s) containing the observations.
* @param labelColumn Column name containing the label for each observation.
* @param frequencyColumn Optional column containing the frequency of observations.
* @param numClasses Number of classes
* @param optimizer Set type of optimizer.
* LBFGS - Limited-memory BFGS.
* LBFGS supports multinomial logistic regression.
* SGD - Stochastic Gradient Descent.
* SGD only supports binary logistic regression.
* @param computeCovariance Compute covariance matrix for the model.
* @param intercept Add intercept column to training data.
* @param featureScaling Perform feature scaling before training model.
* @param threshold Threshold for separating positive predictions from negative predictions.
* @param regType Set type of regularization
* L1 - L1 regularization with sum of absolute values of coefficients
* L2 - L2 regularization with sum of squares of coefficients
* @param regParam Regularization parameter
* @param numIterations Maximum number of iterations
* @param convergenceTolerance Convergence tolerance of iterations for L-BFGS. Smaller value will lead to higher accuracy with the cost of more iterations.
* @param numCorrections Number of corrections used in LBFGS update.
* Default is 10.
* Values of less than 3 are not recommended;
* large values will result in excessive computing time.
* @param miniBatchFraction Fraction of data to be used for each SGD iteration
* @param stepSize Initial step size for SGD. In subsequent steps, the step size decreases by stepSize/sqrt(t)
* @param hessianMatrixRows hessian matrix rows count
* @param hessianMatrixCols hessian matrix cols count
* @param hessianMatrixData hessian matrix data array
*/
case class LogisticRegressionModelMetaData(observationColumns: List[String],
labelColumn: String,
frequencyColumn: Option[String],
numClasses: Int,
optimizer: String,
computeCovariance: Boolean,
intercept: Boolean,
featureScaling: Boolean,
threshold: Double,
regType: String,
regParam: Double,
numIterations: Int,
convergenceTolerance: Double,
numCorrections: Int,
miniBatchFraction: Double,
stepSize: Double,
hessianMatrixRows: Int,
hessianMatrixCols: Int,
hessianMatrixData: Array[Double]) extends Serializable
/**
* Input arguments for logistic regression train plugin
*/
case class LogisticRegressionTrainArgs(frame: Frame,
observationColumns: List[String],
labelColumn: String,
frequencyColumn: Option[String],
numClasses: Int,
optimizer: String,
computeCovariance: Boolean,
intercept: Boolean,
featureScaling: Boolean,
threshold: Double,
regType: String,
regParam: Double,
numIterations: Int,
convergenceTolerance: Double,
numCorrections: Int,
miniBatchFraction: Double,
stepSize: Double) | ashaarunkumar/spark-tk | sparktk-core/src/main/scala/org/trustedanalytics/sparktk/models/classification/logistic_regression/LogisticRegressionModel.scala | Scala | apache-2.0 | 25,626 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils
import scala.reflect.ClassTag
trait Shape {
/**
* Use this method if its only a single Shape
*/
def toSingle(): List[Int] = throw new RuntimeException("Invalid operation")
/**
* Use this method if the current Shape consist of multiple value
*/
def toMulti(): List[Shape] = throw new RuntimeException("Invalid operation")
/**
* Update the given dim and return a new copy
*/
def copyAndUpdate(dim: Int, v: Int): Shape = throw new RuntimeException("Invalid operation")
/**
* Update the given dim and return a new copy
*/
def copyAndUpdate(dim: Int, v: Shape): Shape
= throw new RuntimeException("Invalid operation")
protected def getDim(dim: Int, length: Int): Int = {
val rdim = if (dim < 0) {
length + dim
} else {
dim
}
require(rdim < length && rdim >=0, "out of range")
rdim
}
}
case class SingleShape(val value: List[Int]) extends Shape {
override def toSingle(): List[Int] = value
override def copyAndUpdate(dim: Int, v: Int): Shape = {
val cValue = value.toArray
cValue(getDim(dim, value.length)) = v
Shape(cValue)
}
override def canEqual(a: Any): Boolean = a.isInstanceOf[SingleShape]
override def equals(that: Any): Boolean =
that match {
case that: SingleShape => that.canEqual(this) && this.hashCode == that.hashCode
case _ => false
}
override def hashCode: Int = {
val prime = 31
var result = 1
result = prime * value.hashCode()
return result
}
}
case class MultiShape(val value: List[Shape]) extends Shape {
override def toMulti(): List[Shape] = value
override def copyAndUpdate(dim: Int, v: Shape): Shape = {
val cValue = value.toArray
cValue(getDim(dim, value.length)) = v
MultiShape(cValue.toList)
}
override def canEqual(a: Any): Boolean = a.isInstanceOf[MultiShape]
override def equals(that: Any): Boolean =
that match {
case that: MultiShape => that.canEqual(this) && this.hashCode == that.hashCode
case _ => false
}
override def hashCode: Int = {
val prime = 31
var result = 1
result = prime * value.hashCode()
return result
}
}
object Shape {
def apply(item : Array[Int]): Shape = {
if (item == null) {
throw new IllegalArgumentException("Empty value")
}
new SingleShape(item.toList)
}
def apply(item : Int*): Shape = {
new SingleShape(item.toList)
}
def apply[T <: Shape : ClassTag](shapes : List[Shape]): Shape = {
if (shapes.length > 1) {
MultiShape(shapes.toList)
} else if (shapes.length == 1) {
shapes(0)
} else {
throw new IllegalArgumentException("Empty value")
}
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/utils/Shape.scala | Scala | apache-2.0 | 3,338 |
package io.github.oxlade39.storrent.test.util
import akka.actor.{Terminated, Props, ActorRef, Actor}
import akka.event.LoggingReceive
/**
* @author dan
*/
class StepParent(child: Props, fwd: ActorRef) extends Actor {
context.watch(context.actorOf(child, "child"))
def receive = LoggingReceive {
case Terminated(_) => context.stop(self)
case msg => fwd.tell(msg, sender)
}
}
class FosterParent(childProps: Props, probe: ActorRef) extends Actor {
val child = context.actorOf(childProps, "child")
def receive = {
case msg if sender == context.parent =>
probe forward msg
child forward msg
case msg =>
probe forward msg
context.parent forward msg
}
}
class ForwardingParent(childProps: Props, forwardTo: ActorRef) extends Actor {
val child = context.watch(context.actorOf(childProps, "child"))
def receive = {
case Terminated(_) => context.stop(self)
case msg if sender == child =>
forwardTo forward msg
context.parent forward msg
case msg =>
child forward msg
}
} | oxlade39/STorrent | src/test/scala/io/github/oxlade39/storrent/test/util/StepParent.scala | Scala | apache-2.0 | 1,065 |
/* Copyright 2015 Devon Miller
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.im
package vdom
package backend
import scala.language._
import org.scalatest._
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Await
import scala.concurrent.duration._
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
/**
* A base DOM backend that can be extended with specific components as needed.
* This trait defines the context type.
*/
trait TestBackend extends Backend {
type This = TestBackend
type Context = BasicContext
protected[this] def createContext() = new BasicContext {}
}
// Could just do `new TestBackend {}`
class MyTestBackend extends TestBackend
/**
* A test backend is used to test IOAction run funcitonality.
*/
class BackendSpec extends FlatSpec
with Assertions
with Matchers
with OptionValues {
val b = new MyTestBackend()
"Backend" should "run a successful action" in {
val action = Action.successful(true)
val result = b.run(action)
assertResult(true)(Await.result(result, 1 seconds))
}
it should "contain the exception when projecting with action.failed " in {
val ex = new IllegalArgumentException("bad arg")
val action = Action.failed(ex)
val result = b.run(action)
assertResult(ex)(Await.result(result.failed, 1 seconds))
}
it should "throw an exception when the action in action.failed does not contain an exception" in {
val a = Action.successful(10)
intercept[NoSuchElementException](Await.result(b.run(a.failed), 1 seconds))
}
it should "return a future value" in {
val f = Future(1)
val action = Action.from(f)
val result = b.run(action)
assertResult(1)(Await.result(result, 1 seconds))
}
it should "map" in {
val action = Action.successful(1).map(_ + 1)
val result = b.run(action)
assertResult(2)(Await.result(result, 1 seconds))
}
it should "flatMap" in {
val action = Action.successful(1)
val action2 = action.flatMap(n => Action.successful(n + 1))
val result = b.run(action2)
assertResult(2)(Await.result(result, 1 seconds))
}
it should "sequence actions, run them, return Unit" in {
val r = new AtomicInteger(0)
val actions = (1 to 10).map(n => ContextualAction {
r.incrementAndGet()
n
})
val actions2 = (1 to 10).map(n =>
ContextualAction[Int, MyTestBackend] { ctx: MyTestBackend#Context =>
r.incrementAndGet()
n
})
val action = Action.seq(actions: _*)
val result = b.run(action)
assertResult(())(Await.result(result, 1 seconds))
assertResult(10)(r.get)
val action2 = Action.seq(actions2: _*)
val result2 = b.run(action2)
assertResult(())(Await.result(result2, 1 seconds))
assertResult(20)(r.get)
}
it should "allow easy extraction to a value" in {
val x = Action.successful(Some(10))
val y = x.map(_.get)
val r = b.run(y)
assertResult(10)(Await.result(r, 1 seconds))
val extracted = x.flatMap { opt =>
opt match {
case Some(x) => Action.successful(x)
case _ => Action.failed(new NoSuchElementException("no el"))
}
}
assertResult(10)(Await.result(b.run(extracted), 1 seconds))
}
it should "always call finally" in {
val a = Action.successful(10)
val counter = new AtomicInteger(0)
val af = a.andFinally(ContextualAction {
counter.incrementAndGet()
})
val r = Await.result(b.run(af), 1 seconds)
assertResult(1)(counter.get)
val ex = new IllegalArgumentException("blah")
val a2 = Action.failed(ex)
val af2 = a2.andFinally(ContextualAction {
counter.incrementAndGet()
})
assertResult(ex)(Await.result(b.run(af2.failed), 1 seconds))
assertResult(2)(counter.get)
}
it should "propagate base actions failure when using finally" in {
val a: IOAction[Int] = Action.failed(new IllegalArgumentException("blah"))
val counter = new AtomicInteger(0)
val af = a.andFinally(ContextualAction {
counter.incrementAndGet()
})
val f = b.run(af.failed)
val r = Await.result(f, 1 seconds)
assertResult(1)(counter.get)
}
it should "call cleanup when successful" in {
val x = Action.successful(Some(10))
val y = x.map(_.get)
val r = b.run(y)
val counter = new AtomicInteger(0)
val cleanup = y.cleanUp { err: Option[Throwable] =>
err match {
case Some(t) =>
fail("should not be called")
case _ =>
counter.incrementAndGet()
Action.successful(-1)
}
}
assertResult(10)(Await.result(b.run(cleanup), 1 seconds))
assertResult(1)(counter.get)
}
it should "pass exception from base action to cleanup" in {
val ex = new IllegalArgumentException("ouch!")
val baseAction: IOAction[Int] = Action.failed(ex)
val counter = new AtomicInteger(0)
val cleanup2: IOAction[Int] = baseAction
.cleanUp { err: Option[Throwable] =>
err match {
case Some(t) =>
counter.incrementAndGet()
Action.successful(())
case _ =>
fail("should not be called")
}
}
val x = Await.result(b.run(cleanup2).failed, 1 seconds)
assertResult(ex)(x)
assertResult(1)(counter.get)
}
it should "return cleanup actions exception if base fails, cleanUp fails and keepFailure=false" in {
val ex = new IllegalArgumentException("ouch!")
val baseAction: IOAction[Int] = Action.failed(ex)
val returnedEx = new NoSuchElementException()
val cleanup: IOAction[Int] = baseAction.cleanUp({
_ match {
case Some(t) => Action.failed(returnedEx)
case _ => Action.successful(())
}
}, false)
val r = Await.result(b.run(cleanup.failed), 1 seconds)
assertResult(returnedEx)(r)
}
} | nightscape/scala-vdom | jvm/src/test/scala/org/im/vdom/backend/BakendSpec.scala | Scala | apache-2.0 | 6,408 |
package mesosphere.marathon
package core.storage.repository
import java.time.OffsetDateTime
import akka.stream.scaladsl.Source
import akka.{ Done, NotUsed }
import scala.concurrent.Future
/** Repository that can store exactly one value of T */
trait SingletonRepository[T] {
def get(): Future[Option[T]]
def store(v: T): Future[Done]
def delete(): Future[Done]
}
/**
* A Repository of values (T) identified uniquely by (Id)
*/
trait ReadOnlyRepository[Id, T] {
def ids(): Source[Id, NotUsed]
def all(): Source[T, NotUsed]
def get(id: Id): Future[Option[T]]
}
/**
* A Repository of values (T) identified uniquely by (Id)
*/
trait Repository[Id, T] extends ReadOnlyRepository[Id, T] {
def store(v: T): Future[Done]
def delete(id: Id): Future[Done]
}
/**
* A Repository of versioned values (T) identified uniquely by (Id)
*/
trait ReadOnlyVersionedRepository[Id, T] extends ReadOnlyRepository[Id, T] {
def versions(id: Id): Source[OffsetDateTime, NotUsed]
def getVersion(id: Id, version: OffsetDateTime): Future[Option[T]]
def getVersions(list: Seq[(Id, OffsetDateTime)]): Source[T, NotUsed]
}
/**
* A Repository of versioned values (T) identified uniquely by (Id)
*/
trait VersionedRepository[Id, T] extends ReadOnlyVersionedRepository[Id, T] with Repository[Id, T] {
def storeVersion(v: T): Future[Done]
// Removes _only_ the current value, leaving all history in place.
def deleteCurrent(id: Id): Future[Done]
}
object RepositoryConstants {
val maxConcurrency = 8
}
| guenter/marathon | src/main/scala/mesosphere/marathon/core/storage/repository/Repository.scala | Scala | apache-2.0 | 1,521 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.