code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.external.kudu
import akka.actor.ActorSystem
import org.apache.gearpump.Message
import org.apache.gearpump.cluster.UserConfig
import org.apache.gearpump.external.kudu.KuduSink.{KuduWriter, KuduWriterFactory}
import org.apache.gearpump.streaming.MockUtil
import org.apache.gearpump.streaming.task.TaskContext
import org.apache.kudu.client._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.prop.PropertyChecks
import org.scalatest.{Matchers, PropSpec}
class KuduSinkSpec extends PropSpec with PropertyChecks with Matchers with MockitoSugar {
property("KuduSink should invoke KuduWriter for writing message to Kudu") {
val kuduWriter = mock[KuduWriter]
val kuduWriterFactory = mock[KuduWriterFactory]
implicit val system: ActorSystem = MockUtil.system
val userConfig = UserConfig.empty
val tableName = "kudu"
when(kuduWriterFactory.getKuduWriter(userConfig, tableName))
.thenReturn(kuduWriter)
val kuduSink = new KuduSink(userConfig, tableName, kuduWriterFactory)
kuduSink.open(MockUtil.mockTaskContext)
val value = ("key", "value")
val message = Message(value)
kuduSink.write(message)
verify(kuduWriter, atLeastOnce()).put(message.value)
kuduSink.close()
verify(kuduWriter).close()
}
property("KuduWriter should insert a row successfully") {
val table = mock[KuduTable]
val kuduClient = mock[KuduClient]
val taskContext = mock[TaskContext]
val map = Map[String, String]("KUDUSINK" -> "kudusink", "TABLE_NAME" -> "kudu.table.name",
"COLUMN_FAMILY" -> "kudu.table.column.family", "COLUMN_NAME" -> "kudu.table.column.name",
"KUDU_USER" -> "kudu.user", "GEARPUMP_KERBEROS_PRINCIPAL" -> "gearpump.kerberos.principal",
"GEARPUMP_KEYTAB_FILE" -> "gearpump.keytab.file"
)
val userConfig = new UserConfig(map)
val tableName = "kudu"
val key = "key"
val value = "value"
when(kuduClient.openTable(tableName)).thenReturn(table)
}
} | manuzhang/incubator-gearpump | external/kudu/src/test/scala/org/apache/gearpump/external/kudu/KuduSinkSpec.scala | Scala | apache-2.0 | 2,836 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package config
import play.api.Configuration
import support.UnitSpec
class FrontendAppConfigSpec extends UnitSpec {
"config" should {
"have correct 'report a problem' urls" in {
val appConfig = new AppConfig {
override lazy val config: Configuration = Configuration.empty
}
appConfig.reportAProblemPartialUrl == "/contact/problem_reports_ajax?service=tax-account-router-frontend"
appConfig.reportAProblemNonJSUrl == "/contact/problem_reports_nonjs?service=tax-account-router-frontend"
}
}
}
| hmrc/tax-account-router-frontend | test/config/FrontendAppConfigSpec.scala | Scala | apache-2.0 | 1,144 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.testkit.japi
import com.typesafe.config.Config
import org.squbs.lifecycle.GracefulStop
import org.squbs.unicomplex.{Unicomplex, UnicomplexBoot}
import org.squbs.testkit.{CustomTestKit => SCustomTestKit, PortGetter}
import scala.collection.JavaConversions.asScalaBuffer
@deprecated("use org.squbs.testkit.japi.AbstractCustomTestKit instead")
class CustomTestKit(override val boot: UnicomplexBoot) extends AbstractCustomTestKit(boot) {
val actorSystem = system
}
abstract class AbstractCustomTestKit(val boot: UnicomplexBoot) extends PortGetter {
val system = boot.actorSystem
SCustomTestKit.checkInit(system)
def this() {
this(SCustomTestKit.boot())
}
def this(actorSystemName: String) {
this(SCustomTestKit.boot(Option(actorSystemName)))
}
def this(config: Config) {
this(SCustomTestKit.boot(config = Option(config)))
}
def this(withClassPath: Boolean) {
this(SCustomTestKit.boot(withClassPath = Option(withClassPath)))
}
def this(resources: java.util.List[String], withClassPath: Boolean) {
this(SCustomTestKit.boot(resources = Option(resources.toList), withClassPath = Option(withClassPath)))
}
def this(actorSystemName: String, resources: java.util.List[String], withClassPath: Boolean) {
this(SCustomTestKit.boot(Option(actorSystemName), resources = Option(resources.toList), withClassPath = Option(withClassPath)))
}
def this(config: Config, resources: java.util.List[String], withClassPath: Boolean) {
this(SCustomTestKit.boot(config = Option(config), resources = Option(resources.toList), withClassPath = Option(withClassPath)))
}
def shutdown() = Unicomplex(system).uniActor ! GracefulStop
}
| Harikiranvuyyuru/squbs | squbs-testkit/src/main/scala/org/squbs/testkit/japi/CustomTestKit.scala | Scala | apache-2.0 | 2,291 |
package com.aberdyne.graphchat
abstract class Protocol(name: String, id: Long, events: List[Class[Event]]) {
} | Kenishi/GraphChat | src/main/com/aberdyne/graphchat/Protocol.scala | Scala | apache-2.0 | 113 |
package io.pathfinder.websockets
import io.pathfinder.models.{Commodity, Cluster, ModelId}
import io.pathfinder.websockets.WebSocketMessage.MessageCompanion
import io.pathfinder.authentication.AuthenticationStatus._
import play.Logger
import play.api.libs.json.{JsSuccess, JsResult, Format, Json, JsValue, __}
import play.api.mvc.WebSocket.FrameFormatter
import play.api.libs.functional.syntax._
import scala.language.postfixOps
/**
* Contains all of the web socket messages and their json formats
*/
sealed abstract class WebSocketMessage {
protected type M >: this.type <: WebSocketMessage
def companion: MessageCompanion[M]
def message: String = companion.message
def withApp(app: String): Option[M] = Some(this.asInstanceOf[M])
def withoutApp: M = this.asInstanceOf[M]
def toJson: JsValue = companion.format.writes(this.asInstanceOf[M])
}
object WebSocketMessage {
import ModelTypes.{ModelType, format => modelFormat}
import Events.{Event, format => eventFormat}
private val builder = Map.newBuilder[String, MessageCompanion[_ <: WebSocketMessage]]
sealed abstract class MessageCompanion[M <: WebSocketMessage] {
def message: String
def format: Format[M]
}
private def addComp(comp: MessageCompanion[_ <: WebSocketMessage]) = builder += comp.message -> comp
/**
* These messages are routed to controllers based on the model they contain
*/
sealed abstract class ControllerMessage extends WebSocketMessage {
override type M >: this.type <: ControllerMessage
def model: ModelType
}
sealed abstract class ModelMessage extends ControllerMessage {
override type M >: this.type <: ModelMessage
def id: ModelId
override def model = id.modelType
override def withApp(app: String): Option[M] = id.withAppId(app).map(withId)
override def withoutApp: M = withId(id.withoutAppId)
protected def withId(id: ModelId): M
}
sealed abstract class SubscriptionMessage extends WebSocketMessage {
override type M >: this.type <: SubscriptionMessage
def clusterId: Option[String]
def model: Option[ModelType]
def id: Option[ModelId]
override def withApp(app: String): Option[M] = Some(
withClusterAndId(
clusterId.map(Cluster.addAppToPath(_, app).getOrElse(return None)),
id.map(_.withAppId(app).getOrElse(return None))
)
)
override def withoutApp: M = withClusterAndId(
clusterId.map(Cluster.removeAppFromPath),
id.map(_.withoutAppId)
)
protected def withClusterAndId(clusterId: Option[String], id: Option[ModelId]): M
}
private def simpleModelMessageFormat[M <: ModelMessage](makeMessage: ModelId => M): Format[M] = {
(__ \ "model").format(ModelTypes.format) and
(__ \ "id").format[JsValue]
}.apply[M](
{ (model: ModelType, id: JsValue) => makeMessage(ModelId.read(model, id).get) },
{ mf: M => (mf.id.modelType, ModelId.write(mf.id)) }
)
private def subscriptionMessageFormat[M <: SubscriptionMessage](
makeMessage: (Option[String], Option[ModelType], Option[ModelId]) => M
) = {
(__ \ "clusterId").formatNullable[String] and
(__ \ "model").formatNullable(ModelTypes.format) and
(__ \ "id").formatNullable[JsValue]
}.apply[M](
{ (path: Option[String], model: Option[ModelType], id: Option[JsValue]) =>
makeMessage(
path,
model,
id.map(i => ModelId.read(model.getOrElse(ModelTypes.Cluster), i).get)
)
}, {
sub: M => (sub.clusterId, sub.model, sub.id.map(ModelId.write))
}
)
/**
* Standard error messages sent to client that make poor request
*/
case class Error(error: String) extends WebSocketMessage {
override type M = Error
override def companion = Error
}
object Error extends MessageCompanion[Error] {
override val message = "Error"
override val format = Json.format[Error]
}
addComp(Error)
case class UnknownMessage(value: JsValue) extends WebSocketMessage {
override type M = UnknownMessage
override def companion = UnknownMessage
}
object UnknownMessage extends MessageCompanion[UnknownMessage] {
override val message = "Unknown"
override val format = new Format[UnknownMessage] {
override def reads(json: JsValue): JsResult[UnknownMessage] = JsSuccess(UnknownMessage(json))
override def writes(o: UnknownMessage): JsValue = o.value
}
}
addComp(UnknownMessage)
/**
* Sent by the client to unsubscribe from push notifications
*/
case class Unsubscribe(
clusterId: Option[String],
model: Option[ModelType],
id: Option[ModelId]
) extends SubscriptionMessage {
override type M = Unsubscribe
override def companion = Unsubscribe
override def withClusterAndId(clusterId: Option[String], id: Option[ModelId]) =
copy(clusterId = clusterId, id = id)
}
object Unsubscribe extends MessageCompanion[Unsubscribe] {
override val message = "Unsubscribe"
override val format = subscriptionMessageFormat(Unsubscribe.apply)
}
addComp(Unsubscribe)
/**
* Sent by the client to subscribe to push notifications
*/
case class Subscribe(
clusterId: Option[String],
model: Option[ModelType],
id: Option[ModelId]
) extends SubscriptionMessage {
override type M = Subscribe
override def companion = Subscribe
override def withClusterAndId(clusterId: Option[String], id: Option[ModelId]): M =
copy(clusterId = clusterId, id = id)
}
object Subscribe extends MessageCompanion[Subscribe] {
override val message = "Subscribe"
override val format = subscriptionMessageFormat(Subscribe.apply)
}
addComp(Subscribe)
/**
* Sent by the client to subscribe to route updates
*/
case class RouteSubscribe(
id: ModelId
) extends ModelMessage {
override type M = RouteSubscribe
override def companion = RouteSubscribe
override def withId(id: ModelId): RouteSubscribe = copy(id = id)
}
object RouteSubscribe extends MessageCompanion[RouteSubscribe] {
override val message = "RouteSubscribe"
override val format = simpleModelMessageFormat(RouteSubscribe.apply)
}
addComp(RouteSubscribe)
/**
* Sent by the client to unsubscribe from route updates
*/
case class RouteUnsubscribe(
id: ModelId
) extends ModelMessage {
override type M = RouteUnsubscribe
override def companion = RouteUnsubscribe
override def withId(id: ModelId): RouteUnsubscribe = copy(id = id)
}
object RouteUnsubscribe extends MessageCompanion[RouteUnsubscribe] {
override val message = "RouteUnsubscribe"
override val format = simpleModelMessageFormat(RouteUnsubscribe.apply)
}
addComp(RouteUnsubscribe)
case class RouteSubscribed(
id: ModelId
) extends ModelMessage {
override type M = RouteSubscribed
override def companion = RouteSubscribed
override def withId(id: ModelId) = copy(id = id)
}
object RouteSubscribed extends MessageCompanion[RouteSubscribed] {
override val message = "RouteSubscribed"
override val format = simpleModelMessageFormat(RouteSubscribed.apply)
}
addComp(RouteSubscribed)
/**
* Sent by the client to create a new model
*/
case class Create(
model: ModelType,
value: JsValue
) extends ControllerMessage {
override type M = Create
override def companion = Create
}
object Create extends MessageCompanion[Create] {
override val message = "Create"
override val format = Json.format[Create]
}
addComp(Create)
/**
* Sent by the client to update a model with the specified id
*/
case class Update(
id: ModelId,
value: JsValue
) extends ModelMessage {
override type M = Update
override def companion = Update
override def withId(id: ModelId) = copy(id = id)
}
object Update extends MessageCompanion[Update] {
override val message = "Update"
override val format = {
(__ \ "model").format(ModelTypes.format) and
(__ \ "id").format[JsValue] and
(__ \ "value").format[JsValue]
}.apply[Update](
{ (model: ModelType, id: JsValue, value: JsValue) => Update(ModelId.read(model, id).get, value) },
{ u: Update => (u.id.modelType, ModelId.write(u.id), u.value) }
)
}
addComp(Update)
/**
* Sent by the client to delete the specified model
*/
case class Delete(
id: ModelId
) extends ModelMessage {
override type M = Delete
override def companion = Delete
override def withId(id: ModelId) = copy(id = id)
}
object Delete extends MessageCompanion[Delete] {
override val message = "Delete"
override val format = simpleModelMessageFormat(Delete.apply)
}
addComp(Delete)
/**
* Request for when the client wants a route for a vehicle or commodity
*/
case class Route(
id: ModelId
) extends ModelMessage {
override type M = Route
override def companion = Route
override def withId(id: ModelId) = copy(id = id)
}
object Route extends MessageCompanion[Route] {
override val message = "Route"
override val format = simpleModelMessageFormat(Route.apply)
}
addComp(Route)
private implicit val cFormat = io.pathfinder.models.Commodity.format
/**
* Response for a route request
*/
case class Routed(
model: ModelType,
value: JsValue,
route: JsValue,
unroutedCommodities: Option[Seq[Commodity]]
) extends ControllerMessage {
override type M = Routed
override def companion = Routed
}
object Routed extends MessageCompanion[Routed] {
override val message = "Routed"
override val format = Json.format[Routed]
}
addComp(Routed)
/**
* Sent by the client that wants to read a model from the database
*/
case class Read(
id: ModelId
) extends ModelMessage {
override type M = Read
override def companion = Read
override protected def withId(id: ModelId): Read = copy(id = id)
}
object Read extends MessageCompanion[Read] {
override val message = "Read"
override val format = simpleModelMessageFormat(Read.apply)
}
addComp(Read)
/**
* Message sent to the client that requested a create
*/
case class Created(
model: ModelType,
value: JsValue
) extends ControllerMessage {
override type M = Created
override def companion = Created
}
object Created extends MessageCompanion[Created] {
override val message = "Created"
override val format = Json.format[Created]
}
addComp(Created)
/**
* Message sent to a client that requested an update
* or any clients that have subscribed to updates
*/
case class Updated(
model: ModelType,
value: JsValue
) extends ControllerMessage {
override type M = Updated
override def companion = Updated
}
object Updated extends MessageCompanion[Updated] {
override val message = "Updated"
override val format = Json.format[Updated]
}
addComp(Updated)
/**
* Message sent to a client that requested a read
*/
case class Model(
model: ModelType,
value: JsValue
) extends ControllerMessage {
override type M = Model
override def companion = Model
}
object Model extends MessageCompanion[Model] {
override val message = "Model"
override val format = Json.format[Model]
}
addComp(Model)
/**
* Message sent to a client that requested a delete
*/
case class Deleted(
model: ModelType,
value: JsValue
) extends ControllerMessage {
override type M = Deleted
override def companion = Deleted
}
object Deleted extends MessageCompanion[Deleted] {
override val message = "Deleted"
override val format = Json.format[Deleted]
}
addComp(Deleted)
/**
* Message sent to a client that requested a subscribe
*/
case class Subscribed(
clusterId: Option[String],
model: Option[ModelType],
id: Option[ModelId]
) extends SubscriptionMessage {
override type M = Subscribed
override def companion = Subscribed
override def withClusterAndId(clusterId: Option[String], id: Option[ModelId]): Subscribed =
copy(clusterId = clusterId, id = id)
}
object Subscribed extends MessageCompanion[Subscribed] {
override val message = "Subscribed"
override val format = subscriptionMessageFormat(Subscribed.apply)
}
addComp(Subscribed)
/**
* Message sent to a client that requested to unsubscribe
*/
case class Unsubscribed(
clusterId: Option[String],
model: Option[ModelType],
id: Option[ModelId]
) extends SubscriptionMessage {
override type M = Unsubscribed
override def companion = Unsubscribed
override def withClusterAndId(clusterId: Option[String], id: Option[ModelId]): Unsubscribed =
copy(clusterId = clusterId, id = id)
}
object Unsubscribed extends MessageCompanion[Unsubscribed] {
override val message = "Unsubscribed"
override val format = subscriptionMessageFormat(Unsubscribed.apply)
}
addComp(Unsubscribed)
case class Recalculate(
clusterId: String
) extends WebSocketMessage {
override type M = Recalculate
override def companion: MessageCompanion[M] = Recalculate
override def withApp(app: String): Option[Recalculate] =
Cluster.addAppToPath(clusterId, app).map(id => copy(clusterId = id))
override def withoutApp: Recalculate =
copy(clusterId = Cluster.removeAppFromPath(clusterId))
}
object Recalculate extends MessageCompanion[Recalculate] {
override def message: String = "Recalculate"
override def format: Format[Recalculate] = Json.format[Recalculate]
}
addComp(Recalculate)
case class Recalculated(
clusterId: String
) extends WebSocketMessage {
override type M = Recalculated
override def companion = Recalculated
override def withApp(app: String): Option[Recalculated] =
Cluster.addAppToPath(clusterId, app).map(id => copy(clusterId = id))
override def withoutApp: Recalculated =
copy(clusterId = Cluster.removeAppFromPath(clusterId))
}
object Recalculated extends MessageCompanion[Recalculated] {
override def message = "Recalculated"
override def format: Format[Recalculated] = Json.format[Recalculated]
}
addComp(Recalculated)
case class ConnectionId(
id: String
) extends WebSocketMessage {
override type M = ConnectionId
override def companion = ConnectionId
}
object ConnectionId extends MessageCompanion[ConnectionId] {
override def message = "ConnectionId"
override def format: Format[ConnectionId] = Json.format[ConnectionId]
}
addComp(ConnectionId)
case class Authenticate(
dashboard: Option[Boolean]
) extends WebSocketMessage {
override type M = Authenticate
override def companion = Authenticate
}
object Authenticate extends MessageCompanion[Authenticate] {
override def message = "Authenticate"
override def format: Format[Authenticate] = Json.format[Authenticate]
}
addComp(Authenticate)
case class Authenticated(status: AuthenticationStatus) extends WebSocketMessage {
override type M = Authenticated
override def companion = Authenticated
}
object Authenticated extends MessageCompanion[Authenticated] {
override def message = "Authenticated"
override def format: Format[Authenticated] = Json.format[Authenticated]
}
addComp(Authenticated)
val stringToMessage: Map[String, _ <: MessageCompanion[_]] = builder.result()
Logger.info("stringToMessage: [" + stringToMessage.keys.mkString("|")+"]")
/**
* reads and writes WebSocketMessages from/to Json
*/
implicit val format: Format[WebSocketMessage] = (
(__ \ "message").format[String] and
__.format[JsValue]
) (
{ case (msg,json) => stringToMessage.get(msg).map{ // reads are not covariant so a cast us required
_.format.reads(json).recoverTotal(
errs => Error("Could not parse json: " + json + "\n" + errs.errors.map(err =>
err._1+":"+err._2.mkString("\n\t")
).mkString("\n\n"))
).asInstanceOf[WebSocketMessage]
}.getOrElse(UnknownMessage(json)) },
{ msg =>
Logger.info("Sending Message: " + msg)
(msg.message, msg.toJson) }
)
/**
* reads and writes WebSocketMessages for the WebSocketActor, uses the format above
*/
implicit val frameFormat: FrameFormatter[WebSocketMessage] = FrameFormatter.jsonFrame[WebSocketMessage]
}
| CSSE497/pathfinder-server | app/io/pathfinder/websockets/WebSocketMessage.scala | Scala | mit | 17,851 |
package skinny.test
import javax.servlet.http._
import javax.servlet.ServletContext
import org.json4s._
import org.mockito.Mockito._
import skinny.SkinnyEnv
import skinny.micro.{ UnstableAccessValidation, SkinnyMicroParams }
import skinny.micro.context.SkinnyContext
import skinny.json.JSONStringOps
import scala.collection.concurrent.TrieMap
import skinny.controller.SkinnyControllerBase
import skinny.controller.feature.{ JSONParamsAutoBinderFeature, RequestScopeFeature }
import javax.servlet.http.HttpServletResponse
/**
* Mock Controller Base.
*/
trait MockControllerBase extends SkinnyControllerBase with JSONParamsAutoBinderFeature {
case class RenderCall(path: String)
private val _requestScope = TrieMap[String, Any]()
override def skipHaltingWhenRedirection = SkinnyEnv.isTest()
override def contextPath = ""
override def initParameter(name: String): Option[String] = None
private[this] lazy val mockRequest = {
val req = new MockHttpServletRequest
req.setAttribute(RequestScopeFeature.REQUEST_SCOPE_KEY, _requestScope)
req
}
private[this] lazy val mockResponse = {
new MockHttpServletResponse
}
override def request(implicit ctx: SkinnyContext = context): HttpServletRequest = mockRequest
override def response(implicit ctx: SkinnyContext = context): HttpServletResponse = mockResponse
override implicit def servletContext: ServletContext = mock(classOf[ServletContext])
override implicit def skinnyContext(implicit ctx: ServletContext): SkinnyContext = {
SkinnyContext.build(ctx, mockRequest, mockResponse, UnstableAccessValidation(true))
}
override def halt[T: Manifest](
status: Integer = null,
body: T = (),
headers: Map[String, String] = Map.empty,
reason: String = null): Nothing = {
throw new MockHaltException(
status = Option(status).map(_.intValue()),
reason = Option(reason),
headers = headers,
body = body)
}
def getOutputStreamContents: String = {
response.getOutputStream.toString
}
def getOutputStreamContents(charset: String): String = {
response
.getOutputStream
.asInstanceOf[MockServletOutputStream]
.toString(charset)
}
private[this] val _params = TrieMap[String, Seq[String]]()
override def params(implicit ctx: SkinnyContext) = {
val mergedParams = (super.params(ctx) ++ new SkinnyMicroParams(_params.toMap)).mapValues(v => Seq(v))
new SkinnyMicroParams(if (_parsedBody.isDefined) {
getMergedMultiParams(mergedParams, parsedBody(ctx).extract[Map[String, String]].mapValues(v => Seq(v)))
} else {
mergedParams
})
}
def prepareParams(params: (String, String)*) = {
_params ++= params.map { case (k, v) => k -> Seq(v) }
}
private[this] var _parsedBody: Option[JValue] = None
override def parsedBody(implicit ctx: SkinnyContext): JValue = {
_parsedBody.getOrElse(JNothing)
}
def prepareJSONBodyRequest(json: String): Unit = {
_parsedBody = JSONStringOps.fromJSONStringToJValue(json).toOption
}
// initialize this controller
initializeRequestScopeAttributes(skinnyContext)
}
| Kuchitama/skinny-framework | test/src/main/scala/skinny/test/MockControllerBase.scala | Scala | mit | 3,124 |
package com.romankagan.languages.classroomanalysis
/**
* Created by roman on 5/6/15.
*/
object SortingStudents extends App{
}
| kagan770/talentbuddy | src/com/romankagan/languages/classroomanalysis/SortingStudents.scala | Scala | apache-2.0 | 130 |
package io.youi.layout
class ReversedVerticalLayout(spacing: Double = 0.0) extends Layout {
override def connect(container: Component): Unit = {
update(container, Vector.empty)
}
override def disconnect(container: Component): Unit = Component.childrenFor(container).foreach { c =>
Snap(c).verticalReset()
}
override def childrenChanged(container: Component, removed: Vector[Component], added: Vector[Component]): Unit = {
super.childrenChanged(container, removed, added)
update(container, removed)
}
private def update(container: Component, removed: Vector[Component]): Unit = {
val items = Component.childrenFor(container)
removed.foreach { c =>
Snap(c).verticalReset()
}
items.filter(c => c.visible() && c.includeInLayout()).foldLeft(Option.empty[Component])((previous, current) => {
Snap(current).verticalReset().bottomTo(previous.map(_.position.top()).getOrElse(container.size.height.value) - spacing)
Some(current)
})
}
} | outr/youi | ui/js/src/main/scala/io/youi/layout/ReversedVerticalLayout.scala | Scala | mit | 1,001 |
package controllers
import play.api.mvc._
object Info extends Controller {
def info = Action {
val html ="""<html>
<head>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css" />
</head>
<body>
<div class="container jumbotron">
<div class="text-center">
<h1>ArchWiki</h1>
<h3>You can check shortest path between two pages using POST to /pages</h3>
<p>
<b>Example body:</b>
</p>
</div>
<div widht="300px">
<pre>{
"first": "https://wiki.archlinux.org/index.php/Special:Random",
"second": "https://wiki.archlinux.org/index.php/Special:Random"
}</pre>
<div class="text-danger text-center">
<br />
<h4>Random lookup can take long time!</h4>
</div>
</div>
</body>
</html>"""
Ok(html).as("text/html")
}
}
| mateuszlor/ProjektArchWiki | app/controllers/Info.scala | Scala | mit | 1,217 |
package org.randi3.model.criterion
import org.randi3.model.criterion.constraint.DateConstraint
import org.randi3.model.Entity._
import scalaz._
import Scalaz._
import org.joda.time.LocalDate
case class DateCriterion private(id: Int, version: Int, name: String, description: String, inclusionConstraint: Option[DateConstraint], strata: List[DateConstraint], private val dummy: Any) extends Criterion[LocalDate, DateConstraint] {
}
object DateCriterion {
def apply(id: Int = Int.MinValue, version: Int = 0, name: String, description: String, inclusionConstraint: Option[DateConstraint], strata: List[DateConstraint]): ValidationNel[String, DateCriterion] = {
checkAll(
checkID(id),
checkVersion(version),
checkStringBetween(name, 2, maxTextLength),
checkStringBetween(description, 2, maxTextLength),
checkNotNull(inclusionConstraint),
checkNotNull(strata)
).toEither match {
case Left(x) => Failure(x)
case Right(_) => Success(new DateCriterion(id, version, name, description, inclusionConstraint, strata, null))
}
}
private def validCriterion = new DateCriterion(Int.MinValue, 0, "validName", "validDescription", None, Nil, null)
def check(id: Int = validCriterion.id, version: Int = validCriterion.version, name: String = validCriterion.name, description: String = validCriterion.description, inclusionConstraint: Option[DateConstraint] = validCriterion.inclusionConstraint, strata: List[DateConstraint] = validCriterion.strata): ValidationNel[String, Boolean] = {
apply(id, version, name, description, inclusionConstraint, strata).toEither match {
case Left(x) => Failure(x)
case Right(_) => Success(true)
}
}
}
| dschrimpf/randi3-core | src/main/scala/org/randi3/model/criterion/DateCriterion.scala | Scala | gpl-3.0 | 1,712 |
package pipelines.images.imagenet
import java.io.File
import scala.reflect.ClassTag
import breeze.linalg._
import breeze.stats._
import breeze.stats.distributions._
import scopt.OptionParser
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.rdd.RDD
import evaluation.MulticlassClassifierEvaluator
import loaders.ImageNetLoader
import pipelines.Logging
import nodes.images.external.{FisherVector, SIFTExtractor}
import nodes.images._
import nodes.learning._
import nodes.stats.{ColumnSampler, NormalizeRows, SignedHellingerMapper, BatchSignedHellingerMapper}
import nodes.util.{FloatToDouble, MatrixVectorizer, Cacher}
import nodes.util.{ClassLabelIndicatorsFromIntLabels, ZipVectors, TopKClassifier}
import utils.{Image, MatrixUtils, Stats}
object ImageNetSiftLcsInteractionTerms extends Serializable with Logging {
val appName = "ImageNetSiftLcsInteractionTerms"
def makeDoubleArrayCsv[T: ClassTag](filenames: RDD[String], data: RDD[Array[T]]): RDD[String] = {
filenames.zip(data).map { case (x,y) => x + ","+ y.mkString(",") }
}
def constructTemplateInteractions(
samples: DenseMatrix[Double],
conf: ImageNetSiftLcsInteractionTermsConfig,
name: Option[String] = None) = {
// Compute ZCA on samples
val zcaWhitener = new ZCAWhitenerEstimator().fitSingle(samples)
// Apply ZCA to sift samples
// siftSamplesFiltered is 1M by 128
// 1M * 128
val whiteSamples = zcaWhitener.apply(samples)
val normalizedWhiteSamples = Stats.normalizeRows(whiteSamples, 1e-6)
// Now run KMeans++ to get a model
val kMeans = new KMeansPlusPlusEstimator(conf.numKMeans, 100,
1e-3).fit(normalizedWhiteSamples)
// Generate a random gaussian matrix
val randomGaussianMatrix = DenseMatrix.rand(
normalizedWhiteSamples.cols, conf.numGaussianRandomFeatures, Rand.gaussian)
randomGaussianMatrix :/= math.sqrt(normalizedWhiteSamples.cols)
val kitchenSinksGaussianWeights = zcaWhitener.whitener * randomGaussianMatrix
val kitchenSinksGaussianBias = kitchenSinksGaussianWeights.t * zcaWhitener.means
val kitchenSinksTemplateWeights = zcaWhitener.whitener * kMeans.means.t
val kitchenSinksTemplateBias = kitchenSinksTemplateWeights.t * zcaWhitener.means
// TODO: Make 0.25 a command line argument ?
val templateInteractions = new TemplateInteractions(
(kitchenSinksGaussianWeights, kitchenSinksGaussianBias),
(kitchenSinksTemplateWeights, kitchenSinksTemplateBias),
0.0,
0.25)
// Now we are going to featurize
val kitchenSinkFeaturizer = {
templateInteractions then
MatrixVectorizer then
SignedHellingerMapper then
NormalizeRows
}
if (conf.featuresSaveDir.isEmpty) {
kitchenSinkFeaturizer then new Cacher[DenseVector[Double]](name)
} else {
kitchenSinkFeaturizer
}
}
def getSiftFeatures(
conf: ImageNetSiftLcsInteractionTermsConfig,
trainParsed: RDD[Image],
testParsed: RDD[Image]): (RDD[DenseVector[Double]], RDD[DenseVector[Double]]) = {
// Part 1: Scale and convert images to grayscale.
val grayscaler = PixelScaler then GrayScaler
val numImgs = trainParsed.count.toInt
// TODO: create an element wise apply function node.
// For functions like sqrt, max, square, pow etc.
val siftHellinger = (new SIFTExtractor(scaleStep = conf.siftScaleStep) then
BatchSignedHellingerMapper)
// Part 1: Get some SIFT samples and then compute the ZCA whitening matrix
val siftFeatures = {
grayscaler then
siftHellinger
}.apply(trainParsed)
val siftSamples = new ColumnSampler(conf.numZcaSamples, Some(numImgs)).apply(
siftFeatures)
val siftSamplesFilteredAll =
siftSamples.filter { sift =>
norm(sift, 2) > conf.siftThreshold
}.map { x =>
convert(x, Double)
}.collect()
val siftSamplesFiltered = MatrixUtils.rowsToMatrix(
MatrixUtils.shuffleArray(siftSamplesFilteredAll).take(1e6.toInt))
val featurizedKitchenSink = {
grayscaler then
siftHellinger then
FloatToDouble then
constructTemplateInteractions(siftSamplesFiltered, conf, Some("sift-features"))
}
val trainingFeatures = featurizedKitchenSink(trainParsed)
val testFeatures = featurizedKitchenSink(testParsed)
(trainingFeatures, testFeatures)
}
def getLcsFeatures(
conf: ImageNetSiftLcsInteractionTermsConfig,
trainParsed: RDD[Image],
testParsed: RDD[Image]): (RDD[DenseVector[Double]], RDD[DenseVector[Double]]) = {
val numImgs = trainParsed.count.toInt
val lcsFeatures = {
new LCSExtractor(conf.lcsStride, conf.lcsBorder, conf.lcsPatch)
}.apply(trainParsed)
val lcsSamples = new ColumnSampler(conf.numZcaSamples, Some(numImgs)).apply(
lcsFeatures)
val lcsSamplesAll = lcsSamples.collect().map(x => convert(x, Double))
val lcsSamplesMat = MatrixUtils.rowsToMatrix(
MatrixUtils.shuffleArray(lcsSamplesAll).take(1e6.toInt))
val featurizedKitchenSink = {
new LCSExtractor(conf.lcsStride, conf.lcsBorder, conf.lcsPatch) then
FloatToDouble then
constructTemplateInteractions(lcsSamplesMat, conf, Some("lcs-features"))
}
val trainingFeatures = featurizedKitchenSink(trainParsed)
val testFeatures = featurizedKitchenSink(testParsed)
(trainingFeatures, testFeatures)
}
def run(sc: SparkContext, conf: ImageNetSiftLcsInteractionTermsConfig) {
// Load the data and extract training labels.
val parsedRDD = ImageNetLoader(
sc,
conf.trainLocation,
conf.labelPath).cache().setName("trainData")
val filenamesRDD = parsedRDD.map(_.filename.get)
val labelGrabber = LabelExtractor then
ClassLabelIndicatorsFromIntLabels(ImageNetLoader.NUM_CLASSES) then
new Cacher[DenseVector[Double]]
val trainingLabels = labelGrabber(parsedRDD)
trainingLabels.count
// Load test data and get actual labels
val testParsedRDD = ImageNetLoader(
sc,
conf.testLocation,
conf.labelPath).cache().setName("testData")
val testActual = (labelGrabber then TopKClassifier(1)).apply(testParsedRDD)
val testFilenamesRDD = testParsedRDD.map(_.filename.get)
val trainParsedImgs = (ImageExtractor).apply(parsedRDD)
val testParsedImgs = (ImageExtractor).apply(testParsedRDD)
// Get SIFT + FV features
val (trainSift, testSift) = getSiftFeatures(conf, trainParsedImgs, testParsedImgs)
// Get LCS + FV features
val (trainLcs, testLcs) = getLcsFeatures(conf, trainParsedImgs, testParsedImgs)
val trainingFeatures = ZipVectors(Seq(trainSift, trainLcs))
val testFeatures = ZipVectors(Seq(testSift, testLcs))
conf.featuresSaveDir.foreach { dir =>
makeDoubleArrayCsv(filenamesRDD, trainingFeatures.map(_.toArray)).saveAsTextFile(dir + "/featuresTrain")
makeDoubleArrayCsv(testFilenamesRDD, testFeatures.map(_.toArray)).saveAsTextFile(dir + "/featuresTest")
makeDoubleArrayCsv(filenamesRDD, trainingLabels.map(_.toArray)).saveAsTextFile(dir + "/trainLabels")
makeDoubleArrayCsv(testFilenamesRDD, testActual).saveAsTextFile(dir + "/testActual")
}
if (conf.featuresSaveDir.isEmpty) {
trainingFeatures.count
val numTestImgs = testFeatures.count
// Fit a weighted least squares model to the data.
val model = new BlockWeightedLeastSquaresEstimator(
4096, 1, conf.lambda, conf.mixtureWeight).fit(
trainingFeatures, trainingLabels, Some(2 * conf.numKMeans * conf.numGaussianRandomFeatures))
// Apply the model to test data and compute test error
val testPredictedValues = model(testFeatures)
val testPredicted = TopKClassifier(5).apply(testPredictedValues)
logInfo("TEST Error is " + Stats.getErrPercent(testPredicted, testActual, numTestImgs) + "%")
}
}
case class ImageNetSiftLcsInteractionTermsConfig(
trainLocation: String = "",
testLocation: String = "",
labelPath: String = "",
lambda: Double = 6e-5,
mixtureWeight: Double = 0.25,
numGaussianRandomFeatures: Int = 160,
numKMeans: Int = 256,
siftScaleStep: Int = 1,
lcsStride: Int = 4,
lcsBorder: Int = 16,
lcsPatch: Int = 6,
numZcaSamples: Int = 1e7.toInt,
siftThreshold: Double = 1e-3.toDouble,
featuresSaveDir: Option[String] = None)
def parse(args: Array[String]): ImageNetSiftLcsInteractionTermsConfig = {
new OptionParser[ImageNetSiftLcsInteractionTermsConfig](appName) {
head(appName, "0.1")
help("help") text("prints this usage text")
opt[String]("trainLocation") required() action { (x,c) => c.copy(trainLocation=x) }
opt[String]("testLocation") required() action { (x,c) => c.copy(testLocation=x) }
opt[String]("labelPath") required() action { (x,c) => c.copy(labelPath=x) }
// Solver params
opt[Double]("lambda") action { (x,c) => c.copy(lambda=x) }
opt[Double]("mixtureWeight") action { (x,c) => c.copy(mixtureWeight=x) }
// PCA, GMM params
opt[Int]("numGaussianRandomFeatures") action { (x,c) => c.copy(numGaussianRandomFeatures=x) }
opt[Int]("numKMeans") action { (x,c) => c.copy(numKMeans=x) }
opt[Int]("numZcaSamples") action { (x,c) => c.copy(numZcaSamples=x) }
opt[Double]("siftThreshold") action { (x,c) => c.copy(siftThreshold=x) }
// SIFT, LCS params
opt[Int]("siftScaleStep") action { (x,c) => c.copy(siftScaleStep=x) }
opt[Int]("lcsStride") action { (x,c) => c.copy(lcsStride=x) }
opt[Int]("lcsBorder") action { (x,c) => c.copy(lcsBorder=x) }
opt[Int]("lcsPatch") action { (x,c) => c.copy(lcsPatch=x) }
opt[String]("featuresSaveDir") action { (x, c) => c.copy(featuresSaveDir=Some(x)) }
}.parse(args, ImageNetSiftLcsInteractionTermsConfig()).get
}
/**
* The actual driver receives its configuration parameters from spark-submit usually.
* @param args
*/
def main(args: Array[String]) = {
val appConfig = parse(args)
val conf = new SparkConf().setAppName(appName)
conf.setIfMissing("spark.master", "local[2]")
val sc = new SparkContext(conf)
run(sc, appConfig)
sc.stop()
}
}
| shivaram/keystone | src/main/scala/pipelines/images/imagenet/ImageNetSiftLCSInteractionTerms.scala | Scala | apache-2.0 | 10,215 |
object Requires {
def f(x: BigInt): Unit = {
require(0 <= x && x <= 100)
val y = x + 2
require(y * y >= x + 40)
}
def g: Unit = {
f(10)
}
}
| epfl-lara/stainless | frontends/benchmarks/verification/valid/MicroTests/Requires.scala | Scala | apache-2.0 | 167 |
package uk.gov.bis.taxserviceMock.data
import scala.concurrent.{ExecutionContext, Future}
case class GatewayUser(gatewayID: String, password: String, empref: Option[String], name: String, require2SV: Option[Boolean])
trait GatewayUserOps {
def forGatewayID(gatewayID: String)(implicit ec: ExecutionContext): Future[Option[GatewayUser]]
def forEmpref(empref: String)(implicit ec: ExecutionContext): Future[Option[GatewayUser]]
def validate(gatewayID: String, password: String)(implicit ec: ExecutionContext): Future[Option[GatewayUser]]
}
| WellFactored/das-alpha-taxservice-mock | src/main/scala/uk.gov/bis/taxserviceMock/data/GatewayUserOps.scala | Scala | mit | 549 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.spec.beforeandafter
import org.scalatest.Spec
import org.scalatest.BeforeAndAfter
import collection.mutable.ListBuffer
class ExampleSpec extends Spec with BeforeAndAfter {
val builder = new StringBuilder
val buffer = new ListBuffer[String]
before {
builder.append("ScalaTest is ")
}
after {
builder.clear()
buffer.clear()
}
object `Testing ` {
def `should be easy` {
builder.append("easy!")
assert(builder.toString === "ScalaTest is easy!")
assert(buffer.isEmpty)
buffer += "sweet"
}
def `should be fun` {
builder.append("fun!")
assert(builder.toString === "ScalaTest is fun!")
assert(buffer.isEmpty)
}
}
}
| travisbrown/scalatest | examples/src/main/scala/org/scalatest/examples/spec/beforeandafter/ExampleSpec.scala | Scala | apache-2.0 | 1,337 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.utils.python.api
import java.util.{ArrayList => JArrayList, HashMap => JHashMap, List => JList, Map => JMap}
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dllib.feature.dataset.{Identity => DIdentity, Sample => JSample, _}
import com.intel.analytics.bigdl.dllib.nn
import com.intel.analytics.bigdl.dllib.nn.{PGCriterion, Sequential, Zeros, _}
import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, _}
import com.intel.analytics.bigdl.numeric._
import com.intel.analytics.bigdl.dllib.optim.{Optimizer, _}
import com.intel.analytics.bigdl.dllib.tensor.{DenseType, SparseType, Storage, Tensor}
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.{Table, _}
import com.intel.analytics.bigdl.dllib.visualization.{Summary, TrainSummary, ValidationSummary}
import com.intel.analytics.bigdl.dllib.utils._
import org.apache.spark.api.java.{JavaRDD, JavaSparkContext}
import org.apache.spark.rdd.RDD
import java.lang.{Boolean => JBoolean}
import java.nio.ByteOrder
import com.intel.analytics.bigdl.dllib.feature.dataset.image.{CropCenter, CropRandom, CropperMethod}
import com.intel.analytics.bigdl.dllib.nn.Graph._
import com.intel.analytics.bigdl.dllib.nn.internal.{KerasLayer, KerasModel}
import com.intel.analytics.bigdl.dllib.optim.SGD.{LearningRateSchedule, SequentialSchedule}
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image._
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.augmentation._
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.label.roi._
import com.intel.analytics.bigdl.dllib.feature.transform.vision.image.opencv.OpenCVMat
import com.intel.analytics.bigdl.dllib.utils.tf.TensorflowDataFormat
import com.intel.analytics.bigdl.dllib.utils.tf.TensorflowLoader.parse
import com.intel.analytics.bigdl.dllib.utils.tf._
import org.apache.logging.log4j.core.config.Configurator
import org.apache.logging.log4j.{Level, LogManager}
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.opencv.imgproc.Imgproc
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.language.existentials
import scala.reflect.ClassTag
/**
* [[com.intel.analytics.bigdl.dllib.feature.dataset.Sample]] for python.
* @param features features
* @param labels labels
* @param bigdlType bigdl numeric type
*/
case class Sample(features: JList[JTensor],
labels: JList[JTensor],
bigdlType: String)
case class JTensor(storage: Array[Float], shape: Array[Int],
bigdlType: String, indices: Array[Array[Int]] = null)
case class JActivity(value: Activity)
/**
* [[ValidationResult]] for python
* @param result result
* @param totalNum total number
* @param method method name
*/
case class EvaluatedResult(val result: Float, totalNum: Int, method: String)
object PythonBigDL {
def ofFloat(): PythonBigDL[Float] = new PythonBigDL[Float]()
def ofDouble(): PythonBigDL[Double] = new PythonBigDL[Double]()
}
/**
* Implementation of Python API for BigDL
*/
class PythonBigDL[T: ClassTag](implicit ev: TensorNumeric[T]) extends Serializable {
private val typeName = {
val cls = implicitly[ClassTag[T]].runtimeClass
cls.getSimpleName
}
private def toTable(input: JList[_ <: Object]): Table = {
input.asScala.foldLeft(new Table())((t, e) =>
if (e.isInstanceOf[JTensor]) {
t.insert(toTensor(e.asInstanceOf[JTensor]))
} else {
t.insert(toTable(e.asInstanceOf[JList[Object]]))
})
}
def jTensorsToActivity(input: JList[_ <: Object], isTable: Boolean): Activity = {
if (input.isEmpty) {
throw new IllegalArgumentException("Empty input")
}
if (isTable) {
toTable(input)
} else {
toTensor(input.asInstanceOf[JList[JTensor]].iterator().next())
}
}
def activityToJTensors(outputActivity: Activity): JList[JTensor] = {
if (outputActivity.isInstanceOf[Tensor[T]]) {
List(toJTensor(outputActivity.toTensor)).asJava
} else if (outputActivity.isInstanceOf[Table]) {
outputActivity.toTable.getState().toList.map {
pair => (pair._1.asInstanceOf[Int], toJTensor(pair._2.asInstanceOf[Tensor[T]]))
}.sortWith(_._1 < _._1).map(pair => pair._2).asJava
} else if (outputActivity.isInstanceOf[EmptyGradInput]) {
List[JTensor]().asJava
} else {
throw new UnsupportedOperationException(s"Activity type" +
s"(${outputActivity.getClass.getName}) not support")
}
}
def toPySample(sample: JSample[T]): Sample = {
val cls = implicitly[ClassTag[T]].runtimeClass
val features = new JArrayList[JTensor]()
features.add(toJTensor(sample.feature()))
val labels = new JArrayList[JTensor]()
labels.add(toJTensor(sample.label()))
Sample(features, labels, cls.getSimpleName)
}
def toTensor(jTensor: JTensor): Tensor[T] = {
if (jTensor == null) return null
this.typeName match {
case "float" =>
if (null == jTensor.indices) {
if (jTensor.shape == null || jTensor.shape.length == 0) {
Tensor()
} else {
Tensor(jTensor.storage.map(x => ev.fromType(x)), jTensor.shape)
}
} else {
Tensor.sparse(jTensor.indices, jTensor.storage.map(x => ev.fromType(x)), jTensor.shape)
}
case "double" =>
if (null == jTensor.indices) {
if (jTensor.shape == null || jTensor.shape.length == 0) {
Tensor()
} else {
Tensor(jTensor.storage.map(x => ev.fromType(x.toDouble)), jTensor.shape)
}
} else {
Tensor.sparse(jTensor.indices,
jTensor.storage.map(x => ev.fromType(x.toDouble)), jTensor.shape)
}
case t: String =>
throw new IllegalArgumentException(s"Not supported type: ${t}")
}
}
def toJTensor(tensor: Tensor[T]): JTensor = {
// clone here in case the the size of storage larger then the size of tensor.
require(tensor != null, "tensor cannot be null")
tensor.getTensorType match {
case SparseType =>
// Note: as SparseTensor's indices is inaccessible here,
// so we will transfer it to DenseTensor. Just for testing.
if (tensor.nElement() == 0) {
JTensor(Array(), Array(0), bigdlType = typeName)
} else {
val cloneTensor = Tensor.dense(tensor)
val result = JTensor(cloneTensor.storage().array().map(i => ev.toType[Float](i)),
cloneTensor.size(), bigdlType = typeName)
result
}
case DenseType =>
if (tensor.nElement() == 0) {
if (tensor.dim() == 0) {
JTensor(null, null, bigdlType = typeName)
} else {
JTensor(Array(), tensor.size(), bigdlType = typeName)
}
} else {
val cloneTensor = tensor.clone()
val result = JTensor(cloneTensor.storage().array().map(i => ev.toType[Float](i)),
cloneTensor.size(), bigdlType = typeName)
result
}
case _ =>
throw new IllegalArgumentException(s"toJTensor: Unsupported tensor type" +
s" ${tensor.getTensorType}")
}
}
def testTensor(jTensor: JTensor): JTensor = {
val tensor = toTensor(jTensor)
toJTensor(tensor)
}
def testSample(sample: Sample): Sample = {
val jsample = toJSample(sample)
toPySample(jsample)
}
def toJSample(record: Sample): JSample[T] = {
require(record.bigdlType == this.typeName,
s"record.bigdlType: ${record.bigdlType} == this.typeName: ${this.typeName}")
JSample[T](record.features.asScala.toArray.map(toTensor(_)),
record.labels.asScala.toArray.map(toTensor(_)))
}
def toJSample(psamples: RDD[Sample]): RDD[JSample[T]] = {
psamples.map(toJSample(_))
}
// The first dimension is batch for both X and y
def toSampleArray(Xs: List[Tensor[T]], y: Tensor[T] = null): Array[JSample[T]] = {
require(!Xs.isEmpty, "Xs should not be empty")
val totalNum = Xs(0).size()(0)
var i = 1
val samples = new Array[JSample[T]](totalNum)
if (y != null) {
require(Xs(0).size()(0) == y.size()(0),
s"The batch dim should be equal, but we got: ${Xs(0).size()(0)} vs ${y.size()(0)}")
while (i <= totalNum) {
samples(i-1) = JSample(Xs.map{X => X.select(1, i)}.toArray, y.select(1, i))
i += 1
}
} else {
val dummyTensor = Tensor[T](1).fill(ev.fromType(1))
while (i <= totalNum) {
samples(i-1) = JSample(Xs.map{X => X.select(1, i)}.toArray, dummyTensor)
i += 1
}
}
samples
}
def batching(dataset: DataSet[JSample[T]], batchSize: Int)
: DataSet[MiniBatch[T]] = {
dataset -> SampleToMiniBatch[T](batchSize)
}
private def enrichOptimizer[T](
optimizer: Optimizer[T, MiniBatch[T]],
endTrigger: Trigger,
optimMethod: Map[String, OptimMethod[T]]): Optimizer[T, MiniBatch[T]] = {
optimizer.setEndWhen(endTrigger)
optimizer.setOptimMethods(optimMethod)
// TODO: remove this
optimizer.disableCheckSingleton()
optimizer
}
def createSequential(): Container[Activity, Activity, T] = {
Sequential[T]()
}
def toGraph(sequential: Sequential[T]): StaticGraph[T] = {
sequential.toGraph().asInstanceOf[StaticGraph[T]]
}
def createAttention(hiddenSize: Int, numHeads: Int, attentionDropout: Float): Attention[T] = {
Attention(hiddenSize, numHeads, attentionDropout)
}
def createFeedForwardNetwork(hiddenSize: Int,
filterSize: Int, reluDropout: Float): FeedForwardNetwork[T] = {
FeedForwardNetwork(hiddenSize, filterSize, reluDropout)
}
def createExpandSize(targetSizes: JList[Int]): ExpandSize[T] = {
ExpandSize(targetSizes.asScala.toArray)
}
def createTableOperation(
operationLayer: AbstractModule[Table, Tensor[T], T]): TableOperation[T] = {
new TableOperation(operationLayer)
}
def createLayerNormalization(hiddenSize: Int): LayerNormalization[T] = {
new LayerNormalization[T](hiddenSize)
}
def createTransformer(
vocabSize: Int,
hiddenSize: Int,
numHeads: Int,
filterSize: Int,
numHiddenlayers: Int,
postprocessDropout: Double,
attentionDropout: Double,
reluDropout: Double): nn.Transformer[T] = {
Transformer(vocabSize, hiddenSize, numHeads,
filterSize, numHiddenlayers, postprocessDropout.toFloat,
attentionDropout.toFloat, reluDropout.toFloat)
}
def createLinear(inputSize: Int, outputSize: Int,
withBias: Boolean,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
initWeight: JTensor = null,
initBias: JTensor = null,
initGradWeight: JTensor = null,
initGradBias: JTensor = null): Linear[T] = {
Linear[T](inputSize, outputSize, withBias, wRegularizer, bRegularizer,
toTensor(initWeight), toTensor(initBias), toTensor(initGradWeight), toTensor(initGradBias))
}
def createSparseLinear(inputSize: Int, outputSize: Int,
withBias: Boolean,
backwardStart: Int = -1,
backwardLength: Int = -1,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
initWeight: JTensor = null,
initBias: JTensor = null,
initGradWeight: JTensor = null,
initGradBias: JTensor = null): SparseLinear[T] = {
SparseLinear[T](inputSize, outputSize, withBias, backwardStart, backwardLength,
wRegularizer, bRegularizer, toTensor(initWeight), toTensor(initBias),
toTensor(initGradWeight), toTensor(initGradBias))
}
def createNegative(inplace: Boolean): Negative[T] = {
Negative[T](inplace)
}
def createDenseToSparse(): DenseToSparse[T] = {
DenseToSparse[T]()
}
def createReLU(ip: Boolean = false): ReLU[T] = {
ReLU[T](ip)
}
def createTanh(): Tanh[T] = {
Tanh[T]()
}
def createTimeDistributed(layer: TensorModule[T]): TimeDistributed[T] = {
TimeDistributed[T](layer)
}
def createSpatialWithinChannelLRN(size: Int = 5, alpha: Double = 1.0, beta: Double = 0.75)
: SpatialWithinChannelLRN[T] = {
SpatialWithinChannelLRN[T](size, alpha, beta)
}
def createRnnCell(inputSize: Int,
hiddenSize: Int,
activation: TensorModule[T],
isInputWithBias: Boolean = true,
isHiddenWithBias: Boolean = true,
wRegularizer: Regularizer[T] = null,
uRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null): RnnCell[T] = {
RnnCell[T](inputSize,
hiddenSize,
activation,
isInputWithBias,
isHiddenWithBias,
wRegularizer,
uRegularizer,
bRegularizer)
}
def createTimeDistributedMaskCriterion(critrn: TensorCriterion[T],
paddingValue: Int = 0): TimeDistributedMaskCriterion[T] = {
TimeDistributedMaskCriterion[T](critrn, paddingValue)
}
def createTimeDistributedCriterion(critrn: TensorCriterion[T],
sizeAverage: Boolean = false, dimension: Int = 2): TimeDistributedCriterion[T] = {
TimeDistributedCriterion[T](critrn, sizeAverage, dimension)
}
def createGRU(
inputSize: Int,
outputSize: Int,
p: Double = 0,
activation: TensorModule[T] = null,
innerActivation: TensorModule[T] = null,
wRegularizer: Regularizer[T] = null,
uRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null): GRU[T] = {
GRU[T](inputSize, outputSize, p, activation, innerActivation,
wRegularizer, uRegularizer, bRegularizer)
}
def createLSTM(
inputSize: Int,
hiddenSize: Int,
p: Double = 0,
activation: TensorModule[T] = null,
innerActivation: TensorModule[T] = null,
wRegularizer: Regularizer[T] = null,
uRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null): LSTM[T] = {
LSTM[T](inputSize, hiddenSize, p, activation, innerActivation,
wRegularizer, uRegularizer, bRegularizer)
}
def createLSTMPeephole(
inputSize: Int,
hiddenSize: Int,
p: Double = 0,
wRegularizer: Regularizer[T] = null,
uRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null): LSTMPeephole[T] = {
LSTMPeephole[T](inputSize, hiddenSize, p, wRegularizer, uRegularizer, bRegularizer)
}
def createRecurrent(): Recurrent[T] = {
Recurrent[T]()
}
def createRecurrentDecoder(outputLength: Int): RecurrentDecoder[T] = {
RecurrentDecoder[T](outputLength)
}
def createConvLSTMPeephole(
inputSize: Int,
outputSize: Int,
kernelI: Int,
kernelC: Int,
stride: Int = 1,
padding: Int = -1,
activation: TensorModule[T] = null,
innerActivation: TensorModule[T] = null,
wRegularizer: Regularizer[T] = null,
uRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
cRegularizer: Regularizer[T] = null,
withPeephole: Boolean = true): ConvLSTMPeephole[T] = {
ConvLSTMPeephole[T](inputSize, outputSize, kernelI, kernelC,
stride, padding, activation, innerActivation,
wRegularizer, uRegularizer, bRegularizer, cRegularizer, withPeephole)
}
def createConvLSTMPeephole3D(
inputSize: Int,
outputSize: Int,
kernelI: Int,
kernelC: Int,
stride: Int = 1,
padding: Int = -1,
wRegularizer: Regularizer[T] = null,
uRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
cRegularizer: Regularizer[T] = null,
withPeephole: Boolean = true): ConvLSTMPeephole3D[T] = {
ConvLSTMPeephole3D[T](inputSize, outputSize, kernelI, kernelC, stride, padding,
wRegularizer, uRegularizer, bRegularizer, cRegularizer, withPeephole)
}
def createEcho(): Echo[T] = {
Echo[T]()
}
def createLogSoftMax(): LogSoftMax[T] = {
LogSoftMax[T]()
}
def createTemporalMaxPooling(
kW: Int,
dW: Int)
: TemporalMaxPooling[T] = {
TemporalMaxPooling[T](
kW,
dW)
}
def createSpatialMaxPooling(kW: Int,
kH: Int,
dW: Int,
dH: Int,
padW: Int = 0,
padH: Int = 0,
ceilMode: Boolean = false,
format: String = "NCHW")
: SpatialMaxPooling[T] = {
val maxpooling = SpatialMaxPooling[T](kW,
kH,
dW,
dH,
padW,
padH,
format = DataFormat(format))
if (ceilMode) maxpooling.ceil()
else maxpooling
}
def createLocallyConnected2D(
nInputPlane: Int,
inputWidth: Int,
inputHeight: Int,
nOutputPlane: Int,
kernelW: Int,
kernelH: Int,
strideW: Int = 1,
strideH: Int = 1,
padW: Int = 0,
padH: Int = 0,
propagateBack: Boolean = true,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
initWeight: JTensor = null,
initBias: JTensor = null,
initGradWeight: JTensor = null,
initGradBias: JTensor = null,
withBias: Boolean = true,
dataFormat: String = "NCHW"): LocallyConnected2D[T] = {
LocallyConnected2D[T](
nInputPlane,
inputWidth,
inputHeight,
nOutputPlane,
kernelW,
kernelH,
strideW,
strideH,
padW,
padH,
propagateBack,
wRegularizer,
bRegularizer,
toTensor(initWeight),
toTensor(initBias),
toTensor(initGradWeight),
toTensor(initGradBias),
withBias,
DataFormat(dataFormat)
)
}
def createSpatialConvolution(nInputPlane: Int,
nOutputPlane: Int,
kernelW: Int,
kernelH: Int,
strideW: Int = 1,
strideH: Int = 1,
padW: Int = 0,
padH: Int = 0,
nGroup: Int = 1,
propagateBack: Boolean = true,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
initWeight: JTensor = null,
initBias: JTensor = null,
initGradWeight: JTensor = null,
initGradBias: JTensor = null,
withBias: Boolean = true,
dataFormat: String = "NCHW"
)
: SpatialConvolution[T] = {
SpatialConvolution[T](nInputPlane,
nOutputPlane,
kernelW,
kernelH,
strideW,
strideH,
padW,
padH,
nGroup,
propagateBack,
wRegularizer,
bRegularizer,
toTensor(initWeight),
toTensor(initBias),
toTensor(initGradWeight),
toTensor(initGradBias),
withBias,
DataFormat(dataFormat)
)
}
def createSpatialSeparableConvolution(
nInputChannel: Int,
nOutputChannel: Int,
depthMultiplier: Int,
kW: Int,
kH: Int,
sW: Int = 1,
sH: Int = 1,
pW: Int = 0,
pH: Int = 0,
withBias: Boolean = true,
dataFormat: String = "NCHW",
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
pRegularizer: Regularizer[T] = null
)
: SpatialSeparableConvolution[T] = {
SpatialSeparableConvolution[T](nInputChannel,
nOutputChannel,
depthMultiplier,
kW,
kH,
sW,
sH,
pW,
pH,
withBias,
DataFormat(dataFormat),
wRegularizer,
bRegularizer,
pRegularizer
)
}
def createReshape(size: JList[Int], batchMode: JBoolean = null): Reshape[T] = {
val mappedBatchMode = batchMode match {
case JBoolean.TRUE => Some(true)
case JBoolean.FALSE => Some(false)
case _ => None
}
Reshape(size.asScala.toArray, mappedBatchMode)
}
def createConcat(dimension: Int): Concat[T] = {
Concat[T](dimension)
}
def createSpatialAveragePooling(kW: Int,
kH: Int,
dW: Int = 1,
dH: Int = 1,
padW: Int = 0,
padH: Int = 0,
globalPooling: Boolean = false,
ceilMode: Boolean = false,
countIncludePad: Boolean = true,
divide: Boolean = true,
format: String = "NCHW")
: SpatialAveragePooling[T] = {
SpatialAveragePooling[T](kW, kH, dW, dH, padW, padH, globalPooling,
ceilMode, countIncludePad, divide, format = DataFormat(format))
}
def createSpatialBatchNormalization(nOutput: Int,
eps: Double = 1e-5,
momentum: Double = 0.1,
affine: Boolean = true,
initWeight: JTensor = null,
initBias: JTensor = null,
initGradWeight: JTensor = null,
initGradBias: JTensor = null, dataFormat: String = "NCHW")
: SpatialBatchNormalization[T] = {
SpatialBatchNormalization[T](nOutput, eps, momentum, affine,
toTensor(initWeight), toTensor(initBias), toTensor(initGradWeight), toTensor(initBias),
DataFormat(dataFormat)
)
}
def createSpatialCrossMapLRN(size: Int = 5,
alpha: Double = 1.0,
beta: Double = 0.75,
k: Double = 1.0,
dataFormat: String = "NCHW")
: SpatialCrossMapLRN[T] = {
SpatialCrossMapLRN[T](size, alpha, beta, k, DataFormat(dataFormat))
}
def createDropout(initP: Double = 0.5,
inplace: Boolean = false,
scale: Boolean = true)
: Dropout[T] = {
Dropout[T](initP, inplace, scale)
}
def createGaussianDropout(rate: Double)
: GaussianDropout[T] = {
GaussianDropout[T](rate)
}
def createGaussianNoise(stddev: Double)
: GaussianNoise[T] = {
GaussianNoise[T](stddev)
}
def createView(sizes: JList[Int], num_input_dims: Int = 0): View[T] = {
View[T](sizes.asScala.toArray).setNumInputDims(num_input_dims)
}
def createAbs()
: Abs[T] = {
Abs[T]()
}
def createAdd(inputSize: Int)
: Add[T] = {
Add[T](inputSize)
}
def createAddConstant(constant_scalar: Double,
inplace: Boolean = false)
: AddConstant[T] = {
AddConstant[T](constant_scalar,
inplace)
}
def createBatchNormalization(nOutput: Int,
eps: Double = 1e-5,
momentum: Double = 0.1,
affine: Boolean = true,
initWeight: JTensor = null,
initBias: JTensor = null,
initGradWeight: JTensor = null,
initGradBias: JTensor = null)
: BatchNormalization[T] = {
BatchNormalization[T](nOutput,
eps,
momentum,
affine,
toTensor(initWeight),
toTensor(initBias),
toTensor(initGradWeight),
toTensor(initGradBias))
}
def createBilinear(inputSize1: Int,
inputSize2: Int,
outputSize: Int,
biasRes: Boolean = true,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null)
: Bilinear[T] = {
Bilinear[T](inputSize1,
inputSize2,
outputSize,
biasRes,
wRegularizer,
bRegularizer)
}
def createBottle(module: AbstractModule[Activity, Activity, T],
nInputDim: Int = 2,
nOutputDim1: Int = Int.MaxValue)
: Bottle[T] = {
Bottle[T](module,
nInputDim,
nOutputDim1)
}
def createCAdd(size: JList[Int],
bRegularizer: Regularizer[T] = null)
: CAdd[T] = {
CAdd[T](size.asScala.toArray, bRegularizer)
}
def createCAddTable(inplace: Boolean = false)
: CAddTable[T, T] = {
CAddTable[T](inplace)
}
def createCAveTable(inplace: Boolean = false)
: CAveTable[T] = {
CAveTable[T](inplace)
}
def createCDivTable()
: CDivTable[T] = {
CDivTable[T]()
}
def createCMaxTable()
: CMaxTable[T] = {
CMaxTable[T]()
}
def createCMinTable()
: CMinTable[T] = {
CMinTable[T]()
}
def createCMul(size: JList[Int],
wRegularizer: Regularizer[T] = null)
: CMul[T] = {
CMul[T](size.asScala.toArray, wRegularizer)
}
def createCMulTable()
: CMulTable[T] = {
CMulTable[T]()
}
def createCSubTable()
: CSubTable[T] = {
CSubTable[T]()
}
def createClamp(min: Int,
max: Int)
: Clamp[T] = {
Clamp[T](min,
max)
}
def createContiguous()
: Contiguous[T] = {
Contiguous[T]()
}
def createCosine(inputSize: Int,
outputSize: Int)
: Cosine[T] = {
Cosine[T](inputSize,
outputSize)
}
def createCosineDistance()
: CosineDistance[T] = {
CosineDistance[T]()
}
def createCosineDistanceCriterion(sizeAverage: Boolean = true)
: CosineDistanceCriterion[T] = {
CosineDistanceCriterion[T](sizeAverage)
}
def createCrossProduct(numTensor: Int = 0,
embeddingSize: Int = 0)
: CrossProduct[T] = {
CrossProduct[T](numTensor, embeddingSize)
}
def createDiceCoefficientCriterion(sizeAverage: Boolean = true,
epsilon: Float = 1.0f)
: DiceCoefficientCriterion[T] = {
DiceCoefficientCriterion[T](sizeAverage, epsilon)
}
def createDotProduct()
: DotProduct[T] = {
DotProduct[T]()
}
def createELU(alpha: Double = 1.0,
inplace: Boolean = false)
: ELU[T] = {
ELU[T](alpha,
inplace)
}
def createEuclidean(inputSize: Int,
outputSize: Int,
fastBackward: Boolean = true)
: Euclidean[T] = {
Euclidean[T](inputSize,
outputSize,
fastBackward)
}
def createExp()
: Exp[T] = {
Exp[T]()
}
def createFlattenTable()
: FlattenTable[T] = {
FlattenTable[T]()
}
def createGradientReversal(lambda: Double = 1)
: GradientReversal[T] = {
GradientReversal[T](lambda)
}
def createHardShrink(lambda: Double = 0.5)
: HardShrink[T] = {
HardShrink[T](lambda)
}
def createHardTanh(minValue: Double = -1,
maxValue: Double = 1,
inplace: Boolean = false)
: HardTanh[T] = {
HardTanh[T](minValue,
maxValue,
inplace)
}
def createIndex(dimension: Int)
: Index[T] = {
Index[T](dimension)
}
def createInferReshape(size: JList[Int], batchMode: Boolean = false)
: InferReshape[T] = {
InferReshape[T](size.asScala.toArray,
batchMode)
}
def createJoinTable(dimension: Int,
nInputDims: Int)
: JoinTable[T] = {
JoinTable[T](dimension,
nInputDims)
}
def createSparseJoinTable(dimension: Int): SparseJoinTable[T] = {
SparseJoinTable[T](dimension)
}
def createL1Cost()
: L1Cost[T] = {
L1Cost[T]()
}
def createUpSampling1D(length: Int): UpSampling1D[T] = {
UpSampling1D(length)
}
def createUpSampling2D(size: JList[Int], dataFormat: String): UpSampling2D[T] = {
UpSampling2D(size.asScala.toArray, DataFormat(dataFormat))
}
def createL1Penalty(l1weight: Int,
sizeAverage: Boolean = false,
provideOutput: Boolean = true)
: L1Penalty[T] = {
L1Penalty[T](l1weight,
sizeAverage,
provideOutput)
}
def createNegativeEntropyPenalty(beta: Double): NegativeEntropyPenalty[T] = {
NegativeEntropyPenalty(beta)
}
def createLeakyReLU(negval: Double = 0.01,
inplace: Boolean = false)
: LeakyReLU[T] = {
LeakyReLU[T](negval,
inplace)
}
def createLog()
: Log[T] = {
Log[T]()
}
def createLogSigmoid()
: LogSigmoid[T] = {
LogSigmoid[T]()
}
def createLookupTable(nIndex: Int, nOutput: Int,
paddingValue: Double = 0, maxNorm: Double = Double.MaxValue,
normType: Double = 2.0, shouldScaleGradByFreq: Boolean = false,
wRegularizer: Regularizer[T] = null)
: LookupTable[T] = {
LookupTable[T](nIndex,
nOutput,
paddingValue,
maxNorm,
normType,
shouldScaleGradByFreq,
wRegularizer)
}
def createLookupTableSparse(nIndex: Int, nOutput: Int,
combiner: String = "sum", maxNorm: Double = -1,
wRegularizer: Regularizer[T] = null)
: LookupTableSparse[T] = {
LookupTableSparse[T](nIndex,
nOutput,
combiner,
maxNorm,
wRegularizer)
}
def createMM(transA: Boolean = false,
transB: Boolean = false)
: MM[T] = {
MM[T](transA,
transB)
}
def createMV(trans: Boolean = false)
: MV[T] = {
MV[T](trans)
}
def createMapTable(module: AbstractModule[Activity, Activity, T] = null)
: MapTable[T] = {
MapTable[T](module)
}
def createMaskedSelect()
: MaskedSelect[T] = {
MaskedSelect[T]()
}
def createMax(dim: Int = 1,
numInputDims: Int = Int.MinValue)
: Max[T] = {
Max[T](dim,
numInputDims)
}
def createMean(dimension: Int = 1,
nInputDims: Int = -1,
squeeze: Boolean = true)
: Mean[T] = {
Mean[T](dimension,
nInputDims,
squeeze)
}
def createMin(dim: Int = 1,
numInputDims: Int = Int.MinValue)
: Min[T] = {
Min[T](dim,
numInputDims)
}
def createMixtureTable(dim: Int = Int.MaxValue)
: MixtureTable[T] = {
MixtureTable[T](dim)
}
def createMul()
: Mul[T] = {
Mul[T]()
}
def createMulConstant(scalar: Double,
inplace: Boolean = false)
: MulConstant[T] = {
MulConstant[T](scalar,
inplace)
}
def createNarrow(dimension: Int,
offset: Int,
length: Int = 1)
: Narrow[T] = {
Narrow[T](dimension,
offset,
length)
}
def createNarrowTable(offset: Int,
length: Int = 1)
: NarrowTable[T] = {
NarrowTable[T](offset,
length)
}
def createNormalize(p: Double,
eps: Double = 1e-10)
: Normalize[T] = {
Normalize[T](p,
eps)
}
def createPReLU(nOutputPlane: Int = 0)
: PReLU[T] = {
PReLU[T](nOutputPlane)
}
def createSReLU(shape: JArrayList[Int], shareAxes: JArrayList[Int] = null): SReLU[T] = {
val argv: Array[Int] = if (shareAxes == null) {
null
} else {
shareAxes.asScala.toArray
}
SReLU[T](shape.asScala.toArray, argv)
}
def createActivityRegularization(l1: Double, l2: Double): ActivityRegularization[T] = {
ActivityRegularization[T](l1, l2)
}
def createPadding(dim: Int,
pad: Int,
nInputDim: Int,
value: Double = 0.0,
nIndex: Int = 1)
: Padding[T] = {
Padding[T](dim,
pad,
nInputDim,
value,
nIndex)
}
def createPairwiseDistance(norm: Int = 2)
: PairwiseDistance[T] = {
PairwiseDistance[T](norm)
}
def createParallelTable()
: ParallelTable[T] = {
ParallelTable[T]()
}
def createPower(power: Double,
scale: Double = 1,
shift: Double = 0)
: Power[T] = {
Power[T](power,
scale,
shift)
}
def createRReLU(lower: Double = 1.0 / 8,
upper: Double = 1.0 / 3,
inplace: Boolean = false)
: RReLU[T] = {
RReLU[T](lower,
upper,
inplace)
}
def createReLU6(inplace: Boolean = false)
: ReLU6[T] = {
ReLU6[T](inplace)
}
def createReplicate(nFeatures: Int,
dim: Int = 1,
nDim: Int = Int.MaxValue)
: Replicate[T] = {
Replicate[T](nFeatures,
dim,
nDim)
}
def createRoiPooling(pooled_w: Int, pooled_h: Int, spatial_scale: Double)
: RoiPooling[T] = {
RoiPooling[T](pooled_w,
pooled_h,
ev.fromType(spatial_scale))
}
def createRoiAlign(spatial_scale: Double, sampling_ratio: Int, pooled_h: Int, pooled_w: Int)
: RoiAlign[T] = {
RoiAlign[T](spatial_scale.toFloat,
sampling_ratio,
pooled_h,
pooled_w)
}
def createFPN(in_channels_list: JList[Int], out_channels: Int,
top_blocks: Int = 0, in_channels_of_p6p7: Int = 0, out_channels_of_p6p7: Int = 0)
: FPN[T] = {
FPN[T](in_channels_list.asScala.toArray, out_channels,
top_blocks, in_channels_of_p6p7, out_channels_of_p6p7)
}
def createPooler(resolution: Int, scales: JList[Double], sampling_ratio: Int)
: Pooler[T] = {
Pooler[T](resolution,
scales.asScala.toArray.map(_.toFloat),
sampling_ratio)
}
def createScale(size: JList[Int])
: Scale[T] = {
Scale[T](size.asScala.toArray)
}
def createSelect(dimension: Int,
index: Int)
: Select[T] = {
Select[T](dimension,
index)
}
def createSelectTable(dimension: Int)
: SelectTable[T] = {
SelectTable[T](dimension)
}
def createSequenceBeamSearch(vocabSize: Int,
beamSize: Int,
alpha: Float,
decodeLength: Int,
eosId: Float,
paddingValue: Float,
numHiddenLayers: Int,
hiddenSize: Int)
: SequenceBeamSearch[T] = {
SequenceBeamSearch[T](vocabSize,
beamSize,
alpha,
decodeLength,
eosId,
paddingValue,
numHiddenLayers,
hiddenSize)
}
def createSigmoid()
: Sigmoid[T] = {
Sigmoid[T]()
}
def createSoftMax()
: SoftMax[T] = {
SoftMax[T]()
}
def createSoftMin()
: SoftMin[T] = {
SoftMin[T]()
}
def createSoftPlus(beta: Double = 1.0)
: SoftPlus[T] = {
SoftPlus[T](beta)
}
def createSoftShrink(lambda: Double = 0.5)
: SoftShrink[T] = {
SoftShrink[T](lambda)
}
def createSoftSign()
: SoftSign[T] = {
SoftSign[T]()
}
def createSpatialDropout1D(
initP: Double = 0.5
): SpatialDropout1D[T] = {
SpatialDropout1D[T](initP)
}
def createSpatialDropout2D(
initP: Double = 0.5,
dataFormat: String = "NCHW"
): SpatialDropout2D[T] = {
SpatialDropout2D[T](initP, DataFormat(dataFormat))
}
def createSpatialDropout3D(
initP: Double = 0.5,
dataFormat: String = "NCHW"
): SpatialDropout3D[T] = {
SpatialDropout3D[T](initP, DataFormat(dataFormat))
}
def createSpatialDilatedConvolution(nInputPlane: Int,
nOutputPlane: Int,
kW: Int,
kH: Int,
dW: Int = 1,
dH: Int = 1,
padW: Int = 0,
padH: Int = 0,
dilationW: Int = 1,
dilationH: Int = 1,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null)
: SpatialDilatedConvolution[T] = {
SpatialDilatedConvolution[T](nInputPlane,
nOutputPlane,
kW,
kH,
dW,
dH,
padW,
padH,
dilationW,
dilationH,
wRegularizer,
bRegularizer)
}
def createTemporalConvolution(
inputFrameSize: Int,
outputFrameSize: Int,
kernelW: Int,
strideW: Int = 1,
propagateBack: Boolean = true,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
initWeight: JTensor = null,
initBias: JTensor = null,
initGradWeight: JTensor = null,
initGradBias: JTensor = null
)
: TemporalConvolution[T] = {
TemporalConvolution[T](
inputFrameSize,
outputFrameSize,
kernelW,
strideW,
propagateBack,
wRegularizer,
bRegularizer,
toTensor(initWeight),
toTensor(initBias),
toTensor(initGradWeight),
toTensor(initGradBias)
)
}
def createLocallyConnected1D(
nInputFrame: Int,
inputFrameSize: Int,
outputFrameSize: Int,
kernelW: Int,
strideW: Int = 1,
propagateBack: Boolean = true,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
initWeight: JTensor = null,
initBias: JTensor = null,
initGradWeight: JTensor = null,
initGradBias: JTensor = null
)
: LocallyConnected1D[T] = {
LocallyConnected1D[T](
nInputFrame,
inputFrameSize,
outputFrameSize,
kernelW,
strideW,
propagateBack,
wRegularizer,
bRegularizer,
toTensor(initWeight),
toTensor(initBias),
toTensor(initGradWeight),
toTensor(initGradBias)
)
}
def createBinaryTreeLSTM(
inputSize: Int,
hiddenSize: Int,
gateOutput: Boolean = true,
withGraph: Boolean = true)
: BinaryTreeLSTM[T] = {
BinaryTreeLSTM[T](
inputSize,
hiddenSize,
gateOutput,
withGraph)
}
def createVolumetricFullConvolution(nInputPlane: Int,
nOutputPlane: Int,
kT: Int,
kW: Int,
kH: Int,
dT: Int = 1,
dW: Int = 1,
dH: Int = 1,
padT: Int = 0,
padW: Int = 0,
padH: Int = 0,
adjT: Int = 0,
adjW: Int = 0,
adjH: Int = 0,
nGroup: Int = 1,
noBias: Boolean = false,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null)
: VolumetricFullConvolution[T] = {
VolumetricFullConvolution[T](nInputPlane,
nOutputPlane,
kT,
kW,
kH,
dT,
dW,
dH,
padT,
padW,
padH,
adjT,
adjW,
adjH,
nGroup,
noBias,
wRegularizer,
bRegularizer)
}
def createSpatialFullConvolution(nInputPlane: Int,
nOutputPlane: Int,
kW: Int,
kH: Int,
dW: Int = 1,
dH: Int = 1,
padW: Int = 0,
padH: Int = 0,
adjW: Int = 0,
adjH: Int = 0,
nGroup: Int = 1,
noBias: Boolean = false,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null)
: SpatialFullConvolution[T] = {
SpatialFullConvolution[T](nInputPlane,
nOutputPlane,
kW,
kH,
dW,
dH,
padW,
padH,
adjW,
adjH,
nGroup,
noBias,
wRegularizer,
bRegularizer)
}
def createSpatialShareConvolution(
nInputPlane: Int,
nOutputPlane: Int,
kernelW: Int,
kernelH: Int,
strideW: Int = 1,
strideH: Int = 1,
padW: Int = 0,
padH: Int = 0,
nGroup: Int = 1,
propagateBack: Boolean = true,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
initWeight: JTensor = null,
initBias: JTensor = null,
initGradWeight: JTensor = null,
initGradBias: JTensor = null,
withBias: Boolean = true): SpatialShareConvolution[T] = {
SpatialShareConvolution[T](nInputPlane,
nOutputPlane,
kernelW,
kernelH,
strideW,
strideH,
padW,
padH,
nGroup,
propagateBack,
wRegularizer,
bRegularizer,
toTensor(initWeight),
toTensor(initBias),
toTensor(initGradWeight),
toTensor(initGradBias),
withBias
)
}
def createSpatialZeroPadding(padLeft: Int,
padRight: Int,
padTop: Int,
padBottom: Int)
: SpatialZeroPadding[T] = {
SpatialZeroPadding[T](padLeft,
padRight,
padTop,
padBottom)
}
def createBifurcateSplitTable(dimension: Int)
: BifurcateSplitTable[T] = {
BifurcateSplitTable[T](dimension)
}
def createSplitTable(dimension: Int,
nInputDims: Int = -1)
: SplitTable[T] = {
SplitTable[T](dimension,
nInputDims)
}
def createSqrt()
: Sqrt[T] = {
Sqrt[T]()
}
def createSquare()
: Square[T] = {
Square[T]()
}
def createSqueeze(dim: Int = Int.MinValue,
numInputDims: Int = Int.MinValue)
: Squeeze[T] = {
Squeeze[T](dim,
numInputDims)
}
def createSum(dimension: Int = 1,
nInputDims: Int = -1,
sizeAverage: Boolean = false,
squeeze: Boolean = true
)
: Sum[T] = {
Sum[T](dimension,
nInputDims,
sizeAverage,
squeeze
)
}
def createTanhShrink()
: TanhShrink[T] = {
TanhShrink[T]()
}
def createThreshold(th: Double = 1e-6,
v: Double = 0.0,
ip: Boolean = false)
: Threshold[T] = {
Threshold[T](th,
v,
ip)
}
def createUnsqueeze(pos: JList[Int],
numInputDims: Int = Int.MinValue)
: Unsqueeze[T] = {
Unsqueeze[T](pos.asScala.toArray,
numInputDims)
}
def createBCECriterion(weights: JTensor = null,
sizeAverage: Boolean = true)
: BCECriterion[T] = {
BCECriterion[T](if (weights == null) null else toTensor(weights),
sizeAverage)
}
def createBiRecurrent(merge: AbstractModule[Table, Tensor[T], T] = null)
: BiRecurrent[T] = {
BiRecurrent[T](merge)
}
def createConcatTable()
: ConcatTable[T] = {
ConcatTable[Activity, T]()
}
def createIdentity()
: Identity[T] = {
Identity[T]()
}
def createGaussianSampler(): GaussianSampler[T] = {
GaussianSampler[T]()
}
def createMultiLabelSoftMarginCriterion(weights: JTensor = null,
sizeAverage: Boolean = true)
: MultiLabelSoftMarginCriterion[T] = {
MultiLabelSoftMarginCriterion[T](if (weights == null) null else toTensor(weights),
sizeAverage)
}
def createMultiMarginCriterion(p: Int = 1,
weights: JTensor = null,
margin: Double = 1.0,
sizeAverage: Boolean = true)
: MultiMarginCriterion[T] = {
MultiMarginCriterion[T](p,
if (weights == null) null else toTensor(weights),
margin,
sizeAverage)
}
def createReverse(dimension: Int = 1, isInplace: Boolean = false)
: Reverse[T] = {
Reverse[T](dimension, isInplace)
}
def createTranspose(permutations: JList[JList[Int]])
: Transpose[T] = {
Transpose[T](permutations.asScala.toArray.map { item =>
val itemArray = item.asScala.toArray
(itemArray(0), itemArray(1))
})
}
def createSpatialContrastiveNormalization(nInputPlane: Int = 1,
kernel: JTensor = null,
threshold: Double = 1e-4,
thresval: Double = 1e-4)
: SpatialContrastiveNormalization[T] = {
SpatialContrastiveNormalization[T](nInputPlane,
if (kernel == null) null else toTensor(kernel),
threshold,
thresval)
}
def createSpatialConvolutionMap(connTable: JTensor,
kW: Int,
kH: Int,
dW: Int = 1,
dH: Int = 1,
padW: Int = 0,
padH: Int = 0,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null)
: SpatialConvolutionMap[T] = {
SpatialConvolutionMap[T](if (connTable == null) null else toTensor(connTable),
kW,
kH,
dW,
dH,
padW,
padH,
wRegularizer,
bRegularizer)
}
def createVolumetricConvolution(nInputPlane: Int,
nOutputPlane: Int,
kT: Int,
kW: Int,
kH: Int,
dT: Int = 1,
dW: Int = 1,
dH: Int = 1,
padT: Int = 0,
padW: Int = 0,
padH: Int = 0,
withBias: Boolean = true,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null)
: VolumetricConvolution[T] = {
VolumetricConvolution[T](nInputPlane,
nOutputPlane,
kT,
kW,
kH,
dT,
dW,
dH,
padT,
padW,
padH,
withBias,
wRegularizer,
bRegularizer)
}
def createVolumetricMaxPooling(kT: Int,
kW: Int,
kH: Int,
dT: Int,
dW: Int,
dH: Int,
padT: Int = 0,
padW: Int = 0,
padH: Int = 0): VolumetricMaxPooling[T] = {
VolumetricMaxPooling[T](kT, kW, kH, dT, dW, dH, padT, padW, padH)
}
def createVolumetricAveragePooling(kT: Int,
kW: Int,
kH: Int,
dT: Int,
dW: Int,
dH: Int,
padT: Int = 0,
padW: Int = 0,
padH: Int = 0,
countIncludePad: Boolean = true,
ceilMode: Boolean = false):
VolumetricAveragePooling[T] = {
VolumetricAveragePooling[T](kT, kW, kH, dT, dW, dH, padT, padW, padH, countIncludePad, ceilMode)
}
def createSpatialDivisiveNormalization(nInputPlane: Int = 1,
kernel: JTensor = null,
threshold: Double = 1e-4,
thresval: Double = 1e-4)
: SpatialDivisiveNormalization[T] = {
SpatialDivisiveNormalization[T](nInputPlane,
if (kernel == null) null else toTensor(kernel),
threshold,
thresval)
}
def createSpatialSubtractiveNormalization(nInputPlane: Int = 1,
kernel: JTensor = null)
: SpatialSubtractiveNormalization[T] = {
SpatialSubtractiveNormalization[T](nInputPlane,
if (kernel == null) null else toTensor(kernel))
}
def createSoftMarginCriterion(sizeAverage: Boolean = true)
: SoftMarginCriterion[T] = {
SoftMarginCriterion[T](sizeAverage)
}
def createCategoricalCrossEntropy(): CategoricalCrossEntropy[T] = {
CategoricalCrossEntropy[T]()
}
// Optimizer
def createPoly(power: Double, maxIteration: Int): SGD.Poly = {
SGD.Poly(power, maxIteration)
}
def createStep(stepSize: Int, gamma: Double): SGD.Step = {
SGD.Step(stepSize, gamma)
}
def createMultiStep(stepSizes: JList[Int], gamma: Double): SGD.MultiStep = {
SGD.MultiStep(stepSizes.asScala.toArray, gamma)
}
def createExponential(decayStep: Int, decayRate: Double,
stairCase: Boolean = false): SGD.Exponential = {
SGD.Exponential(decayStep, decayRate, stairCase)
}
def createDefault(): SGD.Default = {
SGD.Default()
}
def createPlateau(monitor: String, factor: Float = 0.1f,
patience: Int = 10, mode: String = "min", epsilon: Float = 1e-4f,
cooldown: Int = 0, minLr: Float = 0): SGD.Plateau = {
SGD.Plateau(monitor, factor, patience, mode, epsilon, cooldown, minLr)
}
def createWarmup(delta: Double): SGD.Warmup = {
SGD.Warmup(delta)
}
def createSequentialSchedule(iterationPerEpoch: Int): SGD.SequentialSchedule = {
SGD.SequentialSchedule(iterationPerEpoch)
}
def createClassNLLCriterion(weights: JTensor = null,
sizeAverage: Boolean = true, logProbAsInput: Boolean = true)
: ClassNLLCriterion[T] = {
ClassNLLCriterion[T](if (weights == null) null else toTensor(weights),
sizeAverage, logProbAsInput)
}
def createMSECriterion: MSECriterion[T] = {
MSECriterion[T]()
}
def createAbsCriterion(sizeAverage: Boolean = true)
: AbsCriterion[T] = {
AbsCriterion[T](sizeAverage)
}
def createClassSimplexCriterion(nClasses: Int)
: ClassSimplexCriterion[T] = {
ClassSimplexCriterion[T](nClasses)
}
def createCrossEntropyCriterion(weights: JTensor = null,
sizeAverage: Boolean = true): CrossEntropyCriterion[T] = {
new CrossEntropyCriterion[T](if (null == weights) null else toTensor(weights), sizeAverage)
}
def createCosineEmbeddingCriterion(margin: Double = 0.0,
sizeAverage: Boolean = true)
: CosineEmbeddingCriterion[T] = {
CosineEmbeddingCriterion[T](margin,
sizeAverage)
}
def createDistKLDivCriterion(sizeAverage: Boolean = true)
: DistKLDivCriterion[T] = {
DistKLDivCriterion[T](sizeAverage)
}
def createHingeEmbeddingCriterion(margin: Double = 1,
sizeAverage: Boolean = true)
: HingeEmbeddingCriterion[T] = {
HingeEmbeddingCriterion[T](margin,
sizeAverage)
}
def createL1HingeEmbeddingCriterion(margin: Double = 1)
: L1HingeEmbeddingCriterion[T] = {
L1HingeEmbeddingCriterion[T](margin)
}
def createMarginCriterion(margin: Double = 1.0,
sizeAverage: Boolean = true, squared: Boolean = false)
: MarginCriterion[T] = {
MarginCriterion[T](margin,
sizeAverage, squared)
}
def createMarginRankingCriterion(margin: Double = 1.0,
sizeAverage: Boolean = true)
: MarginRankingCriterion[T] = {
MarginRankingCriterion[T](margin,
sizeAverage)
}
def createMultiCriterion()
: MultiCriterion[T] = {
MultiCriterion[T]()
}
def createMultiLabelMarginCriterion(sizeAverage: Boolean = true)
: MultiLabelMarginCriterion[T] = {
MultiLabelMarginCriterion[T](sizeAverage)
}
def createParallelCriterion(repeatTarget: Boolean = false)
: ParallelCriterion[T] = {
ParallelCriterion[T](repeatTarget)
}
def createKLDCriterion(sizeAverage: Boolean): KLDCriterion[T] = {
KLDCriterion[T](sizeAverage)
}
def createGaussianCriterion(): GaussianCriterion[T] = {
GaussianCriterion[T]()
}
def createSmoothL1Criterion(sizeAverage: Boolean = true)
: SmoothL1Criterion[T] = {
SmoothL1Criterion[T](sizeAverage)
}
def createSmoothL1CriterionWithWeights(sigma: Double, num: Int = 0)
: SmoothL1CriterionWithWeights[T] = {
SmoothL1CriterionWithWeights[T](sigma,
num)
}
def createSoftmaxWithCriterion(ignoreLabel: Integer = null,
normalizeMode: String = "VALID")
: SoftmaxWithCriterion[T] = {
val normM = normalizeMode match {
case "FULL" => NormMode.FULL
case "VALID" => NormMode.VALID
case "BATCH_SIZE" => NormMode.BATCH_SIZE
case "NONE" => NormMode.NONE
case n: String =>
throw new IllegalArgumentException(s"Only support 'FULL', " +
s"'VALID', 'BATCH_SIZE' and 'NONE': $n")
}
val labelToIgnore = ignoreLabel match {
case i: Integer => Some(i.toInt)
case null => None
}
SoftmaxWithCriterion[T](labelToIgnore, normM)
}
def createTransformerCriterion(
criterion: AbstractCriterion[Activity, Activity, T],
inputTransformer: AbstractModule[Activity, Activity, T] = null,
targetTransformer: AbstractModule[Activity, Activity, T] = null
): TransformerCriterion[T] = {
TransformerCriterion(criterion, Option(inputTransformer), Option(targetTransformer))
}
def createDotProductCriterion(
sizeAverage: Boolean = false): DotProductCriterion[T] = {
DotProductCriterion[T](sizeAverage)
}
def createPGCriterion(
sizeAverage: Boolean = false): PGCriterion[T] = {
PGCriterion(sizeAverage)
}
def createPack(dimension: Int): Pack[T] = {
Pack(dimension)
}
def createTile(dim : Int, copies : Int): Tile[T] = {
Tile(dim, copies)
}
def createBinaryThreshold(th: Double, ip: Boolean): BinaryThreshold[T] = {
BinaryThreshold(th, ip)
}
def setModelSeed(seed: Long): Unit = {
RandomGenerator.RNG.setSeed(seed)
}
def modelEvaluate(model: AbstractModule[Activity, Activity, T],
valRDD: JavaRDD[Sample],
batchSize: Int,
valMethods: JList[ValidationMethod[T]])
: JList[EvaluatedResult] = {
val resultArray = model.evaluate(valRDD.rdd.map(toJSample(_)),
valMethods.asScala.toArray, Some(batchSize))
val testResultArray = resultArray.map { result =>
EvaluatedResult(result._1.result()._1, result._1.result()._2,
result._2.toString())
}
testResultArray.toList.asJava
}
def modelEvaluateImageFrame(model: AbstractModule[Activity, Activity, T],
imageFrame: ImageFrame,
batchSize: Int,
valMethods: JList[ValidationMethod[T]])
: JList[EvaluatedResult] = {
val resultArray = model.evaluateImage(imageFrame,
valMethods.asScala.toArray, Some(batchSize))
val testResultArray = resultArray.map { result =>
EvaluatedResult(result._1.result()._1, result._1.result()._2,
result._2.toString())
}
testResultArray.toList.asJava
}
def loadBigDL(path: String): AbstractModule[Activity, Activity, T] = {
Module.load[T](path)
}
def loadBigDLModule(modulePath: String,
weightPath : String): AbstractModule[Activity, Activity, T] = {
Module.loadModule[T](modulePath, weightPath)
}
def loadTorch(path: String): AbstractModule[Activity, Activity, T] = {
Module.loadTorch[T](path)
}
def loadCaffe(model: AbstractModule[Activity, Activity, T],
defPath: String,
modelPath: String,
matchAll: Boolean = true): AbstractModule[Activity, Activity, T] = {
Module.loadCaffe[T](model, defPath, modelPath, matchAll)
}
def loadCaffeModel(defPath: String, modelPath: String): AbstractModule[Activity, Activity, T] = {
Module.loadCaffeModel[T](defPath, modelPath)
}
def loadTF(path: String, inputs: JList[String], outputs: JList[String],
byteOrder: String, binFile: String = null,
generatedBackward: Boolean = true): AbstractModule[Activity, Activity, T] = {
val order = byteOrder match {
case "little_endian" => ByteOrder.LITTLE_ENDIAN
case "big_endian" => ByteOrder.BIG_ENDIAN
case _ => throw new IllegalArgumentException(s"No support byte order $byteOrder")
}
Module.loadTF[T](path, inputs.asScala, outputs.asScala, order,
Option(binFile), generatedBackward)
}
def saveTF(model: AbstractModule[Activity, Activity, T],
inputs: JList[Any],
path: String,
byteOrder: String,
dataFormat: String): Unit = {
val order = byteOrder.toLowerCase match {
case "little_endian" => ByteOrder.LITTLE_ENDIAN
case "big_endian" => ByteOrder.BIG_ENDIAN
case _ => throw new IllegalArgumentException(s"Unknown byte order $byteOrder")
}
val format = dataFormat.toLowerCase match {
case "nhwc" => TensorflowDataFormat.NHWC
case "nchw" => TensorflowDataFormat.NCHW
case _ => throw new IllegalArgumentException(s"Unknown format $dataFormat")
}
val scalaInputs = inputs.asScala.map { elem =>
val array = elem.asInstanceOf[JList[Any]]
val name = array.get(0).asInstanceOf[String]
val shape = array.get(1).asInstanceOf[JList[Int]]
(name, shape.asScala)
}
model.saveTF(scalaInputs, path, order, format)
}
def predictLocal(model: AbstractModule[Activity, Activity, T],
features: JList[JTensor], batchSize: Int = -1): JList[JTensor] = {
val sampleArray = toSampleArray(features.asScala.toList.map{f => toTensor(f)})
val localPredictor = if (batchSize > 0) {
val batchPerCore = batchSize / Engine.coreNumber()
if (batchPerCore < 1) {
LocalPredictor(model, batchPerCore = 1)
} else {
LocalPredictor(model, batchPerCore = batchPerCore)
}
} else {
LocalPredictor(model)
}
val result = localPredictor.predict(sampleArray)
result.map{a => toJTensor(a.asInstanceOf[Tensor[T]])}.toList.asJava
}
def predictLocalClass(model: AbstractModule[Activity, Activity, T],
features: JList[JTensor]): JList[Int] = {
val sampleArray = toSampleArray(features.asScala.toList.map{f => toTensor(f)})
val localPredictor = LocalPredictor(model)
val result = localPredictor.predictClass(sampleArray)
result.toList.asJava
}
def modelPredictRDD(model: AbstractModule[Activity, Activity, T],
dataRdd: JavaRDD[Sample], batchSize: Int = -1): JavaRDD[JTensor] = {
val tensorRDD = model.predict(dataRdd.rdd.map(toJSample(_)), batchSize)
val listRDD = tensorRDD.map { res =>
val tensor = res.asInstanceOf[Tensor[T]]
val cloneTensor = tensor.clone()
toJTensor(cloneTensor)
}
new JavaRDD[JTensor](listRDD)
}
def modelPredictImage(model: AbstractModule[Activity, Activity, T],
imageFrame: ImageFrame,
featLayerName: String,
shareBuffer: Boolean,
batchPerPartition: Int,
predictKey: String)
: ImageFrame = {
model.predictImage(imageFrame,
featLayerName, shareBuffer, batchPerPartition, predictKey)
}
def evaluate(module: AbstractModule[Activity, Activity, T]):
AbstractModule[Activity, Activity, T] = {
module.evaluate()
}
def modelPredictClass(model: AbstractModule[Activity, Activity, T],
dataRdd: JavaRDD[Sample]): JavaRDD[Int] = {
val sampleRdd = toJSample(dataRdd)
val tensorRDD = model.predictClass(sampleRdd)
new JavaRDD[Int](tensorRDD)
}
def modelForward(model: AbstractModule[Activity, Activity, T],
input: JList[_ <: Object],
inputIsTable: Boolean): JList[JTensor] = {
val inputActivity = jTensorsToActivity(input, inputIsTable)
val outputActivity = model.forward(inputActivity)
activityToJTensors(outputActivity)
}
def modelBackward(model: AbstractModule[Activity, Activity, T],
input: JList[_ <: Object],
inputIsTable: Boolean,
gradOutput: JList[_ <: Object],
gradOutputIsTable: Boolean): JList[JTensor] = {
val inputActivity = jTensorsToActivity(input, inputIsTable)
val gradOutputActivity = jTensorsToActivity(gradOutput, gradOutputIsTable)
val outputActivity = model.backward(inputActivity, gradOutputActivity)
activityToJTensors(outputActivity)
}
def modelSave(module: AbstractModule[Activity, Activity, T],
path: String, overWrite: Boolean): Unit = {
module.save(path, overWrite)
}
def saveBigDLModule(module: AbstractModule[Activity, Activity, T],
modulePath: String, weightPath: String, overWrite: Boolean): Unit = {
module.saveModule(modulePath, weightPath, overWrite)
}
def saveCaffe(module: AbstractModule[Activity, Activity, T],
prototxtPath: String, modelPath: String,
useV2: Boolean = true, overwrite: Boolean = false): Unit = {
module.saveCaffe(prototxtPath, modelPath, useV2, overwrite)
}
def criterionForward(criterion: AbstractCriterion[Activity, Activity, T],
input: JList[_ <: Object],
inputIsTable: Boolean,
target: JList[_ <: Object],
targetIsTable: Boolean): T = {
val inputActivity = jTensorsToActivity(input, inputIsTable)
val targetActivity = jTensorsToActivity(target, targetIsTable)
return criterion.forward(inputActivity, targetActivity)
}
def criterionBackward(criterion: AbstractCriterion[Activity, Activity, T],
input: JList[_ <: Object],
inputIsTable: Boolean,
target: JList[_ <: Object],
targetIsTable: Boolean): JList[JTensor] = {
val inputActivity = jTensorsToActivity(input, inputIsTable)
val targetActivity = jTensorsToActivity(target, targetIsTable)
val outputActivity = criterion.backward(inputActivity, targetActivity)
activityToJTensors(outputActivity)
}
def modelGetParameters(model: AbstractModule[Activity, Activity, T])
: JMap[Any, JMap[Any, JList[JList[Any]]]] = {
model.getParametersTable().getState().mapValues {
case name2Values: Table =>
name2Values.getState().mapValues {
case t: Tensor[T] =>
val tensorClone = t.clone()
val item = List(tensorClone.storage().toList.asJava.asInstanceOf[JList[Any]],
tensorClone.size().toList.asJava.asInstanceOf[JList[Any]]).asJava
item
}.asJava
}.asJava
}
def createMaxEpoch(max: Int): Trigger = {
Trigger.maxEpoch(max)
}
def createEveryEpoch(): Trigger = {
Trigger.everyEpoch
}
def createSeveralIteration(interval: Int): Trigger = {
Trigger.severalIteration(interval)
}
def createMaxIteration(max: Int): Trigger = {
Trigger.maxIteration(max)
}
def createMaxScore(max: Float): Trigger = {
Trigger.maxScore(max)
}
def createMinLoss(min: Float): Trigger = {
Trigger.minLoss(min)
}
def createTriggerAnd(first: Trigger, others: JList[Trigger]): Trigger = {
Trigger.and(first, others.asScala: _*)
}
def createTriggerOr(first: Trigger, others: JList[Trigger]): Trigger = {
Trigger.or(first, others.asScala: _*)
}
def createTop1Accuracy(): ValidationMethod[T] = {
new Top1Accuracy()
}
def createHitRatio(k: Int = 10, negNum: Int = 100): ValidationMethod[T] = {
new HitRatio(k, negNum)
}
def createNDCG(k: Int = 10, negNum: Int = 100): ValidationMethod[T] = {
new NDCG(k, negNum)
}
def createTreeNNAccuracy(): ValidationMethod[T] = {
new TreeNNAccuracy()
}
def createTop5Accuracy(): ValidationMethod[T] = {
new Top5Accuracy()
}
def createMeanAveragePrecision(k: Int, classes: Int): ValidationMethod[T] = {
new MeanAveragePrecision(k, classes)
}
def createMeanAveragePrecisionObjectDetection(classes: Int, iou: Float, useVoc2007: Boolean,
skipClass: Int): ValidationMethod[T] = {
new MeanAveragePrecisionObjectDetection(classes, iouThres = Array(iou),
theType = if (useVoc2007) MAPPascalVoc2007 else MAPPascalVoc2010, skipClass = skipClass)
}
def createLoss(criterion: Criterion[T]): ValidationMethod[T] = {
new Loss(criterion)
}
def createMAE(): ValidationMethod[T] = {
new MAE()
}
def createSGD(learningRate: Double = 1e-3,
learningRateDecay: Double = 0.0,
weightDecay: Double = 0.0,
momentum: Double = 0.0,
dampening: Double = Double.MaxValue,
nesterov: Boolean = false,
leaningRateSchedule: SGD.LearningRateSchedule = SGD.Default(),
learningRates: JTensor = null,
weightDecays: JTensor = null): SGD[T] = {
val p1 = if (learningRates == null) null else toTensor(learningRates)
val p2 = if (weightDecays == null) null else toTensor(weightDecays)
new SGD[T](learningRate, learningRateDecay, weightDecay, momentum, dampening,
nesterov, leaningRateSchedule, p1, p2)
}
def createAdagrad(learningRate: Double = 1e-3,
learningRateDecay: Double = 0.0,
weightDecay: Double = 0.0): Adagrad[T] = {
new Adagrad[T](learningRate, learningRateDecay, weightDecay)
}
def createLBFGS(maxIter: Int = 20,
maxEval: Double = Double.MaxValue,
tolFun: Double = 1e-5,
tolX: Double = 1e-9,
nCorrection: Int = 100,
learningRate: Double = 1.0,
verbose: Boolean = false,
lineSearch: LineSearch[T] = null,
lineSearchOptions: JMap[Any, Any] = null): LBFGS[T] = {
val p1 = if (lineSearch == null) None else Option(lineSearch)
val p2 = if (lineSearchOptions == null) None else Option(T(lineSearchOptions))
new LBFGS[T](maxIter, maxEval, tolFun, tolX, nCorrection, learningRate, verbose, p1, p2)
}
def createAdadelta(decayRate: Double = 0.9, Epsilon: Double = 1e-10): Adadelta[T] = {
new Adadelta[T](decayRate, Epsilon)
}
def createAdam(
learningRate: Double = 1e-3,
learningRateDecay: Double = 0.0,
beta1: Double = 0.9,
beta2: Double = 0.999,
Epsilon: Double = 1e-8): Adam[T] = {
new Adam[T](learningRate, learningRateDecay, beta1, beta2, Epsilon)
}
def createParallelAdam(
learningRate: Double = 1e-3,
learningRateDecay: Double = 0.0,
beta1: Double = 0.9,
beta2: Double = 0.999,
Epsilon: Double = 1e-8,
parallelNum: Int = Engine.coreNumber()): ParallelAdam[T] = {
new ParallelAdam[T](learningRate, learningRateDecay, beta1, beta2, Epsilon, parallelNum)
}
def createFtrl(
learningRate: Double = 1e-3,
learningRatePower: Double = -0.5,
initialAccumulatorValue: Double = 0.1,
l1RegularizationStrength: Double = 0.0,
l2RegularizationStrength: Double = 0.0,
l2ShrinkageRegularizationStrength: Double = 0.0): Ftrl[T] = {
new Ftrl[T](learningRate,
learningRatePower,
initialAccumulatorValue,
l1RegularizationStrength,
l2RegularizationStrength,
l2ShrinkageRegularizationStrength)
}
def createAdamax(
learningRate: Double = 0.002,
beta1: Double = 0.9,
beta2: Double = 0.999,
Epsilon: Double = 1e-38): Adamax[T] = {
new Adamax(learningRate, beta1, beta2, Epsilon)
}
def createRMSprop(
learningRate: Double = 1e-2,
learningRateDecay: Double = 0.0,
decayRate: Double = 0.99,
Epsilon: Double = 1e-8): RMSprop[T] = {
new RMSprop[T](learningRate, learningRateDecay, decayRate, Epsilon)
}
def loadOptimMethod(path: String): OptimMethod[T] = {
OptimMethod.load[T](path)
}
def saveOptimMethod(method: OptimMethod[T], path: String,
overWrite: Boolean = false): Unit = {
method.save(path, overWrite)
}
/**
* Save tensor dictionary to a Java hashmap object file
*/
def saveTensorDictionary(tensors: JHashMap[String, JTensor], path: String): Unit = {
File.save(tensors, path, true)
}
def trainTF(
modelPath: String,
output: String,
samples: JavaRDD[Sample],
optMethod: OptimMethod[T],
criterion: Criterion[T],
batchSize: Int,
endWhen: Trigger): AbstractModule[Activity, Activity, T] = {
val nodeList = parse(modelPath)
val context = new Context[T]()
val session = new BigDLSessionImpl[T](nodeList.asScala, context, ByteOrder.LITTLE_ENDIAN)
val dataset = batching(DataSet.rdd(toJSample(samples)),
batchSize).asInstanceOf[DistributedDataSet[MiniBatch[T]]]
val model = session.train(Seq(output), dataset,
optMethod, criterion, endWhen)
model
}
def createLocalOptimizer(features: JList[JTensor],
y: JTensor,
model: AbstractModule[Activity, Activity, T],
criterion: Criterion[T],
optimMethod: JMap[String, OptimMethod[T]],
endTrigger: Trigger,
batchSize: Int,
localCores: Int): Optimizer[T, MiniBatch[T]] = {
val sampleArray = toSampleArray(features.asScala.toList.map{f => toTensor(f)}, toTensor(y))
val optimizer = new LocalOptimizer[T](
model,
batching(DataSet.array(sampleArray), batchSize)
.asInstanceOf[LocalDataSet[MiniBatch[T]]],
criterion
).asInstanceOf[Optimizer[T, MiniBatch[T]]]
Engine.setNodeAndCore(1, localCores)
enrichOptimizer[T](optimizer, endTrigger, optimMethod.asScala.toMap)
}
def createDistriOptimizer(model: AbstractModule[Activity, Activity, T],
trainingRdd: JavaRDD[Sample],
criterion: Criterion[T],
optimMethod: JMap[String, OptimMethod[T]],
endTrigger: Trigger,
batchSize: Int): Optimizer[T, MiniBatch[T]] = {
val sampleRDD = toJSample(trainingRdd)
val optimizer = Optimizer(
model = model,
dataset = batching(DataSet.rdd(sampleRDD), batchSize)
.asInstanceOf[DistributedDataSet[MiniBatch[T]]],
criterion = criterion
).asInstanceOf[Optimizer[T, MiniBatch[T]]]
enrichOptimizer(optimizer, endTrigger, optimMethod.asScala.toMap)
}
def createDistriOptimizerFromDataSet(model: AbstractModule[Activity, Activity, T],
trainDataSet: DataSet[ImageFeature],
criterion: Criterion[T],
optimMethod: JMap[String, OptimMethod[T]],
endTrigger: Trigger,
batchSize: Int): Optimizer[T, MiniBatch[T]] = {
val dataSet = trainDataSet -> ImageFeatureToMiniBatch[T](batchSize)
val optimizer = Optimizer(
model = model,
dataset = dataSet.asInstanceOf[DistributedDataSet[MiniBatch[T]]],
criterion = criterion
).asInstanceOf[Optimizer[T, MiniBatch[T]]]
enrichOptimizer(optimizer, endTrigger, optimMethod.asScala.toMap)
}
def featureTransformDataset(dataset: DataSet[ImageFeature],
transformer: FeatureTransformer): DataSet[ImageFeature] = {
dataset -> transformer
}
def createL1L2Regularizer(l1: Double, l2: Double): L1L2Regularizer[T] = {
L1L2Regularizer[T](l1, l2)
}
def createL1Regularizer(l1: Double): L1Regularizer[T] = {
L1Regularizer[T](l1)
}
def createL2Regularizer(l2: Double): L2Regularizer[T] = {
L2Regularizer[T](l2)
}
def setValidation(optimizer: Optimizer[T, MiniBatch[T]],
batchSize: Int,
trigger: Trigger,
valRdd: JavaRDD[Sample],
vMethods: JList[ValidationMethod[T]]): Unit = {
val sampleRDD = toJSample(valRdd)
optimizer.setValidation(trigger, batching(DataSet.rdd(sampleRDD), batchSize.toInt),
vMethods.asScala.toArray)
}
def setValidationFromDataSet(optimizer: Optimizer[T, MiniBatch[T]],
batchSize: Int,
trigger: Trigger,
valDataSet: DataSet[ImageFeature],
vMethods: JList[ValidationMethod[T]]): Unit = {
val dataSet = valDataSet -> ImageFeatureToMiniBatch[T](batchSize)
optimizer.setValidation(trigger, dataSet,
vMethods.asScala.toArray)
}
def setValidation(optimizer: Optimizer[T, MiniBatch[T]],
batchSize: Int,
trigger: Trigger,
xVal: JList[JTensor],
yVal: JTensor,
vMethods: JList[ValidationMethod[T]]): Unit = {
val sampleArray = toSampleArray(xVal.asScala.toList.map{f => toTensor(f)}, toTensor(yVal))
optimizer.setValidation(trigger, batching(DataSet.array(sampleArray), batchSize),
vMethods.asScala.toArray)
}
def setTrainData(optimizer: Optimizer[T, MiniBatch[T]],
trainingRdd: JavaRDD[Sample],
batchSize: Int): Unit = {
val sampleRDD = toJSample(trainingRdd)
optimizer.setTrainData(sampleRDD, batchSize)
}
def setCriterion(optimizer: Optimizer[T, MiniBatch[T]],
criterion: Criterion[T]): Unit = {
optimizer.setCriterion(criterion)
}
def setCheckPoint(optimizer: Optimizer[T, MiniBatch[T]],
trigger: Trigger,
checkPointPath: String,
isOverwrite: Boolean): Unit = {
optimizer.setCheckpoint(checkPointPath, trigger)
if (isOverwrite) {
optimizer.overWriteCheckpoint()
}
}
def setTrainSummary(optimizer: Optimizer[T, MiniBatch[T]], summary: TrainSummary): Unit = {
optimizer.setTrainSummary(summary)
}
def setValSummary(optimizer: Optimizer[T, MiniBatch[T]], summary: ValidationSummary): Unit = {
optimizer.setValidationSummary(summary)
}
def summaryReadScalar(summary: Summary, tag: String): JList[JList[Any]] = {
val result = summary.readScalar(tag)
result.toList.map { item =>
List(item._1, item._2, item._3).asJava.asInstanceOf[JList[Any]]
}.asJava
}
def summarySetTrigger(
summary: TrainSummary,
summaryName: String,
trigger: Trigger): TrainSummary = {
summary.setSummaryTrigger(summaryName, trigger)
summary
}
def createTrainSummary(logDir: String,
appName: String): TrainSummary = {
new TrainSummary(logDir, appName)
}
def createValidationSummary(logDir: String,
appName: String): ValidationSummary = {
new ValidationSummary(logDir, appName)
}
def createModel(input: JList[ModuleNode[T]],
output: JList[ModuleNode[T]]): Graph[T] = {
Graph(input.asScala.toArray, output.asScala.toArray)
}
def createModelPreprocessor(preprocessor: AbstractModule[Activity, Activity, T],
trainable: AbstractModule[Activity, Activity, T]): Graph[T] = {
Graph(preprocessor, trainable)
}
def createNode(module: AbstractModule[Activity, Activity, T],
x: JList[ModuleNode[T]]): ModuleNode[T] = {
if (null == x || x.isEmpty) {
module.inputs()
} else {
module.inputs(x.asScala: _*)
}
}
def createInput(): ModuleNode[T] = {
Input()
}
def initEngine(): Unit = {
Engine.init
}
def getEngineType(): String = {
Engine.getEngineType().toString
}
def getNodeAndCoreNumber(): Array[Int] = {
Array(Engine.nodeNumber(), Engine.coreNumber())
}
def setOptimizerVersion(version: String): Unit = {
version.toLowerCase() match {
case "optimizerv1" => Engine.setOptimizerVersion(OptimizerV1)
case "optimizerv2" => Engine.setOptimizerVersion(OptimizerV2)
}
}
def getOptimizerVersion(): String = {
Engine.getOptimizerVersion().toString
}
def setWeights(model: AbstractModule[Activity, Activity, T], weights: JList[JTensor]): Unit = {
val weightTensor = weights.asScala.toArray.map(toTensor(_))
model.setWeightsBias(weightTensor)
}
def getWeights(model: AbstractModule[Activity, Activity, T]): JList[JTensor] = {
val weights = model.getWeightsBias()
if (weights != null) {
weights.map(toJTensor(_)).toList.asJava
} else {
null
}
}
def updateParameters(model: AbstractModule[Activity, Activity, T], lr: Double): Unit = {
val (w, g) = model.getParameters()
w.add(ev.negative(ev.fromType(lr)), g)
}
def uniform(a: Double, b: Double, size: JList[Int]): JTensor = {
val result = Tensor[T]().resize(size.asScala.toArray)
result.apply1(i => ev.fromType(RandomGenerator.RNG.uniform(a, b)))
toJTensor(result)
}
def createZeros(): Zeros.type = {
Zeros
}
def createOnes(): Ones.type = {
Ones
}
def createConstInitMethod(value: Double): ConstInitMethod = {
ConstInitMethod(value)
}
def createRandomUniform(lower: Double, upper: Double): InitializationMethod = {
RandomUniform(lower, upper)
}
def createRandomUniform(): InitializationMethod = {
RandomUniform
}
def createRandomNormal(mean: Double, stdv: Double): RandomNormal = {
RandomNormal(mean, stdv)
}
def createXavier(): Xavier.type = {
Xavier
}
def createMsraFiller(varianceNormAverage: Boolean = true): MsraFiller = {
MsraFiller(varianceNormAverage)
}
def createBilinearFiller(): BilinearFiller.type = {
BilinearFiller
}
def createHardSigmoid : HardSigmoid[T] = {
HardSigmoid()
}
def createMeanAbsolutePercentageCriterion: MeanAbsolutePercentageCriterion[T] = {
MeanAbsolutePercentageCriterion()
}
def createMeanSquaredLogarithmicCriterion: MeanSquaredLogarithmicCriterion[T] = {
MeanSquaredLogarithmicCriterion()
}
def createKullbackLeiblerDivergenceCriterion: KullbackLeiblerDivergenceCriterion[T] = {
KullbackLeiblerDivergenceCriterion()
}
def createPoissonCriterion: PoissonCriterion[T] = {
PoissonCriterion()
}
def setInitMethod(layer: Initializable, weightInitMethod: InitializationMethod,
biasInitMethod: InitializationMethod): layer.type = {
layer.setInitMethod(weightInitMethod, biasInitMethod)
}
def setInitMethod(layer: Initializable,
initMethods: JArrayList[InitializationMethod]): layer.type = {
layer.setInitMethod(initMethods.asScala.toArray)
}
def getHiddenState(rec: Recurrent[T]): JActivity = {
JActivity(rec.getHiddenState())
}
def freeze(model: AbstractModule[Activity, Activity, T], freezeLayers: JList[String])
: AbstractModule[Activity, Activity, T] = {
if (null == freezeLayers) model.freeze() else model.freeze(freezeLayers.asScala: _*)
}
def unFreeze(model: AbstractModule[Activity, Activity, T],
names: JList[String]): AbstractModule[Activity, Activity, T] = {
if (names == null) {
model.unFreeze()
} else {
model.unFreeze(names.asScala: _*)
}
}
def setStopGradient(model: Graph[T], layers: JList[String]): Graph[T] = {
model.stopGradient(layers.asScala.toArray)
}
def saveGraphTopology(model: Graph[T], logPath: String): Graph[T] = {
model.saveGraphTopology(logPath)
}
def setInputFormats(graph: StaticGraph[T], inputFormat: JList[Int]): StaticGraph[T] = {
graph.setInputFormats(inputFormat.asScala.toList)
}
def setOutputFormats(graph: StaticGraph[T], outputFormat: JList[Int]): StaticGraph[T] = {
graph.setOutputFormats(outputFormat.asScala.toList)
}
def createResizeBilinear(
outputHeight: Int,
outputWidth: Int,
alignCorner: Boolean,
dataFormat: String
): ResizeBilinear[T] = {
ResizeBilinear[T](outputHeight,
outputWidth,
alignCorner, DataFormat.apply(dataFormat))
}
def createMultiRNNCell(cells: JList[Cell[T]]): MultiRNNCell[T] = {
MultiRNNCell(cells.asScala.toArray)
}
def createHighway(size: Int, withBias: Boolean,
activation: TensorModule[T] = null,
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null): Graph[T] = {
Highway(size, withBias, activation, wRegularizer, bRegularizer)
}
def createUpSampling3D(size: JList[Int]): UpSampling3D[T] = {
UpSampling3D(size.asScala.toArray)
}
def createCropping2D(
heightCrop: JList[Int],
widthCrop: JList[Int],
dataFormat: String = "NCHW"): Cropping2D[T] = {
Cropping2D(heightCrop.asScala.toArray, widthCrop.asScala.toArray, DataFormat(dataFormat))
}
def createCropping3D(
dim1Crop: JList[Int],
dim2Crop: JList[Int],
dim3Crop: JList[Int],
dataFormat: String = Cropping3D.CHANNEL_FIRST): Cropping3D[T] = {
Cropping3D(
dim1Crop.asScala.toArray, dim2Crop.asScala.toArray, dim3Crop.asScala.toArray, dataFormat)
}
def redirectSparkLogs(logPath: String): Unit = {
LoggerFilter.redirectSparkInfoLogs(logPath)
}
def showBigDlInfoLogs(): Unit = {
Configurator.setLevel("com.intel.analytics.bigdl.dllib.optim", Level.INFO)
}
def quantize(module: AbstractModule[Activity, Activity, T]): Module[T] = {
module.quantize()
}
def findGraphNode(model: Graph[T], name: String): ModuleNode[T] = {
model.node(name)
}
def getContainerModules(module: Container[Activity, Activity, T])
: JList[AbstractModule[Activity, Activity, T]] = {
module match {
case m: KerasModel[T] =>
m.getSubModules().asJava
case kl: KerasLayer[Activity, Activity, T] =>
throw new RuntimeException(s"There's no sub modules for ${kl}")
case _ =>
module.modules.toList.asJava
}
}
def getFlattenModules(module: Container[Activity, Activity, T],
includeContainer: Boolean)
: JList[AbstractModule[Activity, Activity, T]] = {
val result = ArrayBuffer[AbstractModule[Activity, Activity, T]]()
doGetFlattenModules(module, includeContainer, result)
result.toList.asJava
}
// TODO: refactor Container and KerasLayer to simplify this logic
private def hasSubModules(module: AbstractModule[Activity, Activity, T]) = {
module match {
case km: KerasModel[T] => true
case kl: KerasLayer[Activity, Activity, T] => false
case c: Container[_, _, _] => true
case _ => false
}
}
private def doGetFlattenModules(module: Container[Activity, Activity, T],
includeContainer: Boolean,
result: ArrayBuffer[AbstractModule[Activity, Activity, T]]): Unit = {
getContainerModules(module).asScala.foreach {m =>
if (hasSubModules(m)) {
doGetFlattenModules(m.asInstanceOf[Container[Activity, Activity, T]],
includeContainer,
result)
} else {
result.append(m)
}
}
if (includeContainer) {
result.append(module)
}
}
def isWithWeights(module: Module[T]): Boolean = {
val weights = module.getWeightsBias()
return weights != null && !weights.isEmpty
}
def setRunningMean(module: BatchNormalization[T], runningMean: JTensor): Unit = {
module.runningMean.set(toTensor(runningMean))
}
def setRunningStd(module: BatchNormalization[T], runningStd: JTensor): Unit = {
module.runningVar.set(toTensor(runningStd))
}
def getRunningMean(module: BatchNormalization[T]): JTensor = {
toJTensor(module.runningMean)
}
def getRunningStd(module: BatchNormalization[T]): JTensor = {
toJTensor(module.runningVar)
}
def createMasking(maskValue: Double)
: Masking[T] = {
Masking[T](maskValue)
}
def createMaxout(inputSize: Int, outputSize: Int, maxoutNumber: Int, withBias: Boolean = true,
wRegularizer: Regularizer[T] = null, bRegularizer: Regularizer[T] = null,
initWeight: Tensor[T] = null, initBias: Tensor[T] = null)
: Maxout[T] = {
Maxout[T](inputSize, outputSize, maxoutNumber, withBias, wRegularizer, bRegularizer,
initWeight, initBias)
}
def createCosineProximityCriterion(): CosineProximityCriterion[T] = {
CosineProximityCriterion[T]()
}
def createPriorBox(minSizes: JList[Double], maxSizes: JList[Double] = null,
aspectRatios: JList[Double] = null, isFlip: Boolean = true, isClip: Boolean = false,
variances: JList[Double] = null, offset: Float = 0.5f,
imgH: Int = 0, imgW: Int = 0, imgSize: Int = 0,
stepH: Float = 0, stepW: Float = 0, step: Float = 0): PriorBox[T] = {
val maxS = if (maxSizes == null) null else maxSizes.asScala.toArray.map(_.toFloat)
val aspectR = if (aspectRatios == null) null else aspectRatios.asScala.toArray.map(_.toFloat)
val vars = if (variances == null) null else variances.asScala.toArray.map(_.toFloat)
new PriorBox[T](minSizes.asScala.toArray.map(_.toFloat),
maxS, aspectR, isFlip, isClip, vars, offset, imgH, imgW, imgSize, stepH, stepW, step)
}
def createNormalizeScale(p: Double, eps: Double = 1e-10, scale: Double, size: JList[Int],
wRegularizer: Regularizer[T] = null): NormalizeScale[T] =
new NormalizeScale[T](p, eps, scale, size.asScala.toArray, wRegularizer)
def createDetectionOutputSSD(nClasses: Int,
shareLocation: Boolean,
bgLabel: Int,
nmsThresh: Double,
nmsTopk: Int,
keepTopK: Int,
confThresh: Double,
varianceEncodedInTarget: Boolean,
confPostProcess: Boolean): DetectionOutputSSD[T] =
new DetectionOutputSSD[T](nClasses, shareLocation, bgLabel, nmsThresh.toFloat,
nmsTopk, keepTopK, confThresh.toFloat, varianceEncodedInTarget, confPostProcess)
def createDetectionOutputFrcnn(nmsThresh: Float = 0.3f, nClasses: Int,
bboxVote: Boolean, maxPerImage: Int = 100, thresh: Double = 0.05): DetectionOutputFrcnn = {
new DetectionOutputFrcnn(nmsThresh, nClasses, bboxVote, maxPerImage, thresh)
}
def createProposal(preNmsTopN: Int, postNmsTopN: Int,
ratios: JList[Double], scales: JList[Double],
rpnPreNmsTopNTrain: Int = 12000, rpnPostNmsTopNTrain: Int = 2000): Proposal = {
new Proposal(preNmsTopN, postNmsTopN, ratios.asScala.toArray.map(_.toFloat),
scales.asScala.toArray.map(_.toFloat), rpnPreNmsTopNTrain, rpnPostNmsTopNTrain)
}
def createHFlip(): HFlip = {
HFlip()
}
def createResize(resizeH: Int, resizeW: Int, resizeMode: Int = Imgproc.INTER_LINEAR,
useScaleFactor: Boolean): Resize = {
Resize(resizeH, resizeW, resizeMode, useScaleFactor)
}
def createColorJitter(brightnessProb: Double = 0.5, brightnessDelta: Double = 32,
contrastProb: Double = 0.5, contrastLower: Double = 0.5, contrastUpper: Double = 1.5,
hueProb: Double = 0.5, hueDelta: Double = 18,
saturationProb: Double = 0.5, saturationLower: Double = 0.5, saturationUpper: Double = 1.5,
randomOrderProb: Double = 0, shuffle: Boolean = false): ColorJitter = {
ColorJitter(brightnessProb, brightnessDelta, contrastProb,
contrastLower, contrastUpper, hueProb, hueDelta, saturationProb,
saturationLower, saturationUpper, randomOrderProb, shuffle)
}
def createBrightness(deltaLow: Double, deltaHigh: Double): Brightness = {
Brightness(deltaLow, deltaHigh)
}
def createChannelOrder(): ChannelOrder = {
ChannelOrder()
}
def createContrast(deltaLow: Double, deltaHigh: Double): Contrast = {
Contrast(deltaLow, deltaHigh)
}
def createRandomCrop(cropWidth: Int, cropHeight: Int, isClip: Boolean): RandomCrop = {
RandomCrop(cropWidth, cropHeight, isClip)
}
def createCenterCrop(cropWidth: Int, cropHeight: Int, isClip: Boolean): CenterCrop = {
CenterCrop(cropWidth, cropHeight, isClip)
}
def createFixedCrop(wStart: Double,
hStart: Double, wEnd: Double, hEnd: Double, normalized: Boolean,
isClip: Boolean): FixedCrop = {
FixedCrop(wStart.toFloat, hStart.toFloat, wEnd.toFloat, hEnd.toFloat, normalized, isClip)
}
def createDetectionCrop(roiKey: String, normalized: Boolean): DetectionCrop = {
DetectionCrop(roiKey, normalized)
}
def createExpand(meansR: Int = 123, meansG: Int = 117, meansB: Int = 104,
minExpandRatio: Double = 1.0,
maxExpandRatio: Double = 4.0): Expand = {
Expand(meansR, meansG, meansB, minExpandRatio, maxExpandRatio)
}
def createRandomAspectScale(scales: JList[Int], scaleMultipleOf: Int = 1,
maxSize: Int = 1000): RandomAspectScale = {
RandomAspectScale(scales.asScala.toArray, scaleMultipleOf, maxSize)
}
def createHue(deltaLow: Double, deltaHigh: Double): Hue = {
Hue(deltaLow, deltaHigh)
}
def createRandomTransformer(transformer: FeatureTransformer, prob: Double): RandomTransformer = {
RandomTransformer(transformer, prob)
}
def createSaturation(deltaLow: Double, deltaHigh: Double): Saturation = {
Saturation(deltaLow, deltaHigh)
}
def createRandomSampler(): FeatureTransformer = {
RandomSampler()
}
def createChannelNormalize(meanR: Double, meanG: Double, meanB: Double,
stdR: Double = 1, stdG: Double = 1, stdB: Double = 1): FeatureTransformer = {
ChannelNormalize(meanR.toFloat, meanG.toFloat, meanB.toFloat,
stdR.toFloat, stdG.toFloat, stdB.toFloat)
}
def createAspectScale(scale: Int,
scaleMultipleOf: Int,
maxSize: Int,
resizeMode: Int = 1,
useScaleFactor: Boolean = true,
minScale: Double = -1): FeatureTransformer = {
val minS = if (minScale == -1) None else Some(minScale.toFloat)
AspectScale(scale, scaleMultipleOf, maxSize, resizeMode, useScaleFactor, minS)
}
def createFiller(startX: Double, startY: Double, endX: Double, endY: Double,
value: Int = 255): Filler = {
Filler(startX.toFloat, startY.toFloat, endX.toFloat, endY.toFloat, value)
}
def createPixelNormalize(means: JList[Double]): PixelNormalizer = {
PixelNormalizer(means.asScala.toArray.map(_.toFloat))
}
def createRoiProject(needMeetCenterConstraint: Boolean): RoiProject = {
RoiProject(needMeetCenterConstraint)
}
def createRoiResize(normalized: Boolean): RoiResize = {
RoiResize(normalized)
}
def createRoiHFlip(normalized: Boolean = true): RoiHFlip = {
RoiHFlip(normalized)
}
def createRoiNormalize(): RoiNormalize = {
RoiNormalize()
}
def createFixExpand(eh: Int, ew: Int): FixExpand = {
FixExpand(eh, ew)
}
def createChannelScaledNormalizer(meanR: Int, meanG: Int, meanB: Int, scale: Double)
: ChannelScaledNormalizer = {
ChannelScaledNormalizer(meanR, meanG, meanB, scale)
}
def createRandomAlterAspect(min_area_ratio: Float,
max_area_ratio: Int,
min_aspect_ratio_change: Float,
interp_mode: String,
cropLength: Int)
: RandomAlterAspect = {
RandomAlterAspect(min_area_ratio, max_area_ratio, min_aspect_ratio_change,
interp_mode, cropLength)
}
def createRandomCropper(cropWidth: Int, cropHeight: Int,
mirror: Boolean, cropperMethod: String,
channels: Int)
: RandomCropper = {
if (cropperMethod == "Random") {
RandomCropper(cropWidth, cropHeight, mirror,
CropRandom, channels)
} else {
RandomCropper(cropWidth, cropHeight, mirror,
CropCenter, channels)
}
}
def createRandomResize(minSize: Int, maxSize : Int)
: RandomResize = {
RandomResize(minSize, maxSize)
}
def transformImageFeature(transformer: FeatureTransformer, feature: ImageFeature)
: ImageFeature = {
transformer.transform(feature)
}
def transformImageFrame(transformer: FeatureTransformer,
imageFrame: ImageFrame): ImageFrame = {
imageFrame.transform(transformer)
}
def setLabel(labelMap: JMap[String, Float], imageFrame: ImageFrame): Unit = {
imageFrame.setLabel(labelMap.asScala)
}
def createDistributedImageFrame(imageRdd: JavaRDD[JTensor], labelRdd: JavaRDD[JTensor])
: DistributedImageFrame = {
require(null != imageRdd, "imageRdd cannot be null")
val featureRdd = if (null != labelRdd) {
imageRdd.rdd.zip(labelRdd.rdd).map(data => {
createImageFeature(data._1, data._2)
})
} else {
imageRdd.rdd.map(image => {
createImageFeature(image, null)
})
}
new DistributedImageFrame(featureRdd)
}
def createLocalImageFrame(images: JList[JTensor], labels: JList[JTensor])
: LocalImageFrame = {
require(null != images, "images cannot be null")
val features = if (null != labels) {
(0 until images.size()).map(i => {
createImageFeature(images.get(i), labels.get(i))
})
} else {
(0 until images.size()).map(i => {
createImageFeature(images.get(i), null)
})
}
new LocalImageFrame(features.toArray)
}
def createPipeline(list: JList[FeatureTransformer]): FeatureTransformer = {
var cur = list.get(0)
(1 until list.size()).foreach(t => cur = cur -> list.get(t))
cur
}
def createImageFeature(data: JTensor = null, label: JTensor = null, uri: String = null)
: ImageFeature = {
val feature = new ImageFeature()
if (null != data) {
val mat = OpenCVMat.fromFloats(data.storage, data.shape(0), data.shape(1), data.shape(2))
feature(ImageFeature.bytes) = OpenCVMat.imencode(mat)
feature(ImageFeature.mat) = mat
feature(ImageFeature.originalSize) = mat.shape()
}
if (null != label) {
// todo: may need a method to change label format if needed
feature(ImageFeature.label) = toTensor(label)
}
if (null != uri) {
feature(ImageFeature.uri) = uri
}
feature
}
def imageFeatureGetKeys(imageFeature: ImageFeature): JList[String] = {
imageFeature.keys().toList.asJava
}
def distributedImageFrameToImageTensorRdd(imageFrame: DistributedImageFrame,
floatKey: String = ImageFeature.floats, toChw: Boolean = true): JavaRDD[JTensor] = {
imageFrame.rdd.map(imageFeatureToImageTensor(_, floatKey, toChw)).toJavaRDD()
}
def distributedImageFrameToLabelTensorRdd(imageFrame: DistributedImageFrame): JavaRDD[JTensor] = {
imageFrame.rdd.map(imageFeatureToLabelTensor).toJavaRDD()
}
def distributedImageFrameToPredict(imageFrame: DistributedImageFrame, key: String)
: JavaRDD[JList[Any]] = {
imageFrame.rdd.map(x => {
if (x.isValid && x.contains(key)) {
List[Any](x.uri(), toJTensor(x[Tensor[T]](key))).asJava
} else {
List[Any](x.uri(), null).asJava
}
})
}
def distributedImageFrameToSample(imageFrame: DistributedImageFrame, key: String):
JavaRDD[Sample] = {
imageFrame.rdd.map(x => {
if (x.isValid && x.contains(key)) {
toPySample(x[JSample[T]](key))
} else {
null
}
})
}
def distributedImageFrameToUri(imageFrame: DistributedImageFrame, key: String):
JavaRDD[String] = {
imageFrame.rdd.map(x => {
if (x.contains(key)) {
x[String](key)
} else {
null
}
})
}
def distributedImageFrameRandomSplit(imageFrame: DistributedImageFrame,
weights: JList[Double]): Array[ImageFrame] = {
return imageFrame.randomSplit(weights.asScala.toArray)
}
def localImageFrameToUri(imageFrame: LocalImageFrame, key: String): JList[String] = {
imageFrame.array.map(x => {
if (x.contains(key)) {
x[String](key)
} else {
null
}
}).toList.asJava
}
def localImageFrameToSample(imageFrame: LocalImageFrame, key: String): JList[Sample] = {
imageFrame.array.map(x => {
if (x.isValid && x.contains(key)) {
toPySample(x[JSample[T]](key))
} else {
null
}
}).toList.asJava
}
def localImageFrameToPredict(imageFrame: LocalImageFrame, key: String)
: JList[JList[Any]] = {
imageFrame.array.map(x =>
if (x.isValid && x.contains(key)) {
List[Any](x.uri(), toJTensor(x[Tensor[T]](key))).asJava
} else {
List[Any](x.uri(), null).asJava
}).toList.asJava
}
def localImageFrameToImageTensor(imageFrame: LocalImageFrame,
floatKey: String = ImageFeature.floats, toChw: Boolean = true): JList[JTensor] = {
imageFrame.array.map(imageFeatureToImageTensor(_, floatKey, toChw)).toList.asJava
}
def localImageFrameToLabelTensor(imageFrame: LocalImageFrame): JList[JTensor] = {
imageFrame.array.map(imageFeatureToLabelTensor).toList.asJava
}
def imageFeatureToImageTensor(imageFeature: ImageFeature,
floatKey: String = ImageFeature.floats, toChw: Boolean = true): JTensor = {
toJTensor(imageFeature.toTensor(floatKey, toChw).asInstanceOf[Tensor[T]])
}
def imageFeatureToLabelTensor(imageFeature: ImageFeature): JTensor = {
val label = if (imageFeature.hasLabel()) {
imageFeature.getLabel[Tensor[T]]
} else {
Tensor[T](1).fill(ev.fromType[Float](-1f))
}
toJTensor(label)
}
def read(path: String, sc: JavaSparkContext, minPartitions: Int): ImageFrame = {
if (sc == null) {
ImageFrame.read(path, null, minPartitions)
} else {
ImageFrame.read(path, sc.sc, minPartitions)
}
}
def readParquet(path: String, sc: JavaSparkContext): DistributedImageFrame = {
val sqlContext = new SQLContext(sc)
ImageFrame.readParquet(path, sqlContext)
}
def writeParquet(path: String, output: String,
sc: JavaSparkContext, partitionNum: Int = 1): Unit = {
val sqlContext = new SQLContext(sc)
ImageFrame.writeParquet(path, output, sqlContext, partitionNum)
}
def createBytesToMat(byteKey: String): BytesToMat = {
BytesToMat(byteKey)
}
def createPixelBytesToMat(byteKey: String): PixelBytesToMat = {
PixelBytesToMat(byteKey)
}
def createMatToFloats(validHeight: Int = 300, validWidth: Int = 300, validChannels: Int = 3,
outKey: String = ImageFeature.floats, shareBuffer: Boolean = true): MatToFloats =
new MatToFloats(validHeight, validWidth, validChannels, outKey, shareBuffer)
def createMatToTensor(toRGB: Boolean = false, tensorKey: String = ImageFeature.imageTensor)
: MatToTensor[T] = new MatToTensor[T](toRGB, tensorKey)
def isLocal(imageFrame: ImageFrame): Boolean = imageFrame.isLocal()
def isDistributed(imageFrame: ImageFrame): Boolean = imageFrame.isDistributed()
def createImageFrameToSample(inputKeys: JList[String],
targetKeys: JList[String], sampleKey: String): ImageFrameToSample[T] = {
val targets = if (targetKeys == null) null else targetKeys.asScala.toArray
ImageFrameToSample[T](inputKeys.asScala.toArray, targets, sampleKey)
}
def seqFilesToImageFrame(url: String, sc: JavaSparkContext,
classNum: Int, partitionNum: Int): ImageFrame = {
val pn = if (partitionNum <= 0) None else Some(partitionNum)
DataSet.SeqFileFolder.filesToImageFrame(url, sc, classNum, pn)
}
def setConstantClip(optimizer: Optimizer[T, MiniBatch[T]],
min: Float, max: Float): Unit = {
optimizer.setConstantGradientClipping(min, max)
}
def setL2NormClip(optimizer: Optimizer[T, MiniBatch[T]],
normValue: Float): Unit = {
optimizer.setGradientClippingByl2Norm(normValue)
}
def disableClip(optimizer: Optimizer[T, MiniBatch[T]]): Unit = {
optimizer.disableGradientClipping()
}
def addScheduler(seq: SequentialSchedule, scheduler: LearningRateSchedule,
maxIteration: Int): SequentialSchedule = {
seq.add(scheduler, maxIteration)
}
private[bigdl] def initExecutorGateway(sc: JavaSparkContext, driverPort: Int): Unit = {
sc.parallelize(Seq(""), Engine.coreNumber() * Engine.nodeNumber())
.foreachPartition(_ => Engine.createJavaGateway(driverPort))
}
def createDatasetFromImageFrame(imageFrame: ImageFrame): DataSet[ImageFeature] = {
DataSet.imageFrame(imageFrame)
}
def getRealClassNameOfJValue(module: AbstractModule[Activity, Activity, T]): String = {
module.getClass.getCanonicalName
}
}
object PythonBigDLUtils {
def toTensor[T: ClassTag](jTensor: JTensor, typeName: String)
(implicit ev: TensorNumeric[T]): Tensor[T] = {
if (jTensor == null) return null
typeName match {
case "float" =>
Tensor(jTensor.storage.map(x => ev.fromType(x.toFloat)), jTensor.shape)
case "double" =>
Tensor(jTensor.storage.map(x => ev.fromType(x.toDouble)), jTensor.shape)
case t: String =>
throw new IllegalArgumentException(s"Not supported type: ${t}")
}
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/utils/python/api/PythonBigDL.scala | Scala | apache-2.0 | 94,877 |
package com.socrata.internal.http.pingpong
import java.nio.channels.spi.SelectorProvider
import java.nio.{BufferOverflowException, ByteBuffer}
import scala.util.Random
import com.rojoma.simplearm.util._
import java.nio.channels.ClosedByInterruptException
import java.net.InetSocketAddress
import java.io.{Closeable, IOException}
import java.util.concurrent.CountDownLatch
import java.nio.charset.StandardCharsets
import scala.beans.BeanProperty
class PingInfo(@BeanProperty var port: Int, @BeanProperty var response: String) {
@deprecated(message = "This constructor is for Jackson's use, not yours", since = "forever")
def this() = this(0, null)
}
class Pong(address: InetSocketAddress, rng: Random = new Random) extends Closeable {
private val sendString = {
val alphanum = (('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9')).mkString
val sb = new StringBuilder
for(_ <- 1 to 16) sb.append(alphanum(rng.nextInt(alphanum.length)))
sb.toString
}
private val sendBytes = sendString.getBytes(StandardCharsets.UTF_8)
private val log = org.slf4j.LoggerFactory.getLogger(classOf[Pong])
private val started = new CountDownLatch(1)
@volatile private var port = 0
@volatile private var problem: Throwable = null
private var _pingInfo: PingInfo = null
private val thread = new Thread() {
setName(getId + " / Ping responder")
override def run() {
mainloop()
}
}
// runs the mainloop, responding to every ping packet it receives.
//
// The packets that it sends consist of the following:
// [the contents of the received ping] [the contents of `send`]
//
// At any time, this thread may be interrupted. This will cause it to exit.
private def mainloop() {
try {
val selectorProvider = SelectorProvider.provider
using(selectorProvider.openDatagramChannel()) { socket =>
socket.bind(address)
port = socket.getLocalAddress.asInstanceOf[InetSocketAddress].getPort
started.countDown()
val recvBuf = ByteBuffer.allocate(512)
while(true) {
recvBuf.clear()
val respondTo = socket.receive(recvBuf)
try {
log.trace("Ping from {}", respondTo)
recvBuf.put(sendBytes).flip()
socket.send(recvBuf, respondTo)
log.trace("Pong!")
} catch {
case e: IOException =>
log.trace("IO exception sending to the peer; ignoring")
case e: BufferOverflowException =>
log.trace("packet too large to add reply and still fit in 512 bytes; ignoring")
}
}
}
} catch {
case _: InterruptedException | _: ClosedByInterruptException =>
// pass
case e: Throwable =>
problem = e
started.countDown()
log.error("Unexpected exception on pong thread", e)
throw e
}
}
def start() {
if(_pingInfo != null) throw new IllegalStateException("Already started")
thread.start()
started.await()
if(problem != null) throw new Exception("Exception on pong thread", problem)
_pingInfo = new PingInfo(port, sendString)
}
def pingInfo = {
if(_pingInfo == null) throw new IllegalStateException("not yet started")
_pingInfo
}
def close() {
thread.interrupt()
thread.join()
}
}
| socrata-platform/socrata-internal-http | src/main/scala/com/socrata/internal/http/pingpong/Pong.scala | Scala | apache-2.0 | 3,315 |
package org.scalaide.core.internal.quickassist
// Java imports
import java.util.regex.Pattern
/**
* This object is used for applying code transformations based on the found and required type
* extract from the annotation message (such as quick fix message) and the expression in the source code.
* The object arguments are: found type string, required type string and annotation string, respectively and
* the result is a list of strings which should replace the annotation string
*/
object TypeMismatchQuickFixProcessor extends
((String, String, String) => List[String]) {
/** list containing all type mismatch quick fix cases that this object should go through */
val cases: List[FoundToRequiredTypeCase] =
List(
// "type mismatch: List[T] but found List[List[T]]
FoundToRequiredTypeCase(
List("%s.flatten", "%s.head", "%s.last"),
Pattern.compile("List\\\\[List\\\\[(.*)\\\\]\\\\]"), Pattern.compile("List\\\\[(.*)\\\\]"), Pattern.compile("^(.*)$")
),
// "type mismatch: Array[T] but found List[T]
FoundToRequiredTypeCase(
List("%s.toArray"),
Pattern.compile("List\\\\[(.*)\\\\]"), Pattern.compile("Array\\\\[(.*)\\\\]"), Pattern.compile("^(.*)$")
),
// "type mismatch: found T; required Option[T]" -> suggest to wrap the result in Some()
FoundToRequiredTypeCase(
List("Option(%s)", "Some(%s)"),
Pattern.compile("(.*)"), Pattern.compile("Option\\\\[(.*)\\\\]"), Pattern.compile("^(.*)$")
)
// TODO: compiler does not return annotations properly, uncomment this and tests when it is fixed
// "type mismatch: found BasicType(T); required Option[T]" -> suggest to wrap the result in Some()
// ,
// FoundToRequiredTypeCase(
// List("Option(%s)", "Some(%s)"),
// Pattern.compile("(?:java\\\\.lang\\\\.)([a-zA-Z&&[^\\\\(\\\\)]]+)\\\\(.*\\\\)"), Pattern.compile("Option\\\\[(.*)\\\\]"), Pattern.compile("^(.*)$")
// )
)
/**
* apply method for getting list of replacement strings
* @param foundType extracted found type string
* @param requiredType extracted required type string
* @param annotationString extracted expression string from the source code
* @return list of strings which should replace the annotation string
*/
def apply(foundType: String, requiredType: String, annotationString: String): List[String] =
// go through all cases and collect lists of replacement strings
(List[String]() /: cases) {
case (list, ftrtc: FoundToRequiredTypeCase) => {
list ++ ftrtc.apply(foundType, requiredType, annotationString)
}
}
}
/** trait marking all type mismatch quick fix cases */
trait TypeMismatchQuickFixCase
/**
* class which is to be inherited if quick fix simply injects a sequence of strings into a format strings of
* form "... %s... %s..."
*/
abstract class SimpleFormatQuickFixCase(formatStrings: List[String]) extends TypeMismatchQuickFixCase {
def apply(listOfInjectStrings: Seq[String]*) =
for (
// iterate through all sequences of strings to inject
injectString <- listOfInjectStrings;
// iterate through all given format
formatString <- formatStrings
// yield a string when inject strings are applied to the format string
) yield { formatString.format( injectString:_* ) }
}
/**
* class which checks whether found type string and required type string match - it does by
* capturing all groups according to the found and required patterns and compares them for match -
* if all match, the replacement is proceeded by extracting inject strings from the annotation pattern
* and applying them to SimpleFormatQuickFixCase
*
* found and required patterns should extract the same number of groups
* annotation string should extract required number of groups to feed the given format string
*/
case class FoundToRequiredTypeCase(formatStrings: List[String],
found: Pattern, required: Pattern, annotationExtract: Pattern) extends SimpleFormatQuickFixCase(formatStrings) {
def apply(foundType: String, requiredType: String, annotationString: String): Seq[String] = {
// get matchers
val foundMatcher = found.matcher(foundType)
val requiredMatcher = required.matcher(requiredType)
// if both matched
// NOTE (we expect only a single match)
if (foundMatcher.find && requiredMatcher.find) {
// check if all groups match
if (
// fold all groups and compare them - capturing group count must be the same for both patterns
(true /: (1 to foundMatcher.groupCount()) ) {
case (false, _) => false
case (_, ind) => foundMatcher.group(ind) == requiredMatcher.group(ind)
}
) {
// get annotation matcher
val annotationMatcher = annotationExtract.matcher(annotationString)
// check if find can pass (only single match is expected)
if (annotationMatcher.find) {
// get injection strings
val injectStrings =
for (ind <- 1 to annotationMatcher.groupCount()) yield { annotationMatcher.group(ind) }
// apply them to the format string
super.apply(injectStrings)
// in case annotation matcher cannot find
} else Nil
}
// in case groups don't match
else Nil
// in case matchers fail
} else Nil
}
}
| scala-ide/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/core/internal/quickassist/TypeMismatchQuickFixProcessor.scala | Scala | bsd-3-clause | 5,338 |
package com.github.pavlov99
abstract sealed class Treap[+K <% Ordered[K], +P <% Ordered[P]]{
def key: K
def priority: P
def left: Treap[K, P]
def right: Treap[K, P]
def size: Int
def isEmpty: Boolean
override def toString: String =
if (isEmpty) {
"."
} else {
"{" + left + "(" + key + ", " + priority + ")" + right + "}"
}
override def equals(that: Any): Boolean =
that match {
case that: Treap[K, P] =>
if (isEmpty && that.isEmpty) {
true
} else if (isEmpty ^ that.isEmpty) {
false
} else {
key == that.key && priority == that.priority &&
left == that.left && right == that.right
}
case _ => false
}
override def hashCode: Int =
if (isEmpty) 0 else key.hashCode + priority.hashCode + left.hashCode + right.hashCode
def split[K1 >: K <% Ordered[K1]](k: K1): (Treap[K, P], Treap[K, P]) =
if (isEmpty) {
(Treap.empty[K, P], Treap.empty[K, P])
} else if (k > key) {
right.split(k) match {
case (t1, t2) => (Treap.make(key, priority, left, t1), t2)
}
} else {
left.split(k) match {
case (t1, t2) => (t1, Treap.make(key, priority, t2, right))
}
}
def merge[K1 >: K <% Ordered[K1], P1 >: P <% Ordered[P1]](t: Treap[K1, P1]): Treap[K1, P1] =
if (t.isEmpty) {
this
} else if (isEmpty) {
t
} else if (priority > t.priority) {
Treap.make(key, priority, left, right merge t)
} else {
Treap.make(t.key, t.priority, this merge t.left, t.right)
}
// scalastyle:off method.name
def ++[K1 >: K <% Ordered[K1], P1 >: P <% Ordered[P1]](t: Treap[K1, P1]): Treap[K1, P1] = merge(t)
// scalastyle:on
def insert[K1 >: K <% Ordered[K1], P1 >: P <% Ordered[P1]](k: K1, p: P1): Treap[K1, P1] =
split(k) match {
case(t1, t2) => t1.merge(Treap.make(k, p)).merge(t2)
}
// scalastyle:off method.name
def :+[K1 >: K <% Ordered[K1], P1 >: P <% Ordered[P1]](k: K1, p: P1): Treap[K1, P1] = insert(k, p)
// scalastyle:on
def head: (K, P) = (key, priority)
def tail: Treap[K, P] = left merge right
def extract(): ((K, P), Treap[K, P]) = (head, tail)
def remove[K1 >: K <% Ordered[K1], P1 >: P <% Ordered[P1]](k: K1): Treap[K1, P1] =
if (isEmpty) {
throw new RuntimeException("An empty tree.")
} else if (k < key) {
Treap.make(key, priority, left.remove(k), right)
} else if (k > key) {
Treap.make(key, priority, left, right.remove(k))
} else {
left merge right
}
def contains[K1 >: K <% Ordered[K1]](k: K1): Boolean =
if (isEmpty) {
false
} else if (k == key) {
true
} else if (k < key) {
left.contains(k)
} else {
right.contains(k)
}
def height: Int = if (isEmpty) 0 else 1 + math.max(left.height, right.height)
def min: (K, P) = {
def loop(t: Treap[K, P], k: K, p: P): (K, P) =
if (t.isEmpty) (k, p) else loop(t.left, t.key, t.priority)
if (isEmpty) {
throw new RuntimeException("An empty tree.")
} else {
loop(left, key, priority)
}
}
def max: (K, P) = {
def loop(t: Treap[K, P], k: K, p: P): (K, P) =
if (t.isEmpty) (k, p) else loop(t.right, t.key, t.priority)
if (isEmpty) {
throw new RuntimeException("An empty tree.")
} else {
loop(right, key, priority)
}
}
def fold[A](accu: A)(op: (A, (K, P)) => A): A = {
def loop(t: Treap[K, P], accu: A): A =
if (t.isEmpty) accu else loop(t.right, op(loop(t.left, accu), (t.key, t.priority)))
loop(this, accu)
}
}
case object Leaf extends Treap[Nothing, Nothing] {
def key: Nothing = throw new RuntimeException("An empty tree.")
def priority: Nothing = throw new RuntimeException("An empty tree.")
def left: Treap[Nothing, Nothing] = throw new RuntimeException("An empty tree.")
def right: Treap[Nothing, Nothing] = throw new RuntimeException("An empty tree.")
def size: Int = 0
def isEmpty: Boolean = true
}
case class Branch[K <% Ordered[K], P <% Ordered[P]](
key: K,
priority: P,
left: Treap[K, P],
right: Treap[K, P],
size: Int) extends Treap[K, P]{
def isEmpty: Boolean = false
}
object Treap {
def empty[K, P]: Treap[K, P] = Leaf
def make[K <% Ordered[K], P <% Ordered[P]](
k: K,
p: P,
l: Treap[K, P] = Leaf,
r: Treap[K, P] = Leaf): Treap[K, P] = Branch(k, p, l, r, l.size + r.size + 1)
def apply[K <% Ordered[K], P <% Ordered[P]](xs: (K, P)*): Treap[K, P] =
xs.foldLeft(Treap.empty[K, P])({case(t, (k, p)) => t.insert(k, p)})
}
| pavlov99/scalastructures | src/main/scala/com/github/pavlov99/tree/Treap.scala | Scala | mit | 4,596 |
package org.bowlerframework.view.scuery.stub
/**
* Created by IntelliJ IDEA.
* User: wfaler
* Date: 13/03/2011
* Time: 23:37
* To change this template use File | Settings | File Templates.
*/
class ExtendingComponent extends MySimpleComponent | rkpandey/Bowler | core/src/test/scala/org/bowlerframework/view/scuery/stub/ExtendingComponent.scala | Scala | bsd-3-clause | 250 |
/*
* Copyright 2012 杨博 (Yang Bo)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dongxiguo.commons.continuations
import java.nio._
import java.util.concurrent._
import java.nio.channels._
import scala.util.continuations._
import scala.annotation._
import com.dongxiguo.fastring.Fastring.Implicits._
package object io {
implicit private val (logger, formatter, appender) = ZeroLoggerFactory.newLogger(this)
import formatter._
private object WriteHandler
extends CompletionHandler[java.lang.Long, Long => Unit] {
override final def completed(
bytesWritten: java.lang.Long,
handler: Long => Unit) {
try {
handler(bytesWritten)
} catch {
case e: Throwable =>
logger.severe(
"Exception is thrown in continuation when handling a completed asynchronous writing.",
e)
}
}
/**
* 写入时不处理异常而在读取处处理异常
*/
override final def failed(throwable: Throwable, handler: Long => Unit) {
logger.fine("Asynchronous operation is failed.", throwable)
handler(-1)
}
}
/**
* 只要连接没断就能成功写入,但如果连接断开了就会失败而且不做任何提示。
*/
final def writeAll(
socket: AsynchronousSocketChannel,
buffers: Array[ByteBuffer],
bufferOffset: Int,
bufferLength: Int,
timeout: Long,
unit: TimeUnit): Unit @suspendable = {
logger.fine{ fast"writeAll for ${timeout.toString}${unit.toString}" }
logger.finest(buffers.mkString)
val bytesWritten = shift { (continue: Long => Unit) =>
socket.write(
buffers,
bufferOffset,
bufferLength,
timeout,
unit,
continue,
WriteHandler)
}
if (bytesWritten > 0) {
@tailrec
def getNewOffset(i: Int): Int = {
if (i >= bufferOffset + bufferLength) {
i
} else {
val buffer = buffers(i)
if (buffer.hasRemaining) {
i
} else {
buffers(i) = null
getNewOffset(i + 1)
}
}
}
val newOffset = getNewOffset(bufferOffset)
val newLength = bufferLength - (newOffset - bufferOffset)
if (newLength > 0) {
writeAll(socket, buffers, newOffset, newLength, timeout, unit)
}
}
}
/**
* 只要连接没断就能成功写入,但如果连接断开了就会失败而且不做任何提示。
*/
final def writeAll(
socket: AsynchronousSocketChannel,
buffers: Array[ByteBuffer],
timeout: Long,
unit: TimeUnit): Unit @suspendable =
writeAll(socket, buffers, 0, buffers.length, timeout, unit)
}
// vim: set ts=2 sw=2 et:
| Atry/commons-continuations | src/main/scala/com/dongxiguo/commons/continuations/io/package.scala | Scala | apache-2.0 | 3,247 |
//@
package xyz.hyperreal.energize
import org.apache.http.client.fluent.Request
import xyz.hyperreal.bvm.VM
import xyz.hyperreal.json.DefaultJSONReader
object ClientFunctions {
def requestJSON( vm: VM, url: String ): Any =
DefaultJSONReader.fromString(
Request.Get(url)
.addHeader( "Accept", "application/json" )
.connectTimeout(1000)
.socketTimeout(1000)
.execute.returnContent.asString )
} | vinctustech/energize | src/main/scala/ClientFunctions.scala | Scala | isc | 441 |
package scalakurs.myfutures
import support.FlatSpecHelper
import work._
import org.scalatest.matchers.ShouldMatchers
import scala.concurrent._
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
class MyFuturesTest extends FlatSpecHelper with ShouldMatchers {
def delayFactorNumber(n: Long): FactorNumber = new FactorNumber(n, FlatSpecHelper.FUTURE_TIME_LIMIT * 2)
it should "compute square" in {
checkImplemented {
val future = time {
MyFutures.computeSquare(4)
}
val result = Await.result(future, Duration.Inf)
result should equal (16)
}
}
it should "compute square of future value" in {
checkImplemented {
val futureValue = future {
Thread.sleep(FlatSpecHelper.FUTURE_TIME_LIMIT * 2)
4
}
val futureResult = time {
MyFutures.computeSquare(futureValue)
}
val result = Await.result(futureResult, Duration.Inf)
result should equal (16)
}
}
it should "find max factor" in {
checkImplemented {
val work = delayFactorNumber(49L)
val futureResult = time {
MyFutures.findMaxFactor(work)
}
val result = Await.result(futureResult, Duration.Inf)
result should equal (7L)
}
}
it should "find max factor of future factors" in {
checkImplemented {
val futureFactors = future {
delayFactorNumber(49L)
}
val futureResult = time {
MyFutures.findMaxFactor(futureFactors)
}
val result = Await.result(futureResult, Duration.Inf)
result should equal (7L)
}
}
it should "do risky work or fallback on safe work" in {
checkImplemented{
// Each work will exceed the time limit
val shouldNotDoWork = new SumSequence(0, 4, FlatSpecHelper.FUTURE_TIME_LIMIT + 1)
val safeWork = new SumSequence(0, 5, FlatSpecHelper.FUTURE_TIME_LIMIT + 1)
val riskyWork = new SumSequence(-1, 6, FlatSpecHelper.FUTURE_TIME_LIMIT + 1)
val futureSafeResult = time {
MyFutures.computeRiskySumFallbackOnSafeSum(safeWork, shouldNotDoWork)
}
val futureSafeResult2 = time {
MyFutures.computeRiskySumFallbackOnSafeSum(riskyWork, safeWork)
}
val result = Await.result(futureSafeResult, Duration.Inf)
val result2 = Await.result(futureSafeResult2, Duration.Inf)
result should equal (15)
result2 should equal (15)
}
}
it should "find sum of all max factors" in {
checkImplemented {
val work1 = Seq(delayFactorNumber(21L), delayFactorNumber(49L), delayFactorNumber(12L))
val work2 = Seq(delayFactorNumber(51L), delayFactorNumber(81L))
val futureResult1 = time {
MyFutures.findSumOfAllMaxFactors(work1)
}
val futureResult2 = time {
MyFutures.findSumOfAllMaxFactors(work2)
}
val result1 = Await.result(futureResult1, Duration.Inf)
result1 should equal (20L)
val result2 = Await.result(futureResult2, Duration.Inf)
result2 should equal (44L)
}
}
it should "find max factor of all max factors in parallel" in {
checkImplemented {
// Each work will take at least 100 milliseconds
val work = Seq(delayFactorNumber(49L), delayFactorNumber(12L), delayFactorNumber(21L), delayFactorNumber(54L))
val futureResult = time {
MyFutures.findMaxFactorOfAllMaxFactorsInParallel(work)
}
val t1 = System.currentTimeMillis()
val result = Await.result(futureResult, Duration.Inf)
result should equal (27)
val totalExecutionTime = System.currentTimeMillis() - t1
totalExecutionTime should be < (FlatSpecHelper.FUTURE_TIME_LIMIT * 7)
println("Parallel execution time: " + totalExecutionTime)
}
}
} | elacin/scala-kurs | oppgaver/src/test/scala/scalakurs/myfutures/MyFuturesTest.scala | Scala | apache-2.0 | 3,773 |
/**
* @author Carol Alexandru
* @author Silvan Troxler
*
* Copyright 2013 University of Zurich
*
* Licensed below the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed below the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations below the License.
*/
package com.signalcollect.console
import java.io.PrintWriter
import java.io.StringWriter
import scala.collection.JavaConversions.propertiesAsScalaMap
import scala.reflect.runtime.universe._
import org.json4s.DefaultFormats
import org.json4s.Extraction.decompose
import org.json4s.JObject
import org.json4s.JString
import org.json4s.JValue
import org.json4s.JsonDSL.double2jvalue
import org.json4s.JsonDSL.int2jvalue
import org.json4s.JsonDSL.jobject2assoc
import org.json4s.JsonDSL.long2jvalue
import org.json4s.JsonDSL.pair2Assoc
import org.json4s.JsonDSL.pair2jvalue
import org.json4s.JsonDSL.seq2jvalue
import org.json4s.JsonDSL.string2jvalue
import org.json4s.jvalue2extractable
import org.json4s.jvalue2monadic
import org.json4s.native.JsonMethods.compact
import org.json4s.native.JsonMethods.parse
import org.json4s.native.JsonMethods.render
import org.json4s.string2JsonInput
import com.signalcollect.ExecutionConfiguration
import com.signalcollect.ExecutionStatistics
import com.signalcollect.Vertex
import com.signalcollect.interfaces.Coordinator
import com.signalcollect.interfaces.Logger
import com.signalcollect.interfaces.NodeStatistics
import com.signalcollect.interfaces.WorkerStatistics
/** Abstract class defining the interface every DataProvider has to implement. */
abstract class DataProvider {
def fetch(): JObject
def fetchInvalid(msg: JValue = JString(""),
comment: String): JObject = {
new InvalidDataProvider(msg, comment).fetch
}
}
/**
* DataProvider that wraps a stack trace.
*
* @constructor create a new ErrorDataProvider
* @param e the exception that has been thrown
*/
class ErrorDataProvider(e: Exception) extends DataProvider {
def fetchStacktrace(): String = {
val sw = new StringWriter()
e.printStackTrace(new PrintWriter(sw))
sw.toString()
}
def fetch(): JObject = {
("provider" -> "error") ~
("msg" -> "An exception occured") ~
("stacktrace" -> fetchStacktrace())
}
}
/**
* DataProvider used when invalid communication occurs.
*
* When an invalid request is received, this provider is used to supply the
* original message that caused the problem plus a comment that could further
* explain the problem.
*
* @constructor create a new InvalidDataProvider
* @param msg the original request that caused the problem
* @param comment more information on why the request is invalid
*/
class InvalidDataProvider(msg: JValue, comment: String = "No comment") extends DataProvider {
def fetch(): JObject = {
("provider" -> "invalid") ~
("msg" -> compact(render(msg))) ~
("comment" -> comment)
}
}
/**
* DataProvider used when another data provider can't satisfy the request.
*
* It can occur that a valid request cannot be satisfied, for example because
* the underlying data is not available yet. In that case, this data provider
* is used to inform the client of the problem
*
* @constructor create a new NotReadyDataProvider
* @param msg the original request
*/
class NotReadyDataProvider(msg: String) extends DataProvider {
implicit val formats = DefaultFormats
val j = parse(msg)
val p = (j \\ "requestor").extract[String]
def fetch(): JObject = {
("provider" -> "notready") ~
("requestor" -> p) ~
("msg" -> "The signal/collect computation is not ready yet") ~
("request" -> msg)
}
}
/**
* DataProvider used to fetch the current state of the computation.
*
* @constructor create a new StateDataProvider
* @param socket the WebSocketConsoleServer
*/
class StateDataProvider[Id, Signal](socket: WebSocketConsoleServer[Id, Signal])
extends DataProvider {
def fetch(): JObject = {
val reply: JObject = socket.execution match {
case Some(e) =>
("state", e.state) ~
(("steps", e.stepTokens)) ~
(("iteration", e.iteration))
case None => socket.executionConfiguration match {
case Some(ec: ExecutionConfiguration[Id, Signal]) => socket.executionStatistics match {
case Some(es: ExecutionStatistics) =>
("mode", ec.executionMode.toString) ~
(("state", es.terminationReason.toString)) ~
(("totalExecutionTime", es.totalExecutionTime.toString)) ~
(("computationTime", es.computationTime.toString))
case other =>
("mode", ec.executionMode.toString())
}
case other =>
("state", "undetermined")
}
}
("provider", "state") ~ reply
}
}
/**
* DataProvider which serves execution, graph and system configurations.
*
* The execution configuration is taken from the WebSocketConsoleServer which
* owns the execution mode object. The graph configuration is taken from the
* coordinator. Finally, the system information is taken from the Java
* System.getProperties collection.
*
* @constructor create a new ConfigurationDataProvider
* @param socket the WebSocketConsoleServer (who knows the exec conf)
* @param coordinator the Coordinator (who knows the graph conf)
*/
class ConfigurationDataProvider[Id: TypeTag, Signal: TypeTag](socket: WebSocketConsoleServer[Id, Signal],
coordinator: Coordinator[Id, Signal])
extends DataProvider {
def fetch(): JObject = {
val executionConfiguration = socket.executionConfiguration match {
case Some(e: ExecutionConfiguration[Id, Signal]) => Toolkit.unpackObject(e)
case otherwise => JString("unknown")
}
("provider" -> "configuration") ~
("executionConfiguration" -> executionConfiguration) ~
("graphConfiguration" -> Toolkit.unpackObjects(Array[AnyRef](socket.graphConfiguration))) ~
("systemProperties" -> propertiesAsScalaMap(System.getProperties))
}
}
/**
* DataProvider which serves akka log messages from the coordinator.
*
* @constructor create a new LogDataProvider
* @param coordinator the Coordinator
*/
class LogDataProvider[Id](logger: Logger) extends DataProvider {
def fetch(): JObject = {
("provider" -> "log") ~
("messages" -> logger.getLogMessages)
}
}
/** Data structure used to model client control requests */
case class ControlsRequest(
control: Option[String])
/**
* Provider that accepts execution commands.
*
* @constructor create a new ControlsProvider
* @param socket the WebSocketConsoleServer
* @param msg the request by the client
*/
class ControlsProvider[Id, Signal](socket: WebSocketConsoleServer[Id, Signal],
msg: JValue) extends DataProvider {
implicit val formats = DefaultFormats
var execution: Option[Execution] = socket.execution
def command(e: Execution, command: String): JObject = {
command match {
case "step" => e.step
case "collect" => e.collect
case "pause" => e.pause
case "continue" => e.continue
case "reset" => e.reset
case "terminate" => e.terminate
}
("msg" -> "command accepted")
}
def fetch(): JObject = {
val request = (msg).extract[ControlsRequest]
val reply = execution match {
case Some(e) => request.control match {
case Some(action) => action match {
case "step" | "collect" | "pause" | "continue" | "reset" | "terminate" =>
command(e, action)
case otherwise => fetchInvalid(msg, "invalid control!")
}
case None => fetchInvalid(msg, "missing command!")
}
case None => fetchInvalid(msg, "interactive execution is unavailable!")
}
("provider" -> "controls") ~
reply
}
}
/** Data structure used to model client break condition requests */
case class BreakConditionsRequest(
action: Option[String],
name: Option[String],
id: Option[String],
props: Option[Map[String, String]])
/** Data structure used to model replies to the client */
case class BreakConditionContainer(
id: String,
name: String,
props: Map[String, String])
/**
* Provider that can be used to add, remove and check on break conditions.
*
* The break conditions are stored with the interactive execution mode. Each
* condition has a unique ID which it receives when it's being created. During
* the computation, various break conditions will be checked at different
* times.
*
* Whether the client supplies an action (to add or remove a condition) or not,
* the provider will always answer with a list of configured conditions and a
* list of reached conditions. If the action could not be performed (e.g. if
* an invalid vertex ID has been supplied) an error message will be included.
*
* @constructor create a new BreakConditionsProvider
* @param coordinator the Coordinator
* @param socket the WebSocketConsoleServer
* @param msg the request by the client
*/
class BreakConditionsProvider[Id, Signal](coordinator: Coordinator[Id, Signal],
socket: WebSocketConsoleServer[Id, Signal],
msg: JValue) extends DataProvider {
implicit val formats = DefaultFormats
var execution: Option[Execution] = socket.execution
val workerApi = coordinator.getWorkerApi
def fetchConditions(e: Execution): JObject = {
val active = e.conditions.map {
case (id, c) =>
Toolkit.unpackObject(BreakConditionContainer(id, c.name.toString, c.props.toMap))
}.toList
val reached = decompose(e.conditionsReached)
("provider" -> "breakconditions") ~
("active" -> active) ~
("reached" -> reached)
}
def fetch(): JObject = {
execution match {
case Some(e) =>
// add or remove conditions
val request = (msg).extract[BreakConditionsRequest]
request.action match {
case Some(action) => action match {
case "add" => request.name match {
case Some(name) =>
try {
val n = BreakConditionName.withName(name)
request.props match {
case Some(props) =>
socket.executionConfiguration match {
case Some(c) =>
try {
// Create the condition. The input is validated
// inside the constructor of BreakCondition and
// when a requirement fails, an exception is thrown
val condition = new BreakCondition(socket.graphConfiguration,
c, n, props, workerApi)
e.addCondition(condition)
fetchConditions(e)
} catch {
case ex: IllegalArgumentException =>
fetchConditions(e) ~
("error" -> ex.getMessage.toString)
}
case None => fetchInvalid(msg, "executionConfiguration unavailable!")
}
case None => fetchInvalid(msg, "missing props!")
}
} catch {
case e: NoSuchElementException =>
fetchInvalid(msg, "invalid Name!")
}
case None => fetchInvalid(msg, "missing name!")
}
case "remove" => request.id match {
case Some(id) =>
e.removeCondition(id)
fetchConditions(e)
case None => fetchInvalid(msg, "Missing id!")
}
}
case None => fetchConditions(e)
}
case None =>
("provider" -> "breakconditions") ~
("status" -> "noExecution")
}
}
}
/** Data structure used to model client graph data requests */
case class GraphDataRequest(
vertexIds: Option[List[String]],
vicinityRadius: Option[Int],
vicinityIncoming: Option[Boolean],
exposeVertices: Option[Boolean],
query: Option[String],
targetCount: Option[Int],
topCriterium: Option[String],
substring: Option[String],
signalThreshold: Option[Double],
collectThreshold: Option[Double])
/**
* Provider that can be used to query the graph.
*
* @constructor create a new GraphDataProvider
* @param coordinator the Coordinator
* @param msg the request by the client
*/
class GraphDataProvider[Id, Signal](coordinator: Coordinator[Id, Signal], msg: JValue)
extends DataProvider {
implicit val formats = DefaultFormats
val workerApi = coordinator.getWorkerApi
var vertexIdStrings = List[String]()
var targetCount = 5
var vicinityRadius = 0
var vicinityIncoming = false
var exposeVertices = false
var signalThreshold = 0.01
var collectThreshold = 0.0
/**
* Recursively load the vicinity of a vertex.
*
* The vicinity of a vertex is the set of vertices that can be reached by
* traveling a maximum of {{radius}} times along the edges leading away
* from the vertex. A {{radius}} of 1 denotes only the vertices that share
* an edge with the node. Because a vertex already knows its outgoing edges,
* it is cheap to find the outgoing vicinity. However, for the incoming
* vicinity, an aggregation has to be performed to find the vertices that
* target one of the vertices we're interested in, hence it's an expensive
* operation.
*
* @param sourceIds set of vertex IDs to use in the search
* @param radius how far to travel from the source vertices
* @param incoming also consider incoming edges (costs {{radius}} aggregations)
* @return the original set plus the set of vertex IDs in the vicinity
*/
def findVicinity(sourceIds: Set[Id], radius: Int = 3,
incoming: Boolean = false): Set[Id] = {
if (radius == 0) { sourceIds }
else {
if (incoming) {
val vicinityIds = workerApi.aggregateAll(
new FindVertexVicinitiesByIdsAggregator[Id](sourceIds))
sourceIds ++ findVicinity(vicinityIds, radius - 1, true)
} else {
sourceIds ++ findVicinity(sourceIds.map { id =>
workerApi.forVertexWithId(id, { vertex: Vertex[Id, _, _, _] =>
vertex.targetIds.asInstanceOf[Traversable[Id]].toSet
})
}.flatten, radius - 1, false)
}
}
}
/**
* Fetch a JObject representation of the vertices and edges of the graph.
*
* This function will load the vertices as supplied by the {{vertexIds}}
* set but also any vertices in the object-scope variable
* {{vertexIdStrings}}. The client will usually request a specific part of
* the graph, but also supply a list of vertices that should be loaded.
*
* All the other fetch functions make use of this function to finalize
* the request.
*
* @param vertexIds set of vertex IDs to load
* @return the JObject containing the requested vertices and their edges
*/
def fetchGraph(vertexIds: Set[Id] = Set[Id]()): JObject = {
val vertices = workerApi.aggregateAll(
new FindVerticesByIdsAggregator[Id](vertexIdStrings))
val vicinityIds = findVicinity(vertexIds ++ vertices.map { _.id }.toSet,
vicinityRadius, vicinityIncoming)
val (lowestState, highestState, graph) =
workerApi.aggregateAll(new GraphAggregator[Id](vicinityIds, exposeVertices))
("highestState" -> highestState) ~
("lowestState" -> lowestState) ~
graph
}
/** Fetch vertices ordered by their highest or lowest state. */
def fetchByTopState(inverted: Boolean = false): JObject = {
val topState = workerApi.aggregateAll(
new TopStateAggregator[Id](targetCount, inverted)).take(targetCount)
val vertexIds = topState.foldLeft(Set[Id]()) { (acc, m) => acc + m._2 }
fetchGraph(vertexIds)
}
/** Fetch vertices ordered by their degree. */
def fetchByTopDegree(): JObject = {
val vertexIds = workerApi.aggregateAll(new TopDegreeAggregator[Id](targetCount))
.toSeq.sortBy(-_._2)
.take(targetCount)
.map { _._1 }
.toSet
fetchGraph(vertexIds)
}
/** Fetch vertices with a {{scoreType}} score above the threshold. */
def fetchByAboveThreshold(scoreType: String): JObject = {
val threshold = if (scoreType == "signal") signalThreshold else collectThreshold
val aboveThreshold = workerApi.aggregateAll(
new AboveThresholdAggregator[Id](targetCount, scoreType, threshold)).take(targetCount)
val vertexIds = aboveThreshold.foldLeft(Set[Id]()) { (acc, m) => acc + m._2 }
fetchGraph(vertexIds)
}
/** Fetch vertices whose IDs contain the given substring {{s}}. */
def fetchBySubstring(s: String): JObject = {
val vertexIds = workerApi.aggregateAll(
new FindVertexIdsBySubstringAggregator[Id](s, targetCount))
fetchGraph(vertexIds)
}
/** Fetch a random sample of vertices. */
def fetchSample: JObject = {
val vertexIds = workerApi.aggregateAll(new SampleAggregator[Id](targetCount))
fetchGraph(vertexIds)
}
def fetch(): JObject = {
val request = (msg).extract[GraphDataRequest]
// Override default values if any
request.vertexIds match {
case Some(ids) => vertexIdStrings = ids.take(1000)
case otherwise =>
}
request.targetCount match {
case Some(t) => targetCount = List(t, 1000).min
case otherwise =>
}
request.vicinityRadius match {
case Some(r) => vicinityRadius = List(r, 4).min
case otherwise =>
}
request.vicinityIncoming match {
case Some(b) => vicinityIncoming = b
case otherwise =>
}
request.exposeVertices match {
case Some(b) => exposeVertices = b
case otherwise =>
}
request.signalThreshold match {
case Some(t) => signalThreshold = t
case otherwise =>
}
request.collectThreshold match {
case Some(t) => collectThreshold = t
case otherwise =>
}
// route request and fetch graph data
val graphData = request.query match {
case Some("substring") => request.substring match {
case Some(s) => fetchBySubstring(s)
case otherwise => fetchInvalid(msg, "missing substring")
}
case Some("vertexIds") => request.vertexIds match {
case Some(ids) => fetchGraph()
case otherwise => fetchInvalid(msg, "missing vertexIds")
}
case Some("top") => request.topCriterium match {
case Some("Highest state") => fetchByTopState()
case Some("Lowest state") => fetchByTopState(true)
case Some("Highest degree") => fetchByTopDegree
case Some("Above signal thresh.") => fetchByAboveThreshold("signal")
case Some("Above collect thresh.") => fetchByAboveThreshold("collect")
case Some("Sample") => fetchSample
case otherwise => new InvalidDataProvider(msg, "invalid top criterium").fetch
}
case otherwise => fetchInvalid(msg, "missing query")
}
("provider" -> "graph") ~
graphData
}
}
/**
* Provider offering resource and other statistics on workers and nodes.
*
* @constructor create a new ReourcesDataProvider
* @param coordinator the Coordinator
* @param msg the request by the client
*/
class ResourcesDataProvider(coordinator: Coordinator[_, _], msg: JValue)
extends DataProvider {
def fetch(): JObject = {
val inboxSize: Long = coordinator.getGlobalInboxSize
val ws: Array[WorkerStatistics] =
(coordinator.getWorkerApi.getIndividualWorkerStatistics).toArray
val wstats = Toolkit.unpackObjects(ws)
val ns: Array[NodeStatistics] =
(coordinator.getWorkerApi.getIndividualNodeStatistics).toArray
val nstats = Toolkit.unpackObjects(ns)
("provider" -> "resources") ~
("timestamp" -> System.currentTimeMillis) ~
("inboxSize" -> inboxSize) ~
("workerStatistics" -> wstats) ~
("nodeStatistics" -> nstats)
}
}
| mageru/signal-collect | src/main/scala/com/signalcollect/console/DataProvider.scala | Scala | apache-2.0 | 20,226 |
package app
import com.google.inject.Guice
import com.mohiva.play.silhouette.api.{ Logger, SecuredSettings }
import controllers.routes
import play.api._
import play.api.GlobalSettings
import play.api.i18n.{ Lang, Messages }
import play.api.mvc._
import play.api.mvc.Results._
import play.api.mvc.{ RequestHeader, Result }
import utils.di.SilhouetteModule
import controllers.headers._
import scala.concurrent.Future
/**
* The global object.
*/
object Global extends Global {
/**
* Handling errors
* http://alvinalexander.com/scala/handling-scala-play-framework-2-404-500-errors
* https://www.playframework.com/documentation/2.3.x/ScalaGlobal
*/
override def onBadRequest(request: RequestHeader, error: String) = {
Future.successful(BadRequest("Bad Request: " + error))
}
/**
* 500 - internal server error
*/
override def onError(request: RequestHeader, throwable: Throwable) = {
Future.successful(InternalServerError(views.html.errors.onError(throwable)))
}
/**
* 404 - Page not found
* @param request
* @return
*/
override def onHandlerNotFound(request: RequestHeader) = {
Future.successful(NotFound(
views.html.errors.onHandlerNotFound(request)
))
}
}
/**
* The global configuration.
*/
trait Global extends GlobalSettings with SecuredSettings with Logger {
/**
* The Guice dependencies injector.
*/
val injector = Guice.createInjector(new SilhouetteModule)
/**
* Loads the controller classes with the Guice injector,
* in order to be able to inject dependencies directly into the controller.
*
* @param controllerClass The controller class to instantiate.
* @return The instance of the controller class.
* @throws Exception if the controller couldn't be instantiated.
*/
override def getControllerInstance[A](controllerClass: Class[A]) = injector.getInstance(controllerClass)
/**
* Called when a user is not authenticated.
*
* As defined by RFC 2616, the status code of the response should be 401 Unauthorized.
*
* @param request The request header.
* @param lang The currently selected language.
* @return The result to send to the client.
*/
override def onNotAuthenticated(request: RequestHeader, lang: Lang): Option[Future[Result]] = {
Some(Future.successful(Redirect(routes.UserController.signIn())))
}
/**
* Called when a user is authenticated but not authorized.
*
* As defined by RFC 2616, the status code of the response should be 403 Forbidden.
*
* @param request The request header.
* @param lang The currently selected language.
* @return The result to send to the client.
*/
override def onNotAuthorized(request: RequestHeader, lang: Lang): Option[Future[Result]] = {
Some(Future.successful(Redirect(routes.UserController.signIn()).flashing("error" -> Messages("access.denied"))))
}
}
| danZzyy/SidewalkWebpage | sidewalk-webpage/app/Global.scala | Scala | mit | 2,887 |
package org.hammerlab.guacamole.alignment
import org.hammerlab.guacamole.alignment.AffineGapPenaltyAlignment.{align, scoreAlignmentPaths}
import org.hammerlab.guacamole.util.BasesUtil._
import org.scalatest.{FunSuite, Matchers}
class AffineGapPenaltyAlignmentSuite extends FunSuite with Matchers {
test("score alignment: exact match") {
val alignments =
scoreAlignmentPaths(
"TCGA",
"TCGA",
mismatchProbability = 1e-2,
openGapProbability = 1e-3,
closeGapProbability = 1e-2
)
alignments(0)._3.toInt should be(0)
}
test("score alignment: single mismatch") {
val alignments =
scoreAlignmentPaths(
"TCGA",
"TCCA",
mismatchProbability = 1e-2,
openGapProbability = 1e-3,
closeGapProbability = 1e-2
)
math.round(alignments(0)._3) should be(5)
}
test("align exact match") {
align("TCGA", "TCGA").toCigarString should be("4=")
}
test("align: single mismatch") {
align("TCGA", "TCCA").toCigarString should be("2=1X1=")
}
test("align long exact match") {
val sequence = "TCGATGATCTGAGA"
align(sequence, sequence).toCigarString should be(sequence.length.toString + "=")
}
test("short align with insertion; left aligned") {
align("TCCGA", "TCGA").toCigarString should be("1=1I3=")
}
test("long align with insertion") {
align("TCGACCCTCTGA", "TCGATCTGA").toCigarString should be("4=3I5=")
}
test("long align with deletion") {
align("TCGATCTGA", "TCGACCCTCTGA").toCigarString should be("4=3D5=")
}
test("mixed mismatch and insertion") {
align("TCGACCCTCTTA", "TCGATCTGA").toCigarString should be("4=3I3=1X1=")
}
test("only mismatch long sequence") {
val alignment =
align(
// ====================X===================X============================================================
"ATTCTCAAGTTTTAAGTGGTATTCTAATTATGGCAGTAATTAACTGAATAAAGAGATTCATCATGTGCAAAAACTAATCTTGTTTACTTAAAATTGAGAGT",
"ATTCTCAAGTTTTAAGTGGTTTTCTAATTATGGCAGTAATAAACTGAATAAAGAGATTCATCATGTGCAAAAACTAATCTTGTTTACTTAAAATTGAGAGT"
)
alignment.toCigarString should be("20=1X19=1X60=")
}
test("2 mismatch with deletion sequence") {
val alignment =
align(
// ====================X===================X========================================DDD====================
"ATTCTCAAGTTTTAAGTGGTATTCTAATTATGGCAGTAATTAACTGAATAAAGAGATTCATCATGTGCAAAAACTAATCTT"+"GTTTACTTAAAATTGAGAGT",
"ATTCTCAAGTTTTAAGTGGTTTTCTAATTATGGCAGTAATAAACTGAATAAAGAGATTCATCATGTGCAAAAACTAATCTTCCCGTTTACTTAAAATTGAGAGT"
)
alignment.toCigarString should be("20=1X19=1X40=3D20=")
}
test("left aligning a deletion alignment") {
val alignment =
align(
// ====================================DD ========================================
"AGACACGGAGACACACAGAGATACACGGAAACACAG" +"ACATGCACACACGCGAAGACACAGACACATACACATGCAT",
"AGACACGGAGACACACAGAGATACACGGAAACACAGAC"+"ACATGCACACACGCGAAGACACAGACACATACACATGCAT"
)
alignment.toCigarString should be("36=2D40=")
}
}
| hammerlab/guacamole | src/test/scala/org/hammerlab/guacamole/alignment/AffineGapPenaltyAlignmentSuite.scala | Scala | apache-2.0 | 3,096 |
package com.github.vitalsoftware.scalaredox
import com.github.vitalsoftware.scalaredox.models.{ GroupedOrdersMessage, Order, SexType }
import org.specs2.mutable.Specification
class GroupedOrdersTest extends Specification with RedoxTest {
"alter GroupedOrders" should {
"post a new GroupedOrders given Redox Dev Tools" in {
val json =
"""
|{
| "Meta": {
| "DataModel": "Order",
| "EventType": "GroupedOrders",
| "EventDateTime": "2017-10-10T15:07:21.362Z",
| "Test": true,
| "Source": {
| "ID": "7ce6f387-c33c-417d-8682-81e83628cbd9",
| "Name": "Redox Dev Tools"
| },
| "Destinations": [
| {
| "ID": "af394f14-b34a-464f-8d24-895f370af4c9",
| "Name": "Redox EMR"
| }
| ],
| "Message": {
| "ID": 5565
| },
| "Transmission": {
| "ID": 12414
| },
| "FacilityCode": null
| },
| "Patient": {
| "Identifiers": [
| {
| "ID": "3281527",
| "IDType": "CLH MRN"
| },
| {
| "ID": "9651160",
| "IDType": "EMPI"
| }
| ],
| "Demographics": {
| "FirstName": "Timothy",
| "MiddleName": "Paul",
| "LastName": "Bixby",
| "DOB": "2008-01-06",
| "SSN": "101-01-0001",
| "Sex": null,
| "Race": "Asian",
| "IsHispanic": null,
| "MaritalStatus": "Single",
| "IsDeceased": null,
| "DeathDateTime": null,
| "PhoneNumber": {
| "Home": "+18088675301",
| "Office": null,
| "Mobile": null
| },
| "EmailAddresses": [],
| "Language": "en",
| "Citizenship": [],
| "Address": {
| "StreetAddress": "4762 Hickory Street",
| "City": "Monroe",
| "State": "WI",
| "ZIP": "53566",
| "County": "Green",
| "Country": "US"
| }
| },
| "Notes": []
| },
| "Visit": {
| "VisitNumber": "1234",
| "AttendingProvider": {
| "ID": "4356789876",
| "IDType": "NPI",
| "FirstName": "Pat",
| "LastName": "Granite",
| "Credentials": [
| "MD"
| ],
| "Address": {
| "StreetAddress": "123 Main St.",
| "City": "Madison",
| "State": "WI",
| "ZIP": "53703",
| "County": "Dane",
| "Country": "USA"
| },
| "Location": {
| "Type": null,
| "Facility": null,
| "Department": null,
| "Room": null
| },
| "PhoneNumber": {
| "Office": "+16085551234"
| }
| },
| "ConsultingProvider": {
| "ID": "2434534567",
| "IDType": "NPI",
| "FirstName": "Sharon",
| "LastName": "Chalk",
| "Credentials": [
| "MD",
| "PhD"
| ],
| "Address": {
| "StreetAddress": "312 Maple Dr. Suite 400",
| "City": "Verona",
| "State": "WI",
| "ZIP": "53593",
| "County": "Dane",
| "Country": "USA"
| },
| "Location": {
| "Type": null,
| "Facility": null,
| "Department": null,
| "Room": null
| },
| "PhoneNumber": {
| "Office": "+16085559999"
| }
| },
| "ReferringProvider": {
| "ID": "4236464757",
| "IDType": "NPI",
| "FirstName": "John",
| "LastName": "Slate",
| "Credentials": [
| "DO"
| ],
| "Address": {
| "StreetAddress": "500 First St.",
| "City": "Clayton",
| "State": "MO",
| "ZIP": "63105",
| "County": "Saint Louis",
| "Country": "USA"
| },
| "Location": {
| "Type": null,
| "Facility": null,
| "Department": null,
| "Room": null
| },
| "PhoneNumber": {
| "Office": "+13145554321"
| }
| },
| "Guarantor": {
| "Number": "10001910",
| "FirstName": "Kent",
| "LastName": "Bixby",
| "DOB": null,
| "Sex": null,
| "Spouse": {
| "FirstName": "Barbara",
| "LastName": "Bixby"
| },
| "Address": {
| "StreetAddress": "4762 Hickory Street",
| "City": "Monroe",
| "State": "WI",
| "ZIP": "53566",
| "County": "Green",
| "Country": "USA"
| },
| "PhoneNumber": {
| "Home": null,
| "Business": null
| },
| "Type": null,
| "RelationToPatient": "Father",
| "Employer": {
| "Name": "Accelerator Labs",
| "Address": {
| "StreetAddress": "1456 Old Sauk Road",
| "City": "Madison",
| "State": "WI",
| "ZIP": "53719",
| "County": "Dane",
| "Country": "USA"
| },
| "PhoneNumber": "+18083451121"
| }
| },
| "Insurances": [
| {
| "Plan": {
| "ID": "31572",
| "IDType": "Payor ID",
| "Name": "HMO Deductable Plan",
| "Type": null
| },
| "MemberNumber": null,
| "Company": {
| "ID": "60054",
| "IDType": null,
| "Name": "aetna (60054 0131)",
| "Address": {
| "StreetAddress": "PO Box 14080",
| "City": "Lexington",
| "State": "KY",
| "ZIP": "40512-4079",
| "County": "Fayette",
| "Country": "US"
| },
| "PhoneNumber": "+18089541123"
| },
| "GroupNumber": "847025-024-0009",
| "GroupName": "Accelerator Labs",
| "EffectiveDate": "2015-01-01",
| "ExpirationDate": "2020-12-31",
| "PolicyNumber": "9140860055",
| "AgreementType": null,
| "CoverageType": null,
| "Insured": {
| "LastName": null,
| "FirstName": null,
| "Relationship": null,
| "DOB": null,
| "Address": {
| "StreetAddress": null,
| "City": null,
| "State": null,
| "ZIP": null,
| "County": null,
| "Country": null
| }
| }
| }
| ],
| "Location": {
| "Type": "Inpatient",
| "Facility": "RES General Hospital",
| "Department": "3N",
| "Room": "136"
| }
| },
| "Orders": [
| {
| "ID": "157968300",
| "Status": "New",
| "TransactionDateTime": "2015-05-06T06:00:58.872Z",
| "CollectionDateTime": "2015-05-06T06:00:58.872Z",
| "Specimen": {
| "Source": null,
| "BodySite": null,
| "ID": null
| },
| "Procedure": {
| "Code": "49086-2",
| "Codeset": null,
| "Description": "First trimester maternal screen with nuchal translucency panel"
| },
| "Provider": {
| "NPI": "4356789876",
| "FirstName": "Pat",
| "LastName": "Granite",
| "Credentials": [
| "MD"
| ],
| "Address": {
| "StreetAddress": "123 Main St.",
| "City": "Madison",
| "State": "WI",
| "ZIP": "53703",
| "County": "Dane",
| "Country": "USA"
| },
| "Location": {
| "Type": null,
| "Facility": null,
| "Department": null,
| "Room": null
| },
| "PhoneNumber": {
| "Office": "+16085551234"
| }
| },
| "OrderingFacility": {
| "Name": null,
| "Address": {
| "StreetAddress": null,
| "City": null,
| "State": null,
| "ZIP": null,
| "County": null,
| "Country": null
| },
| "PhoneNumber": null
| },
| "Priority": "Stat",
| "Comments": null,
| "Notes": [],
| "Diagnoses": [
| {
| "Code": "Z31.41",
| "Codeset": "ICD-10",
| "Name": "Encounter for fertility testing",
| "Type": null
| }
| ],
| "ClinicalInfo": [
| {
| "Code": "QUESTION001",
| "Codeset": null,
| "Description": "Estimated Due Date",
| "Value": "2015-10-05",
| "Units": null,
| "Abbreviation": null,
| "Notes": []
| },
| {
| "Code": "QUESTION002",
| "Codeset": null,
| "Description": "Ethnicity",
| "Value": "White",
| "Units": null,
| "Abbreviation": "W",
| "Notes": []
| },
| {
| "Code": "QUESTION010",
| "Codeset": null,
| "Description": "Is this a twin pregnancy?",
| "Value": "Singleton",
| "Units": null,
| "Abbreviation": "sng",
| "Notes": []
| },
| {
| "Code": "QUESTION011",
| "Codeset": null,
| "Description": "Dating Method",
| "Value": "LMP",
| "Units": null,
| "Abbreviation": "lmp",
| "Notes": []
| }
| ]
| },
| {
| "ID": "194415773",
| "Status": "New",
| "TransactionDateTime": "2015-05-05T00:00:00.000Z",
| "CollectionDateTime": "2015-05-07T06:00:58.872Z",
| "Specimen": {
| "Source": null,
| "BodySite": null,
| "ID": null
| },
| "Procedure": {
| "Code": "24356-8",
| "Codeset": null,
| "Description": "Urinalysis complete panel in Urine"
| },
| "Provider": {
| "NPI": "4356789876",
| "FirstName": "Pat",
| "LastName": "Granite",
| "Credentials": [
| "MD"
| ],
| "Address": {
| "StreetAddress": "123 Main St.",
| "City": "Madison",
| "State": "WI",
| "ZIP": "53703",
| "County": "Dane",
| "Country": "USA"
| },
| "Location": {
| "Type": null,
| "Facility": null,
| "Department": null,
| "Room": null
| },
| "PhoneNumber": {
| "Office": "+16085551234"
| }
| },
| "OrderingFacility": {
| "Name": null,
| "Address": {
| "StreetAddress": null,
| "City": null,
| "State": null,
| "ZIP": null,
| "County": null,
| "Country": null
| },
| "PhoneNumber": null
| },
| "Priority": "Routine",
| "Comments": null,
| "Notes": [],
| "Diagnoses": [
| {
| "Code": "R10.84",
| "Codeset": "ICD-10",
| "Name": "Abdominal pain generalized",
| "Type": null
| }
| ],
| "ClinicalInfo": [
| {
| "Code": null,
| "Codeset": null,
| "Description": null,
| "Value": null,
| "Units": null,
| "Abbreviation": null,
| "Notes": []
| }
| ]
| }
| ]
|}
|
""".stripMargin
val data = validateJsonInput[GroupedOrdersMessage](json)
// Validate our data marshalling
data.Orders must have size (2)
data.Orders must contain { o: Order =>
o.Procedure must beSome
o.Provider must beSome
o.Diagnoses must not be empty
o.ClinicalInfo must not be empty
}
data.Visit must beSome
val visit = data.Visit.get
visit.AttendingProvider must beSome
visit.ConsultingProvider must beSome
visit.ReferringProvider must beSome
visit.Guarantor must beSome
visit.Guarantor.get.Employer must beSome
visit.Insurances must not be empty
visit.Location must beSome
val patient = data.Patient
patient.Demographics must beSome
patient.Demographics.get.Sex must beEqualTo(SexType.Unknown)
}
}
}
| vital-software/scala-redox | src/test/scala/com/github/vitalsoftware/scalaredox/GroupedOrdersTest.scala | Scala | mit | 15,572 |
/*
* Copyright 2014 Alan Rodas Bonjour
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.alanrodas.fronttier
import rapture.fs.FileUrl
trait ConfigParser {
def id: String
def name: String
def fileName: String
def existsFileAt(path : FileUrl) : Boolean
def parseAt(fileName: FileUrl): Configuration
}
| alanrodas/Fronttier | core/src/main/scala/com/alanrodas/fronttier/ConfigParser.scala | Scala | apache-2.0 | 830 |
package org.jetbrains.sbt
package project.module
import java.awt.event.{ActionEvent, ActionListener}
import java.util
import javax.swing.table.AbstractTableModel
import com.intellij.openapi.roots.ui.configuration.{ModuleConfigurationState, ModuleElementsEditor}
import com.intellij.ui.CollectionListModel
import com.intellij.util.text.DateFormatUtil
import org.jetbrains.plugins.scala.util.JListCompatibility
import org.jetbrains.plugins.scala.util.JListCompatibility.CollectionListModelWrapper
import org.jetbrains.sbt.resolvers.{SbtResolver, SbtResolverIndex, SbtResolverIndexesManager}
import org.jetbrains.sbt.settings.SbtSystemSettings
import scala.collection.JavaConverters._
/**
* @author Nikolay Obedin
* @since 12/1/14.
*/
class SbtModuleSettingsEditor (state: ModuleConfigurationState) extends ModuleElementsEditor(state) {
private val myForm = new SbtModuleSettingsForm
private val modelWrapper = new CollectionListModelWrapper(new CollectionListModel[String](new util.ArrayList[String]))
private val resolvers = SbtModule.getResolversFrom(getModel.getModule).toSeq
def getDisplayName = SbtBundle("sbt.settings.sbtModuleSettings")
def getHelpTopic = null
def saveData() {}
def createComponentImpl() = {
myForm.sbtImportsList.setEmptyText(SbtBundle("sbt.settings.noImplicitImportsFound"))
JListCompatibility.setModel(myForm.sbtImportsList, modelWrapper.getModelRaw)
myForm.updateButton.addActionListener(new ActionListener {
override def actionPerformed(e: ActionEvent): Unit = {
val resolversToUpdate: Seq[SbtResolver] =
myForm.resolversTable.getSelectedRows map (resolvers(_))
SbtResolverIndexesManager().update(resolversToUpdate)
}
})
myForm.mainPanel
}
override def reset() {
val moduleSettings = SbtSystemSettings.getInstance(state.getProject).getLinkedProjectSettings(getModel.getModule)
myForm.sbtVersionTextField.setText(moduleSettings.map(_.sbtVersion).getOrElse(SbtBundle("sbt.settings.sbtVersionNotDetected")))
modelWrapper.getModel.replaceAll(SbtModule.getImportsFrom(getModel.getModule).asJava)
myForm.resolversTable.setModel(new ResolversModel(resolvers))
myForm.resolversTable.setRowSelectionInterval(0, 0)
myForm.resolversTable.getColumnModel.getColumn(0).setPreferredWidth(50)
myForm.resolversTable.getColumnModel.getColumn(1).setPreferredWidth(400)
myForm.resolversTable.getColumnModel.getColumn(2).setPreferredWidth(20)
}
}
private class ResolversModel(val resolvers: Seq[SbtResolver]) extends AbstractTableModel {
private val columns = Seq(
SbtBundle("sbt.settings.resolvers.name"),
SbtBundle("sbt.settings.resolvers.url"),
SbtBundle("sbt.settings.resolvers.updated")
)
def getColumnCount = columns.size
def getRowCount = resolvers.size
override def getColumnName(columnIndex: Int) = columns(columnIndex)
def getValueAt(rowIndex: Int, columnIndex: Int) = columnIndex match {
case 0 => resolvers(rowIndex).name
case 1 => resolvers(rowIndex).root
case 2 =>
val ts: Long = resolvers(rowIndex).associatedIndex.map(_.timestamp).getOrElse(SbtResolverIndex.NO_TIMESTAMP)
if (ts == SbtResolverIndex.NO_TIMESTAMP)
SbtBundle("sbt.settings.resolvers.neverUpdated")
else
DateFormatUtil.formatDate(ts)
}
}
| double-y/translation-idea-plugin | src/org/jetbrains/sbt/project/module/SbtModuleSettingsEditor.scala | Scala | apache-2.0 | 3,327 |
package mesosphere.marathon
package api.akkahttp.v2
import mesosphere.UnitTest
import mesosphere.marathon.state.PathId
import org.scalatest.matchers.{ HavePropertyMatchResult, HavePropertyMatcher }
import play.api.libs.json.{ JsNumber, JsObject, JsValue }
trait ResponseMatchers { this: UnitTest =>
/**
* Have property matcher for pods executor resources.
*
* @param cpus expected CPUs
* @param mem expected memory
* @param disk expected disk
* @return Match result.
*/
def executorResources(cpus: Double, mem: Double, disk: Double) = new HavePropertyMatcher[JsValue, Option[JsValue]] {
override def apply(actual: JsValue) = {
val maybeActual = (actual \\ "executorResources").toOption
val expected = JsObject(Seq("cpus" -> JsNumber(cpus), "mem" -> JsNumber(mem), "disk" -> JsNumber(disk)))
val matches = maybeActual.contains(expected)
HavePropertyMatchResult(matches, "executorResources", Some(expected), maybeActual)
}
}
/**
* Have property matcher verifying that no networkname has been defined.
*/
val noDefinedNetworkname = new HavePropertyMatcher[JsValue, Option[JsValue]] {
override def apply(actual: JsValue) = {
val actualNetworkname = (actual \\ "networks" \\ 0 \\ "name").toOption
val matches = !actualNetworkname.isDefined
HavePropertyMatchResult(matches, "networkname", None, actualNetworkname)
}
}
/**
* Have property matcher for network name of pod.
* @param name Expected name of network
* @return Match result
*/
def definedNetworkName(name: String) = new HavePropertyMatcher[JsValue, Option[String]] {
override def apply(actual: JsValue) = {
val maybeNetworkName = (actual \\ "networks" \\ 0 \\ "name").asOpt[String]
val matches = maybeNetworkName.contains(name)
HavePropertyMatchResult(matches, "networkName", Some(name), maybeNetworkName)
}
}
/**
* Have property matcher for network mode of pod.
* @param mode Expected mode.
* @return Match result
*/
def networkMode(mode: raml.NetworkMode) = new HavePropertyMatcher[JsValue, Option[String]] {
override def apply(actual: JsValue) = {
val maybeMode = (actual \\ "networks" \\ 0 \\ "mode").asOpt[String]
val matches = maybeMode.contains(mode.value)
HavePropertyMatchResult(matches, "networkMode", Some(mode.value), maybeMode)
}
}
/**
* Have property matcher for pod environment secret.
* @param secret Expected secret
* @return Match result
*/
def podContainerWithEnvSecret(secret: String) = new HavePropertyMatcher[JsValue, Option[String]] {
override def apply(actual: JsValue) = {
val maybeSecret = (actual \\ "containers" \\ 0 \\ "environment" \\ "vol" \\ "secret").asOpt[String]
val matches = maybeSecret.contains(secret)
HavePropertyMatchResult(matches, "podContainerSecret", Some(secret), maybeSecret)
}
}
/**
* Have property matcher for file based secret.
* @param secret Expected secret
* @return Match result
*/
def podWithFileBasedSecret(secret: String) = new HavePropertyMatcher[JsValue, Option[String]] {
override def apply(actual: JsValue) = {
val maybeSecret = (actual \\ "volumes" \\ 0 \\ "secret").asOpt[String]
val matches = maybeSecret.contains(secret)
HavePropertyMatchResult(matches, "podContainerSecret", Some(secret), maybeSecret)
}
}
/**
* Have property matcher for id (e.g. of an app)
* @param pathId id of an app
* @return Match result
*/
def id(pathId: PathId) = new HavePropertyMatcher[JsValue, Option[PathId]] {
override def apply(actual: JsValue) = {
val maybeId = (actual \\ "id").asOpt[String].map(PathId(_))
HavePropertyMatchResult(maybeId.contains(pathId), "id", Some(pathId), maybeId)
}
}
def podId(id: String) = new HavePropertyMatcher[JsValue, Option[String]] {
override def apply(actual: JsValue) = {
val maybeId = (actual \\ "id").asOpt[String]
val matches = maybeId.contains(id)
HavePropertyMatchResult(matches, "podId", Some(id), maybeId)
}
}
/**
* Have property matcher for a valid task id.
* @param taskId The expected task id.
* @return Match result
*/
def taskId(taskId: String) = new HavePropertyMatcher[JsValue, Option[String]] {
override def apply(actual: JsValue) = {
val maybeId = (actual \\ "id").asOpt[String]
val matches = maybeId.contains(taskId)
HavePropertyMatchResult(matches, "id", Some(taskId), maybeId)
}
}
/**
* Have property matcher for a pod status.
* @param state The expected RAML pod state
* @return Match result
*/
def podState(state: raml.PodState) = new HavePropertyMatcher[JsValue, Option[raml.PodState]] {
override def apply(actual: JsValue) = {
val maybeState = (actual \\ "status").asOpt[String].flatMap(raml.PodState.fromString)
val matches = maybeState.contains(state)
HavePropertyMatchResult(matches, "podState", Some(state), maybeState)
}
}
/**
* Have property matcher for scaling policy instances.
* @param instances expected number of instances
* @return Match result
*/
def scalingPolicyInstances(instances: Int) = new HavePropertyMatcher[JsValue, Option[Int]] {
override def apply(actual: JsValue) = {
val maybeScalingPolicy = (actual \\ "scaling" \\ "instances").asOpt[Int]
val matches = maybeScalingPolicy.contains(instances)
HavePropertyMatchResult(matches, "podScalingPolicy", Some(instances), maybeScalingPolicy)
}
}
/**
* Have property matcher for a pod volume.
* @param volumeIndex an index into a volume array
* @param expectedVolume an expected pod volume
* @return Match result
*/
def podVolume(volumeIndex: Int, expectedVolume: raml.PodVolume) =
new HavePropertyMatcher[JsValue, Option[raml.PodVolume]] {
override def apply(actual: JsValue) = {
val maybeActualVolume = (actual \\ "volumes" \\ volumeIndex).asOpt[raml.PodVolume]
val matches = maybeActualVolume.contains(expectedVolume)
HavePropertyMatchResult(matches, "podVolume", Some(expectedVolume), maybeActualVolume)
}
}
/**
* Have property matcher for a pod volume mount.
* @param containerIndex an index into a container array
* @param volumeMountIndex an index into a container's volume mount array
* @param expectedVolumeMount an expected pod volume mount
* @return Match result
*/
def podVolumeMount(containerIndex: Int, volumeMountIndex: Int, expectedVolumeMount: raml.VolumeMount) =
new HavePropertyMatcher[JsValue, Option[raml.VolumeMount]] {
override def apply(actual: JsValue) = {
val maybeActualVolumeMount =
(actual \\ "containers" \\ containerIndex \\ "volumeMounts" \\ volumeMountIndex).asOpt[raml.VolumeMount]
val matches = maybeActualVolumeMount.contains(expectedVolumeMount)
HavePropertyMatchResult(matches, "podVolumeMount", Some(expectedVolumeMount), maybeActualVolumeMount)
}
}
}
| janisz/marathon | src/test/scala/mesosphere/marathon/api/akkahttp/v2/ResponseMatchers.scala | Scala | apache-2.0 | 7,063 |
package com.sksamuel.elastic4s.requests.admin
case class ShrinkIndexRequest(source: String,
target: String,
waitForActiveShards: Option[Int] = None,
settings: Map[String, String] = Map.empty) {
def settings(map: Map[String, String]): ShrinkIndexRequest = copy(settings = map)
}
| sksamuel/elastic4s | elastic4s-domain/src/main/scala/com/sksamuel/elastic4s/requests/admin/ShrinkIndexRequest.scala | Scala | apache-2.0 | 372 |
package epic.logo
import breeze.util.Index
import breeze.math.MutableInnerProductModule
case class MulticlassOracleInferencer[L, F, W](
validLabels: IndexedSeq[L], labelConjoiner: (L, F) => W)(implicit space: MutableInnerProductModule[W, Double])
extends OracleInferencer[LabeledDatum[L, F], W, Unit] {
override type Y = L
def oracle(weights : Weights[W], instance : LabeledDatum[L, F]) : (L, W, Double, Unit) = {
(instance.label, labelConjoiner(instance.label, instance.features), 0.0, ())
}
def initialState = ()
def reduceStates(a: Unit, b: Unit) = ()
}
| langkilde/epic | src/main/scala/epic/logo/MulticlassOracleInferencer.scala | Scala | apache-2.0 | 583 |
package es.uvigo.ei.sing.sds
package service
import scala.collection.JavaConversions._
import scala.concurrent.Future
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import uk.ac.cam.ch.wwmm.oscar.Oscar
import uk.ac.cam.ch.wwmm.oscar.chemnamedict.entities.{ FormatType, ResolvedNamedEntity }
final class OscarService {
import OscarService._
def getNamedEntities(text: String): Future[Set[ResolvedNamedEntity]] =
Future { oscar.findResolvableEntities(text).toSet }
def normalize(entity: ResolvedNamedEntity): Future[String] =
Future { entity.getFirstChemicalStructure(FormatType.STD_INCHI).getValue }
}
object OscarService {
lazy val oscar = new Oscar()
implicit class NamedEntityOps(rne: ResolvedNamedEntity) {
import entity._
def toAnnotation(articleId: Article.ID, keywordId: Keyword.ID): Annotation =
Annotation(None, articleId, keywordId, rne.getSurface, rne.getStart.toLong, rne.getEnd.toLong)
}
}
| agjacome/smart-drug-search | src/main/scala/service/OscarService.scala | Scala | mit | 968 |
package org.hibernate.cache.rediscala
import scala.annotation.varargs
/**
* Hash 관련 툴
*
* @author 배성혁 sunghyouk.bae@gmail.com
*/
private[rediscala] object Hashs {
/** The constant NULL_VALUE. */
val NULL_VALUE = 0
/** The constant ONE_VALUE. */
val ONE_VALUE = 1
/** The constant FACTOR. */
val FACTOR = 31
/**
* 해시코드를 생성합니다.
*
* @param x 해시코드를 생성할 객체
* @return 해시코드
*/
private def computeInternal(x: Any) = if (x == null) NULL_VALUE else x.hashCode()
/**
* 지정된 객체들의 Hash Code를 조합한 Hash Code를 생성합니다.
*
* @param objs 해쉬코드를 생성할 객체 배열
* @return 조합된 Hash code
*/
@varargs
def compute(objs: Any*): Int = {
if (objs == null || objs.length == 0)
return NULL_VALUE
var hash = NULL_VALUE
objs.foreach(x => hash = hash * FACTOR + computeInternal(x))
hash
}
}
| debop/debop4s | hibernate-rediscala/src/test/scala/org/hibernate/cache/rediscala/Hashs.scala | Scala | apache-2.0 | 960 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.integrationtest
import scala.collection.JavaConverters._
import io.fabric8.kubernetes.api.model._
import io.fabric8.kubernetes.api.model.storage.StorageClassBuilder
import org.scalatest.concurrent.{Eventually, PatienceConfiguration}
import org.scalatest.time.{Milliseconds, Span}
import org.apache.spark.deploy.k8s.integrationtest.KubernetesSuite._
private[spark] trait PVTestsSuite { k8sSuite: KubernetesSuite =>
import PVTestsSuite._
private def setupLocalStorage(): Unit = {
val scBuilder = new StorageClassBuilder()
.withKind("StorageClass")
.withApiVersion("storage.k8s.io/v1")
.withNewMetadata()
.withName(STORAGE_NAME)
.endMetadata()
.withProvisioner("kubernetes.io/no-provisioner")
.withVolumeBindingMode("WaitForFirstConsumer")
val pvBuilder = new PersistentVolumeBuilder()
.withKind("PersistentVolume")
.withApiVersion("v1")
.withNewMetadata()
.withName("test-local-pv")
.endMetadata()
.withNewSpec()
.withCapacity(Map("storage" -> new Quantity("1Gi")).asJava)
.withAccessModes("ReadWriteOnce")
.withPersistentVolumeReclaimPolicy("Retain")
.withStorageClassName("test-local-storage")
.withLocal(new LocalVolumeSourceBuilder().withPath(VM_PATH).build())
.withNewNodeAffinity()
.withNewRequired()
.withNodeSelectorTerms(new NodeSelectorTermBuilder()
.withMatchExpressions(new NodeSelectorRequirementBuilder()
.withKey("kubernetes.io/hostname")
.withOperator("In")
.withValues("minikube", "m01", "docker-for-desktop", "docker-desktop")
.build()).build())
.endRequired()
.endNodeAffinity()
.endSpec()
val pvcBuilder = new PersistentVolumeClaimBuilder()
.withKind("PersistentVolumeClaim")
.withApiVersion("v1")
.withNewMetadata()
.withName(PVC_NAME)
.endMetadata()
.withNewSpec()
.withAccessModes("ReadWriteOnce")
.withStorageClassName("test-local-storage")
.withResources(new ResourceRequirementsBuilder()
.withRequests(Map("storage" -> new Quantity("1Gi")).asJava).build())
.endSpec()
kubernetesTestComponents
.kubernetesClient
.storage()
.storageClasses()
.create(scBuilder.build())
kubernetesTestComponents
.kubernetesClient
.persistentVolumes()
.create(pvBuilder.build())
kubernetesTestComponents
.kubernetesClient
.persistentVolumeClaims()
.create(pvcBuilder.build())
}
private def deleteLocalStorage(): Unit = {
kubernetesTestComponents
.kubernetesClient
.persistentVolumeClaims()
.withName(PVC_NAME)
.delete()
kubernetesTestComponents
.kubernetesClient
.persistentVolumes()
.withName(PV_NAME)
.delete()
kubernetesTestComponents
.kubernetesClient
.storage()
.storageClasses()
.withName(STORAGE_NAME)
.delete()
}
private def checkPVs(pod: Pod, file: String) = {
Eventually.eventually(TIMEOUT, INTERVAL) {
implicit val podName: String = pod.getMetadata.getName
implicit val components: KubernetesTestComponents = kubernetesTestComponents
val contents = Utils.executeCommand("cat", s"$CONTAINER_MOUNT_PATH/$file")
assert(contents.toString.trim.equals(FILE_CONTENTS))
}
}
test("PVs with local storage", k8sTestTag, MinikubeTag) {
sparkAppConf
.set(s"spark.kubernetes.driver.volumes.persistentVolumeClaim.data.mount.path",
CONTAINER_MOUNT_PATH)
.set(s"spark.kubernetes.driver.volumes.persistentVolumeClaim.data.options.claimName",
PVC_NAME)
.set(s"spark.kubernetes.executor.volumes.persistentVolumeClaim.data.mount.path",
CONTAINER_MOUNT_PATH)
.set(s"spark.kubernetes.executor.volumes.persistentVolumeClaim.data.options.claimName",
PVC_NAME)
val file = Utils.createTempFile(FILE_CONTENTS, HOST_PATH)
try {
setupLocalStorage()
runDFSReadWriteAndVerifyCompletion(
FILE_CONTENTS.split(" ").length,
driverPodChecker = (driverPod: Pod) => {
doBasicDriverPodCheck(driverPod)
checkPVs(driverPod, file)
},
executorPodChecker = (executorPod: Pod) => {
doBasicExecutorPodCheck(executorPod)
checkPVs(executorPod, file)
},
appArgs = Array(s"$CONTAINER_MOUNT_PATH/$file", s"$CONTAINER_MOUNT_PATH"),
interval = Some(PV_TESTS_INTERVAL)
)
} finally {
// make sure this always runs
deleteLocalStorage()
}
}
}
private[spark] object PVTestsSuite {
val STORAGE_NAME = "test-local-storage"
val PV_NAME = "test-local-pv"
val PVC_NAME = "test-local-pvc"
val CONTAINER_MOUNT_PATH = "/opt/spark/pv-tests"
val HOST_PATH = sys.env.getOrElse("PVC_TESTS_HOST_PATH", "/tmp")
val VM_PATH = sys.env.getOrElse("PVC_TESTS_VM_PATH", "/tmp")
val FILE_CONTENTS = "test PVs"
val PV_TESTS_INTERVAL = PatienceConfiguration.Interval(Span(10, Milliseconds))
}
| matthewfranglen/spark | resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PVTestsSuite.scala | Scala | mit | 5,972 |
// Solution-1.scala
// Solution to Exercise 1 in "Conditional Expressions"
val a = 1
val b = 5
if(a < b) {
println("a is less than b")
} else {
println("a is not less than b")
}
/* OUTPUT_SHOULD_BE
a is less than b
*/
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-solutions/05_ConditionalExpressions/Solution-1.scala | Scala | apache-2.0 | 225 |
package org.dresdenocl.attributegrammar.integration.kiama
import java.lang.reflect.{ Proxy, InvocationHandler, Method }
import org.eclipse.emf.ecore._
import org.eclipse.emf.common.util._
import org.eclipse.emf.common.notify._
import org.dresdenocl.essentialocl.expressions._
import org.kiama.attribution._
/**
* <p>
* Import this object to use EObjects that need to be attributed by Kiama.
* There is a cache so that already adapted EObjects are not adapted over
* and over again and their attribution is not lost over time.
* </p>
*
* <p>
* There are some restrictions in the usage of the Kiama attribution
* library when using the Ecore bridge:
* <ul>
* <li> Pivot Model types that are attributed need to be declared inside this
* object. The field is called #pivotModelTypes.</li>
* <li> The usage of the utility function "->" in org.kiama is not permitted
* as it circumvents the proxy mechanism. Use the attribution as a normal
* function call.</li>
* </ul>
* </p>
*/
object AttributableEObject {
/*
* Use a cache, so that the attribution of an EObject is not lost.
* FIXME: this should be a WeakHashMap; unfortunately, the standard
* implementation in the JDK SL only supports keys to be checked
* against their equals method instead of identity. Since EObjects
* are used as the key, the equals method cannot be overridden to
* support such behaviour.
*/
private val cache = new java.util.IdentityHashMap[EObject, AttributableEObject]
/**
* Should be called when the attribution starts as all old values in
* the cache of attributed EObjects are cleared.
*/
def clearCache {
cache.clear
}
/*
* To get the right interface for Pivot Model elements to be part of
* the proxy interface, we have to define what interface abstractions
* we want to use.
*/
private val pivotModelTypes = List("Namespace", "Operation")
/**
* Creates a new AttributableEObject and a dynamic Proxy object
* so that calls to the original EObject and to the Attributable
* interface are delegated correctly.
*/
implicit def eObject2Attributable[T <: EObject](e: T): AttributableEObject = {
e match {
case a: AttributableEObject => a
case _ =>
if (cache.containsKey(e))
cache.get(e)
else {
val attrEObject = new { val eObject = e } with AttributableEObject
// find the appropriate abstraction (interface) of the EObject
val clazz = e.getClass
clazz.getInterfaces find { e.getClass.getSimpleName startsWith _.getSimpleName } orElse {
clazz.getInterfaces.find { pivotModelTypes contains _.getSimpleName }
} match {
case Some(interface) =>
val proxy = Proxy.newProxyInstance(attrEObject.getClass.getClassLoader, Array(interface, classOf[AttributableEObject]),
new InvocationHandler {
override def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]): AnyRef = {
// decide on which of the original objects the method call should be executed
val obj =
if (classOf[EObject].isAssignableFrom(method.getDeclaringClass) || classOf[Notifier].isAssignableFrom(method.getDeclaringClass))
e
else
attrEObject
if (args == null)
method.invoke(obj)
else
method.invoke(obj, args: _*)
}
}).asInstanceOf[AttributableEObject]
cache put (e, proxy)
proxy
case None => throw new IllegalStateException("Cannot find interface for EObject " + e)
}
}
}
}
}
/**
* Trait that implements Attributable and wraps the original
* EObject. It therefore allows to use the Kiama attribution
* on any EObject. To automatically convert EObjects to
* Attributables when needed, use "import AttributableEObject._".
*/
trait AttributableEObject extends Attributable {
import AttributableEObject._
/**
* The wrapped/decorated EObject.
*/
val eObject: EObject
/**
* For caching purposes.
*/
private var _prev: Option[Attributable] = null
/**
* Parent-Child connections are already available with Ecore;
* therefore, do not do anything computationally here and just
* set the parent to the container.
*/
override def setChildConnections {
if (eObject.eContainer != null)
parent = eObject.eContainer
}
/**
* Overridden to use the capabilities of EObject and its containment
* structure.
*/
override def prev[T]: T = {
if (_prev == null) {
if (parent != null) {
val containmentFeature = eObject.eContainmentFeature
if (containmentFeature.isMany) {
val containedFeatures = parent match {
case a: AttributableEObject => a.eObject.eGet(containmentFeature).asInstanceOf[EList[EObject]]
}
val index = containedFeatures.indexOf(eObject)
if (index > 0)
_prev = Some(containedFeatures.get(index - 1))
else
_prev = None
} else
_prev = None
}
}
_prev.getOrElse(null).asInstanceOf[T]
}
override def productElement(index: Int) = eObject.eContents.get(index)
override def productArity = eObject.eContents.size
// FIXME mt: what to test here?
override def canEqual(that: Any) = true
/**
* The equality test is passed on to the original EObject.
*/
override def equals(other: Any) = eObject == other
/**
* The hashCode is the one from the original EObject.
*/
override def hashCode = eObject.hashCode
} | dresden-ocl/dresdenocl | plugins/org.dresdenocl.attributegrammar.integration.kiama/src/org/dresdenocl/attributegrammar/integration/kiama/AttributableEObject.scala | Scala | lgpl-3.0 | 5,940 |
import leon.collection._
object ADTWithArray5 {
case class HashTable(table: Array[BigInt]) {
require(table.length > 0)
def apply(index: Int): BigInt = {
require(index >= 0 && index < table.length)
table(index)
}
}
}
| epfl-lara/leon | src/test/resources/regression/verification/purescala/valid/ADTWithArray5.scala | Scala | gpl-3.0 | 248 |
package com.sageserpent.plutonium
import scala.reflect.runtime.universe._
// TODO - document the mysterious behaviour of the items returned.
trait RecorderFactory {
def apply[Item: TypeTag](id: Any): Item
}
| sageserpent-open/open-plutonium | src/main/scala/com/sageserpent/plutonium/RecorderFactory.scala | Scala | mit | 211 |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bqhiveloader
import com.google.common.collect.ImmutableMap
import javax.security.auth.login.{AppConfigurationEntry, Configuration}
object Kerberos {
private def createEntry(principal: String, keyTab: String): AppConfigurationEntry =
new AppConfigurationEntry(
"com.sun.security.auth.module.Krb5LoginModule",
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
ImmutableMap.builder()
.put("principal", principal)
.put("keyTab", keyTab)
.put("useTicketCache", "true")
.put("isInitiator", "true")
.put("doNotPrompt", "true")
.put("useKeyTab", "true")
.put("debug", "false")
.put("storeKey", "true")
.put("refreshKrb5Config", "true")
.build()
)
class StaticConfiguration(private val entries: Array[AppConfigurationEntry]) extends Configuration {
override def getAppConfigurationEntry(name: String): Array[AppConfigurationEntry] = entries
}
private def createConfig(principal: String, keyTab: String): Configuration =
new StaticConfiguration(Array(createEntry(principal, keyTab)))
def configureJaas(keyTab: String, principal: String): Unit =
Configuration.setConfiguration(createConfig(principal, keyTab))
}
| GoogleCloudPlatform/professional-services | tools/bigquery-hive-external-table-loader/src/main/scala/com/google/cloud/bqhiveloader/Kerberos.scala | Scala | apache-2.0 | 1,859 |
package org.twitterist.utils.solr.schemamanager
/** Package for schema management DSL
*/
package object dsl {
/** Creates a FieldBuilder instance for a static field
*
* @param name The name of the field
* @return a FieldBuilder instance
*/
def field(name: String): FieldBuilder = new FieldBuilder(false, name)
/** Creates a FieldBuilder instance for a dynamic field
*
* @param name The name of the field
* @return a FieldBuilder instance
*/
def dynamicField(name: String): FieldBuilder = new FieldBuilder(true, name)
/** Abstract class for field property definition
*
* @param value The boolean value of the field option
*/
abstract case class FieldProperty(var value: Boolean = true) extends AnyRef
/** Field property definition */
final class Indexed extends FieldProperty
/** Field property definition */
final class Stored extends FieldProperty
/** Field property definition */
final class Required extends FieldProperty
/** Field property definition instance to use in DSL */
final val indexed = new Indexed
/** Field property definition instance to use in DSL */
final val stored = new Stored
/** Field property definition instance to use in DSL */
final val required = new Required
}
| twitterist/solr-schema-manager | src/main/scala/org/twitterist/utils/solr/schemamanager/dsl/package.scala | Scala | mit | 1,276 |
package com.azavea.opentransit.indicators
import com.github.nscala_time.time.Imports._
case class SamplePeriod(
id: Int,
periodType: String,
start: LocalDateTime,
end: LocalDateTime
) {
def period: Period = new Period(start, end)
}
object SamplePeriod {
def getRepresentativeWeekday(periods: Seq[SamplePeriod]): Option[LocalDate] =
periods
.find { p => p.periodType != "alltime" && p.periodType != "weekend" }
.map { p => p.start.toLocalDate }
}
| flibbertigibbet/open-transit-indicators | scala/opentransit/src/main/scala/com/azavea/opentransit/indicators/SamplePeriod.scala | Scala | gpl-3.0 | 477 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui
private[spark] object ToolTips {
val SCHEDULER_DELAY =
"""Scheduler delay includes time to ship the task from the scheduler to
the executor, and time to send the task result from the executor to the scheduler. If
scheduler delay is large, consider decreasing the size of tasks or decreasing the size
of task results."""
val TASK_DESERIALIZATION_TIME =
"""Time spent deserializing the task closure on the executor, including the time to read the
broadcasted task."""
val SHUFFLE_READ_BLOCKED_TIME =
"Time that the task spent blocked waiting for shuffle data to be read from remote machines."
val INPUT = "Bytes and records read from Hadoop or from Spark storage."
val OUTPUT = "Bytes and records written to Hadoop."
val STORAGE_MEMORY =
"Memory used / total available memory for storage of data " +
"like RDD partitions cached in memory. "
val SHUFFLE_WRITE =
"Bytes and records written to disk in order to be read by a shuffle in a future stage."
val SHUFFLE_READ =
"""Total shuffle bytes and records read (includes both data read locally and data read from
remote executors). """
val SHUFFLE_READ_REMOTE_SIZE =
"""Total shuffle bytes read from remote executors. This is a subset of the shuffle
read bytes; the remaining shuffle data is read locally. """
val GETTING_RESULT_TIME =
"""Time that the driver spends fetching task results from workers. If this is large, consider
decreasing the amount of data returned from each task."""
val RESULT_SERIALIZATION_TIME =
"""Time spent serializing the task result on the executor before sending it back to the
driver."""
val GC_TIME =
"""Time that the executor spent paused for Java garbage collection while the task was
running."""
val PEAK_EXECUTION_MEMORY =
"""Execution memory refers to the memory used by internal data structures created during
shuffles, aggregations and joins when Tungsten is enabled. The value of this accumulator
should be approximately the sum of the peak sizes across all such data structures created
in this task. For SQL jobs, this only tracks all unsafe operators, broadcast joins, and
external sort."""
val JOB_TIMELINE =
"""Shows when jobs started and ended and when executors joined or left. Drag to scroll.
Click Enable Zooming and use mouse wheel to zoom in/out."""
val STAGE_TIMELINE =
"""Shows when stages started and ended and when executors joined or left. Drag to scroll.
Click Enable Zooming and use mouse wheel to zoom in/out."""
val JOB_DAG =
"""Shows a graph of stages executed for this job, each of which can contain
multiple RDD operations (e.g. map() and filter()), and of RDDs inside each operation
(shown as dots)."""
val STAGE_DAG =
"""Shows a graph of RDD operations in this stage, and RDDs inside each one. A stage can run
multiple operations (e.g. two map() functions) if they can be pipelined. Some operations
also create multiple RDDs internally. Cached RDDs are shown in green.
"""
val TASK_TIME =
"Shaded red when garbage collection (GC) time is over 10% of task time"
val APPLICATION_EXECUTOR_LIMIT =
"""Maximum number of executors that this application will use. This limit is finite only when
dynamic allocation is enabled. The number of granted executors may exceed the limit
ephemerally when executors are being killed.
"""
}
| sh-cho/cshSpark | ui/ToolTips.scala | Scala | apache-2.0 | 4,346 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package kafka.api
import java.util.{Locale, Properties}
import kafka.log.LogConfig
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils.{JaasTestUtils, TestUtils}
import com.yammer.metrics.Metrics
import com.yammer.metrics.core.{Gauge, Histogram, Meter}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.{Metric, MetricName, TopicPartition}
import org.apache.kafka.common.config.SaslConfigs
import org.apache.kafka.common.errors.InvalidTopicException
import org.apache.kafka.common.network.ListenerName
import org.apache.kafka.common.security.auth.SecurityProtocol
import org.junit.{After, Before, Test}
import org.junit.Assert._
import scala.collection.JavaConverters._
class MetricsTest extends IntegrationTestHarness with SaslSetup {
override val producerCount = 1
override val consumerCount = 1
override val serverCount = 1
override protected def listenerName = new ListenerName("CLIENT")
private val kafkaClientSaslMechanism = "PLAIN"
private val kafkaServerSaslMechanisms = List(kafkaClientSaslMechanism)
private val kafkaServerJaasEntryName =
s"${listenerName.value.toLowerCase(Locale.ROOT)}.${JaasTestUtils.KafkaServerContextName}"
this.serverConfig.setProperty(KafkaConfig.ZkEnableSecureAclsProp, "false")
this.serverConfig.setProperty(KafkaConfig.AutoCreateTopicsEnableDoc, "false")
this.producerConfig.setProperty(ProducerConfig.LINGER_MS_CONFIG, "10")
this.producerConfig.setProperty(ProducerConfig.BATCH_SIZE_CONFIG, "1000000")
override protected def securityProtocol = SecurityProtocol.SASL_PLAINTEXT
override protected val serverSaslProperties =
Some(kafkaServerSaslProperties(kafkaServerSaslMechanisms, kafkaClientSaslMechanism))
override protected val clientSaslProperties =
Some(kafkaClientSaslProperties(kafkaClientSaslMechanism))
@Before
override def setUp(): Unit = {
verifyNoRequestMetrics("Request metrics not removed in a previous test")
startSasl(jaasSections(kafkaServerSaslMechanisms, Some(kafkaClientSaslMechanism), KafkaSasl, kafkaServerJaasEntryName))
super.setUp()
}
@After
override def tearDown(): Unit = {
super.tearDown()
closeSasl()
verifyNoRequestMetrics("Request metrics not removed in this test")
}
/**
* Verifies some of the metrics of producer, consumer as well as server.
*/
@Test
def testMetrics(): Unit = {
val topic = "topicWithOldMessageFormat"
val props = new Properties
props.setProperty(LogConfig.MessageFormatVersionProp, "0.9.0")
createTopic(topic, numPartitions = 1, replicationFactor = 1, props)
val tp = new TopicPartition(topic, 0)
// Produce and consume some records
val numRecords = 10
val recordSize = 1000
val producer = producers.head
sendRecords(producer, numRecords, recordSize, tp)
val consumer = this.consumers.head
consumer.assign(List(tp).asJava)
consumer.seek(tp, 0)
TestUtils.consumeRecords(consumer, numRecords)
verifyKafkaRateMetricsHaveCumulativeCount()
verifyClientVersionMetrics(consumer.metrics, "Consumer")
verifyClientVersionMetrics(this.producers.head.metrics, "Producer")
val server = servers.head
verifyBrokerMessageConversionMetrics(server, recordSize, tp)
verifyBrokerErrorMetrics(servers.head)
verifyBrokerZkMetrics(server, topic)
generateAuthenticationFailure(tp)
verifyBrokerAuthenticationMetrics(server)
}
private def sendRecords(producer: KafkaProducer[Array[Byte], Array[Byte]], numRecords: Int,
recordSize: Int, tp: TopicPartition) = {
val bytes = new Array[Byte](recordSize)
(0 until numRecords).map { i =>
producer.send(new ProducerRecord(tp.topic, tp.partition, i.toLong, s"key $i".getBytes, bytes))
}
producer.flush()
}
// Create a producer that fails authentication to verify authentication failure metrics
private def generateAuthenticationFailure(tp: TopicPartition): Unit = {
val producerProps = new Properties()
val saslProps = new Properties()
// Temporary limit to reduce blocking before KIP-152 client-side changes are merged
saslProps.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "1000")
saslProps.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, "1000")
saslProps.put(SaslConfigs.SASL_MECHANISM, "SCRAM-SHA-256")
// Use acks=0 to verify error metric when connection is closed without a response
saslProps.put(ProducerConfig.ACKS_CONFIG, "0")
val producer = TestUtils.createNewProducer(brokerList, securityProtocol = securityProtocol,
trustStoreFile = trustStoreFile, saslProperties = Some(saslProps), props = Some(producerProps))
try {
producer.send(new ProducerRecord(tp.topic, tp.partition, "key".getBytes, "value".getBytes)).get
} catch {
case _: Exception => // expected exception
} finally {
producer.close()
}
}
private def verifyKafkaRateMetricsHaveCumulativeCount(): Unit = {
def exists(name: String, rateMetricName: MetricName, allMetricNames: Set[MetricName]): Boolean = {
allMetricNames.contains(new MetricName(name, rateMetricName.group, "", rateMetricName.tags))
}
def verify(rateMetricName: MetricName, allMetricNames: Set[MetricName]): Unit = {
val name = rateMetricName.name
val totalExists = exists(name.replace("-rate", "-total"), rateMetricName, allMetricNames)
val totalTimeExists = exists(name.replace("-rate", "-time"), rateMetricName, allMetricNames)
assertTrue(s"No cumulative count/time metric for rate metric $rateMetricName",
totalExists || totalTimeExists)
}
val consumer = this.consumers.head
val consumerMetricNames = consumer.metrics.keySet.asScala.toSet
consumerMetricNames.filter(_.name.endsWith("-rate"))
.foreach(verify(_, consumerMetricNames))
val producer = this.producers.head
val producerMetricNames = producer.metrics.keySet.asScala.toSet
val producerExclusions = Set("compression-rate") // compression-rate is an Average metric, not Rate
producerMetricNames.filter(_.name.endsWith("-rate"))
.filterNot(metricName => producerExclusions.contains(metricName.name))
.foreach(verify(_, producerMetricNames))
// Check a couple of metrics of consumer and producer to ensure that values are set
verifyKafkaMetricRecorded("records-consumed-rate", consumer.metrics, "Consumer")
verifyKafkaMetricRecorded("records-consumed-total", consumer.metrics, "Consumer")
verifyKafkaMetricRecorded("record-send-rate", producer.metrics, "Producer")
verifyKafkaMetricRecorded("record-send-total", producer.metrics, "Producer")
}
private def verifyClientVersionMetrics(metrics: java.util.Map[MetricName, _ <: Metric], entity: String): Unit = {
Seq("commit-id", "version").foreach { name =>
verifyKafkaMetric(name, metrics, entity) { matchingMetrics =>
assertEquals(1, matchingMetrics.size)
val metric = matchingMetrics.head
val value = metric.metricValue
assertNotNull(s"$entity metric not recorded $name", value)
assertNotNull(s"$entity metric $name should be a non-empty String",
value.isInstanceOf[String] && !value.asInstanceOf[String].isEmpty)
assertTrue("Client-id not specified", metric.metricName.tags.containsKey("client-id"))
}
}
}
private def verifyBrokerAuthenticationMetrics(server: KafkaServer): Unit = {
val metrics = server.metrics.metrics
TestUtils.waitUntilTrue(() =>
maxKafkaMetricValue("failed-authentication-total", metrics, "Broker", Some("socket-server-metrics")) > 0,
"failed-authentication-total not updated")
verifyKafkaMetricRecorded("successful-authentication-rate", metrics, "Broker", Some("socket-server-metrics"))
verifyKafkaMetricRecorded("successful-authentication-total", metrics, "Broker", Some("socket-server-metrics"))
verifyKafkaMetricRecorded("failed-authentication-rate", metrics, "Broker", Some("socket-server-metrics"))
verifyKafkaMetricRecorded("failed-authentication-total", metrics, "Broker", Some("socket-server-metrics"))
}
private def verifyBrokerMessageConversionMetrics(server: KafkaServer, recordSize: Int, tp: TopicPartition): Unit = {
val requestMetricsPrefix = "kafka.network:type=RequestMetrics"
val requestBytes = verifyYammerMetricRecorded(s"$requestMetricsPrefix,name=RequestBytes,request=Produce")
val tempBytes = verifyYammerMetricRecorded(s"$requestMetricsPrefix,name=TemporaryMemoryBytes,request=Produce")
assertTrue(s"Unexpected temporary memory size requestBytes $requestBytes tempBytes $tempBytes",
tempBytes >= recordSize)
verifyYammerMetricRecorded(s"kafka.server:type=BrokerTopicMetrics,name=ProduceMessageConversionsPerSec")
// Conversion time less than 1 millisecond is reported as zero, so retry with larger batches until time > 0
var iteration = 0
TestUtils.retry(5000) {
val conversionTimeMs = yammerMetricValue(s"$requestMetricsPrefix,name=MessageConversionsTimeMs,request=Produce").asInstanceOf[Double]
if (conversionTimeMs <= 0.0) {
iteration += 1
sendRecords(producers.head, 1000 * iteration, 100, tp)
}
assertTrue(s"Message conversion time not recorded $conversionTimeMs", conversionTimeMs > 0.0)
}
verifyYammerMetricRecorded(s"$requestMetricsPrefix,name=RequestBytes,request=Fetch")
verifyYammerMetricRecorded(s"$requestMetricsPrefix,name=TemporaryMemoryBytes,request=Fetch", value => value == 0.0)
// request size recorded for all request types, check one
verifyYammerMetricRecorded(s"$requestMetricsPrefix,name=RequestBytes,request=Metadata")
}
private def verifyBrokerZkMetrics(server: KafkaServer, topic: String): Unit = {
val histogram = yammerHistogram("kafka.server:type=ZooKeeperClientMetrics,name=ZooKeeperRequestLatencyMs")
// Latency is rounded to milliseconds, so check the count instead
val initialCount = histogram.count
servers.head.zkClient.getLeaderForPartition(new TopicPartition(topic, 0))
val newCount = histogram.count
assertTrue("ZooKeeper latency not recorded", newCount > initialCount)
val min = histogram.min
assertTrue(s"Min latency should not be negative: $min", min >= 0)
assertEquals(s"Unexpected ZK state", "CONNECTED", yammerMetricValue("SessionState"))
}
private def verifyBrokerErrorMetrics(server: KafkaServer): Unit = {
def errorMetricCount = Metrics.defaultRegistry.allMetrics.keySet.asScala.filter(_.getName == "ErrorsPerSec").size
val startErrorMetricCount = errorMetricCount
val errorMetricPrefix = "kafka.network:type=RequestMetrics,name=ErrorsPerSec"
verifyYammerMetricRecorded(s"$errorMetricPrefix,request=Metadata,error=NONE")
try {
consumers.head.partitionsFor("12{}!")
} catch {
case _: InvalidTopicException => // expected
}
verifyYammerMetricRecorded(s"$errorMetricPrefix,request=Metadata,error=INVALID_TOPIC_EXCEPTION")
// Check that error metrics are registered dynamically
val currentErrorMetricCount = errorMetricCount
assertEquals(startErrorMetricCount + 1, currentErrorMetricCount)
assertTrue(s"Too many error metrics $currentErrorMetricCount" , currentErrorMetricCount < 10)
// Verify that error metric is updated with producer acks=0 when no response is sent
sendRecords(producers.head, 1, 100, new TopicPartition("non-existent", 0))
verifyYammerMetricRecorded(s"$errorMetricPrefix,request=Metadata,error=LEADER_NOT_AVAILABLE")
}
private def verifyKafkaMetric[T](name: String, metrics: java.util.Map[MetricName, _ <: Metric], entity: String,
group: Option[String] = None)(verify: Iterable[Metric] => T) : T = {
val matchingMetrics = metrics.asScala.filter {
case (metricName, _) => metricName.name == name && group.forall(_ == metricName.group)
}
assertTrue(s"Metric not found $name", matchingMetrics.size > 0)
verify(matchingMetrics.values)
}
private def maxKafkaMetricValue(name: String, metrics: java.util.Map[MetricName, _ <: Metric], entity: String,
group: Option[String]): Double = {
// Use max value of all matching metrics since Selector metrics are recorded for each Processor
verifyKafkaMetric(name, metrics, entity, group) { matchingMetrics =>
matchingMetrics.foldLeft(0.0)((max, metric) => Math.max(max, metric.value))
}
}
private def verifyKafkaMetricRecorded(name: String, metrics: java.util.Map[MetricName, _ <: Metric], entity: String,
group: Option[String] = None): Unit = {
val value = maxKafkaMetricValue(name, metrics, entity, group)
assertTrue(s"$entity metric not recorded correctly for $name value $value", value > 0.0)
}
private def yammerMetricValue(name: String): Any = {
val allMetrics = Metrics.defaultRegistry.allMetrics.asScala
val (_, metric) = allMetrics.find { case (n, _) => n.getMBeanName.endsWith(name) }
.getOrElse(fail(s"Unable to find broker metric $name: allMetrics: ${allMetrics.keySet.map(_.getMBeanName)}"))
metric match {
case m: Meter => m.count.toDouble
case m: Histogram => m.max
case m: Gauge[_] => m.value
case m => fail(s"Unexpected broker metric of class ${m.getClass}")
}
}
private def yammerHistogram(name: String): Histogram = {
val allMetrics = Metrics.defaultRegistry.allMetrics.asScala
val (_, metric) = allMetrics.find { case (n, _) => n.getMBeanName.endsWith(name) }
.getOrElse(fail(s"Unable to find broker metric $name: allMetrics: ${allMetrics.keySet.map(_.getMBeanName)}"))
metric match {
case m: Histogram => m
case m => fail(s"Unexpected broker metric of class ${m.getClass}")
}
}
private def verifyYammerMetricRecorded(name: String, verify: Double => Boolean = d => d > 0): Double = {
val metricValue = yammerMetricValue(name).asInstanceOf[Double]
assertTrue(s"Broker metric not recorded correctly for $name value $metricValue", verify(metricValue))
metricValue
}
private def verifyNoRequestMetrics(errorMessage: String): Unit = {
val metrics = Metrics.defaultRegistry.allMetrics.asScala.filter { case (n, _) =>
n.getMBeanName.startsWith("kafka.network:type=RequestMetrics")
}
assertTrue(s"$errorMessage: ${metrics.keys}", metrics.isEmpty)
}
}
| sebadiaz/kafka | core/src/test/scala/integration/kafka/api/MetricsTest.scala | Scala | apache-2.0 | 15,159 |
package org.jetbrains.plugins.scala.lang.resolve2
/**
* Pavel.Fatin, 02.02.2010
*/
class FunctionTypeGenericTest extends ResolveTestBase {
override def folderPath: String = {
super.folderPath + "function/type/generic/"
}
def testFunction1() = doTest()
def testFunction2() = doTest()
//TODO answer?
// def testFunctionExpression1 = doTest
def testFunctionExpression2() = doTest()
//TODO answer?
// def testGeneric1 = doTest
def testGeneric2() = doTest()
} | katejim/intellij-scala | test/org/jetbrains/plugins/scala/lang/resolve2/FunctionTypeGenericTest.scala | Scala | apache-2.0 | 482 |
package com.eevolution.context.dictionary.domain.model
import ai.x.play.json.Jsonx
import com.eevolution.context.dictionary.api.{ActiveEnabled, DomainModel, Identifiable, Traceable}
import org.joda.time.DateTime
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/e-Evolution
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com
*/
/**
* Ldap Processor Log Entity
* @param ldapProcessorLogId Ldap Processor Log ID
* @param ldapProcessorId Ldap Processor ID
* @param tenantId Tenant ID
* @param organizationId Organization ID
* @param isActive Is Active
* @param created Created
* @param createdBy Created By
* @param updated Updated
* @param updatedBy Updated By
* @param summary Summary
* @param isError Is Error
* @param reference Reference
* @param description Description
* @param textMsg Text Msg
* @param binaryData Binary Data
* @param uuid UUID
*/
case class LdapProcessorLog(ldapProcessorLogId: Int,
ldapProcessorId: Int,
tenantId : Int ,
organizationId : Int,
isActive: Boolean = true,
created: DateTime = DateTime.now,
createdBy: Int,
updated: DateTime = DateTime.now,
updatedBy: Int,
summary: Option[String],
isError: Boolean = false,
reference: Option[String],
description: Option[String],
textMsg: Option[String],
binaryData: String,
uuid: String
) extends DomainModel
with ActiveEnabled
with Identifiable
with Traceable {
override type ActiveEnabled = this.type
override type Identifiable = this.type
override type Traceable = this.type
override def Id: Int = ldapProcessorLogId
override val entityName: String = "AD_LdapProcessorLog"
override val identifier: String = "AD_LdapProcessorLog_ID"
}
object LdapProcessorLog {
implicit lazy val jsonFormat = Jsonx.formatCaseClass[LdapProcessorLog]
def create(ldapProcessorLogId: Int,
ldapProcessorId: Int,
tenantId : Int ,
organizationId : Int,
isActive: Boolean,
created: DateTime,
createdBy: Int,
updated: DateTime,
updatedBy: Int,
summary: String,
isError: Boolean,
reference: String,
description: String,
textMsg: String,
binaryData: String,
uuid: String) = LdapProcessorLog(ldapProcessorLogId, ldapProcessorId, tenantId, organizationId, isActive,
created, createdBy, updated, updatedBy, None, isError, None, None, None, binaryData, uuid)
}
| adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/model/LdapProcessorLog.scala | Scala | gpl-3.0 | 3,714 |
trait MM {
protected def method = "bip"
}
trait NN {
protected def method = "bop"
}
trait OOOOO extends MM with NN {
override protected def method = super[MM].method + super[NN].method
override def hashCode = super[MM].hashCode + super[NN].hashCode
}
| felixmulder/scala | test/pending/run/bug4704run.scala | Scala | bsd-3-clause | 259 |
package com.gx.strategy.fp
/**
* Copyright 2017 josephguan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
class Sorter[T](strategy: SortStrategy.Type[T]) {
def sort(dataset: List[T])(implicit ord: Ordering[T]): List[T] = {
strategy(dataset)
}
}
| josephguan/scala-design-patterns | behavioral/strategy/src/main/scala/com/gx/strategy/fp/Sorter.scala | Scala | apache-2.0 | 784 |
/*
* Copyright 2016 Nikolay Smelik
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalabot.examples.teamnotification.states
import scalabot.common.bot._
import scalabot.common.message.{Intent, NegativeIntent, PositiveIntent}
import scalabot.examples.teamnotification.conversations.AddNotificationConversationProvider
import scalabot.examples.teamnotification.{TeamNotificationBot, TeamNotificationData}
/**
* Created by Nikolay.Smelik on 8/17/2016.
*/
trait AddNotificationStateProvider {
this: TeamNotificationBot with AddNotificationConversationProvider =>
case class AddNotificationAgree(bundle: Bundle, data: TeamNotificationData) extends BotState {
override def handleIntent = {
case PositiveIntent(sender, _) =>
Reply(MoveToConversation(new AddNotificationConversation(data).appendBundle(bundle)))
case NegativeIntent(sender, _) =>
Reply(Exit)
case _ => Reply(this)
}
}
}
| kerzok/ScalaBot | Examples/src/main/scala/scalabot/examples/teamnotification/states/AddNotificationStateProvider.scala | Scala | apache-2.0 | 1,458 |
package capitulo11
class Sec04_OperadoresAtribuicao {
/**
* Um operador de atribuição tem a forma operador=. e a expressão
* a operador= b
* significa o mesmo que
* a = a operador b
* Por exemplo, a += b é equivalente a: a = a + b
* Existem alguns detalhes técnicos.
* 1) <=, >=, e != não são operadores de atribuição
* 2) Um operação começando com um = nunca é um operador de atribuição
* (==, ===, =/=, etc...)
* 3) Se a tem um método chamado operador=, então esse método é chamado
* diretamente
*/
} | celioeduardo/scala-impatient | src/test/scala/capitulo11/Sec04_OperadoresAtribuicao.scala | Scala | mit | 551 |
package com.karasiq.bootstrap4.pagination
import rx.{Rx, Var}
import com.karasiq.bootstrap.context.RenderingContext
trait UniversalPageSelectors extends PageSelectors { self: RenderingContext ⇒
import scalaTags.all._
type PageSelector = UniversalPageSelector
object PageSelector extends PageSelectorFactory {
def apply(pages: Rx[Int], currentPage: Var[Int]): PageSelector = {
new UniversalPageSelector(pages, currentPage)
}
}
class UniversalPageSelector(val pages: Rx[Int], val currentPage: Var[Int])
extends AbstractPageSelector with BootstrapHtmlComponent {
def previousLink: TagT = {
a(`class` := "page-link", href := "#", aria.label := "Previous", onclick := Callback.onClick { _ ⇒
if (currentPage.now > 1) currentPage.update(currentPage.now - 1)
}, span(aria.hidden := true, raw("«")))
}
def nextLink: TagT = {
a(`class` := "page-link", href := "#", aria.label := "Next", onclick := Callback.onClick { _ ⇒
if (currentPage.now < pages.now) currentPage.update(currentPage.now + 1)
}, span(aria.hidden := true, raw("»")))
}
def pageLink(page: Int): TagT = {
a(`class` := "page-link", href := "#", onclick := Callback.onClick(_ ⇒ currentPage.update(page)), page)
}
private[this] def previousPageButton: TagT = {
li(
`class` := "page-item",
previousLink,
"disabled".classIf(Rx(currentPage() == 1))
)
}
private[this] def nextPageButton: TagT = {
li(
`class` := "page-item",
nextLink,
"disabled".classIf(Rx(currentPage() == pages()))
)
}
private[this] def pageButton(page: Int): TagT = {
li(
`class` := "page-item",
"active".classIf(Rx(page == currentPage())),
pageLink(page)
)
}
def renderTag(md: ModifierT*): TagT = {
val nav = tag("nav")
nav(Rx(ul(`class` := "pagination justify-content-center", Rx(pages() == 1).reactiveHide, md)(
previousPageButton,
for(page ← 1 to pages()) yield pageButton(page),
nextPageButton
)))
}
}
}
| Karasiq/scalajs-bootstrap | library-v4/shared/src/main/scala/com/karasiq/bootstrap4/pagination/UniversalPageSelectors.scala | Scala | mit | 2,143 |
object Version {
val akka = "2.4.7"
val scalaTest = "2.2.1"
val scalaCheck = "1.12.1"
val logback = "1.1.2"
val scalaLogging = "3.1.0"
}
| jmarin/microservice-template | project/Version.scala | Scala | apache-2.0 | 183 |
/**
* Magmanics Licensing. This web application allows for centralized control
* of client application activation, with optional configuration parameters
* to control licensable features, and storage of supplementary information
* about the client machine. Client applications may interface with this
* central server (for activation) using libraries licenced under an
* alternative licence.
*
* Copyright (C) 2010 James Baxter <j.w.baxter(at)gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.magmanics.licensing.datalayer.model
import java.util
import javax.persistence._
/**
* Created by IntelliJ IDEA.
* User: James
* Date: 12-Jun-2010
* Time: 01:34:18
* To change this template use File | Settings | File Templates.
*/
trait ProductOptionEntity[T] {
var name: String
def getDefault: T
def setDefault(default: T)
}
@Entity
@Table(name = "product_options_string")
class TextProductOptionEntity extends ProductOptionEntity[String] {
@Id
@GeneratedValue
var id: Long = _
@Basic
@Column(nullable = false)
var name: String = _
@Basic
@Column(nullable = false)
var default: String = _
@ManyToOne(optional = false)
@JoinColumn(name = "product_id")
var product: ProductEntity = _
override def getDefault = default
override def setDefault(defaultValue: String) = default = defaultValue
}
@Entity
@Table(name = "product_options_boolean")
class RadioProductOptionEntity extends ProductOptionEntity[Boolean] {
@Id
@GeneratedValue
var id: Long = _
@Basic
@Column(nullable = false)
var name: String = _
@Basic
@Column(nullable = false)
var default: Boolean = _
@ManyToOne(optional = false)
@JoinColumn(name = "product_id")
var product: ProductEntity = _
override def getDefault = default
override def setDefault(defaultValue: Boolean) = default = defaultValue
}
@Entity
@Table(name = "product_options_list")
class ListProductOptionEntity extends ProductOptionEntity[String] {
@Id
@GeneratedValue
var id: Long = _
@Basic
@Column(nullable = false)
var name: String = _
@Basic
@Column(nullable = false)
var default: String = _
@ManyToOne(optional = false)
@JoinColumn(name = "product_id")
var product: ProductEntity = _
@OneToMany(mappedBy = "listProductOption", cascade = Array(CascadeType.ALL), orphanRemoval = true)
var optionValues: java.util.List[ListProductOptionValueEntity] = _
def addOptionValue(optionValue: ListProductOptionValueEntity) {
if (optionValues == null)
optionValues = new util.ArrayList[ListProductOptionValueEntity]()
optionValues.add(optionValue)
}
override def getDefault = default
override def setDefault(defaultValue: String) = default = defaultValue
}
@Entity
@Table(name = "product_options_list_values")
class ListProductOptionValueEntity extends {
def this(myValue: String, parentList: ListProductOptionEntity) = {
this()
value = myValue
listProductOption = parentList
}
@Id
@GeneratedValue
var id: Long = _
@Basic
@Column(nullable = false)
var value: String = _
@ManyToOne(optional = false)
@JoinColumn(name = "list_product_option_id")
var listProductOption: ListProductOptionEntity = _
}
/**
* for o in objects {
* print o.name
* o.type match {
* case "TEXT" => display textfield(o.default.getOrElse(""))
* case "RADIO" => display radio(o.default.getOrElse(false))
* case "LIST" => display list(o.values, o.default.get)
* }
* }
*/ | manicmonkey/licensing | Licensing-Server/src/main/scala/com/magmanics/licensing/datalayer/model/ProductOptionEntity.scala | Scala | gpl-3.0 | 4,113 |
/*
* Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/agpl.html.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package lancet.interpreter
import lancet.core._
import java.lang.reflect.{Array=>jlrArray,_}
import java.util.{Vector=>_,List=>_,_}
import java.io.{PrintWriter,StringWriter}
import sun.misc._
import com.oracle.graal.api._
import com.oracle.graal.api.meta._
import com.oracle.graal.hotspot.meta._
import com.oracle.graal.bytecode._
import scala.virtualization.lms.common._
import scala.virtualization.lms.internal._
//@SuppressWarnings("static-method")
trait BytecodeInterpreter_LMS extends InterpreterUniverse_LMS with BytecodeInterpreter_Common_Compile { self =>
import BytecodeInterpreter._
def getRuntimeInterface(m: MetaAccessProvider) = new Runtime_LMS(m)
override def trace(level: Int, message: String) = super.trace(level, "// " + message)
override def initialize(): Unit = {
ScalaCompile.reset()
super.initialize()
}
var emitCheckCast = true
var emitUniqueOpt = true
var debugGlobalDefs = false
var debugDepGraph = false
// for scoping "RES" vars. TODO: move?
var curResId = 0
def RES = "RES" + curResId
def globalVars: scala.collection.immutable.Set[String] // defined in Opt4 (bit of a hack)
// ---------- high level execution loop ----------
/* see test4
def quoteFun[A:Manifest,B:Manifest](f: A=>B): String = {
val (src0, res) = captureOutputResult {
val arg = reflect[A]("ARG")
execute(f.getClass.getMethod("apply", manifest[A].erasure), Array[Rep[Object]](unit(f),arg.asInstanceOf[Rep[Object]])(repManifest[Object]))
}
"{ (ARG: " + manifest[A] + ") => " + src0 + "}"
}*/
def printIndented(str: String)(emit: String => Unit): Unit = {
val lines = str.split("\\n")
var indent = 1
for (l0 <- lines) {
val l = l0.trim
if (l.length > 0) {
var open = 0
var close = 0
l foreach { case '{' => open += 1 case '}' => close += 1 case _ => }
val d = if (close == 0) 0 else l.takeWhile(_ == '}').length
emit(" "*(indent-d) + l)
indent += (open - close)
}
}
}
def createCodegen(): GEN_Scala_LMS { val IR: self.type } =
new GEN_Scala_LMS { val IR: self.type = self }
abstract class Fun[A,B] extends (A=>B) {
def code: String
def compiled: Fun[A,B]
def interpreted: Fun[A,B]
def inline: Fun[A,B]
def printcode = println(code)
}
var lastcode: String = "(empty)"
def fun[B:Manifest](f: =>B): Fun[Unit,B] = fun[Object,B]{ x => f }.asInstanceOf[Fun[Unit,B]]
def fun[A:Manifest,B:Manifest](f: A=>B): Fun[A,B] = {
lms0[A,B] { arg =>
execute(
f.getClass.getMethod("apply", manifest[A].erasure),
Array[Rep[Object]](unit(f),arg.asInstanceOf[Rep[Object]])(repManifest[Object])
).asInstanceOf[Rep[B]]
}
}
// XX conflict with delite method
def compile0[A:Manifest,B:Manifest](f: A=>B): A=>B = fun(f)
//def compile[A:Manifest,B:Manifest](f: A=>B): A=>B = fun(f)
def lms[A:Manifest,B:Manifest](f: Rep[A]=>Rep[B]): Fun[A,B] = {
lms0[A,B] { arg =>
val res = f(arg)
reflect[B]("RES = ",res)(mtr)
}
}
def lms0[A:Manifest,B:Manifest](f: Rep[A]=>Rep[B]): Fun[A,B] = {
implicit val tp = manifestToTypeRep(manifest[B])
val (maStr, mbStr) = (manifestStr(manifest[A]), manifestStr(manifest[B]))
val arg = fresh[A]
val y = reify {
f(arg)
()
}
val codegen = createCodegen()
VConstantPool = Vector.empty
// dry run to hash out constant pool ... HACK
codegen.withStream(new PrintWriter(new StringWriter)) {
// codegen.emitSource({ x:Rep[A] => reflect[Unit]("val "+arg+" = "+x); reflect[B](y) },"Generated",codegen.stream)
// XX OLD LMS
codegen.emitSource({ x:Rep[A] => reflect[Unit]("val "+quote(arg)+" = ",x); reflect[B](y) },"Generated",codegen.stream)
}
val cst = VConstantPool
val stream = new StringWriter
val out = new PrintWriter(stream)
codegen.withStream(out) {
// codegen.emitSource(List(arg),y,"Generated",codegen.stream)
// XX OLD LMS
codegen.emitSource({ x:Rep[A] =>
emitString("import sun.misc.Unsafe")
emitString("val unsafe = { val fld = classOf[Unsafe].getDeclaredField(\\"theUnsafe\\"); fld.setAccessible(true); fld.get(classOf[Unsafe]).asInstanceOf[Unsafe]; }")
emitString("type char = Char")
emitString("def WARN = assert(false, \\"WARN\\")")
emitString("def ERROR = assert(false, \\"ERROR\\")")
emitString(" var "+RES+" = null.asInstanceOf["+mbStr+"]")
// emit var declarations
// TODO: print only used ones, not all of globalDefs!!
// TODO: should be part of codegen, really
for (TP(sym,rhs) <- globalDefs) {
if (sym.tp != manifest[Unit])
emitString("var "+quote(sym)+": "+(sym.tp)+" = null.asInstanceOf["+(sym.tp)+"]")
}
globalVars.foreach(emitString)
reflect[Unit]("val "+quote(arg)+" = ",x)
reflect[Unit](y)
DynExp[B](RES)
},"Generated",codegen.stream)
}
val source = stream.toString
printIndented(source)(Console.println)
if (debugGlobalDefs) globalDefs.foreach(println)
if (debugDepGraph) {
val exp = new ExportGraph { val IR: self.type = self }
println("dumping graph to: "+this.getClass.getName)
exp.exportGraph(this.getClass.getName)(y.res)
}
println(cst)
println(cst.map(x=>specCls(x._2)).toList)
val comp = ScalaCompile.compile[A,B](source, "Generated", cst.map(x=>specCls(x._2)).toList)
val fun: Fun[A,B] = new Fun[A,B] {
def code = source
def compiled = this
def interpreted = ???
def inline = ???
def apply(x:A): B = comp(x)
}
fun
}
//@Override
def execute(method: ResolvedJavaMethod, boxedArguments: Array[Rep[Object]]): Rep[Object] = {// throws Throwable {
try {
val receiver: Boolean = hasReceiver(method);
val signature: Signature = method.getSignature();
assert(boxedArguments != null);
assert(signature.getParameterCount(receiver) == boxedArguments.length);
if (TRACE) {
//if (nativeFrame == null) {
trace(0, "Executing root method " + method);
//} else {
// trace(nativeFrame.depth(), "Executing from native " + method);
//}
}
var rootFrame: InterpreterFrame_Str = null // nativeFrame
if (rootFrame == null) {
rootFrame = new InterpreterFrame_Str(rootMethod, signature.getParameterSlots(true)+1);
rootFrame.pushObject(unit(this));
rootFrame.pushObject(unit(method));
rootFrame.pushObject(unit(boxedArguments));
}
// TODO (chaeubl): invoke the first method in the same way as any other method (the method might be redirected!)
val firstFrame: InterpreterFrame = rootFrame.create(method, receiver, 0, false);
initializeLocals(firstFrame, method, boxedArguments);
executeRoot(rootFrame, firstFrame);
/*if (TRACE) {
if (nativeFrame == null) {
trace(0, "Returning to root method " + method);
} else {
trace(nativeFrame.depth(), "Returning to native " + method);
}
}*/
return popAsObject(rootFrame, signature.getReturnKind()); // only works if rootFrame not copied internally...
} catch {
case e: Exception =>
// TODO (chaeubl): remove this exception handler (only used for debugging)
throw e;
}/* finally {
nativeCallerFrame.set(nativeFrame);
}*/
}
def initializeLocals(rootFrame: InterpreterFrame, method: ResolvedJavaMethod, boxedArguments: Array[Rep[Object]]) {
val receiver: Boolean = hasReceiver(method);
val signature: Signature = method.getSignature();
var index = 0;
if (receiver) {
pushAsObject(rootFrame, Kind.Object, boxedArguments(index));
index += 1;
}
var i = 0
while (index < boxedArguments.length) {
pushAsObject(rootFrame, signature.getParameterKind(i), boxedArguments(index));
i += 1
index += 1
}
// push the remaining locals
rootFrame.pushVoid(rootFrame.stackTos() - rootFrame.getStackTop());
}
def execute(javaMethod: Method, boxedArguments: Array[Rep[Object]]): Rep[Object] = {// throws Throwable {
return execute(metaAccessProvider.lookupJavaMethod(javaMethod), boxedArguments);
}
def hasReceiver(method: ResolvedJavaMethod): Boolean = {
return !Modifier.isStatic(method.getModifiers());
}
def executeRoot(root: InterpreterFrame, frame: InterpreterFrame): Unit = { // throws Throwable {
if (TRACE) {
traceCall(frame, "RootCall");
}
exec(frame)
loop(root, frame);
}
def loop(root: InterpreterFrame, main: InterpreterFrame): Unit
def getContext(frame: InterpreterFrame): scala.List[InterpreterFrame] =
if (frame == null) Nil else frame :: getContext(frame.getParentFrame)
def contextKey(frame: InterpreterFrame) = getContext(frame).map(frameKey).mkString(" // ")
def frameKey(frame: InterpreterFrame) = ("" + frame.getBCI + ":" + frame.getMethod() + frame.getMethod().getSignature().toString()).replace("HotSpotMethod","")
override def retn() = local { (frame, bs) =>
// create copy -- will be pushing values into parent frame !!
val parentFrame = frame.getParentFrame.asInstanceOf[InterpreterFrame_Str].copy
val returnValue = frame.getReturnValue()
popFrame(frame)
pushAsObjectInternal(parentFrame, frame.getMethod.getSignature().getReturnKind(), returnValue);
//println("### return "+contextKey(frame))
exec(parentFrame)
}
// ---------- block / statement level ----------
def lookupSearch(bs: BytecodeStream, key: Rep[Int])(k: Int => Rep[Unit]): Rep[Unit] = {
val switchHelper = new BytecodeLookupSwitch(bs, bs.currentBCI())
var low = 0;
var high = switchHelper.numberOfCases() - 1;
???
/*emitString(""+key+" match {")
for (i <- low to high) {
val midVal = switchHelper.keyAt(i);
emitString("case "+midVal+" => ")
emitBlock(reify(k(switchHelper.bci() + switchHelper.offsetAt(i)))) // FIXME
}
emitString("case _ => ")
emitBlock(reify(k(switchHelper.defaultTarget)))
emitString("}")
liftConst(())*/
}
/*{
while (low <= high) {
val mid = (low + high) >>> 1;
val midVal = switchHelper.keyAt(mid);
if (midVal < key) {
low = mid + 1;
} else if (midVal > key) {
high = mid - 1;
} else {
return switchHelper.bci() + switchHelper.offsetAt(mid); // key found
}
}
return switchHelper.defaultTarget(); // key not found.
}*/
def tableSearch(bs: BytecodeStream, index: Rep[Int])(k: Int => Rep[Unit]): Rep[Unit] = {
val switchHelper = new BytecodeTableSwitch(bs, bs.currentBCI());
val low = switchHelper.lowKey();
val high = switchHelper.highKey();
assert(low <= high);
???
/*emitString(""+index+" match {")
for (i <- low to high) {
emitString("case "+i+" => ")
emitBlock(reify(k(switchHelper.targetAt(i-low))))
}
emitString("case _ => ")
emitBlock(reify(k(switchHelper.defaultTarget)))
emitString("}")
/*
if (index < low || index > high) {
return switchHelper.defaultTarget();
} else {
return switchHelper.targetAt(index - low);
}
*/
liftConst(())*/
}
def checkCastInternal(typ: ResolvedJavaType, value: Rep[Object]): Rep[Object] = {
if (!emitCheckCast) return value
val cls = typ.toJava
val params = cls.getTypeParameters
val name = if (params.length == 0) cls.getName
else cls.getName + "[" + params.map(x=>"_").mkString(",") + "]"
reflect[Object](value,".asInstanceOf["+name+"] // checkCast")
}
// called by invokeVirtual
def objectGetClass(receiver: Rep[Object]): Option[Class[_]] = None
def resolveAndInvoke(parent: InterpreterFrame, m: ResolvedJavaMethod): InterpreterFrame = {// throws Throwable {
val receiver = nullCheck(parent.peekReceiver(m));
// TODO/FIXME
// get the receiver's class, if possible
objectGetClass(receiver) match {
case Some(clazz) =>
val method = resolveType(parent, clazz).resolveMethod(m);
return invokeDirect(parent, method, true)
case _ =>
}
// TODO: will require registering an assumption ...
val unique = if (emitUniqueOpt) m.getDeclaringClass.findUniqueConcreteMethod(m) else null
if (unique ne null) {
emitString("// unique method: "+m+" TODO: guard")
return invokeDirect(parent, unique, true)
}
// TODO: if non unique, may want to switch on all possible targets
//val method: ResolvedJavaMethod = resolveType(parent, receiver.getClass()).resolveMethod(m);
val parameters = popArgumentsAsObject(parent, m, true);
val returnValue = runtimeInterface.invoke(m, parameters);
pushAsObject(parent, m.getSignature().getReturnKind(), returnValue);
null
/*val method: ResolvedJavaMethod = resolveType(parent, receiver.getClass()).resolveMethod(m);
if (method == null) {
throw new AbstractMethodError();
}
return invoke(parent, method, receiver);*/
}
// called internally by invoke
def invokeDirect(parent: InterpreterFrame, method: ResolvedJavaMethod, hasReceiver: Boolean): InterpreterFrame = {// throws Throwable {
//return parent.create(method, hasReceiver);
return parent.create(method, hasReceiver, 0, true);
}
}
| TiarkRompf/lancet | src/main/scala/lancet/interpreter/Interpreter_LMS.scala | Scala | agpl-3.0 | 15,380 |
package HackerRank.Training.DataStructures.Arrays
import java.io.{ByteArrayInputStream, IOException, InputStream, PrintWriter}
import java.util.InputMismatchException
import scala.collection.generic.CanBuildFrom
import scala.language.higherKinds
/**
* Copyright (c) 2017 A. Roberto Fischer
*
* @author A. Roberto Fischer <a.robertofischer@gmail.com> on 4/24/2017
*/
private[this] object AlgorithmicCrush {
import Reader._
import Writer._
private[this] val TEST_INPUT: Option[String] = None
//------------------------------------------------------------------------------------------//
// Solution
//------------------------------------------------------------------------------------------//
private[this] def solve(): Unit = {
val n = nextInt()
val array = Array.fill(n + 1)(0)
val m = nextInt()
for (_ <- 0 until m) {
val (left, right, amount) = (nextInt(), nextInt(), nextInt())
array(left) += amount
if (right + 1 <= n) array(right + 1) -= amount
}
var x = 0L
var max = 0L
for (i <- 1 to n) {
x += array(i)
if (max < x) max = x
}
println(max)
}
//------------------------------------------------------------------------------------------//
// Run
//------------------------------------------------------------------------------------------//
@throws[Exception]
def main(args: Array[String]): Unit = {
val s = System.currentTimeMillis
solve()
flush()
if (TEST_INPUT.isDefined) System.out.println(System.currentTimeMillis - s + "ms")
}
//------------------------------------------------------------------------------------------//
// Input
//------------------------------------------------------------------------------------------//
private[this] final object Reader {
private[this] implicit val in: InputStream = TEST_INPUT.fold(System.in)(s => new ByteArrayInputStream(s.getBytes))
def nextSeq[T, Coll[_]](reader: => Seq[T], n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder ++= reader
}
builder.result()
}
def next[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[T], T, Coll[T]]): Coll[T] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += reader
}
builder.result()
}
def nextWithIndex[T, Coll[_]](reader: => T, n: Int)
(implicit cbf: CanBuildFrom[Coll[(T, Int)], (T, Int), Coll[(T, Int)]]): Coll[(T, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((reader, i))
}
builder.result()
}
def nextDouble[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Double], Double, Coll[Double]]): Coll[Double] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextDouble()
}
builder.result()
}
def nextDoubleWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Double, Int)], (Double, Int), Coll[(Double, Int)]]): Coll[(Double, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextDouble(), i))
}
builder.result()
}
def nextChar[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Char], Char, Coll[Char]]): Coll[Char] = {
val builder = cbf()
builder.sizeHint(n)
var b = skip
var p = 0
while (p < n && !isSpaceChar(b)) {
builder += b.toChar
p += 1
b = readByte().toInt
}
builder.result()
}
def nextCharWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Char, Int)], (Char, Int), Coll[(Char, Int)]]): Coll[(Char, Int)] = {
val builder = cbf()
builder.sizeHint(n)
var b = skip
var p = 0
while (p < n && !isSpaceChar(b)) {
builder += ((b.toChar, p))
p += 1
b = readByte().toInt
}
builder.result()
}
def nextInt[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Int], Int, Coll[Int]]): Coll[Int] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextInt()
}
builder.result()
}
def nextIntWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Int, Int)], (Int, Int), Coll[(Int, Int)]]): Coll[(Int, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextInt(), i))
}
builder.result()
}
def nextLong[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[Long], Long, Coll[Long]]): Coll[Long] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextLong()
}
builder.result()
}
def nextLongWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(Long, Int)], (Long, Int), Coll[(Long, Int)]]): Coll[(Long, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextLong(), i))
}
builder.result()
}
def nextString[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[String], String, Coll[String]]): Coll[String] = {
val builder = cbf()
builder.sizeHint(n)
for (_ <- 0 until n) {
builder += nextString()
}
builder.result()
}
def nextStringWithIndex[Coll[_]]
(n: Int)(implicit cbf: CanBuildFrom[Coll[(String, Int)], (String, Int), Coll[(String, Int)]]): Coll[(String, Int)] = {
val builder = cbf()
builder.sizeHint(n)
for (i <- 0 until n) {
builder += ((nextString(), i))
}
builder.result()
}
def nextMultiLine(n: Int, m: Int): Array[Array[Char]] = {
val map = new Array[Array[Char]](n)
var i = 0
while (i < n) {
map(i) = nextChar[Array](m)
i += 1
}
map
}
def nextDouble(): Double = nextString().toDouble
def nextChar(): Char = skip.toChar
def nextString(): String = {
var b = skip
val sb = new java.lang.StringBuilder
while (!isSpaceChar(b)) {
sb.appendCodePoint(b)
b = readByte().toInt
}
sb.toString
}
def nextInt(): Int = {
var num = 0
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Int")
}
def nextLong(): Long = {
var num = 0L
var b = 0
var minus = false
while ( {
b = readByte().toInt
b != -1 && !((b >= '0' && b <= '9') || b == '-')
}) {}
if (b == '-') {
minus = true
b = readByte().toInt
}
while (true) {
if (b >= '0' && b <= '9') {
num = num * 10 + (b - '0')
} else {
if (minus) return -num else return num
}
b = readByte().toInt
}
throw new IOException("Read Long")
}
private[this] val inputBuffer = new Array[Byte](1024)
private[this] var lenBuffer = 0
private[this] var ptrBuffer = 0
private[this] def readByte()(implicit in: java.io.InputStream): Byte = {
if (lenBuffer == -1) throw new InputMismatchException
if (ptrBuffer >= lenBuffer) {
ptrBuffer = 0
try {
lenBuffer = in.read(inputBuffer)
} catch {
case _: IOException =>
throw new InputMismatchException
}
if (lenBuffer <= 0) return -1
}
inputBuffer({
ptrBuffer += 1
ptrBuffer - 1
})
}
private[this] def isSpaceChar(c: Int) = !(c >= 33 && c <= 126)
private[this] def skip = {
var b = 0
while ( {
b = readByte().toInt
b != -1 && isSpaceChar(b)
}) {}
b
}
}
//------------------------------------------------------------------------------------------//
// Output
//------------------------------------------------------------------------------------------//
private[this] final object Writer {
private[this] val out = new PrintWriter(System.out)
def flush(): Unit = out.flush()
def println(x: Any): Unit = out.println(x)
def print(x: Any): Unit = out.print(x)
}
} | robertoFischer/hackerrank | src/main/scala/HackerRank/Training/DataStructures/Arrays/AlgorithmicCrush.scala | Scala | mit | 8,832 |
package spire
package object algebra {
type Eq[A] = _root_.algebra.Eq[A]
val Eq = _root_.algebra.Eq
type PartialOrder[A] = _root_.algebra.PartialOrder[A]
val PartialOrder = _root_.algebra.PartialOrder
type Order[A] = _root_.algebra.Order[A]
val Order = _root_.algebra.Order
type Semigroup[A] = _root_.algebra.Semigroup[A]
val Semigroup = _root_.algebra.Semigroup
type CSemigroup[A] = _root_.algebra.CommutativeSemigroup[A]
val CSemigroup = _root_.algebra.CommutativeSemigroup
type Monoid[A] = _root_.algebra.Monoid[A]
val Monoid = _root_.algebra.Monoid
type CMonoid[A] = _root_.algebra.CommutativeMonoid[A]
val CMonoid = _root_.algebra.CommutativeMonoid
type Group[A] = _root_.algebra.Group[A]
val Group = _root_.algebra.Group
type AbGroup[A] = _root_.algebra.CommutativeGroup[A]
val AbGroup = _root_.algebra.CommutativeGroup
type AdditiveSemigroup[A] = _root_.algebra.ring.AdditiveSemigroup[A]
val AdditiveSemigroup = _root_.algebra.ring.AdditiveSemigroup
type AdditiveCSemigroup[A] = _root_.algebra.ring.AdditiveCommutativeSemigroup[A]
val AdditiveCSemigroup = _root_.algebra.ring.AdditiveCommutativeSemigroup
type AdditiveMonoid[A] = _root_.algebra.ring.AdditiveMonoid[A]
val AdditiveMonoid = _root_.algebra.ring.AdditiveMonoid
type AdditiveCMonoid[A] = _root_.algebra.ring.AdditiveCommutativeMonoid[A]
val AdditiveCMonoid = _root_.algebra.ring.AdditiveCommutativeMonoid
type AdditiveGroup[A] = _root_.algebra.ring.AdditiveGroup[A]
val AdditiveGroup = _root_.algebra.ring.AdditiveGroup
type AdditiveAbGroup[A] = _root_.algebra.ring.AdditiveCommutativeGroup[A]
val AdditiveAbGroup = _root_.algebra.ring.AdditiveCommutativeGroup
object Additive {
def apply[A](s: Semigroup[A]): AdditiveSemigroup[A] = new AdditiveSemigroup[A] {
def plus(x: A, y: A): A = s.combine(x, y)
}
def apply[A](s: CSemigroup[A]): AdditiveCSemigroup[A] = new AdditiveCSemigroup[A] {
def plus(x: A, y: A): A = s.combine(x, y)
}
def apply[A](m: Monoid[A]): AdditiveMonoid[A] = new AdditiveMonoid[A] {
def plus(x: A, y: A): A = m.combine(x, y)
def zero = m.empty
}
def apply[A](m: CMonoid[A]): AdditiveCMonoid[A] = new AdditiveCMonoid[A] {
def plus(x: A, y: A): A = m.combine(x, y)
def zero = m.empty
}
def apply[A](g: Group[A]): AdditiveGroup[A] = new AdditiveGroup[A] {
def plus(x: A, y: A): A = g.combine(x, y)
override def minus(x: A, y: A): A = g.remove(x, y)
def zero: A = g.empty
def negate(x: A): A = g.inverse(x)
}
def apply[A](g: AbGroup[A]): AdditiveAbGroup[A] = new AdditiveAbGroup[A] {
def plus(x: A, y: A): A = g.combine(x, y)
override def minus(x: A, y: A): A = g.remove(x, y)
def zero: A = g.empty
def negate(x: A): A = g.inverse(x)
}
}
type MultiplicativeSemigroup[A] = _root_.algebra.ring.MultiplicativeSemigroup[A]
val MultiplicativeSemigroup = _root_.algebra.ring.MultiplicativeSemigroup
type MultiplicativeCSemigroup[A] = _root_.algebra.ring.MultiplicativeCommutativeSemigroup[A]
val MultiplicativeCSemigroup = _root_.algebra.ring.MultiplicativeCommutativeSemigroup
type MultiplicativeMonoid[A] = _root_.algebra.ring.MultiplicativeMonoid[A]
val MultiplicativeMonoid = _root_.algebra.ring.MultiplicativeMonoid
type MultiplicativeCMonoid[A] = _root_.algebra.ring.MultiplicativeCommutativeMonoid[A]
val MultiplicativeCMonoid = _root_.algebra.ring.MultiplicativeCommutativeMonoid
type MultiplicativeGroup[A] = _root_.algebra.ring.MultiplicativeGroup[A]
val MultiplicativeGroup = _root_.algebra.ring.MultiplicativeGroup
type MultiplicativeAbGroup[A] = _root_.algebra.ring.MultiplicativeCommutativeGroup[A]
val MultiplicativeAbGroup = _root_.algebra.ring.MultiplicativeCommutativeGroup
object Multiplicative {
def apply[A](s: Semigroup[A]): MultiplicativeSemigroup[A] = new MultiplicativeSemigroup[A] {
def times(x: A, y: A): A = s.combine(x, y)
}
def apply[A](s: CSemigroup[A]): MultiplicativeCSemigroup[A] = new MultiplicativeCSemigroup[A] {
def times(x: A, y: A): A = s.combine(x, y)
}
def apply[A](m: Monoid[A]): MultiplicativeMonoid[A] = new MultiplicativeMonoid[A] {
def times(x: A, y: A): A = m.combine(x, y)
def one = m.empty
}
def apply[A](m: CMonoid[A]): MultiplicativeCMonoid[A] = new MultiplicativeCMonoid[A] {
def times(x: A, y: A): A = m.combine(x, y)
def one = m.empty
}
def apply[A](g: Group[A]): MultiplicativeGroup[A] = new MultiplicativeGroup[A] {
def times(x: A, y: A): A = g.combine(x, y)
def div(x: A, y: A): A = g.remove(x, y)
def one: A = g.empty
override def reciprocal(x: A): A = g.inverse(x)
}
def apply[A](g: AbGroup[A]): MultiplicativeAbGroup[A] = new MultiplicativeAbGroup[A] {
def times(x: A, y: A): A = g.combine(x, y)
def div(x: A, y: A): A = g.remove(x, y)
def one: A = g.empty
override def reciprocal(x: A): A = g.inverse(x)
}
}
type Semiring[A] = _root_.algebra.ring.Semiring[A]
val Semiring = _root_.algebra.ring.Semiring
type CSemiring[A] = _root_.algebra.ring.CommutativeSemiring[A]
val CSemiring = _root_.algebra.ring.CommutativeSemiring
type Rig[A] = _root_.algebra.ring.Rig[A]
val Rig = _root_.algebra.ring.Rig
type CRig[A] = _root_.algebra.ring.CommutativeRig[A]
val CRig = _root_.algebra.ring.CommutativeRig
type Rng[A] = _root_.algebra.ring.Rng[A]
val Rng = _root_.algebra.ring.Rng
type CRng[A] = _root_.algebra.ring.CommutativeRng[A]
val CRng = _root_.algebra.ring.CommutativeRng
type Ring[A] = _root_.algebra.ring.Ring[A]
val Ring = _root_.algebra.ring.Ring
type CRing[A] = _root_.algebra.ring.CommutativeRing[A]
val CRing = _root_.algebra.ring.CommutativeRing
type AlgebraField[A] = _root_.algebra.ring.Field[A]
val AlgebraField = _root_.algebra.ring.Field
type Bool[A] = _root_.algebra.lattice.Bool[A]
val Bool = _root_.algebra.lattice.Bool
}
| non/spire | core/src/main/scala/spire/algebra/package.scala | Scala | mit | 6,018 |
/**
* Copyright (C) 2013 Typesafe Inc. <http://typesafe.com>
*/
package com.twitter_typesafe.config.impl
import com.twitter_typesafe.config._
import java.io.{ InputStream, InputStreamReader }
import java.time.Duration;
import beanconfig._
import org.junit.Assert._
import org.junit._
import scala.collection.JavaConverters._
class ConfigBeanFactoryTest extends TestUtils {
@Test
def toCamelCase() {
assertEquals("configProp", ConfigImplUtil.toCamelCase("config-prop"))
assertEquals("configProp", ConfigImplUtil.toCamelCase("configProp"))
assertEquals("fooBar", ConfigImplUtil.toCamelCase("foo-----bar"))
assertEquals("fooBar", ConfigImplUtil.toCamelCase("fooBar"))
assertEquals("foo", ConfigImplUtil.toCamelCase("-foo"))
assertEquals("bar", ConfigImplUtil.toCamelCase("bar-"))
}
@Test
def testCreate() {
val configIs: InputStream = this.getClass().getClassLoader().getResourceAsStream("beanconfig/beanconfig01.conf")
val config: Config = ConfigFactory.parseReader(new InputStreamReader(configIs),
ConfigParseOptions.defaults.setSyntax(ConfigSyntax.CONF)).resolve
val beanConfig: TestBeanConfig = ConfigBeanFactory.create(config, classOf[TestBeanConfig])
assertNotNull(beanConfig)
// recursive bean inside the first bean
assertEquals(3, beanConfig.getNumbers.getIntVal)
}
@Test
def testValidation() {
val configIs: InputStream = this.getClass().getClassLoader().getResourceAsStream("beanconfig/beanconfig01.conf")
val config: Config = ConfigFactory.parseReader(new InputStreamReader(configIs),
ConfigParseOptions.defaults.setSyntax(ConfigSyntax.CONF)).resolve.getConfig("validation")
val e = intercept[ConfigException.ValidationFailed] {
ConfigBeanFactory.create(config, classOf[ValidationBeanConfig])
}
val expecteds = Seq(Missing("propNotListedInConfig", 77, "string"),
WrongType("shouldBeInt", 78, "number", "boolean"),
WrongType("should-be-boolean", 79, "boolean", "number"),
WrongType("should-be-list", 80, "list", "string"))
checkValidationException(e, expecteds)
}
@Test
def testCreateBool() {
val beanConfig: BooleansConfig = ConfigBeanFactory.create(loadConfig().getConfig("booleans"), classOf[BooleansConfig])
assertNotNull(beanConfig)
assertEquals(true, beanConfig.getTrueVal)
assertEquals(false, beanConfig.getFalseVal)
}
@Test
def testCreateString() {
val beanConfig: StringsConfig = ConfigBeanFactory.create(loadConfig().getConfig("strings"), classOf[StringsConfig])
assertNotNull(beanConfig)
assertEquals("abcd", beanConfig.getAbcd)
assertEquals("yes", beanConfig.getYes)
}
@Test
def testCreateNumber() {
val beanConfig: NumbersConfig = ConfigBeanFactory.create(loadConfig().getConfig("numbers"), classOf[NumbersConfig])
assertNotNull(beanConfig)
assertEquals(3, beanConfig.getIntVal)
assertEquals(3, beanConfig.getIntObj)
assertEquals(4L, beanConfig.getLongVal)
assertEquals(4L, beanConfig.getLongObj)
assertEquals(1.0, beanConfig.getDoubleVal, 1e-6)
assertEquals(1.0, beanConfig.getDoubleObj, 1e-6)
}
@Test
def testCreateList() {
val beanConfig: ArraysConfig = ConfigBeanFactory.create(loadConfig().getConfig("arrays"), classOf[ArraysConfig])
assertNotNull(beanConfig)
assertEquals(List().asJava, beanConfig.getEmpty)
assertEquals(List(1, 2, 3).asJava, beanConfig.getOfInt)
assertEquals(List(32L, 42L, 52L).asJava, beanConfig.getOfLong)
assertEquals(List("a", "b", "c").asJava, beanConfig.getOfString)
//assertEquals(List(List("a", "b", "c").asJava,
// List("a", "b", "c").asJava,
// List("a", "b", "c").asJava).asJava,
// beanConfig.getOfArray)
assertEquals(3, beanConfig.getOfObject.size)
assertEquals(3, beanConfig.getOfDouble.size)
assertEquals(3, beanConfig.getOfConfig.size)
assertTrue(beanConfig.getOfConfig.get(0).isInstanceOf[Config])
assertEquals(3, beanConfig.getOfConfigObject.size)
assertTrue(beanConfig.getOfConfigObject.get(0).isInstanceOf[ConfigObject])
assertEquals(List(intValue(1), intValue(2), stringValue("a")),
beanConfig.getOfConfigValue.asScala)
assertEquals(List(Duration.ofMillis(1), Duration.ofHours(2), Duration.ofDays(3)),
beanConfig.getOfDuration.asScala)
assertEquals(List(ConfigMemorySize.ofBytes(1024),
ConfigMemorySize.ofBytes(1048576),
ConfigMemorySize.ofBytes(1073741824)),
beanConfig.getOfMemorySize.asScala)
val stringsConfigOne = new StringsConfig();
stringsConfigOne.setAbcd("testAbcdOne")
stringsConfigOne.setYes("testYesOne")
val stringsConfigTwo = new StringsConfig();
stringsConfigTwo.setAbcd("testAbcdTwo")
stringsConfigTwo.setYes("testYesTwo")
assertEquals(List(stringsConfigOne, stringsConfigTwo).asJava, beanConfig.getOfStringBean)
}
@Test
def testCreateDuration() {
val beanConfig: DurationsConfig = ConfigBeanFactory.create(loadConfig().getConfig("durations"), classOf[DurationsConfig])
assertNotNull(beanConfig)
assertEquals(Duration.ofMillis(500), beanConfig.getHalfSecond)
assertEquals(Duration.ofMillis(1000), beanConfig.getSecond)
assertEquals(Duration.ofMillis(1000), beanConfig.getSecondAsNumber)
}
@Test
def testCreateBytes() {
val beanConfig: BytesConfig = ConfigBeanFactory.create(loadConfig().getConfig("bytes"), classOf[BytesConfig])
assertNotNull(beanConfig)
assertEquals(ConfigMemorySize.ofBytes(1024), beanConfig.getKibibyte)
assertEquals(ConfigMemorySize.ofBytes(1000), beanConfig.getKilobyte)
assertEquals(ConfigMemorySize.ofBytes(1000), beanConfig.getThousandBytes)
}
@Test
def testPreferCamelNames() {
val beanConfig = ConfigBeanFactory.create(loadConfig().getConfig("preferCamelNames"), classOf[PreferCamelNamesConfig])
assertNotNull(beanConfig)
assertEquals("yes", beanConfig.getFooBar)
assertEquals("yes", beanConfig.getBazBar)
}
@Test
def testValues() {
val beanConfig = ConfigBeanFactory.create(loadConfig().getConfig("values"), classOf[ValuesConfig])
assertNotNull(beanConfig)
assertEquals(42, beanConfig.getObj())
assertEquals("abcd", beanConfig.getConfig.getString("abcd"))
assertEquals(3, beanConfig.getConfigObj.toConfig.getInt("intVal"))
assertEquals(stringValue("hello world"), beanConfig.getConfigValue)
assertEquals(List(1, 2, 3).map(intValue(_)), beanConfig.getList.asScala)
assertEquals(true, beanConfig.getUnwrappedMap.get("shouldBeInt"))
assertEquals(42, beanConfig.getUnwrappedMap.get("should-be-boolean"))
}
@Test
def testNotABeanField() {
val e = intercept[ConfigException.BadBean] {
ConfigBeanFactory.create(parseConfig("notBean=42"), classOf[NotABeanFieldConfig])
}
assertTrue("unsupported type error", e.getMessage.contains("unsupported type"))
assertTrue("error about the right property", e.getMessage.contains("notBean"))
}
@Test
def testUnsupportedListElement() {
val e = intercept[ConfigException.BadBean] {
ConfigBeanFactory.create(parseConfig("uri=[42]"), classOf[UnsupportedListElementConfig])
}
assertTrue("unsupported element type error", e.getMessage.contains("unsupported list element type"))
assertTrue("error about the right property", e.getMessage.contains("uri"))
}
@Test
def testUnsupportedMapKey() {
val e = intercept[ConfigException.BadBean] {
ConfigBeanFactory.create(parseConfig("map={}"), classOf[UnsupportedMapKeyConfig])
}
assertTrue("unsupported map type error", e.getMessage.contains("unsupported Map"))
assertTrue("error about the right property", e.getMessage.contains("'map'"))
}
@Test
def testUnsupportedMapValue() {
val e = intercept[ConfigException.BadBean] {
ConfigBeanFactory.create(parseConfig("map={}"), classOf[UnsupportedMapValueConfig])
}
assertTrue("unsupported map type error", e.getMessage.contains("unsupported Map"))
assertTrue("error about the right property", e.getMessage.contains("'map'"))
}
private def loadConfig(): Config = {
val configIs: InputStream = this.getClass().getClassLoader().getResourceAsStream("beanconfig/beanconfig01.conf")
try {
val config: Config = ConfigFactory.parseReader(new InputStreamReader(configIs),
ConfigParseOptions.defaults.setSyntax(ConfigSyntax.CONF)).resolve
config
} finally {
configIs.close()
}
}
}
| twitter-forks/config | config/src/test/scala/com/typesafe/config/impl/ConfigBeanFactoryTest.scala | Scala | apache-2.0 | 9,077 |
// Copyright (C) 2017 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import org.scalatestplus.play.PlaySpec
import org.scalatestplus.play.guice.GuiceOneAppPerSuite
import play.api.http.Status
import play.api.test.FakeRequest
import play.api.test.Helpers.{route,GET,status,NOT_FOUND,OK,contentType,contentAsString,writeableOf_AnyContentAsEmpty,defaultAwaitTimeout}
/**
* Created by andrew@andrewresearch.net on 25/8/17.
*/
class FunctionalSpec extends PlaySpec with GuiceOneAppPerSuite {
"Routes" should {
"send 404 on a bad request" in {
route(app, FakeRequest(GET, "/boum")).map(status) mustBe Some(NOT_FOUND)
}
"send 200 on a good request" in {
route(app, FakeRequest(GET, "/")).map(status) mustBe Some(OK)
}
}
"DefaultController" should {
"render the index page" in {
val home = route(app, FakeRequest(GET, "/")).get
status(home) mustBe Status.OK
contentType(home) mustBe Some("text/html")
val pageExcerpt =
"""
|<head>
| <meta charset="UTF-8">
| <title>GraphQL Test</title>
|</head>
|<body>
|<h1>Testing GraphQL</h1>
| <a href="/graphiql">Use graphiql here</a>
|</body>""".stripMargin
contentAsString(home) must include (pageExcerpt)
}
}
}
| andrewresearch/sangria-play-demo | src/test/scala/FunctionalSpec.scala | Scala | apache-2.0 | 1,985 |
package com.sksamuel.elastic4s.requests.searches.aggs.pipeline
import com.sksamuel.elastic4s.requests.script.Script
case class BucketScriptPipelineAgg(name: String,
script: Script,
bucketsPaths: Map[String, String],
format: Option[String] = None,
gapPolicy: Option[GapPolicy] = None,
metadata: Map[String, AnyRef] = Map.empty)
extends PipelineAgg {
type T = BucketScriptPipelineAgg
def format(format: String): BucketScriptPipelineAgg = copy(format = Some(format))
def gapPolicy(gapPolicy: GapPolicy): BucketScriptPipelineAgg = copy(gapPolicy = Some(gapPolicy))
def metadata(metadata: Map[String, AnyRef]): BucketScriptPipelineAgg = copy(metadata = metadata)
}
| sksamuel/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/pipeline/BucketScriptPipelineAgg.scala | Scala | apache-2.0 | 876 |
import sbt._
import Keys._
object CommonPlayTemplatesBuild extends Build {
val appReleaseSettings = Seq(
// Publishing options:
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := { x => false },
publishTo <<= version { (v: String) =>
val nexus = "https://defranexus.kainos.com/"
if (v.trim.endsWith("SNAPSHOT"))
Some("sonatype-snapshots" at nexus + "content/repositories/snapshots")
else
Some("sonatype-releases" at nexus + "content/repositories/releases")
},
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials")
)
def defaultResolvers = Seq(
"Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
"DEFRA Nexus Release repo" at "https://defranexus.kainos.com/content/repositories/releases/",
"DEFRA Nexus Snapshot repo" at "https://defranexus.kainos.com/content/repositories/snapshots/"
)
val standardSettings = appReleaseSettings ++ Seq(
organization := "uk.gov.defra",
scalaVersion := "2.10.2",
autoScalaLibrary := false
)
lazy val capdExternalUserAuthApi = Project("capd-common-play-templates", file("."),
settings = standardSettings).settings(
crossPaths := false,
resolvers ++= defaultResolvers
).enablePlugins(play.twirl.sbt.SbtTwirl)
} | Defra/capd-common-play-templates | project/Build.scala | Scala | mit | 1,330 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import java.io.File
import java.net.URI
import org.apache.spark.SparkConf
import scala.util.Random
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.physical.HashPartitioning
import org.apache.spark.sql.execution.{DataSourceScanExec, FileSourceScanExec, SortExec, SparkPlan}
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec
import org.apache.spark.sql.execution.datasources.BucketingUtils
import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec
import org.apache.spark.sql.execution.joins.SortMergeJoinExec
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
import org.apache.spark.sql.test.{SQLTestUtils, SharedSparkSession}
import org.apache.spark.util.Utils
import org.apache.spark.util.collection.BitSet
class BucketedReadWithoutHiveSupportSuite extends BucketedReadSuite with SharedSparkSession {
override def sparkConf: SparkConf =
super.sparkConf
.setAppName("test")
.set("spark.sql.parquet.columnarReaderBatchSize", "4096")
.set("spark.sql.sources.useV1SourceList", "avro")
.set("spark.sql.extensions", "com.intel.oap.ColumnarPlugin")
.set("spark.sql.execution.arrow.maxRecordsPerBatch", "4096")
//.set("spark.shuffle.manager", "org.apache.spark.shuffle.sort.ColumnarShuffleManager")
.set("spark.memory.offHeap.enabled", "true")
.set("spark.memory.offHeap.size", "50m")
.set("spark.sql.join.preferSortMergeJoin", "false")
.set("spark.sql.columnar.codegen.hashAggregate", "false")
.set("spark.oap.sql.columnar.wholestagecodegen", "false")
.set("spark.sql.columnar.window", "false")
.set("spark.unsafe.exceptionOnMemoryLeak", "false")
//.set("spark.sql.columnar.tmp_dir", "/codegen/nativesql/")
.set("spark.sql.columnar.sort.broadcastJoin", "true")
.set("spark.oap.sql.columnar.preferColumnar", "true")
protected override def beforeAll(): Unit = {
super.beforeAll()
assert(spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "in-memory")
}
}
abstract class BucketedReadSuite extends QueryTest with SQLTestUtils {
import testImplicits._
protected override def beforeAll(): Unit = {
super.beforeAll()
spark.sessionState.conf.setConf(SQLConf.LEGACY_BUCKETED_TABLE_SCAN_OUTPUT_ORDERING, true)
}
protected override def afterAll(): Unit = {
spark.sessionState.conf.unsetConf(SQLConf.LEGACY_BUCKETED_TABLE_SCAN_OUTPUT_ORDERING)
super.afterAll()
}
private val maxI = 5
private val maxJ = 13
private lazy val df = (0 until 50).map(i => (i % maxI, i % maxJ, i.toString)).toDF("i", "j", "k")
private lazy val nullDF = (for {
i <- 0 to 50
s <- Seq(null, "a", "b", "c", "d", "e", "f", null, "g")
} yield (i % maxI, s, i % maxJ)).toDF("i", "j", "k")
// number of buckets that doesn't yield empty buckets when bucketing on column j on df/nullDF
// empty buckets before filtering might hide bugs in pruning logic
private val NumBucketsForPruningDF = 7
private val NumBucketsForPruningNullDf = 5
test("read bucketed data") {
withTable("bucketed_table") {
df.write
.format("parquet")
.partitionBy("i")
.bucketBy(8, "j", "k")
.saveAsTable("bucketed_table")
val bucketValue = Random.nextInt(maxI)
val table = spark.table("bucketed_table").filter($"i" === bucketValue)
val query = table.queryExecution
val output = query.analyzed.output
val rdd = query.toRdd
assert(rdd.partitions.length == 8)
val attrs = table.select("j", "k").queryExecution.analyzed.output
val checkBucketId = rdd.mapPartitionsWithIndex((index, rows) => {
val getBucketId = UnsafeProjection.create(
HashPartitioning(attrs, 8).partitionIdExpression :: Nil,
output)
rows.map(row => getBucketId(row).getInt(0) -> index)
})
checkBucketId.collect().foreach(r => assert(r._1 == r._2))
}
}
private def getFileScan(plan: SparkPlan): FileSourceScanExec = {
val fileScan = plan.collect { case f: FileSourceScanExec => f }
assert(fileScan.nonEmpty, plan)
fileScan.head
}
// To verify if the bucket pruning works, this function checks two conditions:
// 1) Check if the pruned buckets (before filtering) are empty.
// 2) Verify the final result is the same as the expected one
private def checkPrunedAnswers(
bucketSpec: BucketSpec,
bucketValues: Seq[Integer],
filterCondition: Column,
originalDataFrame: DataFrame): Unit = {
// This test verifies parts of the plan. Disable whole stage codegen.
withSQLConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false") {
val bucketedDataFrame = spark.table("bucketed_table").select("i", "j", "k")
val BucketSpec(numBuckets, bucketColumnNames, _) = bucketSpec
// Limit: bucket pruning only works when the bucket column has one and only one column
assert(bucketColumnNames.length == 1)
val bucketColumnIndex = bucketedDataFrame.schema.fieldIndex(bucketColumnNames.head)
val bucketColumn = bucketedDataFrame.schema.toAttributes(bucketColumnIndex)
// Filter could hide the bug in bucket pruning. Thus, skipping all the filters
val plan = bucketedDataFrame.filter(filterCondition).queryExecution.executedPlan
val fileScan = getFileScan(plan)
// if nothing should be pruned, skip the pruning test
if (bucketValues.nonEmpty) {
val matchedBuckets = new BitSet(numBuckets)
bucketValues.foreach { value =>
matchedBuckets.set(BucketingUtils.getBucketIdFromValue(bucketColumn, numBuckets, value))
}
val invalidBuckets = fileScan.execute().mapPartitionsWithIndex { case (index, iter) =>
// return indexes of partitions that should have been pruned and are not empty
if (!matchedBuckets.get(index % numBuckets) && iter.nonEmpty) {
Iterator(index)
} else {
Iterator()
}
}.collect()
if (invalidBuckets.nonEmpty) {
fail(s"Buckets ${invalidBuckets.mkString(",")} should have been pruned from:\\n$plan")
}
}
checkAnswer(
bucketedDataFrame.filter(filterCondition).orderBy("i", "j", "k"),
originalDataFrame.filter(filterCondition).orderBy("i", "j", "k"))
}
}
ignore("read partitioning bucketed tables with bucket pruning filters") {
withTable("bucketed_table") {
val numBuckets = NumBucketsForPruningDF
val bucketSpec = BucketSpec(numBuckets, Seq("j"), Nil)
// json does not support predicate push-down, and thus json is used here
df.write
.format("json")
.partitionBy("i")
.bucketBy(numBuckets, "j")
.saveAsTable("bucketed_table")
val bucketValue = Random.nextInt(maxJ)
// Case 1: EqualTo
checkPrunedAnswers(
bucketSpec,
bucketValues = bucketValue :: Nil,
filterCondition = $"j" === bucketValue,
df)
// Case 2: EqualNullSafe
checkPrunedAnswers(
bucketSpec,
bucketValues = bucketValue :: Nil,
filterCondition = $"j" <=> bucketValue,
df)
// Case 3: In
checkPrunedAnswers(
bucketSpec,
bucketValues = Seq(bucketValue, bucketValue + 1, bucketValue + 2, bucketValue + 3),
filterCondition = $"j".isin(bucketValue, bucketValue + 1, bucketValue + 2, bucketValue + 3),
df)
// Case 4: InSet
val inSetExpr = expressions.InSet($"j".expr,
Set(bucketValue, bucketValue + 1, bucketValue + 2, bucketValue + 3).map(lit(_).expr))
checkPrunedAnswers(
bucketSpec,
bucketValues = Seq(bucketValue, bucketValue + 1, bucketValue + 2, bucketValue + 3),
filterCondition = Column(inSetExpr),
df)
}
}
test("read non-partitioning bucketed tables with bucket pruning filters") {
withTable("bucketed_table") {
val numBuckets = NumBucketsForPruningDF
val bucketSpec = BucketSpec(numBuckets, Seq("j"), Nil)
// json does not support predicate push-down, and thus json is used here
df.write
.format("json")
.bucketBy(numBuckets, "j")
.saveAsTable("bucketed_table")
val bucketValue = Random.nextInt(maxJ)
checkPrunedAnswers(
bucketSpec,
bucketValues = bucketValue :: Nil,
filterCondition = $"j" === bucketValue,
df)
}
}
test("read partitioning bucketed tables having null in bucketing key") {
withTable("bucketed_table") {
val numBuckets = NumBucketsForPruningNullDf
val bucketSpec = BucketSpec(numBuckets, Seq("j"), Nil)
// json does not support predicate push-down, and thus json is used here
nullDF.write
.format("json")
.partitionBy("i")
.bucketBy(numBuckets, "j")
.saveAsTable("bucketed_table")
// Case 1: isNull
checkPrunedAnswers(
bucketSpec,
bucketValues = null :: Nil,
filterCondition = $"j".isNull,
nullDF)
// Case 2: <=> null
checkPrunedAnswers(
bucketSpec,
bucketValues = null :: Nil,
filterCondition = $"j" <=> null,
nullDF)
}
}
ignore("read partitioning bucketed tables having composite filters") {
withTable("bucketed_table") {
val numBuckets = NumBucketsForPruningDF
val bucketSpec = BucketSpec(numBuckets, Seq("j"), Nil)
// json does not support predicate push-down, and thus json is used here
df.write
.format("json")
.partitionBy("i")
.bucketBy(numBuckets, "j")
.saveAsTable("bucketed_table")
val bucketValue = Random.nextInt(maxJ)
checkPrunedAnswers(
bucketSpec,
bucketValues = bucketValue :: Nil,
filterCondition = $"j" === bucketValue && $"k" > $"j",
df)
checkPrunedAnswers(
bucketSpec,
bucketValues = bucketValue :: Nil,
filterCondition = $"j" === bucketValue && $"i" > bucketValue % 5,
df)
// check multiple bucket values OR condition
checkPrunedAnswers(
bucketSpec,
bucketValues = Seq(bucketValue, bucketValue + 1),
filterCondition = $"j" === bucketValue || $"j" === (bucketValue + 1),
df)
// check bucket value and none bucket value OR condition
checkPrunedAnswers(
bucketSpec,
bucketValues = Nil,
filterCondition = $"j" === bucketValue || $"i" === 0,
df)
// check AND condition in complex expression
checkPrunedAnswers(
bucketSpec,
bucketValues = Seq(bucketValue),
filterCondition = ($"i" === 0 || $"k" > $"j") && $"j" === bucketValue,
df)
}
}
test("read bucketed table without filters") {
withTable("bucketed_table") {
val numBuckets = NumBucketsForPruningDF
val bucketSpec = BucketSpec(numBuckets, Seq("j"), Nil)
// json does not support predicate push-down, and thus json is used here
df.write
.format("json")
.bucketBy(numBuckets, "j")
.saveAsTable("bucketed_table")
val bucketedDataFrame = spark.table("bucketed_table").select("i", "j", "k")
val plan = bucketedDataFrame.queryExecution.executedPlan
val fileScan = getFileScan(plan)
val emptyBuckets = fileScan.execute().mapPartitionsWithIndex { case (index, iter) =>
// return indexes of empty partitions
if (iter.isEmpty) {
Iterator(index)
} else {
Iterator()
}
}.collect()
if (emptyBuckets.nonEmpty) {
fail(s"Buckets ${emptyBuckets.mkString(",")} should not have been pruned from:\\n$plan")
}
checkAnswer(
bucketedDataFrame.orderBy("i", "j", "k"),
df.orderBy("i", "j", "k"))
}
}
private lazy val df1 =
(0 until 50).map(i => (i % 5, i % 13, i.toString)).toDF("i", "j", "k").as("df1")
private lazy val df2 =
(0 until 50).map(i => (i % 7, i % 11, i.toString)).toDF("i", "j", "k").as("df2")
case class BucketedTableTestSpec(
bucketSpec: Option[BucketSpec],
numPartitions: Int = 10,
expectedShuffle: Boolean = true,
expectedSort: Boolean = true)
/**
* A helper method to test the bucket read functionality using join. It will save `df1` and `df2`
* to hive tables, bucketed or not, according to the given bucket specifics. Next we will join
* these 2 tables, and firstly make sure the answer is corrected, and then check if the shuffle
* exists as user expected according to the `shuffleLeft` and `shuffleRight`.
*/
private def testBucketing(
bucketedTableTestSpecLeft: BucketedTableTestSpec,
bucketedTableTestSpecRight: BucketedTableTestSpec,
joinType: String = "inner",
joinCondition: (DataFrame, DataFrame) => Column): Unit = {
val BucketedTableTestSpec(bucketSpecLeft, numPartitionsLeft, shuffleLeft, sortLeft) =
bucketedTableTestSpecLeft
val BucketedTableTestSpec(bucketSpecRight, numPartitionsRight, shuffleRight, sortRight) =
bucketedTableTestSpecRight
withTable("bucketed_table1", "bucketed_table2") {
def withBucket(
writer: DataFrameWriter[Row],
bucketSpec: Option[BucketSpec]): DataFrameWriter[Row] = {
bucketSpec.map { spec =>
writer.bucketBy(
spec.numBuckets,
spec.bucketColumnNames.head,
spec.bucketColumnNames.tail: _*)
if (spec.sortColumnNames.nonEmpty) {
writer.sortBy(
spec.sortColumnNames.head,
spec.sortColumnNames.tail: _*
)
} else {
writer
}
}.getOrElse(writer)
}
withBucket(df1.repartition(numPartitionsLeft).write.format("parquet"), bucketSpecLeft)
.saveAsTable("bucketed_table1")
withBucket(df2.repartition(numPartitionsRight).write.format("parquet"), bucketSpecRight)
.saveAsTable("bucketed_table2")
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "0",
SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false") {
val t1 = spark.table("bucketed_table1")
val t2 = spark.table("bucketed_table2")
val joined = t1.join(t2, joinCondition(t1, t2), joinType)
// First check the result is corrected.
checkAnswer(
joined.sort("bucketed_table1.k", "bucketed_table2.k"),
df1.join(df2, joinCondition(df1, df2), joinType).sort("df1.k", "df2.k"))
val joinOperator = if (joined.sqlContext.conf.adaptiveExecutionEnabled) {
val executedPlan =
joined.queryExecution.executedPlan.asInstanceOf[AdaptiveSparkPlanExec].executedPlan
assert(executedPlan.isInstanceOf[SortMergeJoinExec])
executedPlan.asInstanceOf[SortMergeJoinExec]
} else {
val executedPlan = joined.queryExecution.executedPlan
assert(executedPlan.isInstanceOf[SortMergeJoinExec])
executedPlan.asInstanceOf[SortMergeJoinExec]
}
// check existence of shuffle
assert(
joinOperator.left.find(_.isInstanceOf[ShuffleExchangeExec]).isDefined == shuffleLeft,
s"expected shuffle in plan to be $shuffleLeft but found\\n${joinOperator.left}")
assert(
joinOperator.right.find(_.isInstanceOf[ShuffleExchangeExec]).isDefined == shuffleRight,
s"expected shuffle in plan to be $shuffleRight but found\\n${joinOperator.right}")
// check existence of sort
assert(
joinOperator.left.find(_.isInstanceOf[SortExec]).isDefined == sortLeft,
s"expected sort in the left child to be $sortLeft but found\\n${joinOperator.left}")
assert(
joinOperator.right.find(_.isInstanceOf[SortExec]).isDefined == sortRight,
s"expected sort in the right child to be $sortRight but found\\n${joinOperator.right}")
}
}
}
private def joinCondition(joinCols: Seq[String]) (left: DataFrame, right: DataFrame): Column = {
joinCols.map(col => left(col) === right(col)).reduce(_ && _)
}
test("avoid shuffle when join 2 bucketed tables") {
val bucketSpec = Some(BucketSpec(8, Seq("i", "j"), Nil))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(bucketSpec, expectedShuffle = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(bucketSpec, expectedShuffle = false)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i", "j"))
)
}
// Enable it after fix https://issues.apache.org/jira/browse/SPARK-12704
ignore("avoid shuffle when join keys are a super-set of bucket keys") {
val bucketSpec = Some(BucketSpec(8, Seq("i"), Nil))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(bucketSpec, expectedShuffle = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(bucketSpec, expectedShuffle = false)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i", "j"))
)
}
test("only shuffle one side when join bucketed table and non-bucketed table") {
val bucketSpec = Some(BucketSpec(8, Seq("i", "j"), Nil))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(bucketSpec, expectedShuffle = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(None, expectedShuffle = true)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i", "j"))
)
}
test("only shuffle one side when 2 bucketed tables have different bucket number") {
val bucketSpecLeft = Some(BucketSpec(8, Seq("i", "j"), Nil))
val bucketSpecRight = Some(BucketSpec(5, Seq("i", "j"), Nil))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(bucketSpecLeft, expectedShuffle = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(bucketSpecRight, expectedShuffle = true)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i", "j"))
)
}
test("only shuffle one side when 2 bucketed tables have different bucket keys") {
val bucketSpecLeft = Some(BucketSpec(8, Seq("i"), Nil))
val bucketSpecRight = Some(BucketSpec(8, Seq("j"), Nil))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(bucketSpecLeft, expectedShuffle = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(bucketSpecRight, expectedShuffle = true)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i"))
)
}
test("shuffle when join keys are not equal to bucket keys") {
val bucketSpec = Some(BucketSpec(8, Seq("i"), Nil))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(bucketSpec, expectedShuffle = true)
val bucketedTableTestSpecRight = BucketedTableTestSpec(bucketSpec, expectedShuffle = true)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("j"))
)
}
test("shuffle when join 2 bucketed tables with bucketing disabled") {
val bucketSpec = Some(BucketSpec(8, Seq("i", "j"), Nil))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(bucketSpec, expectedShuffle = true)
val bucketedTableTestSpecRight = BucketedTableTestSpec(bucketSpec, expectedShuffle = true)
withSQLConf(SQLConf.BUCKETING_ENABLED.key -> "false") {
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i", "j"))
)
}
}
test("check sort and shuffle when bucket and sort columns are join keys") {
// In case of bucketing, its possible to have multiple files belonging to the
// same bucket in a given relation. Each of these files are locally sorted
// but those files combined together are not globally sorted. Given that,
// the RDD partition will not be sorted even if the relation has sort columns set
// Therefore, we still need to keep the Sort in both sides.
val bucketSpec = Some(BucketSpec(8, Seq("i", "j"), Seq("i", "j")))
val bucketedTableTestSpecLeft1 = BucketedTableTestSpec(
bucketSpec, numPartitions = 50, expectedShuffle = false, expectedSort = true)
val bucketedTableTestSpecRight1 = BucketedTableTestSpec(
bucketSpec, numPartitions = 1, expectedShuffle = false, expectedSort = false)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft1,
bucketedTableTestSpecRight = bucketedTableTestSpecRight1,
joinCondition = joinCondition(Seq("i", "j"))
)
val bucketedTableTestSpecLeft2 = BucketedTableTestSpec(
bucketSpec, numPartitions = 1, expectedShuffle = false, expectedSort = false)
val bucketedTableTestSpecRight2 = BucketedTableTestSpec(
bucketSpec, numPartitions = 50, expectedShuffle = false, expectedSort = true)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft2,
bucketedTableTestSpecRight = bucketedTableTestSpecRight2,
joinCondition = joinCondition(Seq("i", "j"))
)
val bucketedTableTestSpecLeft3 = BucketedTableTestSpec(
bucketSpec, numPartitions = 50, expectedShuffle = false, expectedSort = true)
val bucketedTableTestSpecRight3 = BucketedTableTestSpec(
bucketSpec, numPartitions = 50, expectedShuffle = false, expectedSort = true)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft3,
bucketedTableTestSpecRight = bucketedTableTestSpecRight3,
joinCondition = joinCondition(Seq("i", "j"))
)
val bucketedTableTestSpecLeft4 = BucketedTableTestSpec(
bucketSpec, numPartitions = 1, expectedShuffle = false, expectedSort = false)
val bucketedTableTestSpecRight4 = BucketedTableTestSpec(
bucketSpec, numPartitions = 1, expectedShuffle = false, expectedSort = false)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft4,
bucketedTableTestSpecRight = bucketedTableTestSpecRight4,
joinCondition = joinCondition(Seq("i", "j"))
)
}
test("avoid shuffle and sort when sort columns are a super set of join keys") {
val bucketSpecLeft = Some(BucketSpec(8, Seq("i"), Seq("i", "j")))
val bucketSpecRight = Some(BucketSpec(8, Seq("i"), Seq("i", "k")))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(
bucketSpecLeft, numPartitions = 1, expectedShuffle = false, expectedSort = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(
bucketSpecRight, numPartitions = 1, expectedShuffle = false, expectedSort = false)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i"))
)
}
test("only sort one side when sort columns are different") {
val bucketSpecLeft = Some(BucketSpec(8, Seq("i", "j"), Seq("i", "j")))
val bucketSpecRight = Some(BucketSpec(8, Seq("i", "j"), Seq("k")))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(
bucketSpecLeft, numPartitions = 1, expectedShuffle = false, expectedSort = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(
bucketSpecRight, numPartitions = 1, expectedShuffle = false, expectedSort = true)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i", "j"))
)
}
test("only sort one side when sort columns are same but their ordering is different") {
val bucketSpecLeft = Some(BucketSpec(8, Seq("i", "j"), Seq("i", "j")))
val bucketSpecRight = Some(BucketSpec(8, Seq("i", "j"), Seq("j", "i")))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(
bucketSpecLeft, numPartitions = 1, expectedShuffle = false, expectedSort = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(
bucketSpecRight, numPartitions = 1, expectedShuffle = false, expectedSort = true)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i", "j"))
)
}
test("avoid shuffle when grouping keys are equal to bucket keys") {
withTable("bucketed_table") {
df1.write.format("parquet").bucketBy(8, "i", "j").saveAsTable("bucketed_table")
val tbl = spark.table("bucketed_table")
val agged = tbl.groupBy("i", "j").agg(max("k"))
checkAnswer(
agged.sort("i", "j"),
df1.groupBy("i", "j").agg(max("k")).sort("i", "j"))
assert(agged.queryExecution.executedPlan.find(_.isInstanceOf[ShuffleExchangeExec]).isEmpty)
}
}
test("bucket join should work with SubqueryAlias plan") {
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "0") {
withTable("t") {
withView("v") {
spark.range(20).selectExpr("id as i").write.bucketBy(8, "i").saveAsTable("t")
sql("CREATE VIEW v AS SELECT * FROM t").collect()
val plan = sql("SELECT * FROM t a JOIN v b ON a.i = b.i").queryExecution.executedPlan
assert(plan.collect { case exchange: ShuffleExchangeExec => exchange }.isEmpty)
}
}
}
}
test("avoid shuffle when grouping keys are a super-set of bucket keys") {
withTable("bucketed_table") {
df1.write.format("parquet").bucketBy(8, "i").saveAsTable("bucketed_table")
val tbl = spark.table("bucketed_table")
val agged = tbl.groupBy("i", "j").agg(max("k"))
checkAnswer(
agged.sort("i", "j"),
df1.groupBy("i", "j").agg(max("k")).sort("i", "j"))
assert(agged.queryExecution.executedPlan.find(_.isInstanceOf[ShuffleExchangeExec]).isEmpty)
}
}
test("SPARK-17698 Join predicates should not contain filter clauses") {
val bucketSpec = Some(BucketSpec(8, Seq("i"), Seq("i")))
val bucketedTableTestSpecLeft = BucketedTableTestSpec(
bucketSpec, numPartitions = 1, expectedShuffle = false, expectedSort = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(
bucketSpec, numPartitions = 1, expectedShuffle = false, expectedSort = false)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinType = "fullouter",
joinCondition = (left: DataFrame, right: DataFrame) => {
val joinPredicates = Seq("i").map(col => left(col) === right(col)).reduce(_ && _)
val filterLeft = left("i") === Literal("1")
val filterRight = right("i") === Literal("1")
joinPredicates && filterLeft && filterRight
}
)
}
test("SPARK-19122 Re-order join predicates if they match with the child's output partitioning") {
val bucketedTableTestSpec = BucketedTableTestSpec(
Some(BucketSpec(8, Seq("i", "j", "k"), Seq("i", "j", "k"))),
numPartitions = 1,
expectedShuffle = false,
expectedSort = false)
// If the set of join columns is equal to the set of bucketed + sort columns, then
// the order of join keys in the query should not matter and there should not be any shuffle
// and sort added in the query plan
Seq(
Seq("i", "j", "k"),
Seq("i", "k", "j"),
Seq("j", "k", "i"),
Seq("j", "i", "k"),
Seq("k", "j", "i"),
Seq("k", "i", "j")
).foreach(joinKeys => {
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpec,
bucketedTableTestSpecRight = bucketedTableTestSpec,
joinCondition = joinCondition(joinKeys)
)
})
}
test("SPARK-19122 No re-ordering should happen if set of join columns != set of child's " +
"partitioning columns") {
// join predicates is a super set of child's partitioning columns
val bucketedTableTestSpec1 =
BucketedTableTestSpec(Some(BucketSpec(8, Seq("i", "j"), Seq("i", "j"))), numPartitions = 1)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpec1,
bucketedTableTestSpecRight = bucketedTableTestSpec1,
joinCondition = joinCondition(Seq("i", "j", "k"))
)
// child's partitioning columns is a super set of join predicates
val bucketedTableTestSpec2 =
BucketedTableTestSpec(Some(BucketSpec(8, Seq("i", "j", "k"), Seq("i", "j", "k"))),
numPartitions = 1)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpec2,
bucketedTableTestSpecRight = bucketedTableTestSpec2,
joinCondition = joinCondition(Seq("i", "j"))
)
// set of child's partitioning columns != set join predicates (despite the lengths of the
// sets are same)
val bucketedTableTestSpec3 =
BucketedTableTestSpec(Some(BucketSpec(8, Seq("i", "j"), Seq("i", "j"))), numPartitions = 1)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpec3,
bucketedTableTestSpecRight = bucketedTableTestSpec3,
joinCondition = joinCondition(Seq("j", "k"))
)
}
ignore("SPARK-22042 ReorderJoinPredicates can break when child's partitioning is not decided") {
withTable("bucketed_table", "table1", "table2") {
df.write.format("parquet").saveAsTable("table1")
df.write.format("parquet").saveAsTable("table2")
df.write.format("parquet").bucketBy(8, "j", "k").saveAsTable("bucketed_table")
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "0") {
checkAnswer(
sql("""
|SELECT ab.i, ab.j, ab.k, c.i, c.j, c.k
|FROM (
| SELECT a.i, a.j, a.k
| FROM bucketed_table a
| JOIN table1 b
| ON a.i = b.i
|) ab
|JOIN table2 c
|ON ab.i = c.i
""".stripMargin),
sql("""
|SELECT a.i, a.j, a.k, c.i, c.j, c.k
|FROM bucketed_table a
|JOIN table1 b
|ON a.i = b.i
|JOIN table2 c
|ON a.i = c.i
""".stripMargin))
}
}
}
test("error if there exists any malformed bucket files") {
withTable("bucketed_table") {
df1.write.format("parquet").bucketBy(8, "i").saveAsTable("bucketed_table")
val warehouseFilePath = new URI(spark.sessionState.conf.warehousePath).getPath
val tableDir = new File(warehouseFilePath, "bucketed_table")
Utils.deleteRecursively(tableDir)
df1.write.parquet(tableDir.getAbsolutePath)
val agged = spark.table("bucketed_table").groupBy("i").count()
val error = intercept[Exception] {
agged.count()
}
assert(error.getCause().toString contains "Invalid bucket file")
}
}
ignore("disable bucketing when the output doesn't contain all bucketing columns") {
withTable("bucketed_table") {
df1.write.format("parquet").bucketBy(8, "i").saveAsTable("bucketed_table")
val scanDF = spark.table("bucketed_table").select("j")
assert(!getFileScan(scanDF.queryExecution.executedPlan).bucketedScan)
checkAnswer(scanDF, df1.select("j"))
val aggDF = spark.table("bucketed_table").groupBy("j").agg(max("k"))
assert(!getFileScan(aggDF.queryExecution.executedPlan).bucketedScan)
checkAnswer(aggDF, df1.groupBy("j").agg(max("k")))
}
}
// A test with a partition where the number of files in the partition is
// large. tests for the condition where the serialization of such a task may result in a stack
// overflow if the files list is stored in a recursive data structure
// This test is ignored because it takes long to run (~3 min)
ignore("SPARK-27100 stack overflow: read data with large partitions") {
val nCount = 20000
// reshuffle data so that many small files are created
val nShufflePartitions = 10000
// and with one table partition, should result in 10000 files in one partition
val nPartitions = 1
val nBuckets = 2
val dfPartitioned = (0 until nCount)
.map(i => (i % nPartitions, i % nBuckets, i.toString)).toDF("i", "j", "k")
// non-bucketed tables. This part succeeds without the fix for SPARK-27100
try {
withTable("non_bucketed_table") {
dfPartitioned.repartition(nShufflePartitions)
.write
.format("parquet")
.partitionBy("i")
.saveAsTable("non_bucketed_table")
val table = spark.table("non_bucketed_table")
val nValues = table.select("j", "k").count()
assert(nValues == nCount)
}
} catch {
case e: Exception => fail("Failed due to exception: " + e)
}
// bucketed tables. This fails without the fix for SPARK-27100
try {
withTable("bucketed_table") {
dfPartitioned.repartition(nShufflePartitions)
.write
.format("parquet")
.partitionBy("i")
.bucketBy(nBuckets, "j")
.saveAsTable("bucketed_table")
val table = spark.table("bucketed_table")
val nValues = table.select("j", "k").count()
assert(nValues == nCount)
}
} catch {
case e: Exception => fail("Failed due to exception: " + e)
}
}
test("SPARK-29655 Read bucketed tables obeys spark.sql.shuffle.partitions") {
withSQLConf(
SQLConf.SHUFFLE_PARTITIONS.key -> "5",
SQLConf.COALESCE_PARTITIONS_INITIAL_PARTITION_NUM.key -> "7") {
val bucketSpec = Some(BucketSpec(6, Seq("i", "j"), Nil))
Seq(false, true).foreach { enableAdaptive =>
withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> s"$enableAdaptive") {
val bucketedTableTestSpecLeft = BucketedTableTestSpec(bucketSpec, expectedShuffle = false)
val bucketedTableTestSpecRight = BucketedTableTestSpec(None, expectedShuffle = true)
testBucketing(
bucketedTableTestSpecLeft = bucketedTableTestSpecLeft,
bucketedTableTestSpecRight = bucketedTableTestSpecRight,
joinCondition = joinCondition(Seq("i", "j"))
)
}
}
}
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala | Scala | apache-2.0 | 35,662 |
package looty
package model.parsers
import looty.model.ComputedItem
//////////////////////////////////////////////////////////////
// Copyright (c) 2013 Ben Jackman, Jeff Gomberg
// All Rights Reserved
// please contact ben@jackman.biz or jeff@cgtanalytics.com
// for licensing inquiries
// Created by bjackman @ 12/19/13 2:07 AM
//////////////////////////////////////////////////////////////
object SocketsParser {
def parse(ci: ComputedItem): Boolean = {
ci.item.sockets.toOption.foreach { sockets =>
ci.sockets = sockets.toList.groupBy(_.group).values.map(_.map(_.color).toList.sorted).toList.sortBy(-_.size)
}
true
}
} | benjaminjackman/looty | looty/src/main/scala/looty/model/parsers/SocketsParser.scala | Scala | gpl-2.0 | 650 |
trait Foo {
def foo = 4
}
object Bar extends Foo {
inline def bar = super[Foo].foo // error
}
object Main {
Bar.bar
}
| som-snytt/dotty | tests/neg/i2901.scala | Scala | apache-2.0 | 124 |
// Starter Code for Exercise 6
// From "Functions as Objects" atom
import com.atomicscala.AtomicTest._
between(70, 80, 90) is false
between(70, 60, 90) is true
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-solutions/31_FunctionsasObjects/Starter-6.scala | Scala | apache-2.0 | 161 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.keras.nn
import com.intel.analytics.bigdl.keras.KerasBaseSpec
import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.nn.keras.{AtrousConvolution2D, Sequential => KSequential}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import scala.util.Random
class AtrousConvolution2DSpec extends KerasBaseSpec {
"AtrousConvolution2D" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[3, 64, 64])
|input = np.random.random([2, 3, 64, 64])
|output_tensor = AtrousConvolution2D(32, 2, 4, activation="sigmoid",
| dim_ordering="th",
| atrous_rate=(2,2))(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = KSequential[Float]()
val layer = AtrousConvolution2D[Float](32, 2, 4, activation = "sigmoid",
atrousRate = (2, 2), inputShape = Shape(3, 64, 64))
seq.add(layer)
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode, precision = 1e-2)
}
}
class AtrousConvolution2DSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val layer = AtrousConvolution2D[Float](32, 2, 4, atrousRate = (2, 2),
inputShape = Shape(3, 64, 64))
layer.build(Shape(2, 3, 64, 64))
val input = Tensor[Float](2, 3, 64, 64).apply1(_ => Random.nextFloat())
runSerializationTest(layer, input)
}
}
| yiheng/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/keras/nn/AtrousConvolution2DSpec.scala | Scala | apache-2.0 | 2,292 |
import com.google.caliper.runner.{CaliperMain => CaliperRunner}
object Runner {
// main method for IDEs, from the CLI you can also run the caliper Runner directly
// or simply use SBTs "run" action
def main(args: Array[String]) {
// we simply pass in the CLI args,
// we could of course also just pass hardcoded arguments to the caliper Runner
CaliperRunner.main(classOf[Benchmark], args)
}
}
| bzz/scala-benchmark | src/main/scala/Runner.scala | Scala | apache-2.0 | 419 |
import sbt._
import Keys._
object Util {
val buildScalaVersion = System.getProperty("scala.version", "2.11.8")
val buildScalaVersions = Seq("2.11.8", "2.10.6")
val javaVersion = System.getProperty("java.version")
def loadCredentials(): List[Credentials] = {
val mavenSettingsFile = System.getProperty("maven.settings.file")
if (mavenSettingsFile != null) {
println("Loading Sonatype credentials from " + mavenSettingsFile)
try {
import scala.xml._
val settings = XML.loadFile(mavenSettingsFile)
def readServerConfig(key: String) = (settings \\\\ "settings" \\\\ "servers" \\\\ "server" \\\\ key).head.text
List(Credentials(
"Sonatype Nexus Repository Manager",
"oss.sonatype.org",
readServerConfig("username"),
readServerConfig("password")
))
} catch {
case ex: Exception =>
println("Failed to load Maven settings from " + mavenSettingsFile + ": " + ex)
Nil
}
} else {
// println("Sonatype credentials cannot be loaded: -Dmaven.settings.file is not specified.")
Nil
}
}
}
| scala/pickling | project/Util.scala | Scala | bsd-3-clause | 1,142 |
package impl
import akka.util.ByteString
import scala.io.Source
import java.net.InetSocketAddress
import data.ProposalID
import java.io.FileInputStream
import java.io.FileOutputStream
import java.io.ObjectInputStream
import java.io.ObjectOutputStream
/**
* singleton object contains useful functions to support the functionalities of paxos_Actor
*
* @author Zepeng Zhao
*/
object Util {
/**
* load paxos properties(id, ip addr and port number) from file 'paxos.config'
* and store it to a map
* the config file must store all paxoses properties in a format like below;
* id1 host1 port_number1 eg. 1 127.0.0.1 2015
* id2 host2 port_number2 2 127.0.0.1 2016
* ... ...
* Note that id must be a unique number
*/
def loadPaxos():Map[Int,(String, Int)] ={
var m:Map[Int,(String, Int)] = Map()
val filename = "paxos.config"
for (line <- Source.fromFile(filename).getLines()) {
var arr = line.split(" ")
m+=(arr(0).toInt->(arr(1),arr(2).toInt))
}
m
}
def writeToDisk(filename:String, m:Map[BigInt,(java.io.Serializable,ProposalID)]){
val fos = new FileOutputStream(filename)
val oos = new ObjectOutputStream(fos)
oos.writeObject(m)
oos.close()
}
def read_from_disk(filename:String):Map[BigInt,(java.io.Serializable,ProposalID)]={
val fis = new FileInputStream(filename)
val ois = new ObjectInputStream(fis)
val obj = ois.readObject()
ois.close()
obj.asInstanceOf[Map[BigInt,(java.io.Serializable,ProposalID)]]
}
} | allenfromu/Multi-Paxos2 | src/main/scala/impl/Util.scala | Scala | mit | 1,566 |
/*
* MIT License
*
* Copyright (c) 2016 Gonçalo Marques
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.byteslounge.slickrepo.repository
import com.byteslounge.slickrepo.meta.{Versioned, VersionedEntity}
import com.byteslounge.slickrepo.scalaversion.JdbcProfile
import com.byteslounge.slickrepo.version.InstantVersion
import slick.ast.BaseTypedType
case class TestInstantVersionedEntity(override val id: Option[Int], price: Double, override val version: Option[InstantVersion]) extends VersionedEntity[TestInstantVersionedEntity, Int, InstantVersion] {
def withId(id: Int): TestInstantVersionedEntity = this.copy(id = Some(id))
def withVersion(version: InstantVersion): TestInstantVersionedEntity = this.copy(version = Some(version))
}
class TestInstantVersionedEntityRepository(override val driver: JdbcProfile) extends VersionedRepository[TestInstantVersionedEntity, Int, InstantVersion] {
import driver.api._
val pkType = implicitly[BaseTypedType[Int]]
val versionType = implicitly[BaseTypedType[InstantVersion]]
val tableQuery = TableQuery[TestInstantVersionedEntities]
type TableType = TestInstantVersionedEntities
class TestInstantVersionedEntities(tag: slick.lifted.Tag) extends Table[TestInstantVersionedEntity](tag, "TINSV_ENTITY") with Versioned[Int, InstantVersion] {
def id = column[Int]("ID", O.PrimaryKey)
def price = column[Double]("PRICE")
def version = column[InstantVersion]("VERSION")
def * = (id.?, price, version.?) <> ((TestInstantVersionedEntity.apply _).tupled, TestInstantVersionedEntity.unapply)
}
}
| gonmarques/slick-repo | src/test/scala/com/byteslounge/slickrepo/repository/TestInstantVersionedEntityRepository.scala | Scala | mit | 2,615 |
/*
* Motto Web App Demo Application - oximity.com
* Copyright (c) 2013 Oximity Limited
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, see <http://www.gnu.org/licenses/>.
*/
package demo.motto.service.content
import java.lang.{Long => JLong}
import java.util.logging.Logger
import demo.motto.model.content.Motto
import javax.ejb.LocalBean
import javax.ejb.Stateless
import javax.inject.Inject
import javax.persistence.Entity
import javax.persistence.EntityManager
import javax.persistence.NoResultException
import javax.persistence.PersistenceContext
import javax.persistence.Table
/**
* Access the database through JPA.
*
* @author hapke
*/
@Stateless
@LocalBean
class MottoDBService {
@PersistenceContext(unitName = "dbMotto")
protected var em: EntityManager = _
@Inject
var log: Logger = _
//
// Access Motto
//
/** load a Motto object from DB */
def getMottoById(mottoId: Long): Option[Motto] = {
try {
Option(em.find(classOf[Motto], mottoId))
} catch {
case ex: NoResultException => { None }
}
}
/** determine the max id */
def getMaxMottoId(): Long = {
try {
val u = em.createQuery("SELECT max(m.mottoId) FROM Motto m" /*WHERE m.author=:author"*/).
//setParameter("author", author).
getSingleResult().asInstanceOf[JLong]
u
} catch {
case ex: NoResultException => { 0 }
}
}
/** write a Motto object to DB; return the updated version */
def writeMotto(m: Motto): Motto = {
log.fine("writeMotto m="+m)
em.merge(m)
}
}
| oximity/motto | src/main/scala/demo/motto/service/content/MottoDBService.scala | Scala | gpl-2.0 | 2,118 |
package dotty.tools.languageserver.util.actions
import dotty.tools.languageserver.util.PositionContext
import dotty.tools.languageserver.util.embedded.CodeMarker
import dotty.tools.languageserver.util.server.TestFile
import org.eclipse.lsp4j.{CompletionItem, CompletionItemKind}
import org.junit.Assert.{assertEquals, assertFalse, assertTrue}
import scala.collection.JavaConverters._
/**
* An action requesting for code completion at `marker`, expecting `expected`.
* This action corresponds to the `textDocument/completion` method of the Language Server Protocol.
*
* @param marker The marker indicating the position where completion should be requested.
* @param checkResults A function that takes the results and verifies that they match
* expectations.
*/
class CodeCompletion(override val marker: CodeMarker,
checkResults: Set[CompletionItem] => Unit)
extends ActionOnMarker {
override def execute(): Exec[Unit] = {
val result = server.completion(marker.toCompletionParams).get()
assertTrue(s"Completion results were not 'right': $result", result.isRight)
assertFalse(s"Completion results were 'incomplete': $result", result.getRight.isIncomplete)
val completionResults = result.getRight.getItems.asScala.toSet
checkResults(completionResults)
}
override def show: PositionContext.PosCtx[String] =
s"CodeCompletion(${marker.show}, $checkResults)"
}
object CodeCompletion {
/** Extract the (label, kind, details) of each `CompletionItem`. */
def simplifyResults(items: Set[CompletionItem]): Set[(String, CompletionItemKind, String)] =
items.map(item => (item.getLabel, item.getKind, item.getDetail))
}
| som-snytt/dotty | language-server/test/dotty/tools/languageserver/util/actions/CodeCompletion.scala | Scala | apache-2.0 | 1,708 |
/*
* Copyright (c) 2011-2017 Interfaculty Department of Geoinformatics, University of
* Salzburg (Z_GIS) & Institute of Geological and Nuclear Sciences Limited (GNS Science)
* in the SMART Aquifer Characterisation (SAC) programme funded by the New Zealand
* Ministry of Business, Innovation and Employment (MBIE)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.owc
import java.net.URL
import java.sql.Connection
import java.util.UUID
import anorm.SqlParser._
import anorm._
import info.smart.models.owc100._
import uk.gov.hmrc.emailaddress.EmailAddress
import utils.ClassnameLogger
/** *********
* OwcAuthor
* **********/
object OwcAuthorDAO extends ClassnameLogger {
/**
* Parse a OwcAuthor from a ResultSet
*/
private val owcAuthorParser = {
get[Option[String]]("owc_authors.name") ~
get[Option[String]]("owc_authors.email") ~
get[Option[String]]("owc_authors.uri") ~
str("owc_authors.uuid") map {
case name ~ email ~ uri ~ uuid =>
OwcAuthor(name, email.map(EmailAddress(_)), uri.map(new URL(_)), UUID.fromString(uuid))
}
}
/**
* Retrieve all OwcAuthors.
*
* @param connection implicit connection
* @return
*/
def getAllOwcAuthors()(implicit connection: Connection): Seq[OwcAuthor] = {
SQL(s"select owc_authors.* from $tableOwcAuthors").as(owcAuthorParser *)
}
/**
* Find specific OwcAuthor.
*
* @param uuid
* @param connection implicit connection
* @return
*/
def findOwcAuthorByUuid(uuid: UUID)(implicit connection: Connection): Option[OwcAuthor] = {
SQL(s"""select owc_authors.* from $tableOwcAuthors where uuid = '${uuid.toString}'""").as(owcAuthorParser.singleOpt)
}
/**
* Create an owcAuthor.
*
* @param owcAuthor
* @param connection implicit connection should be managed via transaction from calling entity
* @return
*/
def createOwcAuthor(owcAuthor: OwcAuthor)(implicit connection: Connection): Option[OwcAuthor] = {
val rowCount = SQL(
s"""
insert into $tableOwcAuthors values (
{uuid}, {name}, {email}, {uri}
)
""").on(
'uuid -> owcAuthor.uuid.toString,
'name -> owcAuthor.name,
'email -> owcAuthor.email.map(_.toString()),
'uri -> owcAuthor.uri.map(_.toString)
).executeUpdate()
rowCount match {
case 1 => Some(owcAuthor)
case _ => logger.error("owcAuthor couldn't be created")
None
}
}
/**
* Update single OwcAuthor
*
* @param owcAuthor
* @param connection implicit connection should be managed via transaction from calling entity
* @return
*/
def updateOwcAuthor(owcAuthor: OwcAuthor)(implicit connection: Connection): Option[OwcAuthor] = {
val rowCount = SQL(
s"""
update $tableOwcAuthors set
name = {name},
email = {email},
uri = {uri} where uuid = {uuid}
""").on(
'name -> owcAuthor.name,
'email -> owcAuthor.email.map(_.toString()),
'uri -> owcAuthor.uri.map(_.toString()),
'uuid -> owcAuthor.uuid.toString
).executeUpdate()
rowCount match {
case 1 => Some(owcAuthor)
case _ => logger.error("owcAuthor couldn't be updated")
None
}
}
/**
* delete an OwcAuthor
*
* @param owcAuthor
* @param connection implicit connection should be managed via transaction from calling entity
* @return
*/
def deleteOwcAuthor(owcAuthor: OwcAuthor)(implicit connection: Connection): Boolean = {
deleteOwcAuthorByUuid(owcAuthor.uuid)
}
/**
* delete an OwcAuthor
*
* @param uuid
* @param connection implicit connection should be managed via transaction from calling entity
* @return
*/
def deleteOwcAuthorByUuid(uuid: UUID)(implicit connection: Connection): Boolean = {
val rowCount = SQL(s"delete from $tableOwcAuthors where uuid = {uuid}").on(
'uuid -> uuid.toString
).executeUpdate()
rowCount match {
case 1 => true
case _ =>
logger.error("owcAuthor couldn't be deleted")
false
}
}
}
| ZGIS/smart-portal-backend | app/models/owc/OwcAuthorDAO.scala | Scala | apache-2.0 | 4,654 |
package chat.tox.antox.wrapper
import im.tox.tox4j.core.data.ToxNickname
case class GroupInfo(key: GroupKey,
online: Boolean,
name: ToxNickname,
alias: Option[ToxNickname],
topic: String,
blocked: Boolean,
ignored: Boolean,
favorite: Boolean,
lastMessage: Option[Message],
unreadCount: Int) extends ContactInfo {
def statusMessage: String = topic
val status = if (online) "online" else "offline"
val receivedAvatar = true
val avatar = None
def this(key: GroupKey, online: Boolean, name: ToxNickname, alias: Option[ToxNickname], topic: String,
blocked: Boolean, ignored: Boolean, favorite: Boolean) {
this(key, online, name, alias, topic, blocked, ignored, favorite,
None, 0)
}
}
| wiiam/Antox | app/src/main/scala/chat/tox/antox/wrapper/GroupInfo.scala | Scala | gpl-3.0 | 906 |
import scala.util.parsing.combinator._
object Regexen {
val RangeRegex = """\\[(\\\\\\]|[^\\]])*\\]""".r
val ModifierRegex = """(\\*|\\+|\\?|\\{\\d+,\\d*\\}|\\{\\d+\\})\\??""".r
val CharacterRegex = ("""\\\\""" + """u[\\daa-fA-F]{4}|\\\\.|[^()|]""").r
val ExactRegex = """\\{(\\d+)\\}.*""".r
val RepetitionRegex = """\\{\\d*,(\\d+)\\}.*""".r
val UnboundedRegex = """\\{\\d*,\\}.*""".r
}
sealed trait Everything {
def greatest: Option[Long]
def containsAnchor(target: Anchor): Boolean = this match {
case anchor: Anchor => target == anchor
case Base(e) => e.containsAnchor(target)
case Atom => false
case Factor(base, modifier) => modifier.repeatAtLeastOnce && base.containsAnchor(target)
case Term(factors) => factors.exists(_.containsAnchor(target))
case Reg(choices) => choices.forall(_.containsAnchor(target))
}
def maximumMatchableLength: Option[Long] = if (containsAnchor(Startline) && containsAnchor(Endline)) {
greatest
} else {
None
}
}
sealed trait Anchor extends Everything {
def greatest = Some(0)
}
case object Startline extends Anchor
case object Endline extends Anchor
case class Base(e: Everything) extends Everything {
def greatest = e.greatest
}
case object Atom extends Everything {
def greatest = Some(1)
}
case class Factor(base: Everything, modifier: Modifier) extends Everything {
def greatest = for {
g <- base.greatest
m <- modifier.maxRepeats
} yield g * m
}
case class Modifier(i:String) {
def maxRepeats: Option[Long] = if (i.startsWith("*") || i.startsWith("+")) {
None
} else if (i.startsWith("?")) {
Some(1)
} else {
i match {
case Regexen.RepetitionRegex(x) => Some(x.toLong)
case Regexen.ExactRegex(x) => Some(x.toLong)
case Regexen.UnboundedRegex() => None
}
}
def repeatAtLeastOnce = !(i.startsWith("?") || i.startsWith("*") || i.startsWith("{0"))
}
case class Term(factors: List[Everything]) extends Everything {
def greatest = factors.map(_.greatest).fold(Some(0L)) {
case (Some(x), Some(y)) => Some(x + y)
case _ => None
}
}
case class Reg(choices: List[Everything]) extends Everything {
def greatest = choices.map(_.greatest).fold(Some(0L)) {
case (Some(x), Some(y)) => Some(x max y)
case _ => None
}
}
class JsonSchemaRegexParser extends RegexParsers {
override val skipWhitespace = false
def startline: Parser[Everything] = "^" ^^ {x => Startline}
def endline: Parser[Everything] = "$" ^^ {x => Endline}
def character = Regexen.CharacterRegex
def range = Regexen.RangeRegex
def atom: Parser[Everything] = startline | endline | (range | character) ^^ {x => Atom}
def base: Parser[Everything] = atom | "(" ~ reg ~ ")" ^^ {case a~b~c => Base(b)}
def modifier: Parser[Modifier] = Regexen.ModifierRegex ^^ {x => Modifier(x)}
def factor: Parser[Everything] = base ~ modifier.? ^^ {
case a~b => {
Factor(a,b.getOrElse(Modifier("{1}")))
}
}
def term: Parser[Everything] = factor.* ^^ {l => Term(l)}
def reg: Parser[Everything] = repsep(term, "|") ^^ {x => Reg(x)}
}
object RegexAnalyzer extends JsonSchemaRegexParser {
/**
* Find an upper bound on the length of a string matchable by a JSON Schema regex
*
* e.g. getMaxLength("""^[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}$|^[0-9a-f]{16}$""")
* returns Some(36)
*
* @param targetRegex
* @return None if we fail to parse the regex or the regex can match arbitrarily long strings
* Some(upper bound) otherwise.
*/
def getMaxLength(targetRegex: String): Option[Long] = parseAll(reg, targetRegex) match {
case Success(matched, _) => matched.maximumMatchableLength
case _ => None
}
}
| fblundun/analyze-regex | analyzer.scala | Scala | apache-2.0 | 3,679 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.charts.stats.buffers
import io.gatling.commons.stats.{ Group, Status }
private[stats] case class BufferKey(requestName: Option[String], group: Option[Group], status: Option[Status])
| wiacekm/gatling | gatling-charts/src/main/scala/io/gatling/charts/stats/buffers/BufferKey.scala | Scala | apache-2.0 | 821 |
import sbt._
import Keys._
object SprayCanBuild extends Build {
import Dependencies._
import BuildSettings._
// configure prompt to show current project
override lazy val settings = super.settings :+ {
shellPrompt := { s => Project.extract(s).currentProject.id + " > " }
}
// -------------------------------------------------------------------------------------------------------------------
// Root Project
// -------------------------------------------------------------------------------------------------------------------
lazy val root = Project(
"root",
file("."),
settings = basicSettings ++ noPublishing
) aggregate (sprayCan, clientExample, serverExample)
// -------------------------------------------------------------------------------------------------------------------
// Sub Projects
// -------------------------------------------------------------------------------------------------------------------
lazy val sprayCan = Project(
"spray-can",
file("spray-can"),
settings = moduleSettings ++ Seq(
libraryDependencies ++= Seq(
Compile.slf4j,
Provided.akkaActor,
Test.specs2,
Test.akkaSlf4j,
Test.logback
)
)
)
lazy val clientExample = Project(
"client-example",
file("client-example"),
settings = exampleSettings ++ Seq(
libraryDependencies ++= Seq(
Compile.akkaActor,
Runtime.akkaSlf4j,
Runtime.logback
)
)
) dependsOn (sprayCan)
lazy val serverExample = Project(
"server-example",
file("server-example"),
settings = exampleSettings ++ Seq(
libraryDependencies ++= Seq(
Compile.akkaActor,
Runtime.akkaSlf4j,
Runtime.logback
)
)
) dependsOn (sprayCan)
} | spray/spray-can | project/SprayCanBuild.scala | Scala | apache-2.0 | 1,805 |
package uk.gov.homeoffice.akka.schedule
import akka.actor.ActorPath
object Protocol {
case object Schedule
case object IsScheduled
case class Scheduled(actorPath: ActorPath)
case object NotScheduled
case object Wakeup
} | UKHomeOffice/rtp-akka-lib | src/main/scala/uk/gov/homeoffice/akka/schedule/Protocol.scala | Scala | mit | 235 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command
import java.text.SimpleDateFormat
import scala.collection.JavaConverters._
import scala.language.implicitConversions
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Cast, Literal}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.hive.CarbonRelation
import org.apache.spark.sql.types.TimestampType
import org.apache.spark.util.FileUtils
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.carbon.CarbonDataLoadSchema
import org.apache.carbondata.core.carbon.metadata.CarbonMetadata
import org.apache.carbondata.core.carbon.metadata.schema.table.{CarbonTable, TableInfo}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datastorage.store.impl.FileFactory
import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
import org.apache.carbondata.lcm.locks.{CarbonLockFactory, LockUsage}
import org.apache.carbondata.lcm.status.SegmentStatusManager
import org.apache.carbondata.processing.constants.TableOptionConstant
import org.apache.carbondata.processing.etl.DataLoadingException
import org.apache.carbondata.processing.model.CarbonLoadModel
import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
import org.apache.carbondata.spark.rdd.{CarbonDataRDDFactory, DataManagementFunc}
import org.apache.carbondata.spark.util.{CarbonScalaUtil, CarbonSparkUtil, GlobalDictionaryUtil}
/**
* Command for the compaction in alter table command
*
* @param alterTableModel
*/
case class AlterTableCompaction(alterTableModel: AlterTableModel) {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getName)
def run(sparkSession: SparkSession): Seq[Row] = {
// TODO : Implement it.
val tableName = alterTableModel.tableName
val databaseName = alterTableModel.dbName.getOrElse(sparkSession.catalog.currentDatabase)
if (null == org.apache.carbondata.core.carbon.metadata.CarbonMetadata.getInstance
.getCarbonTable(databaseName + "_" + tableName)) {
LOGGER.error(s"alter table failed. table not found: $databaseName.$tableName")
sys.error(s"alter table failed. table not found: $databaseName.$tableName")
}
val relation =
CarbonEnv.get.carbonMetastore
.lookupRelation(Option(databaseName), tableName)(sparkSession)
.asInstanceOf[CarbonRelation]
if (relation == null) {
sys.error(s"Table $databaseName.$tableName does not exist")
}
val carbonLoadModel = new CarbonLoadModel()
val table = relation.tableMeta.carbonTable
carbonLoadModel.setAggTables(table.getAggregateTablesName.asScala.toArray)
carbonLoadModel.setTableName(table.getFactTableName)
val dataLoadSchema = new CarbonDataLoadSchema(table)
// Need to fill dimension relation
carbonLoadModel.setCarbonDataLoadSchema(dataLoadSchema)
carbonLoadModel.setTableName(relation.tableMeta.carbonTableIdentifier.getTableName)
carbonLoadModel.setDatabaseName(relation.tableMeta.carbonTableIdentifier.getDatabaseName)
carbonLoadModel.setStorePath(relation.tableMeta.storePath)
val kettleHomePath = CarbonScalaUtil.getKettleHome(sparkSession.sqlContext)
var storeLocation = CarbonProperties.getInstance
.getProperty(CarbonCommonConstants.STORE_LOCATION_TEMP_PATH,
System.getProperty("java.io.tmpdir")
)
storeLocation = storeLocation + "/carbonstore/" + System.nanoTime()
try {
CarbonDataRDDFactory
.alterTableForCompaction(sparkSession.sqlContext,
alterTableModel,
carbonLoadModel,
relation.tableMeta.storePath,
kettleHomePath,
storeLocation
)
} catch {
case e: Exception =>
if (null != e.getMessage) {
sys.error(s"Compaction failed. Please check logs for more info. ${ e.getMessage }")
} else {
sys.error("Exception in compaction. Please check logs for more info.")
}
}
Seq.empty
}
}
case class CreateTable(cm: TableModel) {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
def run(sparkSession: SparkSession): Seq[Row] = {
cm.databaseName = cm.databaseNameOp.getOrElse(sparkSession.catalog.currentDatabase)
val tbName = cm.tableName
val dbName = cm.databaseName
LOGGER.audit(s"Creating Table with Database name [$dbName] and Table name [$tbName]")
val tableInfo: TableInfo = TableNewProcessor(cm, sparkSession.sqlContext)
if (tableInfo.getFactTable.getListOfColumns.size <= 0) {
sys.error("No Dimensions found. Table should have at least one dimesnion !")
}
// Add Database to catalog and persist
val catalog = CarbonEnv.get.carbonMetastore
val tablePath = catalog.createTableFromThrift(tableInfo, dbName, tbName)(sparkSession)
LOGGER.audit(s"Table created with Database name [$dbName] and Table name [$tbName]")
Seq.empty
}
def setV(ref: Any, name: String, value: Any): Unit = {
ref.getClass.getFields.find(_.getName == name).get
.set(ref, value.asInstanceOf[AnyRef])
}
}
case class DeleteLoadsById(
loadids: Seq[String],
databaseNameOp: Option[String],
tableName: String) {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
def run(sparkSession: SparkSession): Seq[Row] = {
val databaseName = databaseNameOp.getOrElse(sparkSession.catalog.currentDatabase)
LOGGER.audit(s"Delete segment by Id request has been received for $databaseName.$tableName")
// validate load ids first
validateLoadIds
val dbName = databaseNameOp.getOrElse(sparkSession.catalog.currentDatabase)
val identifier = TableIdentifier(tableName, Option(dbName))
val relation = CarbonEnv.get.carbonMetastore.lookupRelation(
identifier, None)(sparkSession).asInstanceOf[CarbonRelation]
if (relation == null) {
LOGGER.audit(s"Delete segment by Id is failed. Table $dbName.$tableName does not exist")
sys.error(s"Table $dbName.$tableName does not exist")
}
val carbonTable = CarbonMetadata.getInstance().getCarbonTable(dbName + '_' + tableName)
if (null == carbonTable) {
CarbonEnv.get.carbonMetastore
.lookupRelation(identifier, None)(sparkSession).asInstanceOf[CarbonRelation]
}
val path = carbonTable.getMetaDataFilepath
try {
val invalidLoadIds = SegmentStatusManager.updateDeletionStatus(
carbonTable.getAbsoluteTableIdentifier, loadids.asJava, path).asScala
if (invalidLoadIds.isEmpty) {
LOGGER.audit(s"Delete segment by Id is successfull for $databaseName.$tableName.")
}
else {
sys.error("Delete segment by Id is failed. Invalid ID is:" +
s" ${ invalidLoadIds.mkString(",") }")
}
} catch {
case ex: Exception =>
sys.error(ex.getMessage)
}
Seq.empty
}
// validates load ids
private def validateLoadIds: Unit = {
if (loadids.isEmpty) {
val errorMessage = "Error: Segment id(s) should not be empty."
throw new MalformedCarbonCommandException(errorMessage)
}
}
}
case class DeleteLoadsByLoadDate(
databaseNameOp: Option[String],
tableName: String,
dateField: String,
loadDate: String) {
val LOGGER = LogServiceFactory.getLogService("org.apache.spark.sql.TableModel.tableSchema")
def run(sparkSession: SparkSession): Seq[Row] = {
LOGGER.audit("The delete segment by load date request has been received.")
val dbName = databaseNameOp.getOrElse(sparkSession.catalog.currentDatabase)
val identifier = TableIdentifier(tableName, Option(dbName))
val relation = CarbonEnv.get.carbonMetastore
.lookupRelation(identifier, None)(sparkSession).asInstanceOf[CarbonRelation]
if (relation == null) {
LOGGER
.audit(s"Delete segment by load date is failed. Table $dbName.$tableName does not " +
s"exist")
sys.error(s"Table $dbName.$tableName does not exist")
}
val timeObj = Cast(Literal(loadDate), TimestampType).eval()
if (null == timeObj) {
val errorMessage = "Error: Invalid load start time format " + loadDate
throw new MalformedCarbonCommandException(errorMessage)
}
val carbonTable = org.apache.carbondata.core.carbon.metadata.CarbonMetadata.getInstance()
.getCarbonTable(dbName + '_' + tableName)
if (null == carbonTable) {
var relation = CarbonEnv.get.carbonMetastore
.lookupRelation(identifier, None)(sparkSession).asInstanceOf[CarbonRelation]
}
val path = carbonTable.getMetaDataFilepath()
try {
val invalidLoadTimestamps = SegmentStatusManager.updateDeletionStatus(
carbonTable.getAbsoluteTableIdentifier, loadDate, path,
timeObj.asInstanceOf[java.lang.Long]).asScala
if (invalidLoadTimestamps.isEmpty) {
LOGGER.audit(s"Delete segment by date is successfull for $dbName.$tableName.")
}
else {
sys.error("Delete segment by date is failed. No matching segment found.")
}
} catch {
case ex: Exception =>
sys.error(ex.getMessage)
}
Seq.empty
}
}
case class LoadTableByInsert(relation: CarbonDatasourceHadoopRelation, child: LogicalPlan) {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
def run(sparkSession: SparkSession): Seq[Row] = {
val df = Dataset.ofRows(sparkSession, child)
val header = relation.tableSchema.get.fields.map(_.name).mkString(",")
val load = LoadTable(
Some(relation.carbonRelation.databaseName),
relation.carbonRelation.tableName,
null,
Seq(),
scala.collection.immutable.Map(("fileheader" -> header)),
false,
null,
Some(df)).run(sparkSession)
// updating relation metadata. This is in case of auto detect high cardinality
relation.carbonRelation.metaData =
CarbonSparkUtil.createSparkMeta(relation.carbonRelation.tableMeta.carbonTable)
load
}
}
case class LoadTable(
databaseNameOp: Option[String],
tableName: String,
factPathFromUser: String,
dimFilesPath: Seq[DataLoadTableFileMapping],
options: scala.collection.immutable.Map[String, String],
isOverwriteExist: Boolean = false,
var inputSqlString: String = null,
dataFrame: Option[DataFrame] = None) {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
def run(sparkSession: SparkSession): Seq[Row] = {
if (dataFrame.isDefined) {
val rdd = dataFrame.get.rdd
if (rdd.partitions == null || rdd.partitions.length == 0) {
LOGGER.warn("DataLoading finished. No data was loaded.")
return Seq.empty
}
}
val dbName = databaseNameOp.getOrElse(sparkSession.catalog.currentDatabase)
val identifier = TableIdentifier(tableName, Option(dbName))
if (isOverwriteExist) {
sys.error(s"Overwrite is not supported for carbon table with $dbName.$tableName")
}
if (null == CarbonMetadata.getInstance.getCarbonTable(dbName + "_" + tableName)) {
LOGGER.error(s"Data loading failed. table not found: $dbName.$tableName")
LOGGER.audit(s"Data loading failed. table not found: $dbName.$tableName")
sys.error(s"Data loading failed. table not found: $dbName.$tableName")
}
val relation = CarbonEnv.get.carbonMetastore
.lookupRelation(Option(dbName), tableName)(sparkSession).asInstanceOf[CarbonRelation]
if (relation == null) {
sys.error(s"Table $dbName.$tableName does not exist")
}
CarbonProperties.getInstance().addProperty("zookeeper.enable.lock", "false")
val carbonLock = CarbonLockFactory
.getCarbonLockObj(relation.tableMeta.carbonTable.getAbsoluteTableIdentifier
.getCarbonTableIdentifier,
LockUsage.METADATA_LOCK
)
try {
if (carbonLock.lockWithRetries()) {
LOGGER.info("Successfully able to get the table metadata file lock")
} else {
sys.error("Table is locked for updation. Please try after some time")
}
val factPath = if (dataFrame.isDefined) {
""
} else {
FileUtils.getPaths(
CarbonUtil.checkAndAppendHDFSUrl(factPathFromUser))
}
val carbonLoadModel = new CarbonLoadModel()
carbonLoadModel.setTableName(relation.tableMeta.carbonTableIdentifier.getTableName)
carbonLoadModel.setDatabaseName(relation.tableMeta.carbonTableIdentifier.getDatabaseName)
carbonLoadModel.setStorePath(relation.tableMeta.storePath)
if (dimFilesPath.isEmpty) {
carbonLoadModel.setDimFolderPath(null)
} else {
val x = dimFilesPath.map(f => f.table + ":" + CarbonUtil.checkAndAppendHDFSUrl(f.loadPath))
carbonLoadModel.setDimFolderPath(x.mkString(","))
}
val table = relation.tableMeta.carbonTable
carbonLoadModel.setAggTables(table.getAggregateTablesName.asScala.toArray)
carbonLoadModel.setTableName(table.getFactTableName)
val dataLoadSchema = new CarbonDataLoadSchema(table)
// Need to fill dimension relation
carbonLoadModel.setCarbonDataLoadSchema(dataLoadSchema)
var partitionLocation = relation.tableMeta.storePath + "/partition/" +
relation.tableMeta.carbonTableIdentifier.getDatabaseName + "/" +
relation.tableMeta.carbonTableIdentifier.getTableName + "/"
val columnar = sparkSession.conf.get("carbon.is.columnar.storage", "true").toBoolean
val kettleHomePath = CarbonScalaUtil.getKettleHome(sparkSession.sqlContext)
// TODO It will be removed after kettle is removed.
val useKettle = options.get("use_kettle") match {
case Some(value) => value.toBoolean
case _ =>
val useKettleLocal = System.getProperty("use.kettle")
if (useKettleLocal == null) {
sparkSession.sqlContext.sparkContext.getConf.get("use_kettle_default", "true").toBoolean
} else {
useKettleLocal.toBoolean
}
}
val delimiter = options.getOrElse("delimiter", ",")
val quoteChar = options.getOrElse("quotechar", "\\"")
val fileHeader = options.getOrElse("fileheader", "")
val escapeChar = options.getOrElse("escapechar", "\\\\")
val commentchar = options.getOrElse("commentchar", "#")
val columnDict = options.getOrElse("columndict", null)
val serializationNullFormat = options.getOrElse("serialization_null_format", "\\\\N")
val badRecordsLoggerEnable = options.getOrElse("bad_records_logger_enable", "false")
val badRecordsLoggerRedirect = options.getOrElse("bad_records_action", "force")
val allDictionaryPath = options.getOrElse("all_dictionary_path", "")
val complex_delimiter_level_1 = options.getOrElse("complex_delimiter_level_1", "\\\\$")
val complex_delimiter_level_2 = options.getOrElse("complex_delimiter_level_2", "\\\\:")
val dateFormat = options.getOrElse("dateformat", null)
validateDateFormat(dateFormat, table)
val multiLine = options.getOrElse("multiline", "false").trim.toLowerCase match {
case "true" => true
case "false" => false
case illegal =>
val errorMessage = "Illegal syntax found: [" + illegal + "] .The value multiline in " +
"load DDL which you set can only be 'true' or 'false', please check " +
"your input DDL."
throw new MalformedCarbonCommandException(errorMessage)
}
val maxColumns = options.getOrElse("maxcolumns", null)
carbonLoadModel.setMaxColumns(maxColumns)
carbonLoadModel.setEscapeChar(escapeChar)
carbonLoadModel.setQuoteChar(quoteChar)
carbonLoadModel.setCommentChar(commentchar)
carbonLoadModel.setDateFormat(dateFormat)
carbonLoadModel
.setSerializationNullFormat(
TableOptionConstant.SERIALIZATION_NULL_FORMAT.getName + "," + serializationNullFormat)
carbonLoadModel
.setBadRecordsLoggerEnable(
TableOptionConstant.BAD_RECORDS_LOGGER_ENABLE.getName + "," + badRecordsLoggerEnable)
carbonLoadModel
.setBadRecordsAction(
TableOptionConstant.BAD_RECORDS_ACTION.getName + "," + badRecordsLoggerRedirect)
if (delimiter.equalsIgnoreCase(complex_delimiter_level_1) ||
complex_delimiter_level_1.equalsIgnoreCase(complex_delimiter_level_2) ||
delimiter.equalsIgnoreCase(complex_delimiter_level_2)) {
sys.error(s"Field Delimiter & Complex types delimiter are same")
}
else {
carbonLoadModel.setComplexDelimiterLevel1(
CarbonUtil.delimiterConverter(complex_delimiter_level_1))
carbonLoadModel.setComplexDelimiterLevel2(
CarbonUtil.delimiterConverter(complex_delimiter_level_2))
}
// set local dictionary path, and dictionary file extension
carbonLoadModel.setAllDictPath(allDictionaryPath)
var partitionStatus = CarbonCommonConstants.STORE_LOADSTATUS_SUCCESS
try {
// First system has to partition the data first and then call the load data
LOGGER.info(s"Initiating Direct Load for the Table : ($dbName.$tableName)")
carbonLoadModel.setFactFilePath(factPath)
carbonLoadModel.setCsvDelimiter(CarbonUtil.unescapeChar(delimiter))
carbonLoadModel.setCsvHeader(fileHeader)
carbonLoadModel.setColDictFilePath(columnDict)
carbonLoadModel.setDirectLoad(true)
GlobalDictionaryUtil
.generateGlobalDictionary(
sparkSession.sqlContext, carbonLoadModel, relation.tableMeta.storePath, dataFrame)
CarbonDataRDDFactory.loadCarbonData(sparkSession.sqlContext,
carbonLoadModel,
relation.tableMeta.storePath,
kettleHomePath,
columnar,
partitionStatus,
useKettle,
dataFrame)
} catch {
case ex: Exception =>
LOGGER.error(ex)
LOGGER.audit(s"Dataload failure for $dbName.$tableName. Please check the logs")
throw ex
} finally {
// Once the data load is successful delete the unwanted partition files
try {
val fileType = FileFactory.getFileType(partitionLocation)
if (FileFactory.isFileExist(partitionLocation, fileType)) {
val file = FileFactory
.getCarbonFile(partitionLocation, fileType)
CarbonUtil.deleteFoldersAndFiles(file)
}
} catch {
case ex: Exception =>
LOGGER.error(ex)
LOGGER.audit(s"Dataload failure for $dbName.$tableName. " +
"Problem deleting the partition folder")
throw ex
}
}
} catch {
case dle: DataLoadingException =>
LOGGER.audit(s"Dataload failed for $dbName.$tableName. " + dle.getMessage)
throw dle
case mce: MalformedCarbonCommandException =>
LOGGER.audit(s"Dataload failed for $dbName.$tableName. " + mce.getMessage)
throw mce
} finally {
if (carbonLock != null) {
if (carbonLock.unlock()) {
LOGGER.info("Table MetaData Unlocked Successfully after data load")
} else {
LOGGER.error("Unable to unlock Table MetaData")
}
}
}
Seq.empty
}
private def validateDateFormat(dateFormat: String, table: CarbonTable): Unit = {
val dimensions = table.getDimensionByTableName(tableName).asScala
if (dateFormat != null) {
if (dateFormat.trim == "") {
throw new MalformedCarbonCommandException("Error: Option DateFormat is set an empty " +
"string.")
} else {
var dateFormats: Array[String] = dateFormat.split(CarbonCommonConstants.COMMA)
for (singleDateFormat <- dateFormats) {
val dateFormatSplits: Array[String] = singleDateFormat.split(":", 2)
val columnName = dateFormatSplits(0).trim.toLowerCase
if (!dimensions.exists(_.getColName.equals(columnName))) {
throw new MalformedCarbonCommandException("Error: Wrong Column Name " +
dateFormatSplits(0) +
" is provided in Option DateFormat.")
}
if (dateFormatSplits.length < 2 || dateFormatSplits(1).trim.isEmpty) {
throw new MalformedCarbonCommandException("Error: Option DateFormat is not provided " +
"for " + "Column " + dateFormatSplits(0) +
".")
}
}
}
}
}
}
private[sql] case class DeleteLoadByDate(
databaseNameOp: Option[String],
tableName: String,
dateField: String,
dateValue: String) {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
def run(sparkSession: SparkSession): Seq[Row] = {
val dbName = databaseNameOp.getOrElse(sparkSession.catalog.currentDatabase)
LOGGER.audit(s"The delete load by date request has been received for $dbName.$tableName")
val identifier = TableIdentifier(tableName, Option(dbName))
val relation = CarbonEnv.get.carbonMetastore
.lookupRelation(identifier)(sparkSession).asInstanceOf[CarbonRelation]
var level: String = ""
val carbonTable = org.apache.carbondata.core.carbon.metadata.CarbonMetadata
.getInstance().getCarbonTable(dbName + '_' + tableName)
if (relation == null) {
LOGGER.audit(s"The delete load by date is failed. Table $dbName.$tableName does not exist")
sys.error(s"Table $dbName.$tableName does not exist")
}
val matches: Seq[AttributeReference] = relation.dimensionsAttr.filter(
filter => filter.name.equalsIgnoreCase(dateField) &&
filter.dataType.isInstanceOf[TimestampType]).toList
if (matches.isEmpty) {
LOGGER.audit("The delete load by date is failed. " +
s"Table $dbName.$tableName does not contain date field: $dateField")
sys.error(s"Table $dbName.$tableName does not contain date field $dateField")
} else {
level = matches.asJava.get(0).name
}
val actualColName = relation.metaData.carbonTable.getDimensionByName(tableName, level)
.getColName
DataManagementFunc.deleteLoadByDate(
sparkSession.sqlContext,
new CarbonDataLoadSchema(carbonTable),
dbName,
tableName,
CarbonEnv.get.carbonMetastore.storePath,
level,
actualColName,
dateValue)
LOGGER.audit(s"The delete load by date $dateValue is successful for $dbName.$tableName.")
Seq.empty
}
}
case class CleanFiles(
databaseNameOp: Option[String],
tableName: String) {
val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
def run(sparkSession: SparkSession): Seq[Row] = {
val dbName = databaseNameOp.getOrElse(sparkSession.catalog.currentDatabase)
LOGGER.audit(s"The clean files request has been received for $dbName.$tableName")
val identifier = TableIdentifier(tableName, Option(dbName))
val relation = CarbonEnv.get.carbonMetastore
.lookupRelation(identifier)(sparkSession).asInstanceOf[CarbonRelation]
if (relation == null) {
LOGGER.audit(s"The clean files request is failed. Table $dbName.$tableName does not exist")
sys.error(s"Table $dbName.$tableName does not exist")
}
val carbonLoadModel = new CarbonLoadModel()
carbonLoadModel.setTableName(relation.tableMeta.carbonTableIdentifier.getTableName)
carbonLoadModel.setDatabaseName(relation.tableMeta.carbonTableIdentifier.getDatabaseName)
val table = relation.tableMeta.carbonTable
carbonLoadModel.setAggTables(table.getAggregateTablesName.asScala.toArray)
carbonLoadModel.setTableName(table.getFactTableName)
carbonLoadModel.setStorePath(relation.tableMeta.storePath)
val dataLoadSchema = new CarbonDataLoadSchema(table)
carbonLoadModel.setCarbonDataLoadSchema(dataLoadSchema)
try {
DataManagementFunc.cleanFiles(
sparkSession.sqlContext.sparkContext,
carbonLoadModel,
relation.tableMeta.storePath)
LOGGER.audit(s"Clean files request is successfull for $dbName.$tableName.")
} catch {
case ex: Exception =>
sys.error(ex.getMessage)
}
Seq.empty
}
}
case class ShowLoads(
databaseNameOp: Option[String],
tableName: String,
limit: Option[String],
val output: Seq[Attribute]) {
def run(sparkSession: SparkSession): Seq[Row] = {
val databaseName = databaseNameOp.getOrElse(sparkSession.catalog.currentDatabase)
val tableUniqueName = databaseName + "_" + tableName
// Here using checkSchemasModifiedTimeAndReloadTables in tableExists to reload metadata if
// schema is changed by other process, so that tableInfoMap woulb be refilled.
val tableExists = CarbonEnv.get.carbonMetastore
.tableExists(TableIdentifier(tableName, databaseNameOp))(sparkSession)
if (!tableExists) {
sys.error(s"$databaseName.$tableName is not found")
}
val carbonTable = org.apache.carbondata.core.carbon.metadata.CarbonMetadata.getInstance()
.getCarbonTable(tableUniqueName)
if (carbonTable == null) {
sys.error(s"$databaseName.$tableName is not found")
}
val path = carbonTable.getMetaDataFilepath
val loadMetadataDetailsArray = SegmentStatusManager.readLoadMetadata(path)
if (loadMetadataDetailsArray.nonEmpty) {
val parser = new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP)
var loadMetadataDetailsSortedArray = loadMetadataDetailsArray.sortWith(
(l1, l2) => java.lang.Double.parseDouble(l1.getLoadName) > java.lang.Double
.parseDouble(l2.getLoadName)
)
if (limit.isDefined) {
loadMetadataDetailsSortedArray = loadMetadataDetailsSortedArray
.filter(load => load.getVisibility.equalsIgnoreCase("true"))
val limitLoads = limit.get
try {
val lim = Integer.parseInt(limitLoads)
loadMetadataDetailsSortedArray = loadMetadataDetailsSortedArray.slice(0, lim)
} catch {
case ex: NumberFormatException => sys.error(s" Entered limit is not a valid Number")
}
}
loadMetadataDetailsSortedArray.filter(load => load.getVisibility.equalsIgnoreCase("true"))
.map(load =>
Row(
load.getLoadName,
load.getLoadStatus,
new java.sql.Timestamp(parser.parse(load.getLoadStartTime).getTime),
new java.sql.Timestamp(parser.parse(load.getTimestamp).getTime))).toSeq
} else {
Seq.empty
}
}
}
| ashokblend/incubator-carbondata | integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala | Scala | apache-2.0 | 27,702 |
package com.wordnik.client.api
import java.math.BigDecimal
import com.wordnik.client.model.Inline_response_200_23
import com.wordnik.client.model.VariableCategory
import com.wordnik.client.model.Inline_response_200_24
import com.wordnik.client.model.Inline_response_200_2
import java.io.File
import org.scalatra.{ TypedParamSupport, ScalatraServlet }
import org.scalatra.swagger._
import org.json4s._
import org.json4s.JsonDSL._
import org.scalatra.json.{ JValueResult, JacksonJsonSupport }
import org.scalatra.servlet.{FileUploadSupport, MultipartConfig, SizeConstraintExceededException}
import scala.collection.JavaConverters._
class VariableCategoryApi (implicit val swagger: Swagger) extends ScalatraServlet
with FileUploadSupport
with JacksonJsonSupport
with SwaggerSupport {
protected implicit val jsonFormats: Formats = DefaultFormats
protected val applicationDescription: String = "VariableCategoryApi"
override protected val applicationName: Option[String] = Some("VariableCategory")
before() {
contentType = formats("json")
response.headers += ("Access-Control-Allow-Origin" -> "*")
}
val variableCategoriesGetOperation = (apiOperation[Inline_response_200_23]("variableCategoriesGet")
summary "Get all VariableCategories"
parameters(queryParam[String]("name").description("").optional,
queryParam[BigDecimal]("fillingValue").description("").optional,
queryParam[BigDecimal]("maximumAllowedValue").description("").optional,
queryParam[BigDecimal]("minimumAllowedValue").description("").optional,
queryParam[Int]("durationOfAction").description("").optional,
queryParam[Int]("onsetDelay").description("").optional,
queryParam[String]("combinationOperation").description("").optional,
queryParam[Int]("updated").description("").optional,
queryParam[Boolean]("causeOnly").description("").optional,
queryParam[Int]("_public").description("").optional,
queryParam[Boolean]("outcome").description("").optional,
queryParam[String]("createdAt").description("").optional,
queryParam[String]("updatedAt").description("").optional,
queryParam[String]("imageUrl").description("").optional,
queryParam[Int]("defaultUnitId").description("").optional,
queryParam[Int]("limit").description("").optional,
queryParam[Int]("offset").description("").optional,
queryParam[String]("sort").description("").optional)
)
get("/variableCategories",operation(variableCategoriesGetOperation)) {
val name = params.getAs[String]("name")
println("name: " + name)
val fillingValue = params.getAs[BigDecimal]("fillingValue")
println("fillingValue: " + fillingValue)
val maximumAllowedValue = params.getAs[BigDecimal]("maximumAllowedValue")
println("maximumAllowedValue: " + maximumAllowedValue)
val minimumAllowedValue = params.getAs[BigDecimal]("minimumAllowedValue")
println("minimumAllowedValue: " + minimumAllowedValue)
val durationOfAction = params.getAs[Int]("durationOfAction")
println("durationOfAction: " + durationOfAction)
val onsetDelay = params.getAs[Int]("onsetDelay")
println("onsetDelay: " + onsetDelay)
val combinationOperation = params.getAs[String]("combinationOperation")
println("combinationOperation: " + combinationOperation)
val updated = params.getAs[Int]("updated")
println("updated: " + updated)
val causeOnly = params.getAs[Boolean]("causeOnly")
println("causeOnly: " + causeOnly)
val _public = params.getAs[Int]("_public")
println("_public: " + _public)
val outcome = params.getAs[Boolean]("outcome")
println("outcome: " + outcome)
val createdAt = params.getAs[String]("createdAt")
println("createdAt: " + createdAt)
val updatedAt = params.getAs[String]("updatedAt")
println("updatedAt: " + updatedAt)
val imageUrl = params.getAs[String]("imageUrl")
println("imageUrl: " + imageUrl)
val defaultUnitId = params.getAs[Int]("defaultUnitId")
println("defaultUnitId: " + defaultUnitId)
val limit = params.getAs[Int]("limit")
println("limit: " + limit)
val offset = params.getAs[Int]("offset")
println("offset: " + offset)
val sort = params.getAs[String]("sort")
println("sort: " + sort)
}
val variableCategoriesPostOperation = (apiOperation[Inline_response_200_24]("variableCategoriesPost")
summary "Store VariableCategory"
parameters(bodyParam[VariableCategory]("body").description("").optional)
)
post("/variableCategories",operation(variableCategoriesPostOperation)) {
bodyParam[VariableCategory]("body").description("").optional
println("body: " + body)
}
val variableCategoriesIdGetOperation = (apiOperation[Inline_response_200_24]("variableCategoriesIdGet")
summary "Get VariableCategory"
parameters(pathParam[Int]("id").description(""))
)
get("/variableCategories/{id}",operation(variableCategoriesIdGetOperation)) {
val id = params.getOrElse("id", halt(400))
println("id: " + id)
}
val variableCategoriesIdPutOperation = (apiOperation[Inline_response_200_2]("variableCategoriesIdPut")
summary "Update VariableCategory"
parameters(pathParam[Int]("id").description(""),
bodyParam[VariableCategory]("body").description("").optional)
)
put("/variableCategories/{id}",operation(variableCategoriesIdPutOperation)) {
val id = params.getOrElse("id", halt(400))
println("id: " + id)
bodyParam[VariableCategory]("body").description("").optional
println("body: " + body)
}
val variableCategoriesIdDeleteOperation = (apiOperation[Inline_response_200_2]("variableCategoriesIdDelete")
summary "Delete VariableCategory"
parameters(pathParam[Int]("id").description(""))
)
delete("/variableCategories/{id}",operation(variableCategoriesIdDeleteOperation)) {
val id = params.getOrElse("id", halt(400))
println("id: " + id)
}
} | QuantiModo/QuantiModo-SDK-Scalatra | src/main/scala/com/wordnik/client/api/VariableCategoryApi.scala | Scala | gpl-2.0 | 7,111 |
/*
* Copyright (c) 2016. Fengguo (Hugo) Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.cit.intellij.jawa.lang.psi.api.expr
import com.intellij.psi.PsiExpression
import org.argus.cit.intellij.jawa.lang.psi.JawaPsiElement
/**
* @author <a href="mailto:fgwei521@gmail.com">Fengguo Wei</a>
*/
trait JawaExpression extends JawaPsiElement with PsiExpression {
}
| arguslab/argus-cit-intellij | src/main/scala/org/argus/cit/intellij/jawa/lang/psi/api/expr/JawaExpression.scala | Scala | epl-1.0 | 663 |
package com.equalinformation.interview.algo.scala.string_and_array
import java.util
/**
* Created by bpupadhyaya on 6/12/16.
*/
object ReversePolishNotationEvaluation_NaiveTest extends App {
val soln = new ReversePolishNotationEvaluation_Naive
val tokens = Array("2","1","+","3","*")
println("Result: " + soln.evalRPN(tokens))
}
class ReversePolishNotationEvaluation_Naive {
def evalRPN(tokens: Array[String]): Int = {
var returnValue = 0
val operators = "+-*/"
val stack = new util.Stack[String]()
for(t <- tokens) {
if(!operators.contains(t)) {
stack.push(t)
} else {
val a = Integer.valueOf(stack.pop())
val b = Integer.valueOf(stack.pop())
t match {
case "+" => stack.push(String.valueOf(a + b))
case "-" => stack.push(String.valueOf(b - a))
case "*" => stack.push(String.valueOf(a * b))
case "/" => stack.push(String.valueOf(b / a))
}
}
}
returnValue = Integer.valueOf(stack.pop())
returnValue
}
}
| bpupadhyaya/interview-collections | interview-collections-algo/src/main/scala/com/equalinformation/interview/algo/scala/string_and_array/2-A-1-ReversePolishNotationEvaluation_Naive.scala | Scala | mit | 1,052 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.param
import java.lang.reflect.Modifier
import java.util.{List => JList}
import java.util.NoSuchElementException
import scala.annotation.varargs
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.SparkException
import org.apache.spark.annotation.{DeveloperApi, Since}
import org.apache.spark.ml.linalg.{JsonMatrixConverter, JsonVectorConverter, Matrix, Vector}
import org.apache.spark.ml.util.Identifiable
/**
* :: DeveloperApi ::
* A param with self-contained documentation and optionally default value. Primitive-typed param
* should use the specialized versions, which are more friendly to Java users.
*
* @param parent parent object
* @param name param name
* @param doc documentation
* @param isValid optional validation method which indicates if a value is valid.
* See [[ParamValidators]] for factory methods for common validation functions.
* @tparam T param value type
*/
@DeveloperApi
class Param[T](val parent: String, val name: String, val doc: String, val isValid: T => Boolean)
extends Serializable {
def this(parent: Identifiable, name: String, doc: String, isValid: T => Boolean) =
this(parent.uid, name, doc, isValid)
def this(parent: String, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue[T])
def this(parent: Identifiable, name: String, doc: String) = this(parent.uid, name, doc)
/**
* Assert that the given value is valid for this parameter.
*
* Note: Parameter checks involving interactions between multiple parameters and input/output
* columns should be implemented in [[org.apache.spark.ml.PipelineStage.transformSchema()]].
*
* DEVELOPERS: This method is only called by [[ParamPair]], which means that all parameters
* should be specified via [[ParamPair]].
*
* @throws IllegalArgumentException if the value is invalid
*/
private[param] def validate(value: T): Unit = {
if (!isValid(value)) {
val valueToString = value match {
case v: Array[_] => v.mkString("[", ",", "]")
case _ => value.toString
}
throw new IllegalArgumentException(
s"$parent parameter $name given invalid value $valueToString.")
}
}
/** Creates a param pair with the given value (for Java). */
def w(value: T): ParamPair[T] = this -> value
/** Creates a param pair with the given value (for Scala). */
// scalastyle:off
def ->(value: T): ParamPair[T] = ParamPair(this, value)
// scalastyle:on
/** Encodes a param value into JSON, which can be decoded by `jsonDecode()`. */
def jsonEncode(value: T): String = {
value match {
case x: String =>
compact(render(JString(x)))
case v: Vector =>
JsonVectorConverter.toJson(v)
case m: Matrix =>
JsonMatrixConverter.toJson(m)
case _ =>
throw new NotImplementedError(
"The default jsonEncode only supports string, vector and matrix. " +
s"${this.getClass.getName} must override jsonEncode for ${value.getClass.getName}.")
}
}
/** Decodes a param value from JSON. */
def jsonDecode(json: String): T = Param.jsonDecode[T](json)
private[this] val stringRepresentation = s"${parent}__$name"
override final def toString: String = stringRepresentation
override final def hashCode: Int = toString.##
override final def equals(obj: Any): Boolean = {
obj match {
case p: Param[_] => (p.parent == parent) && (p.name == name)
case _ => false
}
}
}
private[ml] object Param {
/** Decodes a param value from JSON. */
def jsonDecode[T](json: String): T = {
val jValue = parse(json)
jValue match {
case JString(x) =>
x.asInstanceOf[T]
case JObject(v) =>
val keys = v.map(_._1)
if (keys.contains("class")) {
implicit val formats = DefaultFormats
val className = (jValue \\ "class").extract[String]
className match {
case JsonMatrixConverter.className =>
val checkFields = Array("numRows", "numCols", "values", "isTransposed", "type")
require(checkFields.forall(keys.contains), s"Expect a JSON serialized Matrix" +
s" but cannot find fields ${checkFields.mkString(", ")} in $json.")
JsonMatrixConverter.fromJson(json).asInstanceOf[T]
case s => throw new SparkException(s"unrecognized class $s in $json")
}
} else {
// "class" info in JSON was added in Spark 2.3(SPARK-22289). JSON support for Vector was
// implemented before that and does not have "class" attribute.
require(keys.contains("type") && keys.contains("values"), s"Expect a JSON serialized" +
s" vector/matrix but cannot find fields 'type' and 'values' in $json.")
JsonVectorConverter.fromJson(json).asInstanceOf[T]
}
case _ =>
throw new NotImplementedError(
"The default jsonDecode only supports string, vector and matrix. " +
s"${this.getClass.getName} must override jsonDecode to support its value type.")
}
}
}
/**
* :: DeveloperApi ::
* Factory methods for common validation functions for `Param.isValid`.
* The numerical methods only support Int, Long, Float, and Double.
*/
@DeveloperApi
object ParamValidators {
/** (private[param]) Default validation always return true */
private[param] def alwaysTrue[T]: T => Boolean = (_: T) => true
/**
* Private method for checking numerical types and converting to Double.
* This is mainly for the sake of compilation; type checks are really handled
* by [[Params]] setters and the [[ParamPair]] constructor.
*/
private def getDouble[T](value: T): Double = value match {
case x: Int => x.toDouble
case x: Long => x.toDouble
case x: Float => x.toDouble
case x: Double => x.toDouble
case _ =>
// The type should be checked before this is ever called.
throw new IllegalArgumentException("Numerical Param validation failed because" +
s" of unexpected input type: ${value.getClass}")
}
/**
* Check if value is greater than lowerBound
*/
def gt[T](lowerBound: Double): T => Boolean = { (value: T) =>
getDouble(value) > lowerBound
}
/**
* Check if value is greater than or equal to lowerBound
*/
def gtEq[T](lowerBound: Double): T => Boolean = { (value: T) =>
getDouble(value) >= lowerBound
}
/**
* Check if value is less than upperBound
*/
def lt[T](upperBound: Double): T => Boolean = { (value: T) =>
getDouble(value) < upperBound
}
/**
* Check if value is less than or equal to upperBound
*/
def ltEq[T](upperBound: Double): T => Boolean = { (value: T) =>
getDouble(value) <= upperBound
}
/**
* Check for value in range lowerBound to upperBound.
*
* @param lowerInclusive if true, range includes value = lowerBound
* @param upperInclusive if true, range includes value = upperBound
*/
def inRange[T](
lowerBound: Double,
upperBound: Double,
lowerInclusive: Boolean,
upperInclusive: Boolean): T => Boolean = { (value: T) =>
val x: Double = getDouble(value)
val lowerValid = if (lowerInclusive) x >= lowerBound else x > lowerBound
val upperValid = if (upperInclusive) x <= upperBound else x < upperBound
lowerValid && upperValid
}
/** Version of `inRange()` which uses inclusive be default: [lowerBound, upperBound] */
def inRange[T](lowerBound: Double, upperBound: Double): T => Boolean = {
inRange[T](lowerBound, upperBound, lowerInclusive = true, upperInclusive = true)
}
/** Check for value in an allowed set of values. */
def inArray[T](allowed: Array[T]): T => Boolean = { (value: T) =>
allowed.contains(value)
}
/** Check for value in an allowed set of values. */
def inArray[T](allowed: java.util.List[T]): T => Boolean = { (value: T) =>
allowed.contains(value)
}
/** Check that the array length is greater than lowerBound. */
def arrayLengthGt[T](lowerBound: Double): Array[T] => Boolean = { (value: Array[T]) =>
value.length > lowerBound
}
}
// specialize primitive-typed params because Java doesn't recognize scala.Double, scala.Int, ...
/**
* :: DeveloperApi ::
* Specialized version of `Param[Double]` for Java.
*/
@DeveloperApi
class DoubleParam(parent: String, name: String, doc: String, isValid: Double => Boolean)
extends Param[Double](parent, name, doc, isValid) {
def this(parent: String, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue)
def this(parent: Identifiable, name: String, doc: String, isValid: Double => Boolean) =
this(parent.uid, name, doc, isValid)
def this(parent: Identifiable, name: String, doc: String) = this(parent.uid, name, doc)
/** Creates a param pair with the given value (for Java). */
override def w(value: Double): ParamPair[Double] = super.w(value)
override def jsonEncode(value: Double): String = {
compact(render(DoubleParam.jValueEncode(value)))
}
override def jsonDecode(json: String): Double = {
DoubleParam.jValueDecode(parse(json))
}
}
private[param] object DoubleParam {
/** Encodes a param value into JValue. */
def jValueEncode(value: Double): JValue = {
value match {
case _ if value.isNaN =>
JString("NaN")
case Double.NegativeInfinity =>
JString("-Inf")
case Double.PositiveInfinity =>
JString("Inf")
case _ =>
JDouble(value)
}
}
/** Decodes a param value from JValue. */
def jValueDecode(jValue: JValue): Double = {
jValue match {
case JString("NaN") =>
Double.NaN
case JString("-Inf") =>
Double.NegativeInfinity
case JString("Inf") =>
Double.PositiveInfinity
case JDouble(x) =>
x
case _ =>
throw new IllegalArgumentException(s"Cannot decode $jValue to Double.")
}
}
}
/**
* :: DeveloperApi ::
* Specialized version of `Param[Int]` for Java.
*/
@DeveloperApi
class IntParam(parent: String, name: String, doc: String, isValid: Int => Boolean)
extends Param[Int](parent, name, doc, isValid) {
def this(parent: String, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue)
def this(parent: Identifiable, name: String, doc: String, isValid: Int => Boolean) =
this(parent.uid, name, doc, isValid)
def this(parent: Identifiable, name: String, doc: String) = this(parent.uid, name, doc)
/** Creates a param pair with the given value (for Java). */
override def w(value: Int): ParamPair[Int] = super.w(value)
override def jsonEncode(value: Int): String = {
compact(render(JInt(value)))
}
override def jsonDecode(json: String): Int = {
implicit val formats = DefaultFormats
parse(json).extract[Int]
}
}
/**
* :: DeveloperApi ::
* Specialized version of `Param[Float]` for Java.
*/
@DeveloperApi
class FloatParam(parent: String, name: String, doc: String, isValid: Float => Boolean)
extends Param[Float](parent, name, doc, isValid) {
def this(parent: String, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue)
def this(parent: Identifiable, name: String, doc: String, isValid: Float => Boolean) =
this(parent.uid, name, doc, isValid)
def this(parent: Identifiable, name: String, doc: String) = this(parent.uid, name, doc)
/** Creates a param pair with the given value (for Java). */
override def w(value: Float): ParamPair[Float] = super.w(value)
override def jsonEncode(value: Float): String = {
compact(render(FloatParam.jValueEncode(value)))
}
override def jsonDecode(json: String): Float = {
FloatParam.jValueDecode(parse(json))
}
}
private object FloatParam {
/** Encodes a param value into JValue. */
def jValueEncode(value: Float): JValue = {
value match {
case _ if value.isNaN =>
JString("NaN")
case Float.NegativeInfinity =>
JString("-Inf")
case Float.PositiveInfinity =>
JString("Inf")
case _ =>
JDouble(value)
}
}
/** Decodes a param value from JValue. */
def jValueDecode(jValue: JValue): Float = {
jValue match {
case JString("NaN") =>
Float.NaN
case JString("-Inf") =>
Float.NegativeInfinity
case JString("Inf") =>
Float.PositiveInfinity
case JDouble(x) =>
x.toFloat
case _ =>
throw new IllegalArgumentException(s"Cannot decode $jValue to Float.")
}
}
}
/**
* :: DeveloperApi ::
* Specialized version of `Param[Long]` for Java.
*/
@DeveloperApi
class LongParam(parent: String, name: String, doc: String, isValid: Long => Boolean)
extends Param[Long](parent, name, doc, isValid) {
def this(parent: String, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue)
def this(parent: Identifiable, name: String, doc: String, isValid: Long => Boolean) =
this(parent.uid, name, doc, isValid)
def this(parent: Identifiable, name: String, doc: String) = this(parent.uid, name, doc)
/** Creates a param pair with the given value (for Java). */
override def w(value: Long): ParamPair[Long] = super.w(value)
override def jsonEncode(value: Long): String = {
compact(render(JInt(value)))
}
override def jsonDecode(json: String): Long = {
implicit val formats = DefaultFormats
parse(json).extract[Long]
}
}
/**
* :: DeveloperApi ::
* Specialized version of `Param[Boolean]` for Java.
*/
@DeveloperApi
class BooleanParam(parent: String, name: String, doc: String) // No need for isValid
extends Param[Boolean](parent, name, doc) {
def this(parent: Identifiable, name: String, doc: String) = this(parent.uid, name, doc)
/** Creates a param pair with the given value (for Java). */
override def w(value: Boolean): ParamPair[Boolean] = super.w(value)
override def jsonEncode(value: Boolean): String = {
compact(render(JBool(value)))
}
override def jsonDecode(json: String): Boolean = {
implicit val formats = DefaultFormats
parse(json).extract[Boolean]
}
}
/**
* :: DeveloperApi ::
* Specialized version of `Param[Array[String]]` for Java.
*/
@DeveloperApi
class StringArrayParam(parent: Params, name: String, doc: String, isValid: Array[String] => Boolean)
extends Param[Array[String]](parent, name, doc, isValid) {
def this(parent: Params, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue)
/** Creates a param pair with a `java.util.List` of values (for Java and Python). */
def w(value: java.util.List[String]): ParamPair[Array[String]] = w(value.asScala.toArray)
override def jsonEncode(value: Array[String]): String = {
import org.json4s.JsonDSL._
compact(render(value.toSeq))
}
override def jsonDecode(json: String): Array[String] = {
implicit val formats = DefaultFormats
parse(json).extract[Seq[String]].toArray
}
}
/**
* :: DeveloperApi ::
* Specialized version of `Param[Array[Double]]` for Java.
*/
@DeveloperApi
class DoubleArrayParam(parent: Params, name: String, doc: String, isValid: Array[Double] => Boolean)
extends Param[Array[Double]](parent, name, doc, isValid) {
def this(parent: Params, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue)
/** Creates a param pair with a `java.util.List` of values (for Java and Python). */
def w(value: java.util.List[java.lang.Double]): ParamPair[Array[Double]] =
w(value.asScala.map(_.asInstanceOf[Double]).toArray)
override def jsonEncode(value: Array[Double]): String = {
import org.json4s.JsonDSL._
compact(render(value.toSeq.map(DoubleParam.jValueEncode)))
}
override def jsonDecode(json: String): Array[Double] = {
parse(json) match {
case JArray(values) =>
values.map(DoubleParam.jValueDecode).toArray
case _ =>
throw new IllegalArgumentException(s"Cannot decode $json to Array[Double].")
}
}
}
/**
* :: DeveloperApi ::
* Specialized version of `Param[Array[Array[Double]]]` for Java.
*/
@DeveloperApi
class DoubleArrayArrayParam(
parent: Params,
name: String,
doc: String,
isValid: Array[Array[Double]] => Boolean)
extends Param[Array[Array[Double]]](parent, name, doc, isValid) {
def this(parent: Params, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue)
/** Creates a param pair with a `java.util.List` of values (for Java and Python). */
def w(value: java.util.List[java.util.List[java.lang.Double]]): ParamPair[Array[Array[Double]]] =
w(value.asScala.map(_.asScala.map(_.asInstanceOf[Double]).toArray).toArray)
override def jsonEncode(value: Array[Array[Double]]): String = {
import org.json4s.JsonDSL._
compact(render(value.toSeq.map(_.toSeq.map(DoubleParam.jValueEncode))))
}
override def jsonDecode(json: String): Array[Array[Double]] = {
parse(json) match {
case JArray(values) =>
values.map {
case JArray(values) =>
values.map(DoubleParam.jValueDecode).toArray
case _ =>
throw new IllegalArgumentException(s"Cannot decode $json to Array[Array[Double]].")
}.toArray
case _ =>
throw new IllegalArgumentException(s"Cannot decode $json to Array[Array[Double]].")
}
}
}
/**
* :: DeveloperApi ::
* Specialized version of `Param[Array[Int]]` for Java.
*/
@DeveloperApi
class IntArrayParam(parent: Params, name: String, doc: String, isValid: Array[Int] => Boolean)
extends Param[Array[Int]](parent, name, doc, isValid) {
def this(parent: Params, name: String, doc: String) =
this(parent, name, doc, ParamValidators.alwaysTrue)
/** Creates a param pair with a `java.util.List` of values (for Java and Python). */
def w(value: java.util.List[java.lang.Integer]): ParamPair[Array[Int]] =
w(value.asScala.map(_.asInstanceOf[Int]).toArray)
override def jsonEncode(value: Array[Int]): String = {
import org.json4s.JsonDSL._
compact(render(value.toSeq))
}
override def jsonDecode(json: String): Array[Int] = {
implicit val formats = DefaultFormats
parse(json).extract[Seq[Int]].toArray
}
}
/**
* A param and its value.
*/
@Since("1.2.0")
case class ParamPair[T] @Since("1.2.0") (
@Since("1.2.0") param: Param[T],
@Since("1.2.0") value: T) {
// This is *the* place Param.validate is called. Whenever a parameter is specified, we should
// always construct a ParamPair so that validate is called.
param.validate(value)
}
/**
* :: DeveloperApi ::
* Trait for components that take parameters. This also provides an internal param map to store
* parameter values attached to the instance.
*/
@DeveloperApi
trait Params extends Identifiable with Serializable {
/**
* Returns all params sorted by their names. The default implementation uses Java reflection to
* list all public methods that have no arguments and return [[Param]].
*
* @note Developer should not use this method in constructor because we cannot guarantee that
* this variable gets initialized before other params.
*/
lazy val params: Array[Param[_]] = {
val methods = this.getClass.getMethods
methods.filter { m =>
Modifier.isPublic(m.getModifiers) &&
classOf[Param[_]].isAssignableFrom(m.getReturnType) &&
m.getParameterTypes.isEmpty
}.sortBy(_.getName)
.map(m => m.invoke(this).asInstanceOf[Param[_]])
}
/**
* Explains a param.
* @param param input param, must belong to this instance.
* @return a string that contains the input param name, doc, and optionally its default value and
* the user-supplied value
*/
def explainParam(param: Param[_]): String = {
shouldOwn(param)
val valueStr = if (isDefined(param)) {
val defaultValueStr = getDefault(param).map("default: " + _)
val currentValueStr = get(param).map("current: " + _)
(defaultValueStr ++ currentValueStr).mkString("(", ", ", ")")
} else {
"(undefined)"
}
s"${param.name}: ${param.doc} $valueStr"
}
/**
* Explains all params of this instance. See `explainParam()`.
*/
def explainParams(): String = {
params.map(explainParam).mkString("\\n")
}
/** Checks whether a param is explicitly set. */
final def isSet(param: Param[_]): Boolean = {
shouldOwn(param)
paramMap.contains(param)
}
/** Checks whether a param is explicitly set or has a default value. */
final def isDefined(param: Param[_]): Boolean = {
shouldOwn(param)
defaultParamMap.contains(param) || paramMap.contains(param)
}
/** Tests whether this instance contains a param with a given name. */
def hasParam(paramName: String): Boolean = {
params.exists(_.name == paramName)
}
/** Gets a param by its name. */
def getParam(paramName: String): Param[Any] = {
params.find(_.name == paramName).getOrElse {
throw new NoSuchElementException(s"Param $paramName does not exist.")
}.asInstanceOf[Param[Any]]
}
/**
* Sets a parameter in the embedded param map.
*/
final def set[T](param: Param[T], value: T): this.type = {
set(param -> value)
}
/**
* Sets a parameter (by name) in the embedded param map.
*/
protected final def set(param: String, value: Any): this.type = {
set(getParam(param), value)
}
/**
* Sets a parameter in the embedded param map.
*/
protected final def set(paramPair: ParamPair[_]): this.type = {
shouldOwn(paramPair.param)
paramMap.put(paramPair)
this
}
/**
* Optionally returns the user-supplied value of a param.
*/
final def get[T](param: Param[T]): Option[T] = {
shouldOwn(param)
paramMap.get(param)
}
/**
* Clears the user-supplied value for the input param.
*/
final def clear(param: Param[_]): this.type = {
shouldOwn(param)
paramMap.remove(param)
this
}
/**
* Gets the value of a param in the embedded param map or its default value. Throws an exception
* if neither is set.
*/
final def getOrDefault[T](param: Param[T]): T = {
shouldOwn(param)
get(param).orElse(getDefault(param)).getOrElse(
throw new NoSuchElementException(s"Failed to find a default value for ${param.name}"))
}
/**
* An alias for `getOrDefault()`.
*/
protected final def $[T](param: Param[T]): T = getOrDefault(param)
/**
* Sets a default value for a param.
* @param param param to set the default value. Make sure that this param is initialized before
* this method gets called.
* @param value the default value
*/
protected final def setDefault[T](param: Param[T], value: T): this.type = {
defaultParamMap.put(param -> value)
this
}
/**
* Sets default values for a list of params.
*
* Note: Java developers should use the single-parameter `setDefault`.
* Annotating this with varargs can cause compilation failures due to a Scala compiler bug.
* See SPARK-9268.
*
* @param paramPairs a list of param pairs that specify params and their default values to set
* respectively. Make sure that the params are initialized before this method
* gets called.
*/
protected final def setDefault(paramPairs: ParamPair[_]*): this.type = {
paramPairs.foreach { p =>
setDefault(p.param.asInstanceOf[Param[Any]], p.value)
}
this
}
/**
* Gets the default value of a parameter.
*/
final def getDefault[T](param: Param[T]): Option[T] = {
shouldOwn(param)
defaultParamMap.get(param)
}
/**
* Tests whether the input param has a default value set.
*/
final def hasDefault[T](param: Param[T]): Boolean = {
shouldOwn(param)
defaultParamMap.contains(param)
}
/**
* Creates a copy of this instance with the same UID and some extra params.
* Subclasses should implement this method and set the return type properly.
* See `defaultCopy()`.
*/
def copy(extra: ParamMap): Params
/**
* Default implementation of copy with extra params.
* It tries to create a new instance with the same UID.
* Then it copies the embedded and extra parameters over and returns the new instance.
*/
protected final def defaultCopy[T <: Params](extra: ParamMap): T = {
val that = this.getClass.getConstructor(classOf[String]).newInstance(uid)
copyValues(that, extra).asInstanceOf[T]
}
/**
* Extracts the embedded default param values and user-supplied values, and then merges them with
* extra values from input into a flat param map, where the latter value is used if there exist
* conflicts, i.e., with ordering:
* default param values less than user-supplied values less than extra.
*/
final def extractParamMap(extra: ParamMap): ParamMap = {
defaultParamMap ++ paramMap ++ extra
}
/**
* `extractParamMap` with no extra values.
*/
final def extractParamMap(): ParamMap = {
extractParamMap(ParamMap.empty)
}
/** Internal param map for user-supplied values. */
private val paramMap: ParamMap = ParamMap.empty
/** Internal param map for default values. */
private val defaultParamMap: ParamMap = ParamMap.empty
/** Validates that the input param belongs to this instance. */
private def shouldOwn(param: Param[_]): Unit = {
require(param.parent == uid && hasParam(param.name), s"Param $param does not belong to $this.")
}
/**
* Copies param values from this instance to another instance for params shared by them.
*
* This handles default Params and explicitly set Params separately.
* Default Params are copied from and to `defaultParamMap`, and explicitly set Params are
* copied from and to `paramMap`.
* Warning: This implicitly assumes that this [[Params]] instance and the target instance
* share the same set of default Params.
*
* @param to the target instance, which should work with the same set of default Params as this
* source instance
* @param extra extra params to be copied to the target's `paramMap`
* @return the target instance with param values copied
*/
protected def copyValues[T <: Params](to: T, extra: ParamMap = ParamMap.empty): T = {
val map = paramMap ++ extra
params.foreach { param =>
// copy default Params
if (defaultParamMap.contains(param) && to.hasParam(param.name)) {
to.defaultParamMap.put(to.getParam(param.name), defaultParamMap(param))
}
// copy explicitly set Params
if (map.contains(param) && to.hasParam(param.name)) {
to.set(param.name, map(param))
}
}
to
}
}
/**
* :: DeveloperApi ::
* Java-friendly wrapper for [[Params]].
* Java developers who need to extend [[Params]] should use this class instead.
* If you need to extend an abstract class which already extends [[Params]], then that abstract
* class should be Java-friendly as well.
*/
@DeveloperApi
abstract class JavaParams extends Params
/**
* A param to value map.
*/
@Since("1.2.0")
final class ParamMap private[ml] (private val map: mutable.Map[Param[Any], Any])
extends Serializable {
/* DEVELOPERS: About validating parameter values
* This and ParamPair are the only two collections of parameters.
* This class should always create ParamPairs when
* specifying new parameter values. ParamPair will then call Param.validate().
*/
/**
* Creates an empty param map.
*/
@Since("1.2.0")
def this() = this(mutable.Map.empty)
/**
* Puts a (param, value) pair (overwrites if the input param exists).
*/
@Since("1.2.0")
def put[T](param: Param[T], value: T): this.type = put(param -> value)
/**
* Puts a list of param pairs (overwrites if the input params exists).
*/
@varargs
@Since("1.2.0")
def put(paramPairs: ParamPair[_]*): this.type = {
paramPairs.foreach { p =>
map(p.param.asInstanceOf[Param[Any]]) = p.value
}
this
}
/** Put param pairs with a `java.util.List` of values for Python. */
private[ml] def put(paramPairs: JList[ParamPair[_]]): this.type = {
put(paramPairs.asScala: _*)
}
/**
* Optionally returns the value associated with a param.
*/
@Since("1.2.0")
def get[T](param: Param[T]): Option[T] = {
map.get(param.asInstanceOf[Param[Any]]).asInstanceOf[Option[T]]
}
/**
* Returns the value associated with a param or a default value.
*/
@Since("1.4.0")
def getOrElse[T](param: Param[T], default: T): T = {
get(param).getOrElse(default)
}
/**
* Gets the value of the input param or its default value if it does not exist.
* Raises a NoSuchElementException if there is no value associated with the input param.
*/
@Since("1.2.0")
def apply[T](param: Param[T]): T = {
get(param).getOrElse {
throw new NoSuchElementException(s"Cannot find param ${param.name}.")
}
}
/**
* Checks whether a parameter is explicitly specified.
*/
@Since("1.2.0")
def contains(param: Param[_]): Boolean = {
map.contains(param.asInstanceOf[Param[Any]])
}
/**
* Removes a key from this map and returns its value associated previously as an option.
*/
@Since("1.4.0")
def remove[T](param: Param[T]): Option[T] = {
map.remove(param.asInstanceOf[Param[Any]]).asInstanceOf[Option[T]]
}
/**
* Filters this param map for the given parent.
*/
@Since("1.2.0")
def filter(parent: Params): ParamMap = {
// Don't use filterKeys because mutable.Map#filterKeys
// returns the instance of collections.Map, not mutable.Map.
// Otherwise, we get ClassCastException.
// Not using filterKeys also avoid SI-6654
val filtered = map.filter { case (k, _) => k.parent == parent.uid }
new ParamMap(filtered)
}
/**
* Creates a copy of this param map.
*/
@Since("1.2.0")
def copy: ParamMap = new ParamMap(map.clone())
@Since("1.2.0")
override def toString: String = {
map.toSeq.sortBy(_._1.name).map { case (param, value) =>
s"\\t${param.parent}-${param.name}: $value"
}.mkString("{\\n", ",\\n", "\\n}")
}
/**
* Returns a new param map that contains parameters in this map and the given map,
* where the latter overwrites this if there exist conflicts.
*/
@Since("1.2.0")
def ++(other: ParamMap): ParamMap = {
// TODO: Provide a better method name for Java users.
new ParamMap(this.map ++ other.map)
}
/**
* Adds all parameters from the input param map into this param map.
*/
@Since("1.2.0")
def ++=(other: ParamMap): this.type = {
// TODO: Provide a better method name for Java users.
this.map ++= other.map
this
}
/**
* Converts this param map to a sequence of param pairs.
*/
@Since("1.2.0")
def toSeq: Seq[ParamPair[_]] = {
map.toSeq.map { case (param, value) =>
ParamPair(param, value)
}
}
/** Java-friendly method for Python API */
private[ml] def toList: java.util.List[ParamPair[_]] = {
this.toSeq.asJava
}
/**
* Number of param pairs in this map.
*/
@Since("1.3.0")
def size: Int = map.size
}
@Since("1.2.0")
object ParamMap {
/**
* Returns an empty param map.
*/
@Since("1.2.0")
def empty: ParamMap = new ParamMap()
/**
* Constructs a param map by specifying its entries.
*/
@varargs
@Since("1.2.0")
def apply(paramPairs: ParamPair[_]*): ParamMap = {
new ParamMap().put(paramPairs: _*)
}
}
| saltstar/spark | mllib/src/main/scala/org/apache/spark/ml/param/params.scala | Scala | apache-2.0 | 32,423 |
package scalaDemo
/**
* 从此可以看出在方法返回类型上使用this.type就可以写出像链条一样的代码,不断地点出来:)
*/
object ThisTypeDemo extends App {
//食物点出来
val food = new Food
food.setName("rice").setAge(2)
println("Food : " + food)
//大米点出来
val rice = new Rice
//这是没问题的因为返回的是Food
rice.setName("guangxidami").setAge(3)
//这样也没问题,setgrow()返回大米这个对象,可以调用父类的setName,setAge方法
rice.setgrow().setName("beijingdami").setAge(1)
//这样在没修改返回类型为this.type之前是有问题的,因为setName,setAge返回的是食物这个类,
//食物没有setgrow()这个方法
rice.setName("zhejiangdami").setAge(4).setgrow()
println("Rice : " + rice)
var sumPredictions = 111
//===断言、检查
assert(sumPredictions == 0.0,
"MyLogisticRegression predicted something other than 0, even though all weights are 0!")
}
/**
* 定义食物这个类,里面有食物的名字还有年龄
*/
class Food {
private var name: String = _
private var age: Int = _
def setName(getName: String): this.type = {
this.name = getName
this
}
def setAge(getAge: Int): this.type = {
this.age = getAge
this
}
/*
def setName(getName: String) = {
this.name = getName
this
}
def setAge(getAge: Int)= {
this.age = getAge
this
}
*/
override def toString: String = "name = " + name + "||=|| age = " + age
}
/**
* 定义一个大米类继承食物,里面方法返回的this是大米这个对象
*/
class Rice extends Food {
def setgrow() = {
println(" I am growing!! Don't eat me :(")
this
}
}
| tophua/spark1.52 | examples/src/main/scala/scalaDemo/ThisTypeDemo.scala | Scala | apache-2.0 | 1,728 |
package objektwerks.types
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
class TypeAliasTest extends AnyFunSuite with Matchers {
test("type alias") {
type User = String
type Age = Int
val users = Map[User, Age]("john" -> 21, "jane" -> 19)
users("john") shouldEqual 21
users("jane") shouldEqual 19
}
} | objektwerks/scala | src/test/scala/objektwerks/types/TypeAliasTest.scala | Scala | apache-2.0 | 369 |
package com.gx.simplefactory
/**
* Copyright 2017 josephguan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// Product Interface
trait Operation {
def getResult(a: Double, b: Double): Double
}
// Concrete Product
class AddOperation extends Operation {
override def getResult(a: Double, b: Double): Double = {
a + b
}
}
// Concrete Product
class SubOperation extends Operation {
override def getResult(a: Double, b: Double): Double = {
a - b
}
}
// Concrete Product
class MulOperation extends Operation {
override def getResult(a: Double, b: Double): Double = {
a * b
}
}
// Concrete Product
class DivOperation extends Operation {
override def getResult(a: Double, b: Double): Double = {
if (b == 0) throw new Exception("b can not be zero")
a / b
}
}
// Simple Factory Method
// also known as: companion object apply method
object Operation {
def apply(op: String) = op match {
case "+" => new AddOperation()
case "-" => new SubOperation()
case "*" => new MulOperation()
case "/" => new DivOperation();
}
}
| josephguan/scala-design-patterns | creational/simple-factory/src/main/scala/com/gx/simplefactory/Operation.scala | Scala | apache-2.0 | 1,602 |
/*
* Copyright (C) 2014 Romain Reuillon
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.task
import org.openmole.core.context.Val
import org.openmole.core.dsl._
package netlogo {
import org.openmole.core.workflow.builder._
trait NetLogoPackage extends external.ExternalPackage {
@deprecated
lazy val netLogoInputs = new {
def +=[T: MappedInputBuilder: InputOutputBuilder](p: Val[_], n: String): T ⇒ T = inputs += p mapped n
def +=[T: MappedInputBuilder: InputOutputBuilder](p: Val[_]): T ⇒ T = this.+=[T](p, p.name)
}
@deprecated
lazy val netLogoOutputs = new {
def +=[T: MappedOutputBuilder: InputOutputBuilder](n: String, p: Val[_]): T ⇒ T = outputs += p mapped n
def +=[T: MappedOutputBuilder: InputOutputBuilder](p: Val[_]): T ⇒ T = this.+=[T](p.name, p)
}
}
}
package object netlogo extends NetLogoPackage | openmole/openmole | openmole/plugins/org.openmole.plugin.task.netlogo/src/main/scala/org/openmole/plugin/task/netlogo/package.scala | Scala | agpl-3.0 | 1,521 |
package bad.robot.radiate.config
import java.time.LocalTime
import java.time.LocalTime._
import scalaz.{Failure, Success, Validation}
object EcoMode {
def validate(start: Option[String], end: Option[String]): Validation[String, Option[EcoMode]] = (start, end) match {
case (Time(start), Time(end)) => Success(Some(EcoMode(start, end)))
case value @ _ => Failure(s"The values for eco mode times must be in the range of '00:00' to '23:59'. Both 'start' and 'end' times must be supplied. The values was $value")
}
}
case class EcoMode(start: Time, end: Time) {
def active = now.isAfter(start) || now.isBefore(end)
}
object Time {
def unapply(value: Option[String]): Option[Time] = {
val matched = "([01]?[0-9]|2[0-3]):([0-5][0-9])".r
value.flatMap {
case matched(hour, minutes) => Some(Time(hour.toInt, minutes.toInt))
case _ => None
}
}
implicit def toLocalTime(time: Time): LocalTime = LocalTime.of(time.hour, time.minutes)
}
case class Time(hour: Int, minutes: Int)
| tobyweston/radiate | src/main/scala/bad/robot/radiate/config/EcoMode.scala | Scala | apache-2.0 | 1,017 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.space
import org.scalatest.{ FlatSpec, Matchers }
import squants.QuantityParseException
import squants.motion.RadiansPerSecond
import squants.time.Seconds
/**
* @author garyKeorkunian
* @since 0.1
*
*/
class AngleSpec extends FlatSpec with Matchers {
behavior of "Angle and its Units of Measure"
it should "create values using UOM factories" in {
Radians(1).toRadians should be(1)
Degrees(1).toDegrees should be(1)
Gradians(1).toGradians should be(1)
Turns(1).toTurns should be(1)
Arcminutes(1).toArcminutes should be(1)
Arcseconds(1).toArcseconds should be(1)
}
it should "create values from properly formatted Strings" in {
Angle("10.22 rad").get should be(Radians(10.22))
Angle("10.22°").get should be(Degrees(10.22))
Angle("10.22 grad").get should be(Gradians(10.22))
Angle("10.22 turns").get should be(Turns(10.22))
Angle("10.22 amin").get should be(Arcminutes(10.22))
Angle("10.22 asec").get should be(Arcseconds(10.22))
Angle("10.33 zz").failed.get should be(QuantityParseException("Unable to parse Angle", "10.33 zz"))
Angle("ZZ rad").failed.get should be(QuantityParseException("Unable to parse Angle", "ZZ rad"))
}
it should "properly convert to all supported Units of Measure" in {
val x = Radians(1)
x.toRadians should be(1d)
x.toDegrees should be(180d / math.Pi)
x.toGradians should be(200d / math.Pi)
x.toTurns should be(1d / (math.Pi * 2d))
x.toArcminutes should be(1d / (math.Pi / 10800d))
x.toArcseconds should be(1d / ((math.Pi / 10800d) / 60d))
}
it should "return properly formatted strings for all supported Units of Measure" in {
Radians(1).toString(Radians) should be("1.0 rad")
Degrees(1).toString(Degrees) should be("1.0 °")
Gradians(1).toString(Gradians) should be("1.0 grad")
Turns(1).toString(Turns) should be("1.0 turns")
Arcminutes(1).toString(Arcminutes) should be("1.0 amin")
Arcseconds(1).toString(Arcseconds) should be("1.0 asec")
}
it should "return the cos of an Angle" in {
Radians(1).cos should be(math.cos(1))
}
it should "return the sin of an Angle" in {
Radians(1).sin should be(math.sin(1))
}
it should "return the acos of an Angle" in {
Radians(1).acos should be(math.acos(1))
}
it should "return the asin of an Angle" in {
Radians(1).asin should be(math.asin(1))
}
it should "return AngularVelocity when divided by Time" in {
Radians(1) / Seconds(1) should be(RadiansPerSecond(1))
}
it should "return Time when divided by AngularVelocity" in {
Radians(1) / RadiansPerSecond(1) should be(Seconds(1))
}
behavior of "AngleConversion"
it should "provide aliases for single unit values" in {
import AngleConversions._
radian should be(Radians(1))
degree should be(Degrees(1))
gradian should be(Gradians(1))
turn should be(Turns(1))
arcminute should be(Arcminutes(1))
arcsecond should be(Arcseconds(1))
}
it should "provide implicit conversion from Double" in {
import AngleConversions._
val d = 10d
d.radians should be(Radians(d))
d.degrees should be(Degrees(d))
d.gradians should be(Gradians(d))
d.turns should be(Turns(d))
d.arcminutes should be(Arcminutes(d))
d.arcseconds should be(Arcseconds(d))
}
it should "provide Numeric support" in {
import AngleConversions.AngleNumeric
val as = List(Radians(100), Radians(1))
as.sum should be(Radians(101))
}
}
| derekmorr/squants | shared/src/test/scala/squants/space/AngleSpec.scala | Scala | apache-2.0 | 4,026 |
/*
* Copyright (C) 2013 Tactix4
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.tactix4.t4openerp.connector.transport
import scalaz.Monoid
/**
* @author max@tactix4.com
* 24/08/2013
*/
/**
* Hierarchy for the OpenERP data representation.
*
* This subset of datastructures/types were chosen given the overlap between
* the supported types in json and xmlrpc as well as the basic types in python
*/
sealed trait OEType{
def fold[X](
oerpBoolean: Boolean => X,
oerpNumeric: BigDecimal => X,
oerpString: String => X,
oerpArray: List[OEType] => X,
oerpMap: Map[String,OEType] => X,
oerpNull: Null => X
): X =
this match {
case OEBoolean(b) => oerpBoolean(b)
case OENumber(n) => oerpNumeric(n)
case OEString(s) => oerpString(s)
case a:OEArray => oerpArray(a.value)
case m:OEDictionary => oerpMap(m.value)
case OENull => oerpNull(null)
}
override def toString: String = this match{
case OEBoolean(b) => b.toString
case OENumber(n) => n.toString
case OEString(s) => s.toString
case a:OEArray => a.value.toString
case m:OEDictionary => m.value.toString
case OENull => "<NULL>"
}
def bool : Option[Boolean] = this.fold(b => Some(b),_=>None,_=>None,_=>None,_=>None,_=>None)
def number : Option[BigDecimal] = this.fold(_ => None,n=> Some(n),_=>None,_=>None,_=>None,_=>None)
def double : Option[Double] = this.fold(_ => None,n=> Some(n.doubleValue),_=>None,_=>None,_=>None,_=>None)
def int : Option[Int] = this.fold(_ => None,n=> Some(n.intValue),_=>None,_=>None,_=>None,_=>None)
def string : Option[String] = this.fold(_ => None,_=> None,s=>Some(s),_=>None,_=>None,_=>None)
def array : Option[List[OEType]] = this.fold(_ => None,_=> None,_=>None,a => Some(a),_=> None,_=>None)
def dictionary : Option[Map[String,OEType]] = this.fold(_ => None,_=> None,_=>None,_=>None,s=>Some(s),_=>None)
def nullT : Option[Nothing] = this.fold(_ => None,_=> None,_=>None,_=>None,_=>None,_ => null)
def isBool :Boolean = this.fold(_ => true, _ => false,_ => false,_ => false,_ => false,_ => false)
def isNumber :Boolean = this.fold(_ => false,_ => true, _ => false,_ => false,_ => false,_ => false)
def isInt :Boolean = this.fold(_ => false,n => n.isValidInt, _ => false,_ => false,_ => false,_ => false)
def isDouble :Boolean = this.fold(_ => false,n => n.isValidDouble, _ => false,_ => false,_ => false,_ => false)
def isString :Boolean = this.fold(_ => false,_ => false,_ => true, _ => false,_ => false,_ => true)
def isArray :Boolean = this.fold(_ => false,_ => false,_ => false,_ => true, _ => false,_ => false)
def isDictionary :Boolean = this.fold(_ => false,_ => false,_ => false,_ => false,_ => true, _ => false)
def isNull :Boolean = this.fold(_ => false,_ => false,_ => false,_ => false,_ => false,_ => true)
def asBool[X](f: Boolean => X): Option[X] = bool.map(f)
def asNumber[X](f: BigDecimal => X): Option[X] = number.map(f)
def asInt[X](f: Int => X) = int.map(f)
def asDouble[X](f: Double => X) = double.map(f)
def asString[X](f: String => X) = string.map(f)
def asArray[X](f: List[OEType] => X) = array.map(f)
def asDictionary[X](f: Map[String,OEType] => X) = dictionary.map(f)
}
case class OENumber(value: BigDecimal) extends OEType
case class OEBoolean(value: Boolean) extends OEType
case class OEString(value: String) extends OEType
class OEArray(val value: List[OEType]) extends OEType
class OEDictionary(val value: Map[String, OEType]) extends OEType{
def get(k:String) : Option[OEType] = value.get(k)
}
case object OENull extends OEType
object OEArray{
def apply(l:List[OEType]) : OEArray = new OEArray(l)
def apply(l:OEType*) : OEArray = new OEArray(l.toList)
def unapplySeq(a:OEArray) : Option[List[OEType]] = Some(a.value)
implicit val monoidInstance = new Monoid[OEArray]{
override def zero: OEArray = OEArray(Nil)
override def append(f1: OEArray, f2: => OEArray): OEArray = OEArray(f1.value ++ f2.value)
}
}
object OEDictionary{
def apply(l:Map[String,OEType]) : OEDictionary = new OEDictionary(l)
def apply(l:(String,OEType)*) : OEDictionary = new OEDictionary(l.toMap)
def unapplySeq(a:OEDictionary) : Option[Seq[(String,OEType)]] = Some(a.value.toSeq)
implicit val monoidInstance = new Monoid[OEDictionary]{
override def zero: OEDictionary = OEDictionary()
override def append(f1: OEDictionary, f2: => OEDictionary): OEDictionary = OEDictionary(f1.value ++ f2.value)
}
}
| NeovaHealth/t4openerp-connector | src/main/scala/com/tactix4/t4openerp.connector/transport/OEType.scala | Scala | agpl-3.0 | 5,318 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.commons.lang
import org.scalatest.matchers.should.Matchers
import org.scalatest.funspec.AnyFunSpec
class StringBuilderTest extends AnyFunSpec with Matchers {
describe("StringBuilder") {
it("Contains") {
val builder = new StringBuilder("a/b/c.action?param1=value1")
assert(!builder.contains("?"))
assert(builder.contains('?'))
}
}
}
| beangle/commons | core/src/test/scala/org/beangle/commons/lang/StringBuilderTest.scala | Scala | lgpl-3.0 | 1,106 |
package org.neo4j.cypher.internal.executionplan.builders
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import org.scalatest.Assertions
import org.junit.Assert._
import org.neo4j.cypher.internal.pipes.NullPipe
import org.neo4j.cypher.internal.executionplan.{PartiallySolvedQuery}
import org.junit.Test
import org.neo4j.cypher.internal.commands._
import expressions.Literal
import org.neo4j.graphdb.event.{KernelEventHandler, TransactionEventHandler}
import java.lang.Iterable
import org.neo4j.graphdb._
import index._
import java.util.Map
class IndexQueryBuilderTest extends BuilderTest {
val builder = new IndexQueryBuilder(new Fake_Database_That_Has_All_Indexes)
@Test
def says_yes_to_node_by_id_queries() {
val q = PartiallySolvedQuery().
copy(start = Seq(Unsolved(NodeByIndexQuery("s", "idx", Literal("foo")))))
assertTrue("Should be able to build on this", builder.canWorkWith(plan(q)))
}
@Test
def only_takes_one_start_item_at_the_time() {
val q = PartiallySolvedQuery().
copy(start = Seq(
Unsolved(NodeByIndexQuery("s", "idx", Literal("foo"))),
Unsolved(NodeByIndexQuery("x", "idx", Literal("foo")))))
val remaining = builder(plan(q)).query
assertEquals("No more than 1 startitem should be solved", 1, remaining.start.filter(_.solved).length)
assertEquals("Stuff should remain", 1, remaining.start.filterNot(_.solved).length)
}
@Test
def fixes_node_by_id_and_keeps_the_rest_around() {
val q = PartiallySolvedQuery().
copy(start = Seq(Unsolved(NodeByIndexQuery("s", "idx", Literal("foo"))), Unsolved(RelationshipById("x", 1))))
val result = builder(plan(q)).query
val expected = Set(Solved(NodeByIndexQuery("s", "idx", Literal("foo"))), Unsolved(RelationshipById("x", 1)))
assert(result.start.toSet === expected)
}
@Test
def says_no_to_already_solved_node_by_id_queries() {
val q = PartiallySolvedQuery().
copy(start = Seq(Solved(NodeByIndexQuery("s", "idx", Literal("foo")))))
assertFalse("Should not build on this", builder.canWorkWith(plan(q)))
}
@Test
def builds_a_nice_start_pipe() {
val q = PartiallySolvedQuery().
copy(start = Seq(Unsolved(NodeByIndexQuery("s", "idx", Literal("foo")))))
val remainingQ = builder(plan(q)).query
assert(remainingQ.start === Seq(Solved(NodeByIndexQuery("s", "idx", Literal("foo")))))
}
}
class Fake_Database_That_Has_All_Indexes extends GraphDatabaseService with IndexManager {
def createNode(): Node = null
def existsForNodes(indexName: String): Boolean = true
def existsForRelationships(indexName: String): Boolean = true
def forNodes(indexName: String): Index[Node] = null
def forNodes(indexName: String, customConfiguration: Map[String, String]): Index[Node] = null
def forRelationships(indexName: String): RelationshipIndex = null
def forRelationships(indexName: String, customConfiguration: Map[String, String]): RelationshipIndex = null
def getConfiguration(index: Index[_ <: PropertyContainer]): Map[String, String] = null
def getNodeAutoIndexer: AutoIndexer[Node] = null
def getRelationshipAutoIndexer: RelationshipAutoIndexer = null
def nodeIndexNames(): Array[String] = null
def relationshipIndexNames(): Array[String] = null
def removeConfiguration(index: Index[_ <: PropertyContainer], key: String): String = ""
def setConfiguration(index: Index[_ <: PropertyContainer], key: String, value: String): String = ""
def beginTx(): Transaction = null
def getAllNodes: Iterable[Node] = null
def getNodeById(id: Long): Node = null
def getReferenceNode: Node = null
def getRelationshipById(id: Long): Relationship = null
def getRelationshipTypes: Iterable[RelationshipType] = null
def index(): IndexManager = this
def registerKernelEventHandler(handler: KernelEventHandler): KernelEventHandler = null
def registerTransactionEventHandler[T](handler: TransactionEventHandler[T]): TransactionEventHandler[T] = null
def shutdown() {}
def unregisterKernelEventHandler(handler: KernelEventHandler): KernelEventHandler = null
def unregisterTransactionEventHandler[T](handler: TransactionEventHandler[T]): TransactionEventHandler[T] = null
} | dksaputra/community | cypher/src/test/scala/org/neo4j/cypher/internal/executionplan/builders/IndexQueryBuilderTest.scala | Scala | gpl-3.0 | 4,967 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.api.push
import cats.Id
import cats.data.{Const, Ior, NonEmptyMap}
import cats.instances.string._
import java.lang.String
import scala.{Boolean, Int, Option}
package object param {
type Formal[A] = ParamType[Id, A]
type Actual[A] = ParamType[Const[A, ?], A]
object Formal {
val boolean: Formal[Boolean] =
ParamType.Boolean[Id](())
def integer(bounds: Option[Int Ior Int], step: Option[IntegerStep]): Formal[Int] =
ParamType.Integer[Id](ParamType.Integer.Args(bounds, step))
def enum[A](x: (String, A), xs: (String, A)*): Formal[A] =
ParamType.Enum[Id, A](NonEmptyMap.of(x, xs: _*))
}
object Actual {
def boolean(b: Boolean): Actual[Boolean] =
ParamType.Boolean(Const(b))
def integer(i: Int): Actual[Int] =
ParamType.Integer(Const(i))
def enumSelect(s: String): Actual[String] =
ParamType.EnumSelect(Const(s))
}
}
| djspiewak/quasar | api/src/main/scala/quasar/api/push/param/package.scala | Scala | apache-2.0 | 1,505 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mongo.query.test
import com.mongodb.{ DBObject, MongoClient }
import de.bwaldvogel.mongo.MongoServer
import mongo.query.test.MongoIntegrationEnv._
import org.specs2.mutable.Specification
import scala.collection.mutable
import scalaz.\\/-
import scalaz.concurrent.Task
import scalaz.stream.io
trait MongoStreamsEnviroment extends org.specs2.mutable.After {
val logger = org.apache.log4j.Logger.getLogger("Observable-Consumer")
var client: MongoClient = _
var server: MongoServer = _
def initMongo() = {
val r = prepareMockMongo()
client = r._1
server = r._2
}
override def after = {
logger.info("Close all resources")
client.close()
server.shutdown()
}
}
class IntegrationMongoStreamsSpec extends Specification {
import mongo._
import dsl._
import qb._
import Interaction._
import mongo.Order._
import MongoIntegrationEnv._
val P = scalaz.stream.Process
"Build query and perform findOne" in new MongoStreamsEnviroment {
initMongo()
val p = for { q ← "index" $eq 0 } yield q
val out = p.findOne(client, TEST_DB, LANGS).attemptRun
out.isRight === true
out.toOption.get.isRight === true
val r = out.toOption.get.toOption.get
r.get("index") === 0
}
"Build query and perform find batch" in new MongoStreamsEnviroment {
initMongo()
val p = for {
_ ← "index" $gte 0 $lte 5
_ ← "popularity_factor" $gte 0
_ ← sort("popularity_factor" -> Ascending, "index" -> Descending)
_ ← limit(4)
q ← skip(2)
} yield q
val out = p.list(client, TEST_DB, LANGS).attemptRun
out.isRight === true
out.toOption.get.isRight === true
val r = out.toOption.get.toOption.get
r.get(BatchPrefix).asInstanceOf[java.util.List[DBObject]].size() === 3
}
"Build query and perform streaming using scalaz.Process" in new MongoStreamsEnviroment {
initMongo()
implicit val cl = client
val q = for { ex ← "index" $gte 0 $lt 5 } yield ex
val buf = mutable.Buffer.empty[DBObject]
val sink = io.fillBuffer(buf)
val out = (q.stream[MProcess](TEST_DB, LANGS) to sink).run.attemptRun
out should be equalTo \\/-(())
langs.size === buf.size
}
"Build query and perform streaming using mongoStream" in new MongoStreamsEnviroment {
initMongo()
val field = "index"
val query = for {
_ ← field $gte 0 $lt 10
q ← sort(field -> Descending)
} yield q
val buffer = mutable.Buffer.empty[String]
val Sink = io.fillBuffer(buffer)
val p = (for {
element ← P.eval(Task.delay(client)).through(query.sChannel[MStream](TEST_DB, LANGS).column[String](field).out)
_ ← element to Sink
} yield ())
p.onFailure(th ⇒ P.eval(Task.delay(logger.info(s"Exception: ${th.getMessage}"))))
.onComplete(P.eval(Task.delay(logger.info(s"Interaction has been completed"))))
.runLog.run
langs.size === buffer.size
}
"One to many join through sChannel with fixed columns" in new MongoStreamsEnviroment {
initMongo()
val buffer = mutable.Buffer.empty[String]
val Sink = io.fillBuffer(buffer)
//Select all lang
val qLang = for {
_ ← "index" $gte 0 $lt 10
q ← limit(5)
} yield q
//Select all programmers by specific lang
def qProgByLang(id: Int) = for { q ← "lang" $eq id } yield q
val left = qLang.sChannel[MStream](TEST_DB, LANGS).column[Int]("index")
val right: Int ⇒ mongo.query.DBChannel[MongoClient, String] =
id ⇒
qProgByLang(id).sChannel[MStream](TEST_DB, PROGRAMMERS).column[String]("name")
(for {
element ← P.eval(Task.delay(client)) through ((left innerJoin right) { (i, p) ⇒ s"[lang:$i/person:$p]" }.out)
_ ← element to Sink
} yield ())
.onFailure(th ⇒ P.eval(Task.delay(logger.info(s"Exception: ${th.getMessage}"))))
.onComplete(P.eval(Task.delay(logger.info(s"Interaction has been completed"))))
.runLog.run
logger.info(buffer)
buffer.size === 10
}
"One to many join through sChannel with raw objects" in new MongoStreamsEnviroment {
initMongo()
val buffer = mutable.Buffer.empty[String]
val Sink = io.fillBuffer(buffer)
implicit val cl = client
val qLang = for { q ← "index" $gte 0 $lt 10 } yield q
def qProg(left: DBObject) = for { q ← "lang" $eq left.get("index").asInstanceOf[Int] } yield q
val query = qLang.sChannel[MStream](TEST_DB, LANGS)
.innerJoinRaw(qProg(_).sChannel[MStream](TEST_DB, PROGRAMMERS)) { (l, r) ⇒
s"[lang:${l.get("name").asInstanceOf[String]}/person:${r.get("name").asInstanceOf[String]}]"
}
val p = for {
element ← P.eval(Task.delay(client)) through query.out
_ ← element to Sink
} yield ()
p.onFailure(th ⇒ P.eval(Task.delay(logger.info(s"Exception: ${th.getMessage}"))))
.onComplete(P.eval(Task.delay(logger.info(s"Interaction has been completed"))))
.runLog.run
logger.info(buffer)
buffer.size === 10
}
} | haghard/mongo-query-streams | src/test/scala/mongo/query/test/IntegrationMongoStreamsSpec.scala | Scala | apache-2.0 | 5,587 |
class Outer {
class Inner {
class Inner2
}
}
class HasA { type A }
class Foo[A]
object Test {
def test = {
val a: Outer#Inner = {
val o = new Outer
new o.Inner
}
val b: Outer#Inner#Inner2 = {
val o = new Outer
val i = new o.Inner
new i.Inner2
}
val c: HasA { type A = Int } = {
val h = new HasA {
type A = Int
}
val x: HasA { type A = h.A } = h
x
}
/* val d: Foo[Int] = {
class Bar[B] extends Foo[B]
new Bar[Int]
}
*/
val e: Foo[_] = {
class Bar[B11] extends Foo[B11]
new Bar[Int]: Bar[_ <: Int]
}
}
}
| lampepfl/dotty | tests/pos/escapingRefs.scala | Scala | apache-2.0 | 645 |
import org.scalacheck._, Prop._, Gen._, Arbitrary._
import scala.reflect.runtime.universe._, Flag._
object UnliftableProps extends QuasiquoteProperties("unliftable") {
property("unlift name") = test {
val termname0 = TermName("foo")
val typename0 = TypeName("foo")
val q"${termname1: TermName}" = Ident(termname0)
assert(termname1 == termname0)
val q"${typename1: TypeName}" = Ident(typename0)
assert(typename1 == typename0)
val q"${name1: Name}" = Ident(termname0)
assert(name1 == termname0)
val q"${name2: Name}" = Ident(typename0)
assert(name2 == typename0)
}
property("unlift type") = test {
val q"${tpe: Type}" = TypeTree(typeOf[Int])
assert(tpe =:= typeOf[Int])
}
property("unlift constant") = test {
val q"${const: Constant}" = Literal(Constant("foo"))
assert(const == Constant("foo"))
}
property("unlift char") = test {
val q"${c: Char}" = Literal(Constant('0'))
assert(c.isInstanceOf[Char] && c == '0')
}
property("unlift byte") = test {
val q"${b: Byte}" = Literal(Constant(0: Byte))
assert(b.isInstanceOf[Byte] && b == 0)
}
property("unlift short") = test {
val q"${s: Short}" = Literal(Constant(0: Short))
assert(s.isInstanceOf[Short] && s == 0)
}
property("unlift int") = test {
val q"${i: Int}" = Literal(Constant(0: Int))
assert(i.isInstanceOf[Int] && i == 0)
}
property("unlift long") = test {
val q"${l: Long}" = Literal(Constant(0L: Long))
assert(l.isInstanceOf[Long] && l == 0L)
}
property("unlift float") = test {
val q"${f: Float}" = Literal(Constant(0.0f: Float))
assert(f.isInstanceOf[Float] && f == 0.0f)
}
property("unlift double") = test {
val q"${d: Double}" = Literal(Constant(0.0: Double))
assert(d.isInstanceOf[Double] && d == 0.0)
}
property("unlift bool") = test {
val q"${b: Boolean}" = q"true"
assert(b.isInstanceOf[Boolean] && b == true)
}
property("unlift string") = test {
val q"${s: String}" = q""" "foo" """
assert(s.isInstanceOf[String] && s == "foo")
}
property("unlift scala.symbol") = test {
val q"${s: scala.Symbol}" = q"'foo"
assert(s.isInstanceOf[scala.Symbol] && s == 'foo)
}
implicit def unliftList[T: Unliftable]: Unliftable[List[T]] = Unliftable {
case q"scala.collection.immutable.List(..$args)" if args.forall { implicitly[Unliftable[T]].unapply(_).nonEmpty } =>
val ut = implicitly[Unliftable[T]]
args.flatMap { ut.unapply(_) }
}
property("unlift list (1)") = test {
val orig = List(1, 2)
val q"${l1: List[Int]}" = q"$orig" // q"List(1, 2)"
assert(l1 == orig)
val q"f(..${l2: List[Int]})" = q"f(..$orig)" // q"f(1, 2)
assert(l2 == orig)
}
property("unlift list (2)") = test {
val orig2 = List(List(1, 2), List(3))
val q"f(${l3: List[List[Int]]})" = q"f($orig2)" // q"f(List(List(1, 2), List(3)))
assert(l3 == orig2)
val q"f(..${l4: List[List[Int]]})" = q"f(..$orig2)" // q"f(List(1, 2), List(3))"
assert(l4 == orig2)
val q"f(...${l5: List[List[Int]]})" = q"f(...$orig2)" // q"f(1, 2)(3)
assert(l5 == orig2)
}
property("don't unlift non-tree unquotee (1)") = test {
val q"${a: TermName}.${b: TermName}" = q"a.b"
assert(a == TermName("a"))
assert(b == TermName("b"))
}
property("don't unlift non-tree unquotee (2)") = test {
val q"${mods: Modifiers} def foo" = q"def foo"
assert(mods == Modifiers(DEFERRED))
}
property("unlift tuple") = test {
val q"${t2: (Int, Int)}" = q"(1, 2)"
val q"${t3: (Int, Int, Int)}" = q"(1, 2, 3)"
val q"${t4: (Int, Int, Int, Int)}" = q"(1, 2, 3, 4)"
val q"${t5: (Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5)"
val q"${t6: (Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6)"
val q"${t7: (Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7)"
val q"${t8: (Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8)"
val q"${t9: (Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9)"
val q"${t10: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)"
val q"${t11: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11)"
val q"${t12: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12)"
val q"${t13: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13)"
val q"${t14: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14)"
val q"${t15: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15)"
val q"${t16: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)"
val q"${t17: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17)"
val q"${t18: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18)"
val q"${t19: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19)"
val q"${t20: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20)"
val q"${t21: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21)"
val q"${t22: (Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int)}" = q"(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22)"
// assert(t1 == Tuple1(1))
assert(t2 == (1, 2))
assert(t3 == (1, 2, 3))
assert(t4 == (1, 2, 3, 4))
assert(t5 == (1, 2, 3, 4, 5))
assert(t6 == (1, 2, 3, 4, 5, 6))
assert(t7 == (1, 2, 3, 4, 5, 6, 7))
assert(t8 == (1, 2, 3, 4, 5, 6, 7, 8))
assert(t9 == (1, 2, 3, 4, 5, 6, 7, 8, 9))
assert(t10 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10))
assert(t11 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
assert(t12 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
assert(t13 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))
assert(t14 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))
assert(t15 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15))
assert(t16 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))
assert(t17 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17))
assert(t18 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18))
assert(t19 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19))
assert(t20 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20))
assert(t21 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21))
assert(t22 == (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22))
}
property("unlift xml comment") = test {
implicit val unliftXmlComment = Unliftable[xml.Comment] {
case q"new _root_.scala.xml.Comment(${value: String})" => xml.Comment(value)
}
val q"${comment: xml.Comment}" = q"<!--foo-->"
assert(comment.commentText == "foo")
}
}
| felixmulder/scala | test/files/scalacheck/quasiquotes/UnliftableProps.scala | Scala | bsd-3-clause | 7,739 |
package com.twitter.finatra.http.modules
import com.twitter.finatra.http.exceptions.DefaultExceptionMapper
import com.twitter.finatra.http.internal.exceptions.{ExceptionManager, FinatraDefaultExceptionMapper}
import com.twitter.finatra.http.internal.exceptions.json.{JsonParseExceptionMapper, CaseClassExceptionMapper}
import com.twitter.inject.{TwitterModule, Injector, InjectorModule}
object ExceptionMapperModule extends TwitterModule {
override val modules = Seq(InjectorModule)
override def configure() {
bindSingleton[DefaultExceptionMapper].to[FinatraDefaultExceptionMapper]
}
override def singletonStartup(injector: Injector) {
val manager = injector.instance[ExceptionManager]
manager.add[JsonParseExceptionMapper]
manager.add[CaseClassExceptionMapper]
}
}
| tom-chan/finatra | http/src/main/scala/com/twitter/finatra/http/modules/ExceptionMapperModule.scala | Scala | apache-2.0 | 796 |
package org.http4s.client.oauth1
import org.http4s._
import org.http4s.client.oauth1
import org.http4s.util.CaseInsensitiveString
import org.specs2.mutable.Specification
class OAuthTest extends Specification {
// some params taken from http://oauth.net/core/1.0/#anchor30, others from
// http://tools.ietf.org/html/rfc5849
val Right(uri) = Uri.fromString("http://photos.example.net/photos")
val consumer = oauth1.Consumer("dpf43f3p2l4k3l03", "kd94hf93k423kf44")
val token = oauth1.Token("nnch734d00sl2jdk", "pfkkdhi9sl3r4s00")
val userParams = Seq(
"file" -> "vacation.jpg",
"size" -> "original"
)
val allParams = Seq(
"oauth_consumer_key" -> "dpf43f3p2l4k3l03",
"oauth_token" -> "nnch734d00sl2jdk",
"oauth_signature_method" -> "HMAC-SHA1",
"oauth_timestamp" -> "1191242096",
"oauth_nonce" -> "kllo9940pd9333jh",
"oauth_version" -> "1.0"
) ++ userParams
val params2 = Seq(
"b5" -> Some("=%3D"),
"a3" -> Some("a"),
"c@" -> None,
"a2" -> Some("r b"),
"oauth_consumer_key" -> Some("9djdj82h48djs9d2"),
"oauth_token" -> Some("kkk9d7dh3k39sjv7"),
"oauth_signature_method" -> Some("HMAC-SHA1"),
"oauth_timestamp" -> Some("137131201"),
"oauth_nonce" -> Some("7d8f3e4a"),
"c2" -> None,
"a3" -> Some("2 q")
)
val specBaseString = "GET&http%3A%2F%2Fphotos.example.net%2Fphotos&file%3Dvacation.jpg%26oauth_consumer_key%" +
"3Ddpf43f3p2l4k3l03%26oauth_nonce%3Dkllo9940pd9333jh%26oauth_signature_method%3DHMAC-SHA1%26oauth_timestamp%" +
"3D1191242096%26oauth_token%3Dnnch734d00sl2jdk%26oauth_version%3D1.0%26size%3Doriginal"
"OAuth support" should {
"generate a Base String" in {
oauth1.genBaseString(Method.GET, uri, allParams) must_== specBaseString
}
"Generate correct SHA1 signature" in {
oauth1.makeSHASig(specBaseString, consumer, Some(token)) must_== "tR3+Ty81lMeYAr/Fid0kMTYa/WM="
}
"generate a Authorization header" in {
val auth = oauth1.genAuthHeader(Method.GET, uri, userParams, consumer, None, None, Some(token))
val creds = auth.credentials
creds.authScheme must_== CaseInsensitiveString("OAuth")
}
}
"RFC 5849 example" should {
implicit def urlFormEncoder : EntityEncoder[UrlForm] = UrlForm.entityEncoder(Charset.`US-ASCII`)
val Right(uri) = Uri.fromString("http://example.com/request?b5=%3D%253D&a3=a&c%40=&a2=r%20b")
val Right(body) = UrlForm.decodeString(Charset.`US-ASCII`)("c2&a3=2+q")
val req = Request(method = Method.POST, uri = uri).withBody(body).unsafeRun()
"Collect proper params, pg 22" in {
oauth1.getUserParams(req).unsafeRun._2.sorted must_== Seq(
"b5" -> "=%3D",
"a3" -> "a",
"c@" -> "",
"a2" -> "r b",
"c2" -> "",
"a3" -> "2 q"
).sorted
}
}
}
| ZizhengTai/http4s | client/src/test/scala/org/http4s/client/oauth1/OAuthTest.scala | Scala | apache-2.0 | 2,852 |
package hevs.especial.dsl.components
import java.io.IOException
import grizzled.slf4j.Logging
import hevs.especial.dsl.components.core.Constant
import hevs.especial.utils.{ComponentNotFound, CycleException, IoTypeMismatch}
import scala.language.higherKinds
import scalax.collection.GraphPredef._
import scalax.collection.constrained.mutable.Graph
import scalax.collection.constrained.{Config, ConstraintCompanion}
import scalax.collection.edge.LDiEdge
/**
* Object used to store all components declared in the code.
*
* Components are stored in single and shared graph.
* This data structure is useful to find any types of components, connected or not, to fin its direct successors, etc.
*
* Each component is identified by a unique generated ID (see [[hevs.especial.dsl.components.ComponentManager.IdGenerator]].
* It can be stored only once in the graph. A [[Wire]] is used as an edge label to store a connection composed by two
* components ports (an [[OutputPort]] source and an [[InputPort]] destination).
*
* The component graph is a directed acyclic graph (`DAG`). A dynamic constraint is used to prevent addition of
* cyclic nodes or edges to the graph.
*
* @version 2.1
* @author Christopher Metrailler (mei@hevs.ch)
*/
object ComponentManager extends Logging {
/* Dynamic acyclic constraint definition for the graph. */
object AcyclicWithException {
import scalax.collection.constrained.Graph
import scalax.collection.constrained.constraints.{Acyclic => AcyclicBase}
object Acyclic extends ConstraintCompanion[AcyclicBase] {
@throws[CycleException]("If a cycle is found (graph constraint to be a DAG).")
def apply[N, E[X] <: EdgeLikeIn[X]](self: Graph[N, E]) =
new AcyclicBase[N, E](self) {
override def onAdditionRefused(refusedNodes: Iterable[N],
refusedEdges: Iterable[E[N]],
graph: Graph[N, E]) = {
// Throw an exception if the node or edges cannot be added to the graph.
val label = refusedEdges.head.label.asInstanceOf[Wire]
throw CycleException(label)
}
}
}
}
import hevs.especial.dsl.components.ComponentManager.AcyclicWithException._
/** Constraint of the graph. The graph must be acyclic and unconnected nodes are allowed. */
implicit val config: Config = Acyclic // && Connected
/** Mutable graph representation of all the components of the program. */
protected val cpGraph: Graph[Component, LDiEdge] = Graph.empty[Component, LDiEdge]
// Used to generate a unique ID for each component
private val cmpIdGen: IdGenerator = {
val g = new IdGenerator()
g.reset()
g
}
/**
* Create a unique component id to store in the graph.
* @return a unique component id
*/
def nextComponentId() = cmpIdGen.nextId
/**
* Insert a component in the graph.
*
* Each component has a unique ID and can be only once in the graph.
*
* If the component already exist in the graph, then the existing node is returned.
* This works only if `equals` and `hashcode` functions of the components are implemented.
*
* If the component already exist, but with another type, an exception is thrown. This is the case when an I/O is
* used twice on the same pin, with different functions (PwmOutput and DigitalOutput for instance).
*
* @param node the component to add in the graph (as node)
* @return `None` if the component has been added successfully, or the existing instance if already in the graph
*/
@throws[CycleException]("If a cycle is found in the graph (not a DAG).")
@throws[IoTypeMismatch]("If an I/O is already configured with another type.")
@throws[IOException]("If the node cannot be added into the graph.")
def addComponent(node: Component): Option[node.type] = {
// Try to add the component as a node to the graph. Not added if it exist already.
// Components ports must be connected manually.
val extNodes = cpGraph.nodes.filter(p => p.value == node)
if (extNodes.size > 0) {
logger.trace(s"Component $node already exist in the graph.")
// If the component already exist, check if it has the same function (class) as the current component.
// If not (example: PwmOutput and DigitalOutput), an exception is thrown. See issue #14.
val cmp = extNodes.head.value
if (cmp.getClass != node.getClass) {
val extCmp = cmp.asInstanceOf[Component]
throw IoTypeMismatch(extCmp, node) // Already used with another type
}
else {
// Return the existing component, directly with the correct type.
// The type conversion is safe.
Some(cpGraph.get(node).value.asInstanceOf[node.type])
}
}
else {
cpGraph.add(node) match {
case true =>
logger.trace(s"Component $node added to the graph.")
None
case _ => throw new IOException("Unable to add the component to the graph !")
}
}
}
/**
* Remove the component of the graph.
* All edges of the node (from/to the node) are removed automatically. Connected ports with the component are
* disconnected.
*
* @param cpId the ID of the component to remove
* @return `true` if successfully removed, `false` otherwise
*/
def removeComponent(cpId: Int): Boolean = {
// Before removing the component, ports must be disconnected manually
val nOpt = cpGraph.nodes.find(n => n.value.asInstanceOf[Component].getId == cpId)
if (nOpt.isEmpty)
return false // Node not found
// Disconnected all ports connected with the component to remove
val node = nOpt.get
for (e <- node.edges) {
val label: Wire = e.label.asInstanceOf[Wire]
label.from.disconnect() // Disconnect the output "from"
label.to.disconnect() // Also disconnect the input "to" (this component)
}
// Finally remove the component and its edges
cpGraph.remove(node)
}
/**
* Order of the graph.
* @return the number of nodes in the graph (order of the graph)
*/
def numberOfNodes = cpGraph.order
/**
* @return the number of edges
*/
def numberOfEdges = cpGraph.edges.size
def getDotGraph = cpGraph // Used by the `dot` generator
/**
* Remove all components from the graph and clear all previous IDs.
*/
def reset(): Unit = {
cpGraph.clear()
cmpIdGen.reset() // Restart id generation from 0
}
/**
* Add a connection between two [[Port]].
*
* 1) Owners of port must be in the graph
* 2) The input must be unconnected
*
* @param from port from
* @param to port to
* @return
*/
def addWire[T <: CType](from: OutputPort[T], to: InputPort[T]): Unit = {
// Get components "from" and "to". These components must be in the graph, or an exception is thrown.
val (cpFrom, cpTo) = (cp(from.getOwnerId), cp(to.getOwnerId))
val w = new Wire(from, to) // Add a wire between the two ports
assert(from.isConnected, "From port not connected !")
assert(to.isConnected, "To port not connected !")
// Add the connection (wire) between these to ports.
// The edge is directed with a key label. The label must be a key because an output can be connected to multiple
// inputs. It must be possible to add multiple wire from an to the same nodes, with different labels.
import scalax.collection.edge.Implicits._
val outer = (cpFrom ~+#> cpTo)(w)
cpGraph += outer
}
/**
* Get a Component from a node graph by its id.
* @see getNode
* @param cpId the component id to search for
* @return the component node or an exception if not found
*/
@throws[ComponentNotFound]("If the component is not in the graph.")
private def cp(cpId: Int): Component = {
getNode(cpId).value.asInstanceOf[Component]
}
/**
* Search a node in the graph by ID.
* Return the node in the graph with the corresponding ID. An exception is thrown in the component was not found.
* All nodes of the graph have a unique id. Only one unique component can be returned.
*
* @param cpId the component id to search for
* @return the component as a graph node (`Component` as value, with edges)
*/
@throws[ComponentNotFound]("If the component is not in the graph.")
def getNode(cpId: Int): cpGraph.NodeT = {
cpGraph.nodes find (c => c.value.asInstanceOf[Component].getId == cpId) match {
case Some(c) => c
case None =>
// Fatal exception: must be in the graph
throw ComponentNotFound(cpId)
}
}
/**
* Return all nodes of the graph as [[Component]]s.
* @return all graph nodes as a [[Set]] of [[Component]]
*/
def getComponents: Set[Component] = {
cpGraph.nodes.map(node => node.value.asInstanceOf[Component]).toSet
}
/**
* @return the number of connected nodes
*/
def numberOfConnectedHardware() = cpGraph.nodes.size - numberOfUnconnectedHardware()
/**
* @see findUnconnectedComponents
*/
def numberOfUnconnectedHardware() = findUnconnectedComponents.size
/**
* Return all unconnected nodes of the graph.
* A component is considered as unconnected if it has at least one input or output and no connections to other
* components.
* A component without input and output (total of 0 I/O) is considered as connected and its code will be generated.
*
* @see findConnectedInputHardware
* @return all unconnected nodes (with at least one input or output)
*/
def findUnconnectedComponents: Set[Component] = {
val nc = cpGraph.nodes filter { c =>
val cp = c.value.asInstanceOf[Component]
// If no I/O, NOT considered has unconnected
val io = cp.getInputs.getOrElse(Nil) ++ cp.getOutputs.getOrElse(Nil)
c.degree == 0 && io.length != 0
}
nc.map(x => x.value.asInstanceOf[Component]).toSet
}
/**
* Find all connected inputs nodes. An input node is a node without direct predecessor. To be considered as
* connected, a node must have at least one input or output and connected with at least one other node.
* A component without input and output is considered as connected.
*
* @see findUnconnectedComponents
* @return list of connected inputs
*/
def findConnectedInputHardware: Set[Component] = findConnectedIOHardware(input = true)
/**
* Find all connected outputs nodes. An output node is a node without direct successors. To be considered as
* connected, a node must have at least one input or output and connected with at least one other node.
* A component without input and output is considered as connected.
*
* @see findUnconnectedComponents
* @return list of connected outputs
*/
def findConnectedOutputHardware: Set[Component] = findConnectedIOHardware(input = false)
// Find connected input or output in the graph
private def findConnectedIOHardware(input: Boolean) = {
val ret = cpGraph.nodes.filter { c =>
val cp = c.value.asInstanceOf[Component]
val io = cp.getInputs.getOrElse(Nil) ++ cp.getOutputs.getOrElse(Nil)
if (input) // Input = no direct predecessors or '0' I/O
c.diPredecessors.isEmpty && c.edges.size > 0 || io.size == 0
else // Output = no direct successors
c.diSuccessors.isEmpty && c.edges.size > 0
}
// Return the node value as a Component
ret.map(x => x.value.asInstanceOf[Component]).toSet
}
/**
* Get the [[OutputPort]] connected with the specified [[InputPort]].
*
* If the input is not connected, the constant value '0' is returned as dummy value.
*
* @version 2.0
* @param port the port to search is input
* @return the [[OutputPort]] connected with the input port
*/
def findPredecessorOutputPort(port: InputPort[CType]): OutputPort[CType] = {
val cp = cpGraph.nodes find (c => c.value.asInstanceOf[Component].equals(port.getOwner))
val edges = cp.get.edges // all connections of this component (from and to components)
// Search the corresponding wire. Should be only one.
val connections = edges filter {
w => w.label.asInstanceOf[Wire].to == port &&
w.label.asInstanceOf[Wire].from.getOwnerId != port.getOwnerId
}
if (connections.size == 0) {
// Port not found. The input is NOT connected...
val cst = Constant[uint8](uint8(0))
cst.out // Return the constant value '0' as dummy value.
}
else
connections.head.label.asInstanceOf[Wire].from // Return the connected port
}
/**
* Helper class used to generate a unique ID.
*
* Each component stored in the graph has a unique ID. This is necessary to equals nodes in the graph.
* Each port of a component has also a unique ID. Used to equal ports and find connections (wires).
*/
private[components] class IdGenerator {
private var id: Int = 0
/**
* Generate a new unique ID for component and ports.
* @return a new unique id
*/
def nextId = {
val currId = id
id += 1
currId
}
/**
* Reset the generator. Next id will be '0'.
*/
def reset(): Unit = id = 0
}
} | hevs-isi/especial-frontend | src/main/scala/hevs/especial/dsl/components/ComponentManager.scala | Scala | mit | 13,147 |
package beppo2k.ftp.server.command
import java.nio.channels.SelectionKey
import java.nio.channels.Selector
import java.nio.channels.SocketChannel
import beppo2k.ftp.server.datatransfer.handler.{StorHandler, RetrHandler, NlstHandler}
import beppo2k.ftp.server.{FtpUserSession, DataTransferSession, DataTransferActor}
import beppo2k.ftp.server.datatransfer.Add
import beppo2k.ftp.util.{Log, FileUtil, NetworkUtil}
import java.io.File
abstract class FtpCommand(argument:String) {
def execute(selector:Selector , key:SelectionKey)
}
class User(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("User start")
val channel = key.channel().asInstanceOf[SocketChannel];
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr, selector, key)
session.username = this.argument
val res = new FtpCommandResponse(
FtpReturnCode.USER_NAME_OKAY,
"Please specify the password")
val bb = res.toBytes()
channel.write(bb)
Log.info("User end")
}
}
class Pass(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Pass start")
val channel = key.channel().asInstanceOf[SocketChannel]
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr, selector, key)
session.password = this.argument
val bb = session.login() match {
case true => {
new FtpCommandResponse(
FtpReturnCode.USER_LOGGED_IN,
"Login successful").toBytes()
}
case false => {
new FtpCommandResponse(
FtpReturnCode.NOT_LOGGED_IN,
"Login incorrect").toBytes()
}
}
channel.write(bb)
Log.info("Pass end")
}
}
class Cwd(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Cwd start")
val channel = key.channel().asInstanceOf[SocketChannel]
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr, selector, key)
val path = argument
val bb = session.changeCurrentDir(path) match {
case true => {
val res = new FtpCommandResponse(
FtpReturnCode.REQUESTED_FILE_ACTION_OKAY ,
"Directory successfully changed")
res.toBytes()
}
case _ => {
val res = new FtpCommandResponse(
FtpReturnCode.REQUESTED_ACTION_NOT_TAKEN_PERMISSION_OR_SYSTEM,
"Failed to change directory")
res.toBytes()
}
}
channel.write(bb)
Log.info("Cwd end")
}
}
class Quit(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Quit start")
val channel = key.channel().asInstanceOf[SocketChannel];
val addr = channel.socket().getRemoteSocketAddress().toString()
FtpUserSession.clear(addr)
channel.close()
Log.info("Quit end")
}
}
class Pasv(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Pasv start")
val channel = key.channel().asInstanceOf[SocketChannel];
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr , selector , key)
val actRef = DataTransferActor.get
val dataTransferSession = new DataTransferSession(session , None)
actRef ! Add(dataTransferSession.sourceChannel , SelectionKey.OP_ACCEPT)
val msg = String.format(
"Entering Passive Mode (%s)",
createAddr(dataTransferSession.port))
val res = new FtpCommandResponse(
FtpReturnCode.ENTERING_PASSIVE_MODE,
msg)
val bb = res.toBytes()
channel.write(bb)
Log.info("Pasv end")
}
private def createAddr(port:Int) :String = {
val first:Int = (port & 0xff00) >>> 8
val second:Int = port & 0x00ff
NetworkUtil.getLocalIpv4Address match {
case Some(ip) => {
return ip.toString().replaceFirst("/","").replaceAll("\\.",",") + "," + first + "," + second
}
case None => {
throw new Exception()
}
}
}
}
class Port(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
try{
Log.info("Port start")
val channel = key.channel().asInstanceOf[SocketChannel];
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr , selector , key)
val (ip,port) = parsePortCommand(argument)
val dataTransferSession = new DataTransferSession(session , Some((ip,port)))
val actRef = DataTransferActor.get
actRef ! Add(dataTransferSession.sourceChannel , SelectionKey.OP_CONNECT)
val res = new FtpCommandResponse(
FtpReturnCode.COMMAND_OKAY,
"PORT command successful")
val bb = res.toBytes()
channel.write(bb)
Log.info("Port end")
}catch{
case e:Exception => {
e.printStackTrace()
Log.error("[%s]" , e.getMessage())
}
}
}
private def parsePortCommand(arg:String) :(String,Int) = {
val array = arg.split(",")
val ipAddr = array.slice(0,4)
val portFirst = array(4)
val portSecond = array(5)
val portStr = portFirst.toInt.formatted("%02x") + portSecond.toInt.formatted("%02x")
val port = Integer.parseInt(portStr , 16)
return (ipAddr.mkString(".") , port)
}
}
class Stor(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Stor start")
val channel = key.channel().asInstanceOf[SocketChannel];
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr , selector , key)
val filePath:String = argument match {
case f if f.startsWith("/") || f.startsWith("C:" + File.separator) => {
val newPath = f
newPath
}
case f => {
val newPath = session.currentDir + File.separator + f
newPath
}
}
val handler = new StorHandler(filePath)
session.handler = handler
handler.comanndSelector = selector
handler.commandConnectionKey = key
val res = new FtpCommandResponse(
FtpReturnCode.FILE_STATUS_OKAY ,
"Ok to send data")
val bb = res.toBytes()
channel.write(bb)
Log.info("Stor end")
session.canTransfer = true
}
}
class Retr(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Retr start")
val channel = key.channel().asInstanceOf[SocketChannel];
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr , selector , key)
val filePath:String = argument match {
case f if f.startsWith("/") || f.startsWith("C:" + File.separator) => {
val newPath = f
newPath
}
case f => {
val newPath = session.currentDir + File.separator + f
newPath
}
}
val handler = new RetrHandler(filePath)
session.handler = handler
handler.comanndSelector = selector
handler.commandConnectionKey = key
val res = new FtpCommandResponse(
FtpReturnCode.FILE_STATUS_OKAY ,
"Opening BINARY mode data connection for ")
val bb = res.toBytes()
channel.write(bb)
Log.info("Retr end")
session.canTransfer = true
}
}
class Rmd(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Rmd start")
val channel = key.channel().asInstanceOf[SocketChannel]
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr , selector , key);
val dir = this.argument match {
case d if d.startsWith("/") || d.startsWith("C:" + File.separator) => {
this.argument
}
case _ => {
session.currentDir + File.separator + this.argument
}
}
val bb = FileUtil.deleteDir(new File(dir)) match {
case true => {
new FtpCommandResponse(
FtpReturnCode.FILE_STATUS_OKAY,
"Succeed to delete directory").toBytes()
}
case false => {
new FtpCommandResponse(
FtpReturnCode.REQUESTED_ACTION_NOT_TAKEN_PERMISSION_OR_SYSTEM,
"Failed to delete directory").toBytes()
}
}
channel.write(bb);
Log.info("Rmd end")
}
private def delete(file:File) :Unit = {
file.exists() match {
case true => {
file.isDirectory() match {
case true => {
for(f <- file.listFiles()){
delete(f)
}
}
case false => {
file.delete()
}
}
}
case false => {
// none
}
}
}
}
class Mkd(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Mkd start")
val channel = key.channel().asInstanceOf[SocketChannel]
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr , selector , key);
val dir = this.argument match {
case d if d.startsWith("/") || d.startsWith("C:" + File.separator) => {
this.argument
}
case _ => {
session.currentDir + File.separator + this.argument
}
}
val bb = new File(dir).mkdirs() match {
case true => {
new FtpCommandResponse(
FtpReturnCode.FILE_STATUS_OKAY,
"Succeed to create directory").toBytes()
}
case false => {
new FtpCommandResponse(
FtpReturnCode.REQUESTED_ACTION_NOT_TAKEN_PERMISSION_OR_SYSTEM,
"Failed to create directory").toBytes()
}
}
channel.write(bb);
Log.info("Mkd end")
}
}
class Pwd(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Pwd start")
val channel = key.channel().asInstanceOf[SocketChannel];
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr , selector , key)
val path = session.getCurrentDir()
val res = new FtpCommandResponse(
FtpReturnCode.PATHNAME_CREATED ,
path)
val bb = res.toBytes()
channel.write(bb)
Log.info("Pwd end")
}
}
class List(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("List start")
val channel = key.channel().asInstanceOf[SocketChannel];
val addr = channel.socket().getRemoteSocketAddress().toString()
val session = FtpUserSession.get(addr , selector , key)
val isDetail = (argument != null && !argument.equals(""))
val handler = new NlstHandler(session.currentDir , isDetail)
session.handler = handler
handler.comanndSelector = selector
handler.commandConnectionKey = key
val res = new FtpCommandResponse(
FtpReturnCode.FILE_STATUS_OKAY ,
"Here comes the directory listing")
val bb = res.toBytes()
channel.write(bb)
Log.info("List end")
session.canTransfer = true
}
}
class Syst(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("Syst start")
val channel = key.channel().asInstanceOf[SocketChannel];
val res = new FtpCommandResponse(
FtpReturnCode.NAME_SYSTEM_TYPE ,
"teset server")
val bb = res.toBytes()
channel.write(bb)
Log.info("Syst end")
}
}
class Stat(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Help(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Noop(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Acct(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Cdup(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Smnt(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Rein(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Stru(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Mode(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Stou(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Appe(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Allo(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Rest(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Rnfr(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Rnto(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Abor(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Dele(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Site(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class Nlst(argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
}
}
class FtpConnectionEstablish(argument: String = null) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("FtpConnectionEstablish start")
val channel = key.channel().asInstanceOf[SocketChannel];
val res = new FtpCommandResponse(
FtpReturnCode.SERVICE_READY_FOR_NEW_USER ,
"test server")
val bb = res.toBytes()
channel.write(bb)
Log.info("FtpConnectionEstablish end")
}
}
class FtpCommandNotFound(argument: String = null) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("FtpCommandNotFound start")
val channel = key.channel().asInstanceOf[SocketChannel]
val res = new FtpCommandResponse(
FtpReturnCode.COMMAND_NOT_IMPLEMENTED,
"Invalid Command")
val bb = res.toBytes()
channel.write(bb)
Log.info("FtpCommandNotFound end")
}
}
class FtpDataConnectionFinishCommand(commandSelector: Selector, commandKey: SelectionKey , argument: String) extends FtpCommand(argument: String) {
override def execute(selector: Selector, key: SelectionKey) {
Log.info("FtpDataConnectionFinishCommand start")
val channel = commandKey.channel().asInstanceOf[SocketChannel]
val res = new FtpCommandResponse(
FtpReturnCode.CLOSING_DATA_CONNECTION,
argument)
val bb = res.toBytes()
channel.write(bb)
Log.info("FtpDataConnectionFinishCommand end")
}
} | beppo2k/ftpserver | src/main/scala/beppo2k/ftp/server/command/FtpCommand.scala | Scala | mit | 17,833 |
package protocgen
import protocbridge.ProtocCodeGenerator
import com.google.protobuf.ExtensionRegistry
import com.google.protobuf.CodedInputStream
import com.google.protobuf.compiler.PluginProtos.CodeGeneratorRequest
import com.google.protobuf.compiler.PluginProtos.CodeGeneratorResponse
import com.google.protobuf.Descriptors.FileDescriptor
import com.google.protobuf.compiler.PluginProtos
import com.google.protobuf.DescriptorProtos.FileDescriptorProto
/** CodeGenApp provides a higher-level Scala API to build protoc code generators.
*
* As a code generator author, you need to optionally provide a `registerExtensions`
* function to register any extensions needed for parsing the CodeGeneratorRequest.
*
* The implement the function process that takes a CodeGenRequest and returns a
* CodeGenResponse. These classes provides higher-level, idiomatic access to the
* request and response used by protoc.
*/
trait CodeGenApp extends ProtocCodeGenerator {
def registerExtensions(registry: ExtensionRegistry): Unit = {}
def process(request: CodeGenRequest): CodeGenResponse
final def main(args: Array[String]): Unit = {
System.out.write(run(CodedInputStream.newInstance(System.in)))
}
final override def run(req: Array[Byte]): Array[Byte] =
run(CodedInputStream.newInstance(req))
private def errorMessage(t: Throwable) = {
val sw = new java.io.StringWriter()
t.printStackTrace(new java.io.PrintWriter(sw, true))
sw.toString
}
final def run(input: CodedInputStream): Array[Byte] = {
try {
val registry = ExtensionRegistry.newInstance()
registerExtensions(registry)
val request = CodeGenRequest(
CodeGeneratorRequest.parseFrom(input, registry)
)
process(request).toCodeGeneratorResponse.toByteArray()
} catch {
case t: Throwable =>
CodeGeneratorResponse
.newBuilder()
.setError(errorMessage(t))
.build()
.toByteArray
}
}
}
| trueaccord/protoc-bridge | protoc-gen/src/main/scala/protocgen/CodeGenApp.scala | Scala | apache-2.0 | 1,985 |
package com.github.andr83.parsek.spark
import com.github.andr83.parsek._
import com.github.andr83.parsek.spark.SparkPipeContext.{LongCountersParam, StringTuple2}
import com.github.andr83.parsek.spark.pipe.RDDPipe
import com.github.andr83.parsek.spark.sink.Sink
import com.github.andr83.parsek.spark.source.Source
import com.typesafe.config.Config
import net.ceedubs.ficus.Ficus._
import net.ceedubs.ficus.readers.ArbitraryTypeReader._
import scala.collection.mutable.{HashMap => MutableHashMap}
/**
* @author andr83
*/
object ParsekJob extends SparkJob {
val DefaultFlow = "default"
var enableStats = false
opt[Unit]("enableStats") action { (_, _) => {
enableStats = true
}
}
override def job(): Unit = {
val startTime = System.currentTimeMillis()
val flows = (config.as[List[Config]]("sources")
.map(_.as[Option[String]]("flow") getOrElse DefaultFlow) ++
config.as[List[Config]]("sinks")
.map(_.as[Option[String]]("flow") getOrElse DefaultFlow) ++
config.as[List[Config]]("pipes")
.flatMap(c => c.as[Option[String]]("toFlow") map (f => Seq(f)) orElse c.as[Option[Seq[String]]]("toFlows") getOrElse Seq.empty[String]))
.toSet
val accumulators = flows.map(flow => {
val acc = sc.accumulable(MutableHashMap.empty[StringTuple2, Long])(LongCountersParam)
flow -> acc
}).toMap
val repository = new FlowRepository(accumulators)
val sourcesByFlow = config.as[List[Config]]("sources")
.groupBy(_.as[Option[String]]("flow") getOrElse DefaultFlow)
.mapValues(_.map(Source.apply))
sourcesByFlow foreach {
case (flow, sources) =>
val rdds = sources.map(_ (this))
val rdd = rdds.tail.foldRight(rdds.head)(_.union(_))
val pipeContext = repository.getContext(flow)
if (enableStats) {
pipeContext.getCounter(PipeContext.InfoGroup, PipeContext.InputRowsGroup) += rdd.count()
}
repository += (flow -> rdd)
}
val pipeConfigs = config.as[Option[List[Config]]]("pipes") getOrElse List.empty[Config]
nextPipe(pipeConfigs, repository)
val sinkConfigs = config.as[List[Config]]("sinks") groupBy (_.as[Option[String]]("flow") getOrElse DefaultFlow)
val sinkFlows = sinkConfigs.keySet
repository.rdds filterKeys sinkFlows.contains foreach {
case (flow, rdd) =>
val sinks = sinkConfigs.get(flow).get map Sink.apply
val pipeContext = repository.getContext(flow)
if (enableStats) {
pipeContext.getCounter(PipeContext.InfoGroup, PipeContext.OutputRowsGroup) += rdd.count()
}
sinks.foreach(_.sink(rdd, startTime))
logger.info(s"Flow $flow counters:")
logger.info(s"Duration: ${System.currentTimeMillis() - startTime}ms")
pipeContext.getCounters.toSeq.sortWith(_._1.toString() < _._1.toString()) foreach { case (key, count) =>
logger.info(s"$key: $count")
}
}
}
def nextPipe(pipes: List[Config], repository: FlowRepository): Unit = pipes match {
case head :: tail =>
runPipe(head, repository)
nextPipe(tail, repository)
case Nil =>
}
def runPipe(pipeConfig: Config, repository: FlowRepository): Unit = {
val flow = pipeConfig.as[Option[String]]("flow") getOrElse DefaultFlow
val pipe = RDDPipe(pipeConfig)
pipe.run(flow, repository)
}
}
| andr83/parsek | spark/src/main/scala/com/github/andr83/parsek/spark/ParsekJob.scala | Scala | mit | 3,371 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.