repo_name
stringlengths 6
97
| path
stringlengths 3
341
| text
stringlengths 8
1.02M
|
|---|---|---|
wix/libpay-creditguard
|
libpay-creditguard-common/src/main/scala/com/wix/pay/creditguard/model/CardSubTypes.scala
|
<filename>libpay-creditguard-common/src/main/scala/com/wix/pay/creditguard/model/CardSubTypes.scala
package com.wix.pay.creditguard.model
object CardSubTypes {
val regular = 0
val gold = 1
val business = 2
val abroad = 3
val debit = 4
val delek = 5
val young = 6
val other = 7
}
|
wix/libpay-creditguard
|
libpay-creditguard-common/src/main/scala/com/wix/pay/creditguard/CreditguardHelper.scala
|
package com.wix.pay.creditguard
import com.wix.pay.creditcard.CreditCard
import com.wix.pay.creditguard.model._
import com.wix.pay.model.CurrencyAmount
object CreditguardHelper {
def createAuthorizeRequest(terminalNumber: String,
supplierNumber: String,
idPrefix: String,
orderId: Option[String] = None,
card: CreditCard,
currencyAmount: CurrencyAmount): AshraitRequest = {
createAuthorizeOrSaleRequest(
validation = Validations.verify,
terminalNumber = terminalNumber,
supplierNumber = supplierNumber,
idPrefix = idPrefix,
orderId = orderId,
card = card,
currencyAmount = currencyAmount
)
}
def createCaptureRequest(terminalNumber: String,
supplierNumber: String,
authNumber: String,
currency: String,
amount: Double,
cardId: String,
cardExpiration: String,
user: String): AshraitRequest = {
val doDeal = new DoDealRequest
doDeal.terminalNumber = terminalNumber
doDeal.cardId = cardId
doDeal.cardExpiration = cardExpiration
doDeal.creditType = CreditTypes.regularCredit
doDeal.currency = currency
doDeal.transactionCode = TransactionCodes.phone
doDeal.transactionType = RequestTransactionTypes.debit
doDeal.total = Conversions.toCreditguardAmount(amount)
doDeal.validation = Validations.autoComm
doDeal.authNumber = authNumber
doDeal.supplierNumber = supplierNumber
doDeal.user = user
val request = new Request
request.command = Commands.doDeal
request.version = Versions.standard
request.language = Languages.english
request.mayBeDuplicate = MayBeDuplicates.`true`
request.doDeal = doDeal
val ashrait = new AshraitRequest
ashrait.request = request
ashrait
}
def createSaleRequest(terminalNumber: String,
supplierNumber: String,
idPrefix: String,
orderId: Option[String] = None,
card: CreditCard,
currencyAmount: CurrencyAmount): AshraitRequest = {
createAuthorizeOrSaleRequest(
validation = Validations.autoComm,
terminalNumber = terminalNumber,
supplierNumber = supplierNumber,
idPrefix = idPrefix,
orderId = orderId,
card = card,
currencyAmount = currencyAmount
)
}
def createAuthorizeOrSaleRequest(validation: String,
terminalNumber: String,
supplierNumber: String,
idPrefix: String,
orderId: Option[String] = None,
card: CreditCard,
currencyAmount: CurrencyAmount): AshraitRequest = {
val doDeal = new DoDealRequest
doDeal.terminalNumber = terminalNumber
doDeal.cardNo = card.number
doDeal.cardExpiration = Conversions.toCreditguardYearMonth(
year = card.expiration.year,
month = card.expiration.month
)
doDeal.cvv = card.csc.getOrElse(Cvvs.notProvided)
card.holderId.foreach { doDeal.id = _ }
doDeal.creditType = CreditTypes.regularCredit
doDeal.currency = currencyAmount.currency
doDeal.transactionCode = TransactionCodes.phone
doDeal.transactionType = RequestTransactionTypes.debit
doDeal.total = Conversions.toCreditguardAmount(currencyAmount.amount)
doDeal.validation = validation
doDeal.user = s"$idPrefix${orderId.getOrElse("")}".take(DoDealRequest.userFieldLength)
doDeal.supplierNumber = supplierNumber
val request = new Request
request.command = Commands.doDeal
request.version = Versions.standard
request.language = Languages.english
request.mayBeDuplicate = MayBeDuplicates.`true`
request.doDeal = doDeal
val ashrait = new AshraitRequest
ashrait.request = request
ashrait
}
}
|
wix/libpay-creditguard
|
libpay-creditguard/src/main/scala/com/wix/pay/creditguard/JsonCreditguardAuthorizationParser.scala
|
package com.wix.pay.creditguard
import org.json4s.DefaultFormats
import org.json4s.native.Serialization
class JsonCreditguardAuthorizationParser() extends CreditguardAuthorizationParser {
private implicit val formats = DefaultFormats
override def parse(authorizationKey: String): CreditguardAuthorization = {
Serialization.read[CreditguardAuthorization](authorizationKey)
}
override def stringify(authorization: CreditguardAuthorization): String = {
Serialization.write(authorization)
}
}
|
wix/libpay-creditguard
|
libpay-creditguard-testkit/src/main/scala/com/wix/pay/creditguard/testkit/CreditguardDriver.scala
|
<gh_stars>1-10
package com.wix.pay.creditguard.testkit
import scala.collection.JavaConversions._
import scala.collection.mutable
import java.util.{List => JList}
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model.{StatusCodes => HttpStatusCodes}
import akka.http.scaladsl.model._
import com.google.api.client.http.UrlEncodedParser
import com.wix.e2e.http.api.StubWebServer
import com.wix.e2e.http.client.extractors.HttpMessageExtractors._
import com.wix.e2e.http.server.WebServerFactory.aStubWebServer
import com.wix.pay.creditcard.CreditCard
import com.wix.pay.creditguard.model._
import com.wix.pay.creditguard.{CreditguardHelper, RequestParser, ResponseParser}
import com.wix.pay.model.CurrencyAmount
import com.wix.pay.shva.model.StatusCodes
class CreditguardDriver(port: Int) {
private val server: StubWebServer = aStubWebServer.onPort(port).build
def start(): Unit = server.start()
def stop(): Unit = server.stop()
def reset(): Unit = server.replaceWith()
def aSaleFor(user: String,
password: String,
terminalNumber: String,
supplierNumber: String,
idPrefix: String,
orderId: Option[String],
card: CreditCard,
currencyAmount: CurrencyAmount): RequestCtx = {
val request = CreditguardHelper.createSaleRequest(
terminalNumber = terminalNumber,
supplierNumber = supplierNumber,
idPrefix = idPrefix,
orderId = orderId,
card = card,
currencyAmount = currencyAmount)
new RequestCtx(
user = user,
password = password,
request = request)
}
def anAuthorizeFor(user: String,
password: String,
terminalNumber: String,
supplierNumber: String,
idPrefix: String,
orderId: Option[String],
card: CreditCard,
currencyAmount: CurrencyAmount): RequestCtx = {
val request = CreditguardHelper.createAuthorizeRequest(
terminalNumber = terminalNumber,
supplierNumber = supplierNumber,
idPrefix = idPrefix,
orderId = orderId,
card = card,
currencyAmount = currencyAmount)
new RequestCtx(
user = user,
password = password,
request = request)
}
def aCaptureFor(user: String,
password: String,
terminalNumber: String,
supplierNumber: String,
authNumber: String,
currency: String,
amount: Double,
cardId: String,
cardExpiration: String,
userField: String): RequestCtx = {
val request = CreditguardHelper.createCaptureRequest(
terminalNumber = terminalNumber,
supplierNumber = supplierNumber,
authNumber = authNumber,
currency = currency,
amount = amount,
cardId = cardId,
cardExpiration = cardExpiration,
user = userField)
new RequestCtx(
user = user,
password = password,
request = request)
}
class RequestCtx(user: String, password: String, request: AshraitRequest) {
def returns(transactionId: String): Unit = {
val doDeal = new DoDealResponse
doDeal.status = StatusCodes.success
val response = new Response
response.doDeal = doDeal
response.tranId = transactionId
val ashrait = new AshraitResponse
ashrait.response = response
returns(ashrait)
}
def returns(authNumber: String,
cardId: String,
cardExpiration: String,
currency: String,
transactionId: String): Unit = {
val doDeal = new DoDealResponse
doDeal.status = StatusCodes.success
doDeal.authNumber = authNumber
doDeal.cardId = cardId
doDeal.cardExpiration = cardExpiration
doDeal.currency = currency
val response = new Response
response.doDeal = doDeal
response.tranId = transactionId
val ashrait = new AshraitResponse
ashrait.response = response
returns(ashrait)
}
def failsOnInvalidMerchant(errorMessage: String): Unit = {
val doDeal = new DoDealResponse
doDeal.status = "405"
doDeal.statusText = errorMessage
val response = new Response
response.doDeal = doDeal
val ashrait = new AshraitResponse
ashrait.response = response
returns(ashrait)
}
def getsRejected(errorMessage: String): Unit = {
val doDeal = new DoDealResponse
doDeal.status = StatusCodes.rejected
doDeal.statusText = errorMessage
val response = new Response
response.doDeal = doDeal
val ashrait = new AshraitResponse
ashrait.response = response
returns(ashrait)
}
private def returns(response: AshraitResponse): Unit = {
val responseXml = ResponseParser.stringify(response)
server.appendAll {
case HttpRequest(
HttpMethods.POST,
Path("/"),
_,
entity,
_) if isStubbedRequestEntity(entity) =>
HttpResponse(
status = HttpStatusCodes.OK,
entity = HttpEntity(ContentType(MediaTypes.`application/xml`, HttpCharsets.`UTF-8`), responseXml))
}
}
private def isStubbedRequestEntity(entity: HttpEntity): Boolean = {
val requestParams = urlDecode(entity.extractAsString)
requestParams.get(Fields.user).contains(user) &&
requestParams.get(Fields.password).contains(password) &&
requestParams.get(Fields.int_in).map { RequestParser.parse }.contains(request)
}
private def urlDecode(str: String): Map[String, String] = {
val params = mutable.LinkedHashMap[String, JList[String]]()
UrlEncodedParser.parse(str, mutableMapAsJavaMap(params))
params.mapValues( _.head ).toMap
}
}
}
|
wix/libpay-creditguard
|
libpay-creditguard-common/src/main/scala/com/wix/pay/creditguard/model/CreditTypes.scala
|
package com.wix.pay.creditguard.model
object CreditTypes {
/** Single payment debit. */
val regularCredit = "RegularCredit"
/** "Isracredit", "AMEX credit", "VisaAdif/30+", "Diners Adif/30+" (local Israeli payment method). */
val israCredit = "IsraCredit"
/** Ad hock debit- "Hi<NAME>" (local Israeli payment method). */
val adHock = "AdHock"
/** Club deal (local Israeli payment method). */
val clubDeal = "ClubDeal"
/**
* Special alpha – "super credit" (local Israeli payment method).
* Tag numberOfPayments is mandatory
*/
val specialAlpha = "SpecialAlpha"
/**
* Special credit - "credit"/"fixed payments credit" (local Israeli payment method).
* Tag numberOfPayments is mandatory
*/
val specialCredit = "SpecialCredit"
/**
* Multiple payments debit (installments).
* Tags numberOfPayments, periodicalPayment and firstPayment are mandatory
*/
val payments = "Payments"
/** Payment club (local Israeli payment method). */
val paymentsClub = "PaymentsClub"
}
|
wix/libpay-creditguard
|
libpay-creditguard-common/src/main/scala/com/wix/pay/creditguard/model/MayBeDuplicates.scala
|
package com.wix.pay.creditguard.model
object MayBeDuplicates {
val `false` = "0"
val `true` = "1"
}
|
eed3si9n/activator
|
project/Dependencies.scala
|
<reponame>eed3si9n/activator
import sbt._
import Keys._
object Dependencies {
val sbtVersion = "0.13.11"
val sbtLibraryVersion = "0.13.11" // for sbtIO on scala 2.11
val sbtPluginVersion = "0.13"
val sbtPluginScalaVersion = "2.11.7"
val scalaVersion = "2.11.8"
val scala210Version = "2.10.4"
val luceneVersion = "4.2.1"
val templateCacheVersion = "1.0-a0afb008ea619bf9d87dc010156cddffa8a6f880"
val sbtRcVersion = "0.3.5"
val sbtCoreNextVersion = "0.1.1"
val shimPlayVersion = "2.4.2"
val play23Version = "2.3.9"
val akka22Version = "2.2.4"
val akka23Version = "2.3.11"
val slickVersion = "3.0.0"
val activatorAnalyticsVersion = "0.1.8"
val aspectJVersion = "1.8.4"
// versions used by build to set dependencies in JS
val ideaVersion = "1.6.0"
val eclipseVersion = "4.0.0"
val activatorCommon = "com.typesafe.activator" % "activator-common" % templateCacheVersion
val templateCache = "com.typesafe.activator" % "activator-templates-cache" % templateCacheVersion
// 2.10 sbt
val sbtIo210 = "org.scala-sbt" % "io" % sbtVersion
// launcher interface is pure Java, no scala versioning
// val sbtLauncherInterface = "org.scala-sbt" % "launcher-interface" % sbtVersion
val sbtLauncherInterface = "org.scala-sbt" % "launcher-interface" % "1.0.0-M1"
// 2.11 sbt
val sbtIo = "org.scala-sbt" %% "io" % sbtLibraryVersion
val sbtCompletion = "org.scala-sbt" %% "completion" % sbtLibraryVersion
// sbtrc projects
val sbtrcClient = "com.typesafe.sbtrc" % "client-2-11" % sbtRcVersion
val sbtrcIntegration = "com.typesafe.sbtrc" % "integration-tests" % sbtRcVersion
val akkaActor = "com.typesafe.akka" %% "akka-actor" % akka23Version
val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % akka23Version
val akkaTestkit = "com.typesafe.akka" %% "akka-testkit"% akka23Version
val commonsIo = "commons-io" % "commons-io" % "2.0.1"
val mimeUtil = "eu.medsea.mimeutil" % "mime-util" % "2.1.3" exclude("org.slf4j", "slf4j-log4j12") exclude("org.slf4j", "slf4j-api") exclude("log4j", "log4j")
val junitInterface = "com.novocode" % "junit-interface" % "0.7"
val specs2 = "org.specs2" % "specs2_2.11" % "2.3.11"
// SBT 0.13 required plugins
val playSbt13Plugin = Defaults.sbtPluginExtra("com.typesafe.play" % "sbt-plugin" % shimPlayVersion, "0.13", "2.10")
val eclipseSbt13Plugin = Defaults.sbtPluginExtra("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0", "0.13", "2.10")
val ideaSbt13Plugin = Defaults.sbtPluginExtra("com.github.mpeltonen" % "sbt-idea" % "1.5.2", "0.13", "2.10")
// Embedded databases / index
val lucene = "org.apache.lucene" % "lucene-core" % luceneVersion
val luceneAnalyzerCommon = "org.apache.lucene" % "lucene-analyzers-common" % luceneVersion
val luceneQueryParser = "org.apache.lucene" % "lucene-queryparser" % luceneVersion
// WebJars for the Activator UI
val requirejs = "org.webjars" % "requirejs" % "2.1.11"
val jquery = "org.webjars" % "jquery" % "2.0.3"
val knockout = "org.webjars" % "knockout" % "3.3.0"
val ace = "org.webjars" % "ace" % "1.1.7-1"
val keymage = "org.webjars" % "keymage" % "1.0.1"
// Analyzers used by Inspect
val activatorAnalytics = "com.typesafe.activator" %% "analytics" % activatorAnalyticsVersion
// Mini DSL
// DSL for adding remote deps like local deps.
implicit def p2remote(p: Project): RemoteDepHelper = new RemoteDepHelper(p)
final class RemoteDepHelper(p: Project) {
def dependsOnRemote(ms: ModuleID*): Project = p.settings(libraryDependencies ++= ms)
}
// DSL for adding source dependencies ot projects.
def dependsOnSource(dir: String): Seq[Setting[_]] = {
import Keys._
Seq(unmanagedSourceDirectories in Compile <<= (unmanagedSourceDirectories in Compile, baseDirectory) { (srcDirs, base) => (base / dir / "src/main/scala") +: srcDirs },
unmanagedSourceDirectories in Test <<= (unmanagedSourceDirectories in Test, baseDirectory) { (srcDirs, base) => (base / dir / "src/test/scala") +: srcDirs })
}
implicit def p2source(p: Project): SourceDepHelper = new SourceDepHelper(p)
final class SourceDepHelper(p: Project) {
def dependsOnSource(dir: String): Project =
p.settings(Dependencies.dependsOnSource(dir):_*)
}
// compile classpath and classes directory, with provided/optional or scala dependencies
// specifically for projects that need remote-probe dependencies
val requiredClasspath = TaskKey[Classpath]("required-classpath")
def requiredJars(deps: ProjectReference*): Setting[_] = {
import xsbti.ArtifactInfo._
import Project.Initialize
val dependentProjectClassPaths: Seq[Initialize[Task[Seq[File]]]] =
(deps map { proj =>
(classDirectory in Compile in proj) map { dir => Seq(dir) }
})
val ivyDeps: Initialize[Task[Seq[File]]] = update map { report =>
val jars = report.matching(configurationFilter(name = "compile") -- moduleFilter(organization = ScalaOrganization, name = ScalaLibraryID))
jars
}
val localClasses: Initialize[Task[Seq[File]]] = (classDirectory in Compile) map { dir =>
Seq(dir)
}
// JOin everyone
def joinCp(inits: Seq[Initialize[Task[Seq[File]]]]): Initialize[Task[Seq[File]]] =
inits reduce { (lhs, rhs) =>
(lhs zip rhs).flatMap { case (l,r) =>
l.flatMap[Seq[File]] { files =>
r.map[Seq[File]] { files2 =>
files ++ files2
}
}
}
}
requiredClasspath <<= joinCp(dependentProjectClassPaths ++ Seq(ivyDeps, localClasses)) map {
_.classpath
}
}
val sbtBackgroundRun = Defaults.sbtPluginExtra("org.scala-sbt" % "sbt-core-next" % sbtCoreNextVersion, "0.13", "2.10")
def playPlugin: Seq[Setting[_]] = Seq(
resolvers += Classpaths.typesafeSnapshots,
resolvers += "Lightbend Maven Snapshots" at "http://repo.typesafe.com/typesafe/snapshots/",
resolvers += "Lightbend Maven Releases" at "http://repo.typesafe.com/typesafe/releases/"
)
// *** END SBT-ECHO DEPENDENCIES ***
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/ScopeJsonBuilder.scala
|
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console.handler.rest
import play.api.libs.json.{ Json, JsObject }
import activator.analytics.data.{ TimeRange, Scope, ActorStats }
object ScopeJsonBuilder {
def createScopeJson(scope: Scope): JsObject = {
val node = scope.node.getOrElse("")
val actorSystem = scope.actorSystem.getOrElse("")
val path = scope.path.getOrElse("")
val dispatcher = scope.dispatcher.getOrElse("")
val tag = scope.tag.getOrElse("")
val playPattern = scope.playPattern.getOrElse("")
val playController = scope.playController.getOrElse("")
Json.obj(
"node" -> node,
"actorSystem" -> actorSystem,
"actorPath" -> path,
"dispatcher" -> dispatcher,
"tag" -> tag,
"playPattern" -> playPattern,
"playController" -> playController)
}
}
|
eed3si9n/activator
|
ui/app/activator/typesafeproxy/Exceptions.scala
|
<filename>ui/app/activator/typesafeproxy/Exceptions.scala<gh_stars>0
package activator.typesafeproxy
sealed abstract class TypesafeComProxyException(message: String, cause: Throwable = null) extends Exception(message, cause)
class ProxyTimeout(msg: String, cause: Throwable = null) extends TypesafeComProxyException(msg, cause)
class ProxyInvalidCredentials(msg: String, cause: Throwable = null) extends TypesafeComProxyException(msg, cause)
class ProxyFailure(msg: String, cause: Throwable = null) extends TypesafeComProxyException(msg, cause)
class ProxyCanceled(msg: String, cause: Throwable = null) extends TypesafeComProxyException(msg, cause)
class CachePutFailure(msg: String, cause: Throwable = null) extends TypesafeComProxyException(msg, cause)
|
eed3si9n/activator
|
ui/app/activator/WebSocketUtil.scala
|
package activator
import play.api._
import play.api.mvc._
import play.filters.csrf._
import play.api.libs.iteratee._
import scala.concurrent.Future
object WebSocketUtil {
private val TokenParam = "token"
// differences from the regular CSRFCheck are:
// - we check the token always (don't allow bypass if certain headers are present)
// - we work on WebSocket not Action
// See https://github.com/playframework/playframework/issues/1788
// for a future official replacement.
private def csrfCheckedWebSocket[A](tokenProvider: CSRF.TokenProvider, socket: WebSocket[A, A]): WebSocket[A, A] = {
def checkedF(request: RequestHeader): Future[Either[Result, (Enumerator[A], Iteratee[A, Unit]) => Unit]] = {
for {
cookieToken <- CSRF.getToken(request)
queryToken <- request.getQueryString(TokenParam)
if (tokenProvider.compareTokens(queryToken, cookieToken.value))
} yield socket.f(request)
} getOrElse {
Logger.warn("CSRF token check failed for WebSocket connection")
Future.successful(Left(Results.Forbidden("CSRF token check failed for WebSocket connection")))
}
WebSocket(checkedF)(socket.inFormatter, socket.outFormatter)
}
// unfortunately we have a cut-and-pasted default for this config option
// because Play has defaults in code rather than in reference.conf
private def signTokens(implicit app: Application): Boolean =
app.configuration.getBoolean("csrf.sign.tokens").getOrElse(true)
def socketCSRFCheck[A](ws: WebSocket[A, A]): WebSocket[A, A] = {
import play.api.Play.current
csrfCheckedWebSocket(if (signTokens) CSRF.SignedTokenProvider else CSRF.UnsignedTokenProvider, ws)
}
def webSocketURLWithCSRF[A](socketCall: Call)(implicit request: RequestHeader): String = {
import activator.EnhancedURI._
val token =
CSRF.getToken(request).getOrElse(throw new RuntimeException("Can't get CSRF token for websocket")).value
(new java.net.URI(socketCall.webSocketURL()))
.addQueryParameter(TokenParam, token)
.toASCIIString
}
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/ActorJsonBuilder.scala
|
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console.handler.rest
import akka.actor.{ ActorRef, Props }
import console.ClientController.Update
import activator.analytics.data.{ Scope, ActorStats }
import play.api.libs.json.{ Json, JsObject, JsValue, JsArray, Writes, JsString }
import activator.analytics.data.BasicTypes.DurationNanos
import java.util.concurrent.TimeUnit
import java.util.concurrent.TimeUnit._
class ActorJsonBuilder extends JsonBuilderActor {
import ActorJsonBuilder._
def receive = {
case r: ActorResult => r.receiver ! Update(createJson(r.stats))
}
}
object ActorJsonBuilder {
import ScopeJsonBuilder._
import TimeRangeJsonBuilder._
import DevationDetailJsonBuilder._
def props(): Props =
Props(classOf[ActorJsonBuilder])
val DefaultOutputDurationTimeUnit = MICROSECONDS
case class ActorResult(receiver: ActorRef, stats: ActorStats)
def createJson(stats: ActorStats): JsObject = {
Json.obj(
"type" -> "actor",
"data" ->
Json.obj(
"actor" -> createActorJson(stats)))
}
def createActorJsonSeq(statsSeq: Seq[ActorStats]): JsArray =
new JsArray(statsSeq.map(createActorJson(_)))
def createActorJson(stats: ActorStats): JsObject = {
Json.obj(
"timerange" -> createTimeRangeJson(stats.timeRange),
"scope" -> createScopeJson(stats.scope),
"createdCount" -> stats.metrics.counts.createdCount,
"stoppedCount" -> stats.metrics.counts.stoppedCount,
"failedCount" -> stats.metrics.counts.failedCount,
"restartCount" -> stats.metrics.counts.restartCount,
"deviationCount" -> stats.metrics.counts.deviationCount,
"errorCount" -> stats.metrics.counts.errorCount,
"errors" -> createDeviationDetailJsonSeq(stats.metrics.deviationDetails.errors),
"warningCount" -> stats.metrics.counts.warningCount,
"warnings" -> createDeviationDetailJsonSeq(stats.metrics.deviationDetails.warnings),
"deadletterCount" -> stats.metrics.counts.deadLetterCount,
"deadletters" -> createDeviationDetailJsonSeq(stats.metrics.deviationDetails.deadLetters),
"unhandledMessageCount" -> stats.metrics.counts.unhandledMessageCount,
"unhandledMessages" -> createDeviationDetailJsonSeq(stats.metrics.deviationDetails.unhandledMessages),
"deadlockCount" -> stats.metrics.deviationDetails.deadlockedThreads.size,
"deadlocks" -> createDeviationDetailJsonSeq(stats.metrics.deviationDetails.deadlockedThreads),
"processedMessagesCount" -> stats.metrics.counts.processedMessagesCount,
"tellMessagesCount" -> stats.metrics.counts.tellMessagesCount,
"askMessagesCount" -> stats.metrics.counts.askMessagesCount,
"meanMailboxSize" -> stats.metrics.meanMailboxSize,
"maxMailboxSize" -> stats.metrics.mailbox.maxMailboxSize,
"maxMailboxSizeTimestamp" -> stats.metrics.mailbox.maxMailboxSizeTimestamp,
"maxMailboxSizeAddressNode" -> stats.metrics.mailbox.maxMailboxSizeAddress.node,
"maxMailboxSizeAddressPath" -> stats.metrics.mailbox.maxMailboxSizeAddress.path,
"meanTimeInMailbox" -> generateValueUnitPair(stats.metrics.meanTimeInMailbox),
"maxTimeInMailbox" -> generateValueUnitPair(stats.metrics.mailbox.maxTimeInMailbox),
"maxTimeInMailboxTimestamp" -> stats.metrics.mailbox.maxTimeInMailboxTimestamp,
"maxTimeInMailboxAddressNode" -> stats.metrics.mailbox.maxTimeInMailboxAddress.node,
"maxTimeInMailboxAddressPath" -> stats.metrics.mailbox.maxTimeInMailboxAddress.path,
"latestTraceEventTimestamp" -> stats.metrics.latestTraceEventTimestamp,
"latestMessageTimestamp" -> stats.metrics.latestMessageTimestamp,
"totalMessageRate" -> stats.metrics.messageRateMetrics.totalMessageRate,
"receiveRate" -> stats.metrics.messageRateMetrics.receiveRate,
"askRate" -> stats.metrics.messageRateMetrics.askRate,
"tellRate" -> stats.metrics.messageRateMetrics.tellRate,
"meanProcessedMessageRate" -> stats.meanProcessedMessageRate,
"meanProcessedMessageRateUnit" -> "messages/second",
"rateUnit" -> "messages/second",
"meanProcessedMessageRate" -> stats.meanProcessedMessageRate,
"meanProcessedMessageRateUnit" -> "messages/second",
"meanBytesReadRate" -> stats.meanBytesReadRate,
"meanBytesReadRateUnit" -> "bytes/second",
"meanBytesWrittenRate" -> stats.meanBytesWrittenRate,
"meanBytesWrittenRateUnit" -> "bytes/second")
}
def generateValueUnitPair(
duration: DurationNanos,
timeUnit: TimeUnit = DefaultOutputDurationTimeUnit): JsValue = {
def parseTimeUnit(time: TimeUnit): JsString = time match {
case MICROSECONDS => JsString("µs")
case MILLISECONDS => JsString("ms")
case NANOSECONDS => JsString("ns")
case _ => JsString("s")
}
Json.obj("value" -> timeUnit.convert(duration, NANOSECONDS)) ++ Json.obj("unit" -> parseTimeUnit(timeUnit))
}
}
|
eed3si9n/activator
|
ui/app/console/handler/PlayRequestsHandler.scala
|
<filename>ui/app/console/handler/PlayRequestsHandler.scala
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console
package handler
import akka.actor.{ ActorRef, Props }
import activator.analytics.data.{ PlayStatsSort, PlayStatsSorts, PlayRequestSummary, TimeRange, Scope }
import activator.analytics.rest.http.SortingHelpers.SortDirection
import console.handler.rest.PlayRequestsJsonBuilder.PlayRequestsResult
import scala.language.existentials
import console.handler.rest.PlayRequestsJsonBuilder
import console.AnalyticsRepository
object PlayRequestsHandler {
def props(repository: AnalyticsRepository,
defaultLimit: Int,
builderProps: Props = PlayRequestsJsonBuilder.props()): Props =
Props(classOf[PlayRequestsHandler], repository, builderProps, defaultLimit)
case class PlayRequestsModuleInfo(scope: Scope,
modifiers: ScopeModifiers,
time: TimeRange,
pagingInformation: Option[PagingInformation],
sortOn: PlayStatsSort[_],
sortDirection: SortDirection,
dataFrom: Option[Long],
traceId: Option[String]) extends MultiValueModuleInformation[PlayStatsSort[_]]
def extractSortOn(sortCommand: Option[String]): PlayStatsSort[_] = sortCommand match {
case Some(sort) ⇒ sort match {
case "path" => PlayStatsSorts.PathSort
case "time" => PlayStatsSorts.TimeSort
case "controller" => PlayStatsSorts.ControllerSort
case "method" => PlayStatsSorts.MethodSort
case "responseCode" => PlayStatsSorts.ResponseCodeSort
case _ => PlayStatsSorts.InvocationTimeSort
}
case _ => PlayStatsSorts.TimeSort
}
}
trait PlayRequestsHandlerBase extends RequestHandler[PlayRequestsHandler.PlayRequestsModuleInfo] {
import PlayRequestsHandler._
import SortDirections._
def usePlayRequestStats(sender: ActorRef, stats: Seq[PlayRequestSummary]): Unit
def onModuleInformation(sender: ActorRef, mi: PlayRequestsModuleInfo): Unit = {
usePlayRequestStats(sender,
repository.playRequestSummaryRepository.findRequestsWithinTimePeriod(
mi.time.startTime,
mi.time.endTime,
(for { p <- mi.pagingInformation } yield p.offset).getOrElse(0),
(for { p <- mi.pagingInformation } yield p.limit).getOrElse(50),
mi.sortOn,
mi.sortDirection.toLegacy))
}
}
class PlayRequestsHandler(val repository: AnalyticsRepository,
builderProps: Props,
val defaultLimit: Int) extends PlayRequestsHandlerBase {
val builder = context.actorOf(builderProps, "playRequestsBuilder")
def usePlayRequestStats(sender: ActorRef, stats: Seq[PlayRequestSummary]): Unit = {
builder ! PlayRequestsResult(sender, stats)
}
}
|
eed3si9n/activator
|
project/build.scala
|
import org.apache.tools.ant.taskdefs.Echo
import sbt._
import ActivatorBuild._
import Dependencies._
import Packaging.localRepoArtifacts
import com.typesafe.sbt.S3Plugin._
import com.typesafe.sbt.SbtNativePackager.Universal
import com.typesafe.sbt.packager.archetypes.JavaAppPackaging
import com.typesafe.sbt.SbtPgp
import play.PlayImport.PlayKeys
import com.typesafe.sbt.SbtPgp.autoImport._
import com.typesafe.sbt.less.Import.LessKeys
import com.typesafe.sbt.web.SbtWeb.autoImport._
import com.typesafe.sbt.jse.JsEngineImport.JsEngineKeys
// NOTE - This file is only used for SBT 0.12.x, in 0.13.x we'll use build.sbt and scala libraries.
// As such try to avoid putting stuff in here so we can see how good build.sbt is without build.scala.
object TheActivatorBuild extends Build {
def fixFileForURIish(f: File): String = {
val uriString = f.toURI.toASCIIString
if(uriString startsWith "file://") uriString.drop("file://".length)
else uriString.drop("file:".length)
}
// ADD sbt launcher support here.
override def settings = super.settings ++ SbtSupport.buildSettings ++ baseVersions ++ Seq(
// This is a hack, so the play application will have the right view of the template directory.
Keys.baseDirectory <<= Keys.baseDirectory apply { bd =>
sys.props("activator.home") = fixFileForURIish(bd.getAbsoluteFile)
bd
}
)
// TODO : Add ++ play.Project.intellijCommandSettings Play 2.3 style to settings above
val root = (
Project("root", file(".")) // TODO - Oddities with clean..
.noAutoPgp
.doNotPublish
aggregate(toReferences(publishedProjects ++
Seq(dist, it, localTemplateRepo, offlinetests)): _*)
)
lazy val news: Project = (
Project("news", file("news"))
settings(NewsHelper.settings:_*)
)
// This project helps us isolate creating the local template repository for testing.
lazy val localTemplateRepo: Project = (
Project("template-repository", file("template-repository"))
.noAutoPgp
.doNotPublish
settings(LocalTemplateRepo.settings:_*)
settings(Keys.resolvers += typesafeIvyReleases)
)
// These are the projects we want in the local repository we deploy.
lazy val publishedProjects: Seq[Project] = Seq(ui, uiCommon, launcher, props)
// basic project that gives us properties to use in other projects.
lazy val props = (
ActivatorJavaProject("props")
settings(Properties.makePropertyClassSetting(Dependencies.sbtVersion, Dependencies.scalaVersion):_*)
)
// Helper for UI projects (CLI + GUI)
lazy val uiCommon = (
ActivatorProject("ui-common")
dependsOnRemote(templateCache)
dependsOn(props)
)
val verboseSbtTests = false
def configureSbtTest(testKey: Scoped) = Seq(
// set up embedded sbt for tests, we fork so we can set
// system properties.
Keys.fork in Test in testKey := true,
Keys.javaOptions in Test in testKey <<= (
SbtSupport.sbtLaunchJar,
Keys.javaOptions in testKey,
Keys.update) map {
(launcher, oldOptions, updateReport) =>
oldOptions ++
(if (verboseSbtTests)
Seq("-Dakka.loglevel=DEBUG",
"-Dakka.actor.debug.autoreceive=on",
"-Dakka.actor.debug.receive=on",
"-Dakka.actor.debug.lifecycle=on")
else
Seq.empty)
})
import WebKeys.{assets, public}
import sbt.Keys.products
lazy val ui = (
ActivatorPlayProject("ui")
dependsOnRemote(
requirejs, jquery, knockout, ace, /*requireCss, requireText,*/ keymage, commonsIo, mimeUtil, activatorAnalytics,
sbtLauncherInterface % "provided",
sbtrcClient,
sbtrcIntegration % "compile;test->test"
)
dependsOn(props, uiCommon)
settings(PlayKeys.playDefaultPort := 8888)
settings(Keys.includeFilter in (Assets, LessKeys.less) := "*.less")
settings(Keys.excludeFilter in (Assets, LessKeys.less) := "_*.less")
settings(Keys.libraryDependencies ++= Seq(Dependencies.akkaTestkit % "test", Dependencies.specs2 % "test"))
// set up debug props for forked tests
settings(configureSbtTest(Keys.test): _*)
settings(configureSbtTest(Keys.testOnly): _*)
// set up debug props for "run"
settings(
Keys.update <<= (
SbtSupport.sbtLaunchJar,
Keys.update,
LocalTemplateRepo.localTemplateCacheCreated in localTemplateRepo) map {
(launcher, update, templateCache) =>
sys.props("activator.template.cache") = fixFileForURIish(templateCache)
sys.props("activator.runinsbt") = "true"
System.err.println("Template cache = " + sys.props("activator.template.cache"))
update
},
// We need to embed the assets in this JAR for activator.
// If we add any more play projects, we need to be clever with them.
public in Assets := (public in Assets).value / "public",
products in Compile += (assets in Assets).value.getParentFile
)
settings(
Keys.compile in Compile <<= (Keys.compile in Compile, Keys.baseDirectory, Keys.streams) map { (oldCompile, baseDir, streams) =>
// write version information
VersionGenerator.createInformation(baseDir)
// check for JS errors
val jsErrors = JsChecker.fixAndCheckAll(baseDir, streams.log)
for (error <- jsErrors) {
streams.log.error(error)
}
if (jsErrors.nonEmpty)
throw new RuntimeException(jsErrors.length + " JavaScript formatting errors found")
else
streams.log.info("JavaScript whitespace meets our exacting standards")
oldCompile
}
)
)
lazy val launcher = (
ActivatorProject("launcher")
dependsOnRemote(sbtLauncherInterface, sbtCompletion)
dependsOn(props, uiCommon)
)
// A hack project just for convenient IvySBT when resolving artifacts into new local repositories.
lazy val dontusemeresolvers = (
ActivatorProject("dontuseme")
.doNotPublish
settings(
// This hack removes the project resolver so we don't resolve stub artifacts.
Keys.fullResolvers <<= (Keys.externalResolvers, Keys.sbtResolver) map (_ :+ _),
Keys.resolvers += Resolver.url("sbt-plugin-releases", new URL("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns),
Keys.resolvers += "Scalaz Bintray Repo" at "https://dl.bintray.com/scalaz/releases"
)
)
lazy val it = (
ActivatorProject("integration-tests")
settings(integration.settings:_*)
dependsOnRemote(sbtLauncherInterface, sbtIo, sbtrcClient, sbtrcIntegration)
dependsOn(props)
settings(
org.sbtidea.SbtIdeaPlugin.ideaIgnoreModule := true,
// we don't use doNotPublish because we want to publishLocal
Keys.publish := {},
PgpKeys.publishSigned := {}
)
)
lazy val offlinetests = (
ActivatorProject("offline-tests")
.doNotPublish
settings(offline.settings:_*)
)
lazy val logDownloadUrls = taskKey[Unit]("log download urls because we are lazy and don't want to hand-construct them")
lazy val dist = (
ActivatorProject("dist")
// TODO - Should publish be pushing the S3 upload?
.doNotPublish
settings(Packaging.settings:_*)
settings(s3Settings:_*)
settings(
Keys.scalaBinaryVersion <<= Keys.scalaBinaryVersion in ui,
Keys.resolvers ++= Seq(
"Lightbend repository" at "https://repo.typesafe.com/typesafe/releases/",
Resolver.url("typesafe-ivy-releases", new URL("https://repo.typesafe.com/typesafe/releases/"))(Resolver.ivyStylePatterns),
Resolver.url("sbt-plugin-releases", new URL("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)
),
// TODO - Do this better - This is where we define what goes in the local repo cache.
localRepoArtifacts <++= (publishedProjects.toSeq map { ref =>
(Keys.projectID in ref) apply { id => id }
}).join,
localRepoArtifacts ++= Seq(
// base dependencies
"org.scala-sbt" % "sbt" % Dependencies.sbtVersion,
"org.scala-lang" % "scala-compiler" % Dependencies.sbtPluginScalaVersion,
"org.scala-lang" % "scala-compiler" % Dependencies.scalaVersion,
// sbt stuff
sbtrcClient,
// sbt 0.13 plugins
playSbt13Plugin,
eclipseSbt13Plugin,
ideaSbt13Plugin,
// featured template dependencies
// *** note: do not use %% here ***
"org.scala-lang" % "jline" % "2.10.4",
"org.scala-lang" % "jline" % "2.10.6",
"com.typesafe.slick" % "slick_2.11" % "3.0.0",
"com.h2database" % "h2" % "1.3.175",
"org.jboss.logging" % "jboss-logging" % "3.1.1.GA",
Defaults.sbtPluginExtra("com.typesafe.sbt" % "sbt-less" % "1.0.0", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.sbt" % "sbt-less" % "1.0.6", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.sbt" % "sbt-less" % "1.1.0", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.sbt" % "sbt-jshint" % "1.0.4", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.sbt" % "sbt-rjs" % "1.0.8", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.sbt" % "sbt-digest" % "1.1.1", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.sbt" % "sbt-mocha" % "1.1.0", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.sbt" % "sbt-play-enhancer" % "1.1.0", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.sbt" % "sbt-coffeescript" % "1.0.0", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.play" % "sbt-plugin" % "2.4.4", "0.13", "2.10"),
Defaults.sbtPluginExtra("com.typesafe.play" % "sbt-plugin" % "2.5.9", "0.13", "2.10"),
Defaults.sbtPluginExtra("org.irundaia.sbt" % "sbt-sassify" % "1.4.6", "0.13", "2.10"),
"com.typesafe.play" % "play-jdbc_2.11" % "2.4.4",
"com.typesafe.play" % "anorm_2.11" % "2.4.4",
"com.typesafe.play" % "play-cache_2.11" % "2.4.4",
"com.typesafe.play" % "play-docs_2.11" % "2.4.4",
"com.typesafe.play" % "play-specs2_2.11" % "2.4.4",
"com.typesafe.play" % "play-omnidoc_2.11" % "2.4.4",
"com.typesafe.play" % "play-server_2.11" % "2.5.9",
"com.typesafe.play" % "play-java_2.11" % "2.5.9",
"com.typesafe.play" % "play-netty-server_2.11" % "2.5.9",
"com.typesafe.play" % "play-logback_2.11" % "2.5.9",
"com.typesafe.play" % "play-java-jdbc_2.11" % "2.5.9",
"com.typesafe.play" % "play-cache_2.11" % "2.5.9",
"com.typesafe.play" % "play-java-ws_2.11" % "2.5.9",
"com.typesafe.play" % "play-test_2.11" % "2.5.9",
"com.typesafe.play" % "play-omnidoc_2.11" % "2.5.9",
"org.scalatestplus.play" % "scalatestplus-play_2.11" % "1.5.1",
"org.scalaz.stream" % "scalaz-stream_2.11" % "0.7a",
"org.specs2" % "specs2-matcher-extra_2.11" % "3.6",
"com.typesafe.play" % "play-test_2.11" % "2.4.4",
"com.typesafe.play" % "play-java_2.11" % "2.4.4",
"com.typesafe.play" % "play-java-jdbc_2.11" % "2.4.4",
"com.typesafe.play" % "play-java-ebean_2.11" % "2.4.4",
"com.typesafe.play" % "play-java-ws_2.11" % "2.4.4",
"com.typesafe.akka" % "akka-slf4j_2.11" % "2.3.11",
"com.typesafe.akka" % "akka-actor_2.11" % "2.4.0",
"com.typesafe.akka" % "akka-testkit_2.11" % "2.4.0",
"org.webjars" % "bootstrap" % "3.0.0",
"org.webjars" % "bootstrap" % "2.3.2",
"org.webjars" % "knockout" % "2.3.0",
"org.webjars" % "requirejs" % "2.1.11-1",
"org.webjars" % "leaflet" % "0.7.2",
"org.webjars" % "flot" % "0.8.0",
"org.webjars" % "squirejs" % "0.1.0",
"org.webjars" % "rjs" % "2.1.11-1",
"org.webjars" % "rjs" % "2.1.11-1-trireme",
"org.apache.httpcomponents" % "httpcore" % "4.0.1",
"org.apache.httpcomponents" % "httpclient" % "4.0.1",
"org.slf4j" % "slf4j-nop" % "1.6.4",
"com.novocode" % "junit-interface" % "0.11",
"junit" % "junit" % "4.12",
"org.scalatest" % "scalatest_2.11" % "2.2.4"
),
Keys.mappings in S3.upload <<= (Keys.packageBin in Universal, Packaging.minimalDist, Keys.version) map { (zip, minimalZip, v) =>
Seq(minimalZip -> ("typesafe-activator/%s/typesafe-activator-%s-minimal.zip" format (v, v)),
zip -> ("typesafe-activator/%s/typesafe-activator-%s.zip" format (v, v)))
},
S3.host in S3.upload := "downloads.typesafe.com.s3.amazonaws.com",
S3.progress in S3.upload := true,
S3.upload := {
val log = Keys.streams.value.log
val hash = (LocalTemplateRepo.checkTemplateCacheHash in TheActivatorBuild.localTemplateRepo).value
log.info("Publishing to S3 with template index " + hash)
S3.upload.value
},
logDownloadUrls := {
val log = Keys.streams.value.log
val version = Keys.version.value
log.info(s"Download: http://downloads.typesafe.com/typesafe-activator/${version}/typesafe-activator-${version}.zip")
log.info(s"Minimal: http://downloads.typesafe.com/typesafe-activator/${version}/typesafe-activator-${version}-minimal.zip")
}
)
).enablePlugins(JavaAppPackaging)
}
|
eed3si9n/activator
|
ui/test/console/JsonHandlerSpec.scala
|
package console
import org.specs2.mutable._
import play.api.libs.json.Json
class JsonHandlerSpec extends Specification {
"JSON parser" should {
"parse empty scope" in {
val json = Json.parse(
"""
{
"scope" : {}
}
""")
implicit val parser = JsonHandler.scopeReads
val scope = (json \ "scope").as[InternalScope]
scope.node must equalTo(None)
}
"parse full scope" in {
val json = Json.parse(
"""
{
"scope" : {
"node" : "n1",
"actorSystem" : "as1",
"dispatcher" : "d1",
"tag" : "t1",
"actorPath" : "a1"
}
}
""")
implicit val parser = JsonHandler.scopeReads
val scope = (json \ "scope").as[InternalScope]
scope.node must equalTo(Some("n1"))
scope.actorSystem must equalTo(Some("as1"))
scope.dispatcher must equalTo(Some("d1"))
scope.tag must equalTo(Some("t1"))
scope.actorPath must equalTo(Some("a1"))
}
"parse one inner module" in {
val json = Json.parse(
"""
{
"modules" : [
{
"name" : "name1",
"paging" : {
"offset" : 101,
"limit" : 11
},
"scope" : {
"node" : "n1"
}
}
]
}
""")
implicit val parser = JsonHandler.innerModuleReads
val innerModule = (json \ "modules").as[List[InnerModuleInformation]]
innerModule.size must equalTo(1)
innerModule.head.name must equalTo("name1")
innerModule.head.scope.node must equalTo(Some("n1"))
innerModule.head.pagingInformation must not be empty
innerModule.head.pagingInformation.get.offset must equalTo(101)
innerModule.head.pagingInformation.get.limit must equalTo(11)
innerModule.head.sortCommand must equalTo(None)
innerModule.head.scope.actorPath must equalTo(None)
}
"parse multiple inner modules" in {
val json = Json.parse(
"""
{
"modules" : [
{
"name" : "name1",
"sortCommand" : "sortOnThis",
"traceId" : "traceId1",
"scope" : {
"node" : "n1"
}
},
{
"name" : "name2",
"scope" : {
"node" : "n2",
"actorSystem" : "as2"
}
},
{
"name" : "name3",
"paging" : {
"offset" : 1,
"limit" : 1000
},
"sortCommand" : "sortMeOnThis",
"scope" : {
"node" : "n3",
"actorSystem" : "as3",
"dispatcher" : "d3",
"actorPath" : "a3"
}
}
]
}
""")
implicit val parser = JsonHandler.innerModuleReads
val innerModules = (json \ "modules").as[List[InnerModuleInformation]]
innerModules.size must equalTo(3)
innerModules(0).name must equalTo("name1")
innerModules(0).traceId must equalTo(Some("traceId1"))
innerModules(0).scope.node must equalTo(Some("n1"))
innerModules(0).sortCommand must equalTo(Some("sortOnThis"))
innerModules(0).scope.tag must equalTo(None)
innerModules(1).name must equalTo("name2")
innerModules(1).traceId must equalTo(None)
innerModules(1).scope.node must equalTo(Some("n2"))
innerModules(1).scope.actorSystem must equalTo(Some("as2"))
innerModules(1).scope.tag must equalTo(None)
innerModules(2).name must equalTo("name3")
innerModules(2).traceId must equalTo(None)
innerModules(2).pagingInformation must not be empty
innerModules(2).pagingInformation.get.offset must equalTo(1)
innerModules(2).pagingInformation.get.limit must equalTo(1000)
innerModules(2).sortCommand must equalTo(Some("sortMeOnThis"))
innerModules(2).scope.node must equalTo(Some("n3"))
innerModules(2).scope.actorSystem must equalTo(Some("as3"))
innerModules(2).scope.dispatcher must equalTo(Some("d3"))
innerModules(2).scope.actorPath must equalTo(Some("a3"))
innerModules(2).scope.tag must equalTo(None)
}
}
}
|
eed3si9n/activator
|
ui/app/activator/WebSocketActor.scala
|
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import akka.actor._
import akka.pattern._
import scala.concurrent.{ Channel => _, _ }
import scala.concurrent.duration._
import akka.util._
import play.api.libs.iteratee._
import scala.collection.immutable.Queue
import play.api.mvc.WebSocket.FrameFormatter
import console.ConsolePlugin
import console.ClientController.InitializeCommunication
import JsonHelper._
import play.api.libs.json.Json._
private case object Ack
case object GetWebSocket
private case object CloseWebSocket
// This is a bunch of glue to convert Iteratee/Enumerator into an actor.
// There's probably a better approach, oh well.
abstract class WebSocketActor[MessageType](implicit frameFormatter: FrameFormatter[MessageType], mf: Manifest[MessageType]) extends Actor with ActorLogging {
import WebSocketActor._
private implicit def ec: ExecutionContext = context.system.dispatcher
protected sealed trait WebSocketMessage
protected case class Incoming[In](message: In) extends WebSocketMessage
private sealed trait InternalWebSocketMessage
private case object IncomingComplete extends InternalWebSocketMessage
private case object Ready extends InternalWebSocketMessage
private case object InitialReadyTimeout extends InternalWebSocketMessage
private case object TimeoutAfterHalfCompleted extends InternalWebSocketMessage
// This is a consumer which is pushed to by the websocket handler
private class ActorIteratee[In](val actorWrapper: ActorWrapperHelper) extends Iteratee[In, Unit] {
// we are an iteratee that always _continues_ by providing the function
// handleNextInput, which in turn computes the next iteratee based on
// some input fed to us from the websocket. The next iteratee will
// be another ActorIteratee, or a Done or an Error.
override def fold[B](folder: Step[In, Unit] => Future[B])(implicit ec: ExecutionContext): Future[B] = folder(Step.Cont(handleNextInput))
private def handleNextInput(i: Input[In]): Iteratee[In, Unit] = {
i match {
case Input.Empty =>
log.debug("consumer iteratee (incoming websocket messages) is empty")
this
case Input.EOF => {
log.debug("consumer iteratee (incoming websocket messages) EOF")
actorWrapper.actor ! IncomingComplete
Done((), Input.Empty)
}
case Input.El(x) => {
if (actorWrapper.isTerminated) {
log.debug("Sending error to the incoming websocket, can't consume since actor is terminated {}", x)
Error("web socket consumer actor has been terminated", i)
} else {
val response = actorWrapper.actor.ask(Incoming[In](x))(WebSocketActor.timeout)
flatMapM(_ =>
response map {
case iteratee: Iteratee[_, _] =>
// note: this iteratee could be an Error, in theory,
// though in practice right now it's just another
// ActorIteratee that serves as an ack
iteratee.asInstanceOf[Iteratee[In, Unit]]
case whatever =>
log.debug("Bad reply from websocket actor {}", whatever)
Error("web socket actor gave us a mystery reply: " + whatever, Input.El(x))
} recover {
case e: Exception =>
log.debug("Failed to consume incoming websocket message: consumer.isTerminated={}: {}: {}: message was {}",
actorWrapper.isTerminated, e.getClass.getName, e.getMessage, x)
Error("web socket actor failed to consume a message", Input.El(x))
})
}
}
}
}
}
// this is called from a non-actor thread
private def newConsumer(): Iteratee[MessageType, Unit] = new ActorIteratee[MessageType](ActorWrapperHelper(self))
private var incomingCompleted = false
private var outgoingCompleted = false
private var triggeredFullyCompleted = false
private var createdSocket = false
private var ready = false
// don't restart children
override val supervisorStrategy = SupervisorStrategy.stoppingStrategy
private val producerActorWrapper = ActorWrapperHelper(context.actorOf(Props(new ProducerProxy[MessageType]), name = "producer"))
// Hook Console related actors up with this actor by initializing the communication.
// A reply with a reference to the console actor will be sent to "self" (see internalReceive below).
def plugin(implicit app: play.api.Application): ConsolePlugin =
app.plugin(classOf[ConsolePlugin]).getOrElse(throw new RuntimeException("The Console plugin does not exist"))
implicit val ctx = play.api.Play.current
plugin.clientHandlerActor ! InitializeCommunication(id = "Actor" + System.currentTimeMillis, consumer = producerActorWrapper.actor)
var consoleActor: Option[ActorRef] = None
override def preStart(): Unit = {
log.debug("starting")
context.system.scheduler.scheduleOnce(WebSocketActor.timeout.duration, self, InitialReadyTimeout)
}
private def checkFullyCompleted() {
// it's possible that ready has never been true when we get here
if (incomingCompleted && outgoingCompleted) {
if (!triggeredFullyCompleted) {
log.debug("Both incoming and outgoing websocket channels done, killing websocket actor")
triggeredFullyCompleted = true
self ! PoisonPill
}
} else if (incomingCompleted || outgoingCompleted) {
context.system.scheduler.scheduleOnce(WebSocketActor.timeout.duration, self, TimeoutAfterHalfCompleted)
}
}
private def internalReceive: Receive = {
case Terminated(child) =>
if (child == producerActorWrapper.actor) {
log.debug("In websocket actor, got Terminated for producer actor")
outgoingCompleted = true
checkFullyCompleted()
} else {
log.debug("In websocket actor, got Terminated for unexpected actor: " + child)
}
case internal: InternalWebSocketMessage => internal match {
case IncomingComplete =>
log.debug("In websocket actor, got IncomingComplete signaling consumer actor is done")
incomingCompleted = true
checkFullyCompleted()
log.debug("poisoning producer to close our side of the socket")
producerActorWrapper.actor ! PoisonPill
case InitialReadyTimeout =>
if (!ready) {
log.debug("websocket actor not ready within its timeout, poisoning")
self ! PoisonPill
}
case Ready =>
ready = true
case TimeoutAfterHalfCompleted =>
if (!(incomingCompleted && outgoingCompleted)) {
log.debug("websocket actor had incoming completed=" + incomingCompleted +
" and outgoing completed=" + outgoingCompleted +
" and timed out before the other one completed; terminating")
incomingCompleted = true
outgoingCompleted = true
}
checkFullyCompleted()
case other => log.error("Received unexpected internal websocket message {}", other)
}
case Incoming(message) =>
onMessage(message.asInstanceOf[MessageType])
// reply with the new iteratee
sender ! new ActorIteratee[MessageType](ActorWrapperHelper(self))
case GetWebSocket =>
if (createdSocket) {
log.debug("second connection attempt will fail")
throw new Exception("Tried to attach a second web socket to the same WebSocketActor")
} else {
log.debug("Firing up web socket")
val actor = self
val futureStreams = producerActorWrapper.actor.ask(GetProducer)
.mapTo[GotProducer[MessageType]]
.map({
case GotProducer(enumerator) =>
log.debug("got producer enumerator")
val consumer = newConsumer()
actor ! Ready
(consumer, enumerator)
})
createdSocket = true
futureStreams onFailure {
case e: Throwable =>
log.debug("Failed to create producer and consumer, {}: {}", e.getClass.getSimpleName, e.getMessage)
}
futureStreams pipeTo sender
}
case CloseWebSocket =>
log.debug("got CloseWebSocket poisoning the producer")
producerActorWrapper.actor ! PoisonPill
case InitializeCommunication(_, ref) =>
consoleActor = Some(ref)
}
final override def receive = internalReceive orElse subReceive
protected def subReceive: Receive = Map.empty
protected def onMessage(message: MessageType): Unit = {
}
protected final def produce(message: MessageType): Unit = {
if (producerActorWrapper.isTerminated) {
// this isn't reliable, it's just nicer to fail early instead of timing out
log.debug("producer actor is dead, sending isn't going to work")
} else {
producerActorWrapper.actor.ask(OutgoingMessage(message))(WebSocketActor.timeout).mapTo[Ack.type].onFailure {
case e: Exception =>
log.debug("Producer actor failed to send Outgoing, {}: {}", e.getClass.getSimpleName, e.getMessage)
log.debug("Killing failed producer")
// this is supposed to start a chain reaction where we get Terminated
// on the producer and then kill ourselves as well
producerActorWrapper.actor ! PoisonPill
}
}
}
}
object WebSocketActor {
implicit val timeout = Timeout(30.seconds)
import play.api.mvc.WebSocket
import play.api.libs.json._
case class InspectRequest(json: JsValue)
object InspectRequest {
val tag = "InspectRequest"
implicit val inspectRequestReads: Reads[InspectRequest] =
extractRequest[InspectRequest](tag)((__ \ "location").read[JsValue].map(InspectRequest.apply _))
implicit val inspectRequestWrites: Writes[InspectRequest] =
emitRequest(tag)(in => obj("location" -> in.json))
def unapply(in: JsValue): Option[InspectRequest] = Json.fromJson[InspectRequest](in).asOpt
}
case class InspectResponse()
case class Ping(cookie: String)
case object Ping {
def unapply(in: JsValue): Option[Ping] =
try {
if ((in \ "request").as[String] == "Ping")
Some(Ping((in \ "cookie").as[String]))
else
None
} catch {
case e: JsResultException => None
}
}
object Pong {
def apply(cookie: String): JsValue =
JsObject(Seq("response" -> JsString("Pong"), "cookie" -> JsString(cookie)))
}
/**
* Creates a new controller method which instantiates a
* websocket actor (in the given actor system) and
* returns the appropriate Iteratee/Enumeratee pair for play
* to delegate messages into the actor.
*
* Note: This method is a convenience, and most likely needs tweaking
* as we use more websockets.
*/
def create[T](system: ActorSystem, creator: => WebSocketActor[T], name: String)(implicit fm: FrameFormatter[T]): WebSocket[T, T] = WebSocketUtil.socketCSRFCheck {
WebSocket.tryAccept[T] { request =>
val wsActor = system.actorOf(Props(creator), name = name)
import system.dispatcher
val stream = (wsActor ? GetWebSocket).map {
case activator.WebSocketAlreadyUsed => throw new RuntimeException("can only connect to websocket actor once.")
case whatever => whatever
}
stream.mapTo[(play.api.libs.iteratee.Iteratee[T, _], play.api.libs.iteratee.Enumerator[T])].map { streams => Right(streams) }
}
}
}
sealed trait ProducerProxyMessage
private case class OutgoingReady[Out](channel: Concurrent.Channel[Out]) extends ProducerProxyMessage
private case object OutgoingComplete extends ProducerProxyMessage
private case class OutgoingError[Out](s: String, input: Input[Out]) extends ProducerProxyMessage
case class OutgoingMessage[Out](message: Out) extends ProducerProxyMessage
private case object GetProducer extends ProducerProxyMessage
private sealed trait ProducerProxyReply
private case class GotProducer[Out](enumerator: Enumerator[Out]) extends ProducerProxyReply
private class ProducerProxy[Out] extends Actor with ActorLogging {
private implicit def ec: ExecutionContext = context.system.dispatcher
private case object InitialReadyTimeout
// create a producer that accepts outgoing websocket messages
// and sends us status updates on the producer channel
protected lazy val enumerator = Concurrent.unicast[Out](
onStart = { channel =>
log.debug("unicast onStart: sending channel to websocket producer")
self ! OutgoingReady[Out](channel)
},
onComplete = { () =>
log.debug("unicast onComplete: completing websocket producer")
self ! OutgoingComplete
},
onError = { (s, input) =>
log.debug("unicast onError: websocket producer {}", s)
self ! OutgoingError(s, input)
})
var channelOption: Option[Concurrent.Channel[Out]] = None
var buffer: Queue[Out] = Queue.empty
private def push(message: Out): Unit = {
require(channelOption.isDefined)
for (channel <- channelOption) {
log.debug("pushing message to channel {}", message)
try {
channel.push(message)
log.debug("message pushed with no exception")
} catch {
case other: Exception =>
log.debug("Exception {} sending to socket, suiciding: {}", other.getClass.getSimpleName, other.getMessage)
self ! PoisonPill
}
}
}
private def produce(message: Out): Unit = {
if (channelOption.isDefined) {
push(message)
} else {
log.debug("Buffering message {}", message)
buffer = buffer.enqueue(message)
}
}
private def flushBuffer(): Unit = {
require(channelOption.isDefined)
if (buffer.isEmpty)
log.debug("No messages in buffer to flush")
while (buffer.nonEmpty) {
val (m, remaining) = buffer.dequeue
log.debug("Flushing buffered message {}", m)
push(m)
buffer = remaining
}
}
override def receive = {
case InitialReadyTimeout =>
if (!channelOption.isDefined) {
log.debug("ProducerProxy not ready within initial timeout, poisoning")
self ! PoisonPill
}
case ppMessage: ProducerProxyMessage => ppMessage match {
case OutgoingMessage(message) =>
log.debug("producer got outgoing: {}", message)
produce(message.asInstanceOf[Out])
log.debug("producer sending Ack")
sender ! Ack
case OutgoingReady(channel) =>
log.debug("ProducerProxy ready to go, got channel {}", channel)
require(channelOption.isEmpty)
channelOption = Some(channel.asInstanceOf[Concurrent.Channel[Out]])
flushBuffer()
case OutgoingComplete =>
log.debug("ProducerProxy got complete, closing down)")
self ! PoisonPill
case OutgoingError(what, input) =>
log.debug("ProducerProxy got error, closing down: {}", what)
self ! PoisonPill
case GetProducer =>
log.debug("ProducerProxy returning its enumerator: {}", enumerator)
sender ! GotProducer(enumerator)
}
}
override def preStart(): Unit = {
log.debug("starting")
context.system.scheduler.scheduleOnce(WebSocketActor.timeout.duration, self, InitialReadyTimeout)
}
override def postStop(): Unit = {
log.debug("stopping")
channelOption.foreach { channel =>
try channel.eofAndEnd() catch {
case e: Exception =>
log.debug("Problem closing websocket outgoing producer: {}: {}", e.getClass.getSimpleName, e.getMessage)
}
}
}
}
|
eed3si9n/activator
|
ui/test/console/handler/ActorHandlerSpec.scala
|
package console.handler
import org.specs2.mutable._
import activator.analytics.data._
import akka.actor.{ ActorRef, ActorPath }
import com.typesafe.trace.uuid.UUID
import activator.analytics.data.TimeRangeType.TimeRangeType
import activator.analytics.data.TimeRangeType
import scala.concurrent.duration._
import scala.Some
import activator.analytics.data.ActorStatsMetrics
import console.ScopeModifiers
import java.util.concurrent.TimeUnit
import console.AnalyticsRepository
object ActorHandlerSpec {
import Generators._
val minuteTimeRanges = genTimeRanges(0, 30.minutes.toMillis.toInt, 1.minute.toMillis.toInt, TimeRangeType.Minutes)
val hourTimeRanges = genTimeRanges(0, 30.hours.toMillis.toInt, 1.hour.toMillis.toInt, TimeRangeType.Hours)
val dayTimeRanges = genTimeRanges(0, 30.days.toMillis.toInt, 1.day.toMillis.toInt, TimeRangeType.Days)
val scopes = genActorScopes(Set(ActorPath.fromString("akka://user/a"), ActorPath.fromString("akka://user/b"), ActorPath.fromString("akka://user/c")),
Set(),
Set("host1", "host2", "host3"),
Set("dispatcher1", "dispatcher2", "dispatcher3"),
Set("system1", "system2", "system3"))
val timeRanges = minuteTimeRanges ++ hourTimeRanges ++ dayTimeRanges
val stats = genActorStats(scopes, timeRanges) { (i, s, tr) =>
ActorStats(tr, s, ActorStatsMetrics(bytesRead = i, bytesWritten = i))
}
lazy val repository: AnalyticsRepository = {
val r = AnalyticsRepository.freshMemoryObjects
val asr = r.actorStatsRepository
asr.save(stats)
r
}
def actorHandler(repo: AnalyticsRepository)(body: (ActorRef, ActorStats) => Unit): ActorHandlerBase = new ActorHandlerBase {
val repository: AnalyticsRepository = repo
def useActorStats(sender: ActorRef, stats: ActorStats): Unit = body(sender, stats)
}
}
trait ActorHandlerSpecification { this: SpecificationLike =>
def beEqualActorStats = (be_==(_: ActorStats)) ^^^ ((_: ActorStats).copy(timeRange = TimeRange(), id = UUID.nilUUID()))
}
class ActorHandlerSpec extends ActorsSpec("ActorHandlerSpec") with ActorHandlerSpecification {
isolated
import ActorHandlerSpec._
"Actor Handler" should {
"Find data" in {
var resultSender: ActorRef = null
var resultStats: ActorStats = null
val h = actorHandler(repository) { (sender, stats) =>
resultSender = sender
resultStats = stats
}
forall(stats) { (as: ActorStats) =>
h.onModuleInformation(ActorRef.noSender, ActorHandler.ActorModuleInfo(as.scope,
modifiers = ScopeModifiers(),
time = as.timeRange,
dataFrom = None,
traceId = None))
resultSender must equalTo(ActorRef.noSender)
resultStats must beEqualActorStats(as)
}
}
"Not find data outside of available range" in { // Note: this isn't actually correct, but hey. No data isn't the same as `zero` data
var resultSender: ActorRef = null
var resultStats: ActorStats = null
val h = actorHandler(repository) { (sender, stats) =>
resultSender = sender
resultStats = stats
}
val oneMinute: Int = Duration(1, TimeUnit.MINUTES).toMillis.toInt
val maxMinutes = stats.filter(x => x.timeRange.rangeType == TimeRangeType.Minutes).maxBy(_.timeRange.startTime)
val outside = maxMinutes.copy(timeRange = TimeRange.rangeFor(maxMinutes.timeRange.startTime + oneMinute, TimeRangeType.Minutes))
val emptyStats = ActorStats(outside.timeRange, outside.scope)
h.onModuleInformation(ActorRef.noSender, ActorHandler.ActorModuleInfo(outside.scope,
modifiers = ScopeModifiers(),
time = outside.timeRange,
dataFrom = None,
traceId = None))
resultSender must equalTo(ActorRef.noSender)
resultStats must beEqualActorStats(emptyStats)
}
}
}
|
eed3si9n/activator
|
ui/app/activator/FileHelper.scala
|
<filename>ui/app/activator/FileHelper.scala
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import java.io._
import java.security.MessageDigest
import java.util.zip._
import java.nio.channels.FileChannel
object FileHelper {
final val hexArray: Array[Char] = "0123456789abcdef".toCharArray
final def bytesToHex(bytes: Array[Byte]): String = {
val hexChars: Array[Char] = new Array[Char](bytes.length * 2)
var j: Int = 0
while (j < bytes.length) {
val v = bytes(j) & 0xFF
hexChars(j * 2) = hexArray(v >>> 4)
hexChars(j * 2 + 1) = hexArray(v & 0x0F)
j += 1
}
new String(hexChars)
}
def bracket[T, I, R](in: I, init: I => R, cleanup: R => Unit)(body: R => T): T = {
val r = init(in)
try {
body(r)
} finally {
cleanup(r)
}
}
def withFileInputStream[T](in: File)(body: FileInputStream => T): T =
bracket[T, File, FileInputStream](in, (x => new FileInputStream(x)), _.close())(body)
def withFileOutputStream[T](in: File)(body: FileOutputStream => T): T =
bracket[T, File, FileOutputStream](in, (x => new FileOutputStream(x)), _.close())(body)
def withZipInputStream[T](in: InputStream)(body: ZipInputStream => T): T =
bracket[T, InputStream, ZipInputStream](in, (x => new ZipInputStream(x)), _.close())(body)
def withFileInputChannel[T](in: FileInputStream)(body: FileChannel => T): T =
bracket[T, FileInputStream, FileChannel](in, _.getChannel(), _.close())(body)
def withFileOutputChannel[T](in: FileOutputStream)(body: FileChannel => T): T =
bracket[T, FileOutputStream, FileChannel](in, _.getChannel(), _.close())(body)
def withBufferedReader[T](in: Reader)(body: BufferedReader => T): T =
bracket[T, Reader, BufferedReader](in, x => new BufferedReader(x), _.close())(body)
def withFileReader[T](in: File)(body: FileReader => T): T =
bracket[T, File, FileReader](in, x => new FileReader(x), _.close())(body)
def writeToFile(in: Array[Byte], destination: File, replaceDestination: Boolean = true): Unit = {
(destination.exists(), destination.isFile()) match {
case (false, _) => Option(destination.getParentFile()).map(_.mkdirs())
case (true, true) =>
if (replaceDestination) destination.delete()
else throw new IOException(s"destination: $destination exists replaceDestination is 'false'")
case (true, false) => throw new IOException(s"destination: $destination exists and is not a file")
}
withFileOutputStream(destination) { fos =>
fos.write(in)
}
}
def getFiles(source: File): Set[File] = {
if (source.isFile()) Set(source)
else Option(source.listFiles).map { lf =>
val (newFiles, dirs) = lf.toSeq.partition(_.isFile())
(newFiles ++ dirs.flatMap(d => getFiles(d))).toSet
} getOrElse (Set.empty[File])
}
def copyFile(source: File, destination: File, replaceDestination: Boolean = true): Unit =
(source.exists(), source.isFile()) match {
case (true, true) =>
(destination.exists(), destination.isFile()) match {
case (false, _) => Option(destination.getParentFile()).map(_.mkdirs())
case (true, true) =>
if (replaceDestination) destination.delete()
else throw new IOException(s"destination: $destination exists replaceDestination is 'false'")
case (true, false) => throw new IOException(s"destination: $destination exists and is not a file")
}
withFileInputStream(source) { fis =>
withFileOutputStream(destination) { fos =>
withFileInputChannel(fis) { fic =>
withFileOutputChannel(fos) { foc =>
foc.transferFrom(fic, 0, fic.size())
}
}
}
}
case (true, false) => throw new IOException(s"source: $source is not a file")
case (false, _) => throw new IOException(s"source file: $source does not exist")
}
def verifyFile(in: File,
targetDigest: String,
md: MessageDigest = MessageDigest.getInstance("SHA-256")): Unit = {
withFileInputStream(in) { fis =>
val buffer = new Array[Byte](1024)
var readCount: Int = fis.read(buffer)
while (readCount > 0) {
md.update(buffer, 0, readCount)
readCount = fis.read(buffer)
}
val digest = md.digest()
val digestString = bytesToHex(digest).toLowerCase
if (digestString != targetDigest.toLowerCase) throw new RuntimeException(s"input file: $in failed checksum. Looking for ${targetDigest.toLowerCase} got $digestString")
}
}
def zipfileEntryStream(zis: ZipInputStream): Stream[ZipEntry] = zis.getNextEntry() match {
case null => Stream.empty[ZipEntry]
case entry => entry #:: zipfileEntryStream(zis)
}
def relativeTo(root: File)(file: String): File =
new File(root, file)
def createTempDirectory(prefix: String, suffix: String): File = {
val temp = File.createTempFile(prefix, suffix)
if (!(temp.delete())) throw new IOException(s"Could not delete temp file: ${temp.getAbsolutePath}")
if (!(temp.mkdir())) throw new IOException(s"Could not create temp directory: ${temp.getAbsolutePath}")
temp
}
def deleteAll(file: File): Unit = {
if (file.isDirectory) {
file.list() match {
case null =>
case l => l.toSeq match {
case files if files.length == 0 => file.delete()
case files =>
files.foreach(f => deleteAll(new File(file, f)))
file.delete()
}
}
} else {
file.delete()
}
}
def unZipFile(zipFile: File, outputFolder: File): File = {
val buffer: Array[Byte] = new Array[Byte](1024)
withFileInputStream(zipFile) { fis =>
withZipInputStream(fis) { zis =>
for (entry <- zipfileEntryStream(zis)) {
val fileName = entry.getName()
val newFile = new File(outputFolder, fileName)
if (entry.isDirectory) {
newFile.mkdirs()
} else {
newFile.getParentFile().mkdirs()
withFileOutputStream(newFile) { fos =>
var len = zis.read(buffer)
while (len > 0) {
fos.write(buffer, 0, len)
len = zis.read(buffer)
}
}
}
}
}
}
outputFolder
}
}
|
eed3si9n/activator
|
project/Properties.scala
|
<gh_stars>0
import sbt._
import Keys._
// Defines how to generate properties file based on build attributes.
object Properties {
val makePropertiesSource = TaskKey[Seq[File]]("make-properties-source")
val configVersion = taskKey[String]("version to store the config file")
val previousConfigVersion = taskKey[String]("OLD version for the config file (to migrate from)")
val launcherGeneration = taskKey[Int]("defines a universe of launchers to upgrade within")
def writeIfChanged(file: java.io.File, content: String): Unit = {
val oldContent = if (file.exists) IO.read(file) else ""
if (oldContent != content) {
IO.write(file, content)
}
}
private def pickLauncherGeneration(version: String): Int = {
// if we're building a git snapshot, we don't want to ever downgrade
// to "latest" according to typesafe.com
val hyphenHex = ".*-([a-f0-9]+)$".r
version match {
case hyphenHex(gitCommit) if gitCommit.length == 40 =>
123456789
case _ =>
// our actual current generation if it's not a snapshot
1
}
}
def makePropertyClassSetting(sbtDefaultVersion: String, scalaVersion: String): Seq[Setting[_]] = Seq(
resourceGenerators in Compile <+= makePropertiesSource,
configVersion := "1.0", // all 1.0 variants share config format; if changing this, move the old one down to previousConfigVersion
previousConfigVersion := "1.0.7", // if we see this config directory, upgrade from it
launcherGeneration := pickLauncherGeneration(version.value),
makePropertiesSource <<= (version, resourceManaged in Compile, compile in Compile, configVersion, previousConfigVersion, launcherGeneration) map { (version, dir, analysis, configVersion, previousConfigVersion, launcherGeneration) =>
val parent= dir / "activator" / "properties"
IO createDirectory parent
val target = parent / "activator.properties"
writeIfChanged(target, makeJavaPropertiesString(version, sbtDefaultVersion, scalaVersion, configVersion, previousConfigVersion, launcherGeneration))
Seq(target)
}
)
def lastCompilationTime(analysis: sbt.inc.Analysis): Long = {
val lastCompilation = analysis.compilations.allCompilations.lastOption
lastCompilation.map(_.startTime) getOrElse 0L
}
def makeJavaPropertiesString(version: String, sbtDefaultVersion: String, scalaVersion: String, configVersion: String, previousConfigVersion: String, launcherGeneration: Int): String = {
"""|app.version=%s
|sbt.default.version=%s
|app.scala.version=%s
|app.config.version=%s
|app.config.previousVersion=%s
|sbt.Xmx=512M
|sbt.PermSize=128M
|activator.launcher.generation=%d
|""".stripMargin format (version, sbtDefaultVersion, scalaVersion, configVersion, previousConfigVersion, launcherGeneration)
}
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/ErrorStatsJsonBuilder.scala
|
<filename>ui/app/console/handler/rest/ErrorStatsJsonBuilder.scala
package console.handler.rest
import play.api.libs.json.{ Json, JsObject, JsValue, JsArray, Writes, JsString }
import activator.analytics.data.{ TimeRange, ErrorStats, DeviationDetail }
object ErrorStatsJsonBuilder {
import TimeRangeJsonBuilder._
import JsonBuilder._
import DevationDetailJsonBuilder._
def createJson(errorStats: ErrorStats): JsObject = {
Json.obj(
"deadletterCount" -> errorStats.metrics.counts.deadLetters,
"deadletters" -> createDeviationDetailJsonSeq(errorStats.metrics.deviations.deadLetters),
"deadlockCount" -> errorStats.metrics.counts.deadlocks,
"deadlocks" -> createDeviationDetailJsonSeq(errorStats.metrics.deviations.deadlockedThreads),
"deviationCount" -> errorStats.metrics.counts.total,
"errorCount" -> errorStats.metrics.counts.errors,
"errors" -> createDeviationDetailJsonSeq(errorStats.metrics.deviations.errors),
"id" -> errorStats.id.toString,
"timerange" -> createTimeRangeJson(errorStats.timeRange),
"unhandledMessageCount" -> errorStats.metrics.counts.unhandledMessages,
"unhandledMessages" -> createDeviationDetailJsonSeq(errorStats.metrics.deviations.unhandledMessages),
"warningCount" -> errorStats.metrics.counts.warnings,
"warnings" -> createDeviationDetailJsonSeq(errorStats.metrics.deviations.warnings)) ++
optJson("node", errorStats.node) ++
optJson("actorSystem", errorStats.actorSystem)
}
def createJsonSeq(errorStatsSeq: Seq[ErrorStats]): JsArray =
new JsArray(errorStatsSeq.map(createJson(_)))
}
|
eed3si9n/activator
|
ui-common/src/main/scala/activator/UICacheHelper.scala
|
<filename>ui-common/src/main/scala/activator/UICacheHelper.scala
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package activator
import activator.properties.ActivatorProperties
import activator.properties.ActivatorProperties.SCRIPT_NAME
import activator.cache._
import akka.actor.ActorRefFactory
import java.io.File
import activator.cache.RemoteTemplateRepository
import com.typesafe.config.ConfigFactory
import akka.actor.ActorSystem
import akka.actor.ActorContext
import akka.event.LoggingAdapter
import scala.concurrent.duration._
// This helper constructs the template cache in the default CLI/UI location.
object UICacheHelper {
// this is intended to be close to "forever" since if we time
// out we'll pretty much fail catastrophically
private implicit val timeout = akka.util.Timeout(Duration(240, SECONDS))
// TODO - Config or ActiavtorProperties?
lazy val config = ConfigFactory.load()
def log(actorFactory: ActorRefFactory) = actorFactory match {
case system: ActorSystem => system.log
case context: ActorContext => context.system.log
case whatever => throw new RuntimeException(s"don't know how to get log from $whatever")
}
val localCache = new File(ActivatorProperties.ACTIVATOR_TEMPLATE_CACHE)
val localSeed = Option(ActivatorProperties.ACTIVATOR_TEMPLATE_LOCAL_REPO) map (new File(_)) filter (_.isDirectory)
def makeDefaultCache(actorFactory: ActorRefFactory): TemplateCache = {
DefaultTemplateCache(
actorFactory = actorFactory,
location = localCache,
remote = RemoteTemplateRepository(config, log(actorFactory)),
seedRepository = localSeed)
}
def makeLocalOnlyCache(actorFactory: ActorRefFactory): TemplateCache = {
DefaultTemplateCache(
actorFactory = actorFactory,
location = localCache,
seedRepository = localSeed)
}
/** Grabs the additional script files we should clone with templates, if they are available in our environment. */
def scriptFilesForCloning: Seq[(File, String)] = {
def fileFor(loc: String, name: String): Option[(File, String)] = Option(loc) map (new File(_)) filter (_.exists) map (_ -> name)
val batFile = fileFor(ActivatorProperties.ACTIVATOR_LAUNCHER_BAT("/bin/"), "bin/" + SCRIPT_NAME + ".bat")
val jarFile = fileFor(ActivatorProperties.ACTIVATOR_LAUNCHER_JAR("libexec"), "libexec/" + ActivatorProperties.ACTIVATOR_LAUNCHER_JAR_NAME("libexec"))
val bashFile = fileFor(ActivatorProperties.ACTIVATOR_LAUNCHER_BASH("/bin/"), "bin/" + SCRIPT_NAME)
if (jarFile.isDefined && (batFile.isDefined || bashFile.isDefined))
Seq(batFile, jarFile, bashFile).flatten
else {
val batFile = fileFor(ActivatorProperties.ACTIVATOR_LAUNCHER_BAT(""), SCRIPT_NAME + ".bat")
val jarFile = fileFor(ActivatorProperties.ACTIVATOR_LAUNCHER_JAR(null), ActivatorProperties.ACTIVATOR_LAUNCHER_JAR_NAME(null))
val bashFile = fileFor(ActivatorProperties.ACTIVATOR_LAUNCHER_BASH(""), SCRIPT_NAME)
if (jarFile.isDefined && (batFile.isDefined || bashFile.isDefined))
Seq(batFile, jarFile, bashFile).flatten
else
Nil
}
}
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/JsonBuilderActor.scala
|
<reponame>eed3si9n/activator
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console.handler.rest
import akka.actor.{ ActorLogging, Actor }
trait JsonBuilderActor extends Actor with ActorLogging {
}
|
eed3si9n/activator
|
ui/app/activator/SbtClientActor.scala
|
<reponame>eed3si9n/activator<filename>ui/app/activator/SbtClientActor.scala
package activator
import akka.actor._
import akka.pattern._
import play.api.libs.json._
import sbt.client._
import sbt.protocol._
import scala.concurrent.Future
import scala.reflect.ClassTag
import scala.util.control.NonFatal
import scala.concurrent.ExecutionContext.Implicits.global
class SbtClientActor(val client: SbtClient) extends Actor with ActorLogging {
log.debug(s"Creating SbtClientActor ${self.path.name}")
import SbtClientActor._
override val supervisorStrategy = SupervisorStrategy.stoppingStrategy
// Initialize the life cycle handler for the sbt client actor
val lifeCycleHandler = context.actorOf(SbtClientLifeCycleHandlerActor.props(client), "lifeCycleHandler-" + self.path.name)
lifeCycleHandler ! SbtClientLifeCycleHandlerActor.Initialize
def forwardOverSocket(event: Event): Unit = {
context.parent ! NotifyWebSocket(SbtProtocol.wrapEvent(event))
}
def produceLog(level: String, message: String): Unit = {
context.parent ! NotifyWebSocket(SbtProtocol.synthesizeLogEvent(level, message))
}
override def receive = {
case event: Event => event match {
case _: ClosedEvent =>
self ! PoisonPill
case _: BuildStructureChanged =>
// this should not happen unless during development, hence the error level
log.error(s"Received event which should have been filtered out by SbtClient ${event}")
case changed: ValueChanged => forwardOverSocket(changed)
case entry: LogEvent => entry match {
case e: DetachedLogEvent => forwardOverSocket(e)
case e: TaskLogEvent => forwardOverSocket(e)
case e: BackgroundJobLogEvent => forwardOverSocket(e)
}
case fail: ExecutionFailure => forwardOverSocket(fail)
case yay: ExecutionSuccess => forwardOverSocket(yay)
case starting: ExecutionStarting => forwardOverSocket(starting)
case waiting: ExecutionWaiting => forwardOverSocket(waiting)
case finished: TaskFinished => forwardOverSocket(finished)
case started: TaskStarted => forwardOverSocket(started)
case taskEvent: TaskEvent => forwardOverSocket(taskEvent)
case detachedEvent: DetachedEvent => forwardOverSocket(detachedEvent)
case loaded: BuildLoaded => forwardOverSocket(loaded)
case failed: BuildFailedToLoad => forwardOverSocket(failed)
case background: BackgroundJobEvent => forwardOverSocket(background)
case background: BackgroundJobStarted => forwardOverSocket(background)
case background: BackgroundJobFinished => forwardOverSocket(background)
}
case structure: MinimalBuildStructure =>
forwardOverSocket(BuildStructureChanged(structure))
case req: ClientAppRequest => {
req match {
case re: RequestExecution =>
log.debug("requesting execution of " + re.command)
client.requestExecution(re.command.get, interaction = None)
case ce: CancelExecution =>
log.debug("canceling execution " + ce.executionId)
client.cancelExecution(ce.executionId)
case pac: PossibleAutoCompletions =>
log.debug("possible autocompletions for " + pac.command.get)
client.possibleAutocompletions(pac.command.get, detailLevel = pac.detailLevel.getOrElse(0))
case rsd: RequestSelfDestruct =>
log.debug("Asking sbt to exit")
client.requestSelfDestruct()
Future.successful(None)
}
} recover {
case NonFatal(e) =>
log.debug(s"request to sbt failed ${e.getMessage}")
produceLog(LogMessage.DEBUG, s"request $req failed: ${e.getClass.getName}: ${e.getMessage}")
Status.Failure(e)
} map { result =>
log.debug(s"${req} result: ${result}")
produceLog(LogMessage.DEBUG, s"request $req result: ${result}")
SbtClientResponse(req.serialId, result, req.command)
} pipeTo sender
}
}
object SbtClientActor {
def props(client: SbtClient) = Props(new SbtClientActor(client))
case class PlayAvailable(available: Boolean)
}
/**
* Sets up all subscriptions to the sbt client required.
* Forwards all messages to its parent.
* Takes care of resources during life cycle changes.
*/
class SbtClientLifeCycleHandlerActor(val client: SbtClient) extends Actor with ActorLogging {
import SbtClientLifeCycleHandlerActor._
var eventsSub: Option[Subscription] = None
var buildSub: Option[Subscription] = None
var valueSub: Option[Subscription] = None
def receive = {
case Initialize =>
handleEvents
watchBuild
setupSubscription
}
override def postStop(): Unit = {
log.debug("postStop")
eventsSub map { _.cancel() }
buildSub map { _.cancel() }
valueSub map { _.cancel() }
// we were probably stopped because the client closed already,
// but if not, close here.
client.close()
}
def handleEvents = {
eventsSub = Some(client.handleEvents { event =>
context.parent ! event
})
}
def watchBuild = {
buildSub = Some(client.watchBuild { structure =>
context.parent ! structure
})
}
// this is a hardcoded hack... we need to control the list of things
// to watch from JS, and we should handle build structure changes
// by redoing this
def setupSubscription = {
valueSub = Some(new Subscription() {
private def forward(key: ScopedKey, result: TaskResult): Unit =
Option(context).foreach(_.parent ! ValueChanged(key, result))
val eagerSubs: Seq[Subscription] =
Seq("discoveredMainClasses",
"mainClass") map { name =>
client.rawWatch(name)(forward)
}
val lazySubs: Seq[Subscription] =
Seq[String]() map { name =>
client.rawLazyWatch(name)(forward)
}
override def cancel(): Unit = {
(eagerSubs ++ lazySubs) map { sub => sub.cancel() }
}
})
}
}
object SbtClientLifeCycleHandlerActor {
def props(client: SbtClient) = Props(new SbtClientLifeCycleHandlerActor(client))
case object Initialize
}
|
eed3si9n/activator
|
ui/app/console/handler/DeviationHandler.scala
|
<gh_stars>0
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console
package handler
import akka.actor.{ ActorRef, Props }
import com.typesafe.trace.uuid.UUID
import com.typesafe.trace.TraceEvent
import console.handler.rest.DeviationJsonBuilder.{ DeviationResult, ValidResult, InvalidResult }
import console.handler.rest.DeviationJsonBuilder
import console.AnalyticsRepository
object DeviationHandler {
def props(repository: AnalyticsRepository,
builderProps: Props = DeviationJsonBuilder.props()): Props =
Props(classOf[DeviationHandler], repository, builderProps)
case class DeviationModuleInfo(eventID: UUID) extends ModuleInformationBase
}
trait DeviationHandlerBase extends RequestHandlerLike[DeviationHandler.DeviationModuleInfo] {
import DeviationHandler._
def useNoDeviation(sender: ActorRef, eventId: UUID): Unit
def useDeviation(sender: ActorRef, eventId: UUID, event: TraceEvent, traces: Seq[TraceEvent]): Unit
def onModuleInformation(sender: ActorRef, mi: DeviationModuleInfo): Unit = {
(for {
event <- repository.traceRepository.event(mi.eventID)
traces: Seq[TraceEvent] = repository.traceRepository.trace(event.trace)
} yield {
useDeviation(sender, mi.eventID, event, traces)
}) getOrElse (useNoDeviation(sender, mi.eventID))
}
}
class DeviationHandler(val repository: AnalyticsRepository,
builderProps: Props) extends RequestHandler[DeviationHandler.DeviationModuleInfo] with DeviationHandlerBase {
val builder = context.actorOf(builderProps, "deviationBuilder")
def useNoDeviation(sender: ActorRef, eventId: UUID): Unit = {
builder ! InvalidResult(sender, eventId)
}
def useDeviation(sender: ActorRef, eventId: UUID, event: TraceEvent, traces: Seq[TraceEvent]): Unit = {
builder ! ValidResult(sender, eventId, event, traces)
}
}
|
eed3si9n/activator
|
ui/app/activator/Global.scala
|
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import play.api._
object Global extends GlobalSettings {
override def beforeStart(app: Application) {
super.beforeStart(app)
}
override def onStop(app: Application) {
super.onStop(app)
Logger.info("onStop received closing down the app")
activator.AppManager.onApplicationStop()
}
}
|
eed3si9n/activator
|
project/git.sbt
|
<filename>project/git.sbt
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.6.1")
|
eed3si9n/activator
|
ui/app/console/handler/ActorHandler.scala
|
<filename>ui/app/console/handler/ActorHandler.scala
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console
package handler
import akka.actor.{ ActorRef, Props }
import activator.analytics.data.{ TimeRange, Scope, ActorStats }
import console.handler.rest.ActorJsonBuilder.ActorResult
import console.handler.rest.ActorJsonBuilder
import console.AnalyticsRepository
object ActorHandler {
def props(repository: AnalyticsRepository,
builderProps: Props = ActorJsonBuilder.props()) =
Props(classOf[ActorHandler], repository, builderProps)
case class ActorModuleInfo(scope: Scope,
modifiers: ScopeModifiers,
time: TimeRange,
dataFrom: Option[Long],
traceId: Option[String]) extends ScopedModuleInformationBase
}
trait ActorHandlerBase extends RequestHandlerLike[ActorHandler.ActorModuleInfo] {
import ActorHandler._
def useActorStats(sender: ActorRef, stats: ActorStats): Unit
def onModuleInformation(sender: ActorRef, mi: ActorModuleInfo): Unit = {
useActorStats(sender, ActorStats.concatenate(repository.actorStatsRepository.findWithinTimePeriod(mi.time, mi.scope), mi.time, mi.scope))
}
}
class ActorHandler(val repository: AnalyticsRepository,
builderProps: Props) extends RequestHandler[ActorHandler.ActorModuleInfo] with ActorHandlerBase {
val builder = context.actorOf(builderProps, "actorBuilder")
def useActorStats(sender: ActorRef, stats: ActorStats): Unit = {
builder ! ActorResult(sender, stats)
}
}
|
eed3si9n/activator
|
ui/app/activator/typesafeproxy/TypesafeComProxy.scala
|
package activator.typesafeproxy
import java.util.concurrent.TimeUnit
import akka.actor._
import com.typesafe.config.{ Config => TSConfig }
import play.api.libs.json.JsValue
import scala.concurrent.duration._
import scala.util.{ Failure, Success, Try }
import akka.util.Timeout
object TypesafeComProxy {
sealed trait SlotValue[+T]
case object Empty extends SlotValue[Nothing]
case class Pending(actor: ActorRef) extends SlotValue[Nothing]
case class Value[T](value: Try[T]) extends SlotValue[T]
object SlotValue {
def empty[T]: SlotValue[T] = Empty
}
case class CacheEntry[T](value: SlotValue[T],
filler: (ActionPair[T]#Get, Long, ActorRef, ActorRef) => Props,
tag: String,
version: Long = 0L,
pendingRequests: Set[ActionPair[T]#Get] = Set.empty[ActionPair[T]#Get],
cacheValue: Boolean = true) {
def maybeDoBump(doBump: Boolean): CacheEntry[T] = if (doBump) this.copy(version = this.version + 1L) else this
}
object CacheEntry {
def fromManifest[T](value: SlotValue[T],
filler: (ActionPair[T]#Get, Long, ActorRef, ActorRef) => Props,
version: Long = 0L,
pendingRequests: Set[ActionPair[T]#Get] = Set.empty[ActionPair[T]#Get],
shouldCache: Boolean = true)(implicit ev: Manifest[T]): CacheEntry[T] =
CacheEntry(value, filler, ev.erasure.getName, version, pendingRequests, shouldCache)
}
case class CacheState(entries: Map[String, CacheEntry[_]] = Map.empty[String, CacheEntry[_]]) {
def lookup[U](implicit ev: Manifest[U]): Option[CacheEntry[U]] =
entries.get(ev.erasure.getName).map(_.asInstanceOf[CacheEntry[U]])
def lookup[U](key: String): Option[CacheEntry[U]] =
entries.get(key).map(_.asInstanceOf[CacheEntry[U]])
def update[U](value: SlotValue[U], doBump: Boolean = true)(implicit ev: Manifest[U]): CacheState = {
val tag = ev.erasure.getName
CacheState(entries.foldLeft(Map.empty[String, CacheEntry[_]]) {
case (s, (k, v)) => if (k == tag) s + (k -> v.asInstanceOf[CacheEntry[Any]].maybeDoBump(doBump).copy(value = value)) else s + (k -> v)
})
}
def updateAll[U](value: CacheEntry[U], doBump: Boolean = true): CacheState =
CacheState(entries.foldLeft(Map.empty[String, CacheEntry[_]]) {
case (s, (k, v)) => if (k == value.tag) s + (k -> value.copy(version = v.maybeDoBump(doBump).version)) else s + (k -> v)
})
def add[U](value: CacheEntry[U]): CacheState =
lookup[U](value.tag).map(_ => updateAll[U](value)).getOrElse(this.copy(entries = this.entries + (value.tag -> value)))
}
def initialStateBuilder(authState: SlotValue[AuthenticationState] = SlotValue.empty[AuthenticationState],
authGetter: (ActionPair[AuthenticationState]#Get, Long, ActorRef, ActorRef) => Props = (_, _, _, _) => ???,
subscriberData: SlotValue[SubscriberData] = SlotValue.empty[SubscriberData],
subscriberDataGetter: (ActionPair[SubscriberData]#Get, Long, ActorRef, ActorRef) => Props = (_, _, _, _) => ???,
activatorInfo: SlotValue[ActivatorLatestInfo] = SlotValue.empty[ActivatorLatestInfo],
activatorInfoGetter: (ActionPair[ActivatorLatestInfo]#Get, Long, ActorRef, ActorRef) => Props = (_, _, _, _) => ???,
httpJsonGetter: (ActionPair[JsValue]#Get with HasUrl, Long, ActorRef, ActorRef) => Props = (_, _, _, _) => ???): CacheState = {
CacheState()
.add(CacheEntry.fromManifest[AuthenticationState](authState, authGetter))
.add(CacheEntry.fromManifest[SubscriberData](subscriberData, subscriberDataGetter))
.add(CacheEntry.fromManifest[ActivatorLatestInfo](activatorInfo, activatorInfoGetter))
.add(CacheEntry.fromManifest[JsValue](SlotValue.empty[JsValue], httpJsonGetter.asInstanceOf[(ActionPair[JsValue]#Get, Long, ActorRef, ActorRef) => Props]).copy(cacheValue = false))
}
sealed trait Response
sealed trait LocalRequest[Resp] extends Request[Resp]
abstract class ActionPair[T](implicit ev: Manifest[T]) {
protected trait GetBase extends LocalRequest[Value] {
def websocketActor: ActorRef
final val key: String = ev.erasure.getName
final def toPut(value: Try[T], version: Long, sendTo: ActorRef, cacheValue: Boolean = true): Put = Put(value, version, sendTo, cacheValue)
final def withValue(value: Try[T], version: Long)(implicit sender: ActorRef) = response(Value(value, version))
}
type Get <: GetBase
final case class Value(value: Try[T], version: Long) extends Response
final case class Outcome(result: Try[Unit]) extends Response
final case class Put(value: Try[T], version: Long, sendTo: ActorRef, cacheValue: Boolean = true) extends LocalRequest[Outcome] {
final val key: String = ev.erasure.getName
final def success()(implicit sender: ActorRef) = response(Outcome(Success(())))
final def failed()(implicit sender: ActorRef) =
response(Outcome(Failure(new CachePutFailure(s"Could not put value:$value into cache slot: $key"))))
}
}
trait HasUrl {
def url: String
}
abstract class SingletonActionPair[T](implicit ev: Manifest[T]) extends ActionPair[T] {
final case class Get(sendTo: ActorRef, websocketActor: ActorRef) extends GetBase
}
case object Authentication extends SingletonActionPair[AuthenticationState]
case object SubscriberDetail extends SingletonActionPair[SubscriberData]
case object ActivatorInfo extends SingletonActionPair[ActivatorLatestInfo]
case class GetFromRemote(prefix: String) extends ActionPair[JsValue] {
final case class Get(path: String, sendTo: ActorRef, websocketActor: ActorRef) extends GetBase with HasUrl {
val url: String = prefix + path
}
}
def getFromTypesafeCom(): GetFromRemote = GetFromRemote("https://www.lightbend.com/")
sealed trait RpcEndpoint {
def url: String
def timeout: FiniteDuration
}
case class LoginConfig(url: String, timeout: FiniteDuration) extends RpcEndpoint
case class SubscriptionConfig(url: String, timeout: FiniteDuration) extends RpcEndpoint
case class ActivatorInfoConfig(url: String, timeout: FiniteDuration) extends RpcEndpoint
case class Config(lookupTimeout: Timeout, login: LoginConfig, subscriptionData: SubscriptionConfig, activatorInfo: ActivatorInfoConfig)
def withTypesafeComConfig[T](in: TSConfig)(body: TSConfig => T): T = {
val c = in.getConfig("activator.lightbend-com-proxy")
body(c)
}
def fromConfig(in: TSConfig): Config = {
withTypesafeComConfig(in) { configRoot =>
val lookupTimeout = Timeout(configRoot.getDuration("lookup-timeout", TimeUnit.MILLISECONDS).intValue.millis)
val login = configRoot.getConfig("login")
val subscriptionData = configRoot.getConfig("subscriber-data")
val activatorInfo = configRoot.getConfig("activator-info")
Config(lookupTimeout = lookupTimeout,
login = LoginConfig(login.getString("url"), login.getDuration("timeout", TimeUnit.MILLISECONDS).intValue.millis),
subscriptionData = SubscriptionConfig(subscriptionData.getString("url"), subscriptionData.getDuration("timeout", TimeUnit.MILLISECONDS).intValue.millis),
activatorInfo = ActivatorInfoConfig(activatorInfo.getString("url"), activatorInfo.getDuration("timeout", TimeUnit.MILLISECONDS).intValue.millis))
}
}
def props(initialCacheState: TypesafeComProxy.CacheState): Props =
Props(new TypesafeComProxy(initialCacheState))
}
class TypesafeComProxy(initialCacheState: TypesafeComProxy.CacheState) extends Actor with ActorLogging {
import TypesafeComProxy._
def run(state: CacheState): Receive = {
def doGet[T](msg: ActionPair[T]#Get): Unit = {
state.lookup[T](msg.key).foreach { slot =>
slot.value match {
case Empty | Value(Failure(_)) =>
val actor = context.actorOf(slot.filler(msg, slot.version, self, msg.websocketActor))
context.become(run(state.updateAll(slot.copy(value = Pending(actor), pendingRequests = slot.pendingRequests + msg), false)))
case Pending(_) =>
context.become(run(state.updateAll(slot.copy(pendingRequests = slot.pendingRequests + msg), false)))
case Value(v @ Success(_)) =>
msg.withValue(v, slot.version)
}
}
}
def doPut[T](msg: ActionPair[T]#Put): Unit = {
state.lookup[T](msg.key).foreach { slot =>
if (slot.version == msg.version) {
slot.pendingRequests.foreach(x => x.withValue(msg.value, slot.version + 1))
msg.success()
if (slot.cacheValue && msg.cacheValue) context.become(run(state.updateAll(slot.copy(value = Value(msg.value), pendingRequests = Set.empty[ActionPair[T]#Get]))))
else context.become(run(state.updateAll(slot.copy(value = Empty, pendingRequests = Set.empty[ActionPair[T]#Get]))))
} else {
msg.failed()
}
}
}
{
case x: ActionPair[_]#Get => doGet(x)
case x: ActionPair[_]#Put => doPut(x)
}
}
def receive: Receive = run(initialCacheState)
}
object TypesafeComProxyUIActor {
import SubscriberData._
import activator.JsonHelper._
import play.api.libs.functional.syntax._
import play.api.libs.json._
import play.api.libs.json.Json._
val requestTag = "TypesafeComProxy"
val responseTag = requestTag
sealed trait Response {
def requestId: String
}
sealed trait LocalRequest[+T <: Response] {
def requestId: String
}
sealed trait SubscriberResponse extends Response
case class NotASubscriber(requestId: String) extends SubscriberResponse
case class SubscriptionDetails(data: SubscriberData, requestId: String) extends SubscriberResponse
case class GetSubscriptionDetail(requestId: String) extends LocalRequest[SubscriberResponse] {
def notASubscriber(): SubscriberResponse = NotASubscriber(requestId)
def details(data: SubscriberData): SubscriberResponse = SubscriptionDetails(data, requestId)
def failure(message: String): SubscriberResponse = Failure(message, requestId)
}
sealed trait ActivatorInfoResponse extends Response
case class ActivatorInfo(data: ActivatorLatestInfo, requestId: String) extends ActivatorInfoResponse
case class GetActivatorInfo(requestId: String) extends LocalRequest[ActivatorInfoResponse] {
def info(data: ActivatorLatestInfo): ActivatorInfoResponse = ActivatorInfo(data, requestId)
def failure(message: String): ActivatorInfoResponse = Failure(message, requestId)
}
sealed trait TypesafeComResponse extends Response
case class JSON(value: JsValue, path: String, requestId: String) extends TypesafeComResponse
case class GetFromTypesafeCom(path: String, requestId: String) extends LocalRequest[TypesafeComResponse] {
def withJson(data: JsValue): TypesafeComResponse = JSON(data, path, requestId)
def failure(message: String): TypesafeComResponse = Failure(message, requestId)
}
case class Failure(message: String, requestId: String) extends SubscriberResponse with ActivatorInfoResponse with TypesafeComResponse
implicit val websocketReads: Reads[LocalRequest[_ <: Response]] =
extractMessage[LocalRequest[_ <: Response]](requestTag)(new Reads[LocalRequest[_ <: Response]] {
def reads(in: JsValue): JsResult[LocalRequest[_ <: Response]] =
(((__ \ "type").read[String] and
(__ \ "requestId").read[String] and
(__ \ "path").readNullable[String]).apply { (t, rid, path) =>
(t, path) match {
case ("getSubscriptionDetail", _) => GetSubscriptionDetail(rid)
case ("getActivatorInfo", _) => GetActivatorInfo(rid)
case ("getFromTypesafeCom", Some(p)) => GetFromTypesafeCom(p, rid)
}
}).reads(in)
})
implicit val websocketWrites: Writes[Response] =
emitMessage(responseTag)(_ match {
case Failure(m, rid) => Json.obj("type" -> "proxyFailure", "message" -> m, "requestId" -> rid)
case NotASubscriber(rid) => Json.obj("type" -> "notASubscriber", "requestId" -> rid)
case x: SubscriptionDetails => Json.obj("type" -> "subscriptionDetails", "data" -> x.data, "requestId" -> x.requestId)
case x: ActivatorInfo => Json.obj("type" -> "activatorInfo", "data" -> x.data, "requestId" -> x.requestId)
case JSON(json, path, rid) => Json.obj("type" -> "fromTypesafeCom", "path" -> path, "data" -> json, "requestId" -> rid)
})
object Inbound {
def unapply(in: JsValue): Option[LocalRequest[_ <: Response]] = Json.fromJson[LocalRequest[_ <: Response]](in).asOpt
}
object Outbound {
def unapply(in: Any): Option[Response] = in match {
case x: Response => Some(x)
case _ => None
}
}
def props(request: LocalRequest[_ <: Response], typesafeComActor: ActorRef, websocketsActor: ActorRef): Props =
Props(new TypesafeComProxyUIActor(request, typesafeComActor, websocketsActor))
}
class TypesafeComProxyUIActor(request: TypesafeComProxyUIActor.LocalRequest[_ <: TypesafeComProxyUIActor.Response], typesafeComActor: ActorRef, websocketsActor: ActorRef) extends Actor with ActorLogging {
import TypesafeComProxyUIActor._
def receive: Receive = {
request match {
case _: GetActivatorInfo =>
typesafeComActor ! TypesafeComProxy.ActivatorInfo.Get(self, websocketsActor)
case _: GetSubscriptionDetail =>
typesafeComActor ! TypesafeComProxy.SubscriberDetail.Get(self, websocketsActor)
case x: GetFromTypesafeCom =>
typesafeComActor ! TypesafeComProxy.getFromTypesafeCom().Get(x.path, self, websocketsActor)
}
def handleResponse[T](response: TypesafeComProxy.ActionPair[T]#Value): Unit = (response.value, request) match {
case (Success(_: SubscriberData.NotASubscriber), x: GetSubscriptionDetail) =>
websocketsActor ! x.notASubscriber()
case (Success(data: SubscriberData.Detail), x: GetSubscriptionDetail) =>
websocketsActor ! x.details(data)
case (Success(data: ActivatorLatestInfo), x: GetActivatorInfo) =>
websocketsActor ! x.info(data)
case (Success(data: JsValue), x: GetFromTypesafeCom) =>
websocketsActor ! x.withJson(data)
case (scala.util.Failure(f), x) =>
websocketsActor ! TypesafeComProxyUIActor.Failure(f.getMessage, x.requestId)
case (other, x) =>
val message = s"[${x.requestId}]Got unexpected response:$other for request: $request"
websocketsActor ! TypesafeComProxyUIActor.Failure(message, x.requestId)
log.error(message)
}
{
case value: TypesafeComProxy.ActionPair[_]#Value =>
handleResponse(value)
context stop self
}
}
}
|
eed3si9n/activator
|
ui/app/activator/ProjectPreprocessor.scala
|
<gh_stars>0
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import akka.actor._
import sbt.protocol._
import java.io._
import sbt.IO
import scala.concurrent.Future
import scala.util.Try
import scala.concurrent.ExecutionContext.Implicits.global
object ProjectPreprocessor {
final case object Finished
def noOpPreprocessor(appActor: ActorRef, socket: ActorRef, config: AppConfig): Unit =
appActor ! Finished
private final val ossVersionParser = """^(\d+)\.(\d+)\.(\d+)([^"]*)$""".r
private final val rpVersionParser = """^(\d+)\.(\d+)\.(\d+)([^"]*)-bin-rp-(\d{2})v(\d{2})p(\d{2})$""".r
private sealed trait ArtifactVersion {
def versionString: String
def equalTo(other: ArtifactVersion): Boolean
def epoch: Int
def major: Int
def minor: Int
def tail: String
override def toString: String = versionString
}
private class OssVersion(val versionString: String) extends ArtifactVersion {
private final val ossVersionParser(e, m, n, t) = versionString
def equalTo(other: ArtifactVersion): Boolean = other match {
case o: OssVersion => versionString.equals(o.versionString)
case o: TRPVersion => epoch == o.epoch && major == o.major && minor == o.minor
}
val epoch: Int = e.toInt
val major: Int = m.toInt
val minor: Int = n.toInt
val tail: String = t
}
private class TRPVersion(val versionString: String) extends ArtifactVersion {
private final val rpVersionParser(e, m, n, t, y, v, p) = versionString
def equalTo(other: ArtifactVersion): Boolean = other match {
case o: OssVersion => o.equalTo(this)
case o: TRPVersion => versionString.equals(o.versionString)
}
val epoch: Int = e.toInt
val major: Int = m.toInt
val minor: Int = n.toInt
val tail: String = t
val year: Int = y.toInt
val month: Int = v.toInt
val patch: Int = p.toInt
}
private object ArtifactVersion {
def fromString(versionString: String): ArtifactVersion = versionString match {
case rpVersionParser(_, _, _, _, _, _, _) => new TRPVersion(versionString)
case ossVersionParser(_, _, _, _) => new OssVersion(versionString)
}
}
private final class ProjectPreprocessorUtil(appActor: ActorRef, config: AppConfig) {
def log(level: String, message: String): Unit = appActor ! NotifyWebSocket(SbtProtocol.synthesizeLogEvent(level, message))
def debug(message: String) = log("debug", message)
def warn(message: String) = log("warn", message)
def info(message: String) = log("info", message)
def error(message: String) = log("error", message)
def file(in: String) = new File(config.location.getAbsolutePath(), in)
def withFile[T](in: String)(body: File => T): Option[T] = {
val f = file(in)
if (f.exists()) Some(body(f))
else None
}
def exists(in: String) = {
val f = file(in)
f.exists()
}
def delete(in: String): Unit = {
if (file(in).delete()) debug(s"deleting $in -> success")
else debug(s"deleting $in -> FAILURE!!")
}
def readFile(in: String): Option[String] = withFile(in)(f => IO.read(f))
def writeFile(inf: String, contents: String): Unit = {
val f = file(inf)
IO.write(f, contents)
}
}
private final val targetPlay23Version = "2.3.10"
private final val playPluginVersionRegex = """\d+\.\d+\.\d+[^"]*"""
private final val playPluginVersionPattern = playPluginVersionRegex.r
private final val playPluginPattern = (""""com\.typesafe\.play"\s*%\s*"sbt-plugin"\s*%\s*"""" + playPluginVersionRegex + "\"").r
private final val oldPlayPluginPattern = (""""play"\s*%\s*"sbt-plugin"\s*%\s*"""" + playPluginVersionRegex + "\"").r
private final def playPluginReplacementString(version: String): String = s""""com.typesafe.play" % "sbt-plugin" % "$version""""
private final val playForkRunPluginPattern = (""""com\.typesafe\.play"\s*%\s*"sbt-fork-run-plugin"\s*%\s*"""" + playPluginVersionRegex + "\"").r
private final def readPlugins(util: ProjectPreprocessorUtil): Option[String] = {
util.info("Examining project/plugins.sbt")
util.readFile("project/plugins.sbt")
}
private final def readPlay(util: ProjectPreprocessorUtil): Option[String] = {
util.info("Examining project/play.sbt")
util.readFile("project/play.sbt")
}
private final def readPlayForkRunPlugin(util: ProjectPreprocessorUtil): Option[String] = {
util.info("Examining project/play-fork-run.sbt")
util.readFile("project/play-fork-run.sbt")
}
private final def getPlayVersion(source: String): Option[ArtifactVersion] = for {
playPlugin <- playPluginPattern.findFirstIn(source) orElse oldPlayPluginPattern.findFirstIn(source)
version <- playPluginVersionPattern.findFirstIn(playPlugin)
} yield ArtifactVersion.fromString(version)
private final def getPlayForkRunnerVersion(source: String): Option[ArtifactVersion] = for {
playForkRunnerPlugin <- playForkRunPluginPattern.findFirstIn(source)
version <- playPluginVersionPattern.findFirstIn(playForkRunnerPlugin)
} yield ArtifactVersion.fromString(version)
private final def rewritePlayPlugin(util: ProjectPreprocessorUtil, version: String): Unit = {
util.info(s"Rewriting Play plugin version to $version")
val nv = playPluginReplacementString(version)
val p1 = for {
source <- readPlay(util)
_ <- getPlayVersion(source)
} yield playPluginPattern.replaceAllIn(source, nv)
val p2 = for {
source <- readPlugins(util)
_ <- getPlayVersion(source)
} yield playPluginPattern.replaceAllIn(source, nv)
p1.foreach(util.writeFile("project/play.sbt", _))
p2.foreach(util.writeFile("project/plugins.sbt", _))
}
private final def examinePlay(util: ProjectPreprocessorUtil): Option[ArtifactVersion] = for {
source <- readPlay(util) orElse readPlugins(util)
version <- getPlayVersion(source)
} yield version
private final def examinePlayForkRunner(util: ProjectPreprocessorUtil): Option[ArtifactVersion] = for {
source <- readPlayForkRunPlugin(util)
version <- getPlayForkRunnerVersion(source)
} yield version
def defaultPreprocessor(appActor: ActorRef, socket: ActorRef, config: AppConfig): Unit = {
Future {
val util = new ProjectPreprocessorUtil(appActor: ActorRef, config: AppConfig)
val playVersion = examinePlay(util)
val playForkRunnerVersion = examinePlayForkRunner(util)
(playVersion, playForkRunnerVersion) match {
case (None, None) =>
util.info("Does not appear to be a Play project")
case (None, Some(_)) =>
util.warn("Possible Play project, but cannot detect play version")
case (Some(pv), pfrv) if pv.epoch == 2 =>
pv.major match {
case 5 | 4 => util.info(s"Using Play $pv -> OK")
case 3 if pv.minor >= 8 => util.info(s"Using Play $pv -> OK")
case 3 if pv.minor < 8 =>
util.warn(s"Using Play < 2.3.8 -> will update to $targetPlay23Version")
rewritePlayPlugin(util, targetPlay23Version)
util.delete("project/play-fork-run.sbt")
case _ =>
util.error(s"Using unsupported version of Play: $pv")
util.delete("project/play-fork-run.sbt")
}
pfrv.foreach { v =>
if (!v.equalTo(pv)) {
util.info("Fork-runner mispatch ... rebuilding")
util.delete("project/play-fork-run.sbt")
}
}
}
} onComplete (_ => appActor ! Finished)
}
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/PlayRequestsJsonBuilder.scala
|
<reponame>eed3si9n/activator<filename>ui/app/console/handler/rest/PlayRequestsJsonBuilder.scala<gh_stars>0
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console.handler.rest
import akka.actor.{ ActorRef, Props }
import console.ClientController.Update
import activator.analytics.data.PlayRequestSummary
import play.api.libs.json.{ Json, JsObject }
class PlayRequestsJsonBuilder extends JsonBuilderActor {
import PlayRequestsJsonBuilder._
def receive = {
case r: PlayRequestsResult => r.receiver ! Update(createPlayRequestsJson(r.stats))
}
}
object PlayRequestsJsonBuilder {
def props(): Props =
Props(classOf[PlayRequestsJsonBuilder])
case class PlayRequestsResult(receiver: ActorRef, stats: Seq[PlayRequestSummary])
def createPlayRequestsJson(stats: Seq[PlayRequestSummary]): JsObject = {
Json.obj(
"type" -> "requests",
"data" ->
Json.obj(
"playRequestSummaries" ->
Json.toJson(stats.map { createPlayRequestJson(_) })))
}
def createPlayRequestJson(req: PlayRequestSummary): JsObject = {
Json.obj(
"traceId" -> req.traceId.toString,
"id" -> req.invocationInfo.id,
"startTimeMillis" -> req.start.millis,
"path" -> req.invocationInfo.path,
"controller" -> req.invocationInfo.controller,
"controllerMethod" -> req.invocationInfo.method,
"httpMethod" -> req.invocationInfo.httpMethod,
"httpResponseCode" -> req.response.resultInfo.httpResponseCode,
"invocationTimeMillis" -> (req.end.millis - req.start.millis))
}
}
|
eed3si9n/activator
|
ui/app/activator/typesafeproxy/JsonGetterActor.scala
|
<gh_stars>0
package activator.typesafeproxy
import java.util.concurrent.TimeoutException
import activator.typesafeproxy.TypesafeComProxy.ActionPair
import akka.actor._
import play.api.Play.current
import play.api.libs.json._
import play.api.libs.ws._
import activator.HttpHelper
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import scala.util.{ Try, Failure, Success }
object JsonGetterActor {
type DoGetJson = (String, ActorRef) => Unit
type ToPut = (Try[JsValue], Long, ActorRef) => ActionPair[JsValue]#Put
sealed trait Notification
def httpGetJson(timeout: FiniteDuration,
executionContext: ExecutionContext,
timeoutMessage: (String, FiniteDuration) => String = (url, d) => s"Timeout from $url. Waited $d",
failureMessage: (String, Throwable) => String = (url, e) => s"Failure getting $url. Error: ${e.getMessage}")(jsonUrl: String,
sendTo: ActorRef): Unit = {
implicit val ec = executionContext
def respondWith(result: Try[JsValue]): Unit = sendTo ! result
val req = HttpHelper.proxyHolder(WS.url(jsonUrl)
.withRequestTimeout(timeout.toMillis.intValue))
req.get() onComplete {
case Success(response) => respondWith {
response.status match {
case 200 =>
Try(Json.parse(response.body))
case status =>
Failure(new ProxyFailure(s"Unexpected response code: $status"))
}
}
case Failure(exception) => respondWith {
exception match {
case x: TimeoutException => Failure(new ProxyTimeout(timeoutMessage(jsonUrl, timeout), x))
case e => Failure(new ProxyFailure(failureMessage(jsonUrl, e), e))
}
}
}
}
def props(jsonUrl: String,
doGetJson: JsonGetterActor.DoGetJson,
toPut: JsonGetterActor.ToPut,
uiActorProps: ActorRef => Props,
version: Long,
replyTo: ActorRef,
websocketActor: ActorRef,
startMessage: String => String = url => s"Fetching from $url",
cancelString: String => String = url => s"Fetching from $url canceled by user"): Props =
Props(new JsonGetterActor(jsonUrl, doGetJson, toPut, uiActorProps, version, replyTo, websocketActor, startMessage, cancelString))
}
class JsonGetterActor(jsonUrl: String,
doGetJson: JsonGetterActor.DoGetJson,
toPut: JsonGetterActor.ToPut,
uiActorProps: ActorRef => Props,
version: Long,
replyTo: ActorRef,
websocketActor: ActorRef,
startMessage: String => String = url => s"Fetching from $url",
cancelString: String => String = url => s"Fetching from $url canceled by user") extends Actor with ActorLogging {
import TypesafeComProxy._
private final val uiActor: ActorRef = context.actorOf(uiActorProps(websocketActor))
def cancel(message: String = cancelString(jsonUrl)): Unit = {
replyTo ! ActivatorInfo.Put(Failure(new ProxyCanceled(message)), version, self)
context.become(doStop())
}
def onFailure(onRetry: () => Unit): Receive = {
case UIActor.Cancel => cancel()
case UIActor.Retry => onRetry()
}
def doStop(): Receive = {
context.watch(uiActor)
uiActor ! PoisonPill
{
case Terminated(`uiActor`) =>
context stop self
}
}
def handleResult(result: Try[JsValue]): Unit = result match {
case x @ Success(_) =>
replyTo ! toPut(x, version, self)
context.become(doStop())
case Failure(e: ProxyTimeout) =>
log.error(s"Unable to fetch from: $jsonUrl", e)
uiActor ! UIActor.RetryableRequests.Failure(e.getMessage, self, retryable = true)
context.become(onFailure(() => context.become(runRequest())))
case x @ Failure(e) =>
log.error(s"Unknown exception fetching $jsonUrl", e)
replyTo ! toPut(x, version, self)
context.become(doStop())
}
def awaitResults(endReport: UIActor.ReportEndAction): Receive = {
case UIActor.Cancel =>
uiActor ! endReport
cancel()
case x @ Success(_: JsValue) =>
uiActor ! endReport
handleResult(x.asInstanceOf[Success[JsValue]])
case x: Failure[_] =>
uiActor ! endReport
handleResult(x.asInstanceOf[Failure[JsValue]])
}
def runRequest(): Receive = {
val actionId: String = UIActor.genActionId()
doGetJson(jsonUrl, self)
val startReport = UIActor.CancelableRequests.ReportStartAction(startMessage(jsonUrl), actionId, self)
uiActor ! startReport
awaitResults(startReport.endReport())
}
def receive: Receive = {
runRequest()
}
}
|
eed3si9n/activator
|
ui/test/console/handler/Generators.scala
|
<filename>ui/test/console/handler/Generators.scala
package console.handler
import akka.actor.ActorPath
import activator.analytics.data.{
ActorStats,
TimeRange,
Scope,
ErrorStats,
ErrorStatsMetrics,
DeviationDetails,
DeviationDetail,
Counts
}
import activator.analytics.data.TimeRangeType._
import com.typesafe.trace.uuid.UUID
import scala.util.Random
import com.typesafe.trace._
import scala.language.implicitConversions
import scala.language.higherKinds
object Generators {
implicit def liftGen[T](gen: => T): () => T = () => gen
def genActorScopes(paths: Set[ActorPath],
tags: Set[String],
hosts: Set[String],
dispatchers: Set[String],
systems: Set[String]): Set[Scope] =
for {
path <- paths
tag <- tags.map(x => Some(x): Option[String]).union(Set(None))
host <- hosts
dispatcher <- dispatchers
system <- systems
} yield Scope(Some(path.toString), tag, Some(host), Some(dispatcher), Some(system), None, None)
def genTimeRanges(start: Int, end: Int, incr: Int, rangeType: TimeRangeType): Seq[TimeRange] =
Range(start, end, incr).map(TimeRange.rangeFor(_, rangeType))
def genActorStats(scopes: Set[Scope], timeRanges: Seq[TimeRange])(body: (Int, Scope, TimeRange) => ActorStats): Seq[ActorStats] = {
var index: Int = 1
for {
scope <- scopes.toSeq
range <- timeRanges
} yield {
val r = body(index, scope, range)
index += 1
r
}
}
def genUUID(): UUID = new UUID(Random.nextLong(), Random.nextLong())
def choose[T](v: T, vs: T*): T =
Random.nextInt(vs.size + 1) match {
case 0 => v
case i => vs(i - 1)
}
def genMultiple[T](times: Int, gen: () => T): Seq[T] = (1 to times).map(_ => gen())
def genMaxMultiple[T](maxTimes: Int, gen: () => T): Seq[T] = genMultiple(Random.nextInt(maxTimes), gen)
def genContainer[M[_], T](times: Int, gen: () => T)(implicit conv: Seq[T] => M[T]): M[T] = conv(genMultiple(times, gen))
def genMaxContainer[M[_], T](maxTimes: Int, gen: () => T)(implicit conv: Seq[T] => M[T]): M[T] = conv(genMaxMultiple(maxTimes, gen))
def genChoose[T](gen: () => T, gens: (() => T)*): () => T =
Random.nextInt(gens.size + 1) match {
case 0 => gen
case i => gens(i - 1)
}
def genString(maxNameLength: Int = 10): String =
new String(Random.alphanumeric.take(Random.nextInt(maxNameLength) + 1).toArray)
def genActorPath(namePrefix: String = "", maxNameLength: Int = 10): ActorPath =
ActorPath.fromString(s"akka://user/$namePrefix${genString(maxNameLength)}")
def genOption[T](gen: () => T): Option[T] =
if (Random.nextBoolean()) Some(gen())
else None
def genPoint[T](v: T): () => T = () => v
def genActorInfo(actorPathGen: () => ActorPath = genActorPath(),
dispatcherGen: () => Option[String] = genOption(genString()),
remoteGen: () => Boolean = Random.nextBoolean(),
routerGen: () => Boolean = Random.nextBoolean(),
tagsGen: () => Set[String] = Set[String]()): ActorInfo =
ActorInfo(path = actorPathGen().toString, dispatcher = dispatcherGen(), remote = remoteGen(), router = routerGen(), tags = tagsGen())
def genActorRequested(info: ActorInfo = genActorInfo(), actor: ActorInfo = genActorInfo()): ActorRequested = ActorRequested(info, actor)
def genActorCreated(info: ActorInfo = genActorInfo()): ActorCreated = ActorCreated(info)
def genActorTold(info: ActorInfo = genActorInfo(), message: String = genString(), sender: Option[ActorInfo] = genOption(genActorInfo())): ActorTold = ActorTold(info, message, sender)
def genActorAutoReceived(info: ActorInfo = genActorInfo(), message: String = genString()): ActorAutoReceived = ActorAutoReceived(info, message)
def genActorAutoCompleted(info: ActorInfo = genActorInfo(), message: String = genString()): ActorAutoCompleted = ActorAutoCompleted(info, message)
def genActorReceived(info: ActorInfo = genActorInfo(), message: String = genString()): ActorReceived = ActorReceived(info, message)
def genActorCompleted(info: ActorInfo = genActorInfo(), message: String = genString()): ActorCompleted = ActorCompleted(info, message)
def genActorAsked(info: ActorInfo = genActorInfo(), message: String = genString()): ActorAsked = ActorAsked(info, message)
def genActorFailed(info: ActorInfo = genActorInfo(), reason: String = genString(), supervisor: ActorInfo = genActorInfo()): ActorFailed = ActorFailed(info, reason, supervisor)
def genTempActorCreated(info: ActorInfo = genActorInfo()): TempActorCreated = TempActorCreated(info)
def genTempActorStopped(info: ActorInfo = genActorInfo()): TempActorStopped = TempActorStopped(info)
def genMessageTraceAnnotation(info: ActorInfo = genActorInfo(),
message: String = genString(),
sender: Option[ActorInfo] = genOption(genActorInfo()),
supervisor: ActorInfo = genActorInfo(),
reason: String = genString(),
failThreshold: Double = 0.1): Seq[ActorAnnotation] =
Seq(genActorTold(info, message, sender), genActorReceived(info, message), if (Random.nextDouble() <= failThreshold) genActorFailed(info, reason, supervisor) else genActorCompleted(info, message))
def genNMessageTraceAnnotations(copies: Int,
info: ActorInfo = genActorInfo(),
messageGen: () => String = genString(),
senderGen: () => Option[ActorInfo] = () => genOption(genActorInfo()),
supervisor: ActorInfo = genActorInfo(),
reasonGen: () => String = genString(),
failThreshold: Double = 0.1): Seq[Seq[ActorAnnotation]] =
(1 to copies).map(_ => genMessageTraceAnnotation(info, messageGen(), senderGen(), supervisor, reasonGen(), failThreshold)).toSeq
def randomFlatten(in: Seq[Seq[ActorAnnotation]]): Seq[ActorAnnotation] = {
def flattenWith(choice: Int, current: Seq[Seq[ActorAnnotation]], accum: Seq[ActorAnnotation]): Seq[ActorAnnotation] = {
if (current.isEmpty) accum
else {
val c = current(choice)
val (h, t) = (c.head, c.tail)
val (prefix, suffix) = current.splitAt(choice)
val naccum = accum :+ h
val ncurrent = if (t.isEmpty) prefix ++ suffix.tail else (prefix :+ t) ++ suffix.tail
val ns = ncurrent.size
val nc = if (ns == 0) 0 else Random.nextInt(ns)
flattenWith(nc, ncurrent, naccum)
}
}
flattenWith(Random.nextInt(in.size), in, Seq.empty[ActorAnnotation])
}
def localUUIDGenGen(changeWithin: Int = 10): () => UUID = {
var remaining = Random.nextInt(changeWithin)
var uuid = new UUID()
{ () =>
if (remaining == 0) {
uuid = new UUID()
remaining = Random.nextInt(changeWithin)
}
remaining -= 1
uuid
}
}
def genTraceEvents(annotations: Seq[Annotation],
trace: UUID = new UUID(),
localGen: () => UUID = localUUIDGenGen(),
sampled: Int = 1,
node: String = "node1",
host: String = "host1",
actorSystem: String = "system1",
startNanoTime: Long = System.nanoTime()): Seq[TraceEvent] =
annotations.foldLeft((UUID.nilUUID(), startNanoTime, Seq.empty[TraceEvent])) {
case ((parent, nanoTime, accum), v) =>
val id = new UUID()
val milliTime: Long = nanoTime / 1000000
val te = TraceEvent(v,
id,
trace,
localGen(),
parent,
sampled,
node,
host,
actorSystem,
milliTime,
nanoTime)
(id, nanoTime + Random.nextInt(200) + 20, accum :+ te)
}._3
def uniqueUUID(in: Set[UUID]): UUID = {
var test = new UUID()
while (in(test)) {
test = new UUID()
}
test
}
def genTimestampGen(start: Long = 0L, maxDelta: Int = 100): () => Long = {
var current = start
{ () =>
current = current + Random.nextInt(maxDelta) + 1
current
}
}
def genCounts(errorsGen: () => Int = Random.nextInt(5),
warningsGen: () => Int = Random.nextInt(5),
deadLettersGen: () => Int = Random.nextInt(5),
unhandledMessagesGen: () => Int = Random.nextInt(5),
deadlocksGen: () => Int = Random.nextInt(5)): () => Counts =
() => Counts(errors = errorsGen(),
warnings = warningsGen(),
deadLetters = deadLettersGen(),
unhandledMessages = unhandledMessagesGen(),
deadlocks = deadlocksGen())
def genDeviationDetail(eventIdGen: () => UUID = genUUID(),
traceIdGen: () => UUID = genUUID(),
messageGen: () => String = genString(50),
timestampGen: () => Long = genTimestampGen()): () => DeviationDetail =
() => DeviationDetail(eventId = eventIdGen(),
traceId = traceIdGen(),
message = messageGen(),
timestamp = timestampGen())
def genDeviationDetails(errorsGen: () => Seq[DeviationDetail] = genMaxMultiple(5, genDeviationDetail()),
warningsGen: () => Seq[DeviationDetail] = genMaxMultiple(5, genDeviationDetail()),
deadLettersGen: () => Seq[DeviationDetail] = genMaxMultiple(5, genDeviationDetail()),
unhandledMessagesGen: () => Seq[DeviationDetail] = genMaxMultiple(5, genDeviationDetail()),
deadlockedThreadsGen: () => Seq[DeviationDetail] = genMaxMultiple(5, genDeviationDetail())): () => DeviationDetails =
() => DeviationDetails(errors = errorsGen().sortBy(_.timestamp).reverse.toList,
warnings = warningsGen().sortBy(_.timestamp).reverse.toList,
deadLetters = deadLettersGen().sortBy(_.timestamp).reverse.toList,
unhandledMessages = unhandledMessagesGen().sortBy(_.timestamp).reverse.toList,
deadlockedThreads = deadlockedThreadsGen().sortBy(_.timestamp).reverse.toList)
def genErrorStatsMetrics(countsGen: () => Counts = genCounts(),
deviationsGen: () => DeviationDetails = genDeviationDetails()): () => ErrorStatsMetrics =
() => ErrorStatsMetrics(counts = countsGen(), deviations = deviationsGen())
def genErrorStats(timeRangeGen: () => TimeRange,
nodeGen: () => Option[String] = genOption(genString()),
actorSystemGen: () => Option[String] = genOption(genString()),
errorMetricsGen: () => ErrorStatsMetrics = genErrorStatsMetrics(),
idGen: () => UUID = genUUID()): () => ErrorStats =
() => ErrorStats(timeRange = timeRangeGen(), node = nodeGen(), actorSystem = actorSystemGen(), metrics = errorMetricsGen(), id = idGen())
}
|
eed3si9n/activator
|
integration-tests/src/main/scala/launcher/MustStartUI.scala
|
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package launcher
import activator.tests._
class MustStartUI extends IntegrationTest {
val sbtProject = makeDummySbtProject(new java.io.File("dummy"))
val process = run_activator(Seq("ui"), sbtProject).run
// Wait for Http Server startup on port 8888
// TODO - If we pick a random port in the future, this needs to detect it...
try assert(waitForHttpServerStartup("http://localhost:8888/"))
finally {
process.destroy()
}
}
|
eed3si9n/activator
|
ui/app/activator/typesafeproxy/AuthenticationActor.scala
|
<gh_stars>0
package activator.typesafeproxy
import java.util.concurrent.TimeoutException
import akka.actor._
import play.api.Play.current
import play.api.libs.ws._
import activator.HttpHelper
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import scala.util.{ Failure, Success, Try }
sealed trait AuthenticationState
object AuthenticationStates {
type AuthenticationData = WSCookie
val emptyAuthentication: AuthenticationData = new WSCookie {
val domain: String = "domain"
val expires: Option[Long] = None
val maxAge: Option[Int] = None
val name: Option[String] = None
val path: String = "path"
val secure: Boolean = true
def underlying[T]: T = ???
val value: Option[String] = Some("value")
}
case class Authenticated(authenticationData: AuthenticationData) extends AuthenticationState
}
object AuthenticationActor {
type DoAuthenticate = (String, String, ActorRef) => Unit
def httpDoAuthenticate(authenticationUrl: String, authenticateTimeout: FiniteDuration, executionContext: ExecutionContext)(username: String, password: String, sendTo: ActorRef): Unit = {
implicit val ec = executionContext
def respondWith(result: Try[AuthenticationState]): Unit = sendTo ! result
val req = HttpHelper.proxyHolder(WS.url(authenticationUrl)
.withHeaders("Accept" -> "application/json")
.withRequestTimeout(authenticateTimeout.toMillis.intValue))
req.post(Map("email" -> Seq(username), "password" -> Seq(password))) onComplete {
case Success(response) => respondWith {
response.status match {
case 200 => Success(AuthenticationStates.Authenticated(response.cookie("PLAY_SESSION").get))
case 401 => Failure(new ProxyInvalidCredentials("Invalid login credentials"))
case 400 => Failure(new ProxyInvalidCredentials("Missing email or password"))
case status => Failure(new ProxyFailure(s"Unknown response code: $status"))
}
}
case Failure(exception) => respondWith {
exception match {
case x: TimeoutException => Failure(new ProxyTimeout(s"Authentication exceeded timeout ${authenticateTimeout}", x))
case e => Failure(new ProxyFailure(s"Authentication failure: ${e.getMessage}", e))
}
}
}
}
def props(doAuthenticate: AuthenticationActor.DoAuthenticate,
uiActorProps: ActorRef => Props,
version: Long,
replyTo: ActorRef,
websocketActor: ActorRef,
initMessage: Option[String] = None): Props =
Props(new AuthenticationActor(doAuthenticate, uiActorProps, version, replyTo, websocketActor, initMessage))
}
class AuthenticationActor(doAuthenticate: AuthenticationActor.DoAuthenticate,
uiActorProps: ActorRef => Props,
version: Long,
replyTo: ActorRef,
websocketActor: ActorRef,
initMessage: Option[String]) extends Actor with ActorLogging {
import TypesafeComProxy._
private final val uiActor: ActorRef = context.actorOf(uiActorProps(websocketActor))
def cancel(message: String = "Authentication canceled by user"): Unit = {
replyTo ! Authentication.Put(Failure(new ProxyCanceled(message)), version, self)
context.become(doStop())
}
def onFailure(onRetry: () => Unit): Receive = {
case UIActor.Cancel => cancel()
case UIActor.Retry => onRetry()
}
def doStop(): Receive = {
context.watch(uiActor)
uiActor ! PoisonPill
{
case Terminated(`uiActor`) =>
context stop self
}
}
def handleResult(username: String, password: String, result: Try[AuthenticationState]): Unit = result match {
case x @ Success(_) =>
replyTo ! Authentication.Put(x, version, self)
context.become(doStop())
case Failure(e: ProxyInvalidCredentials) =>
uiActor ! UIActor.RetryableRequests.Failure(e.getMessage, self, retryable = true)
context.become(onFailure(() => context.become(run())))
case Failure(e: ProxyTimeout) =>
log.error("Unable to authenticate", e)
uiActor ! UIActor.RetryableRequests.Failure(e.getMessage, self, retryable = true)
context.become(onFailure(() => context.become(authenticating(username, password))))
case x @ Failure(e) =>
log.error("Unknown exception during authentication", e)
uiActor ! UIActor.RetryableRequests.Failure(e.getMessage, self, retryable = false)
replyTo ! Authentication.Put(x, version, self)
context.become(doStop())
}
def authenticating(username: String, password: String): Receive = {
val actionId: String = UIActor.genActionId()
val startReport = UIActor.CancelableRequests.ReportStartAction("Checking credentials against typesafe.com", actionId, self)
uiActor ! startReport
doAuthenticate(username, password, self)
val endReport = startReport.endReport()
{
case UIActor.Cancel =>
uiActor ! endReport
cancel()
case x @ Success(_: AuthenticationState) =>
uiActor ! endReport
handleResult(username, password, x.asInstanceOf[Success[AuthenticationState]])
case x: Failure[_] =>
uiActor ! endReport
handleResult(username, password, x.asInstanceOf[Failure[AuthenticationState]])
}
}
def awaitCredentials(): Receive = {
def onCredentialsResponse(msg: UIActor.CredentialsResponse): Unit = msg match {
case UIActor.Credentials(username, password) =>
context.become(authenticating(username, password))
case UIActor.Cancel => cancel()
}
{
case x: UIActor.CredentialsResponse => onCredentialsResponse(x)
}
}
def run(message: Option[String] = None): Receive = {
uiActor ! UIActor.CancelableRequests.RequestCredentials(self, message)
awaitCredentials()
}
def receive: Receive = run(initMessage)
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/JsonBuilder.scala
|
package console.handler.rest
import play.api.libs.json.{ Json, JsObject, JsValue, JsArray, Writes, JsString }
object JsonBuilder {
def optJsonObj[T](k: String, v: Option[T], writer: T => JsValue): JsObject =
v.map(iv => Json.obj(k -> writer(iv))).getOrElse(Json.obj())
def optJson[T](k: String, v: Option[T])(implicit arg0: Writes[T]): JsObject =
v.map(iv => Json.obj(k -> iv)).getOrElse(Json.obj())
}
|
eed3si9n/activator
|
ui/test/PlatformTest.scala
|
package activator
import org.junit._
import java.io.File
class PlatformTest {
val Windows = new Platform(true)
val Linux = new Platform(false)
@Test
def transformIsReversable(): Unit = {
def testReversal(name: String, platform: Platform, names: Seq[String]): Unit = {
val files = names map (new File(_))
val translated = files map platform.getClientFriendlyFilename
val files2 = names map platform.fromClientFriendlyFilename
Assert.assertTrue(s"failed to canonically convert $name files: ", files zip files2 forall {
case (l, r) => l.getAbsolutePath == r.getAbsolutePath
})
}
// TODO - Find more odd file paths to test here...
testReversal("Windows", Windows, Seq(
"C:\\Users\\Josh\\Fun",
"C:\\Program Files (x86)\\SNAPSTER\\I HATE WINDOWS"))
testReversal("Linux", Linux, Seq(
"/home/jsuereth/projects/stuff with spaces/guy",
"/home/jsuereth/projects/regular"))
}
}
|
eed3si9n/activator
|
project/s3.sbt
|
<reponame>eed3si9n/activator
addSbtPlugin("com.typesafe.sbt" % "sbt-s3" % "0.5")
|
eed3si9n/activator
|
ui/app/activator/typesafeproxy/ProtocolModel.scala
|
<filename>ui/app/activator/typesafeproxy/ProtocolModel.scala
package activator.typesafeproxy
import akka.actor._
trait Request[+Resp] {
def sendTo: ActorRef
final def response[T >: Resp](in: T)(implicit sender: ActorRef): Unit = sendTo.tell(in, sender)
}
|
eed3si9n/activator
|
ui/test/FileWatcherTest.scala
|
<filename>ui/test/FileWatcherTest.scala<gh_stars>0
/**
* Copyright (C) 2016 Lightbend, Inc. <http://www.lightbend.com>
*/
package test
import org.junit.Assert._
import org.junit._
import java.io.File
import akka.actor._
import scala.concurrent._
import scala.concurrent.duration._
import akka.pattern._
import activator.SetFilesToWatch
import akka.util.Timeout
class FileWatcherTest {
val testUtil = new com.typesafe.sbtrc.TestUtil(scratchDir = new File("ui/target/scratch"))
import testUtil._
@Test
def testFileWatcher() {
val dir = makeDummySbtProject("fileWatching")
val source = new File(dir, "src/main/scala/hello.scala")
assertTrue("source file exists", source.exists())
val system = ActorSystem("fileWatch")
try {
val watcher = system.actorOf(Props(new activator.FileWatcher))
val observer = system.actorOf(Props(new Actor() with ActorLogging {
watcher ! activator.SubscribeFileChanges(self)
watcher ! SetFilesToWatch(Set(source))
var changeObserver: Option[ActorRef] = None
override def receive = {
case "change-and-tell-me" =>
val old = source.lastModified()
// filesystem may not store resolution finer than seconds
// (does not on Linux)
source.setLastModified(old + 1000)
changeObserver = Some(sender)
case activator.FilesChanged(ref) =>
changeObserver.foreach(_ ! "changed")
}
}));
implicit val timeout = Timeout(5.seconds)
implicit val ec = system.dispatcher
val result1 = Await.result(observer ? "change-and-tell-me", timeout.duration)
assertEquals("changed", result1)
val result2 = Await.result(observer ? "change-and-tell-me", timeout.duration)
assertEquals("changed", result2)
} finally {
system.shutdown();
system.awaitTermination();
}
}
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/TimeRangeJsonBuilder.scala
|
<reponame>eed3si9n/activator
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console.handler.rest
import play.api.libs.json.{ Json, JsObject }
import activator.analytics.data.{ TimeRange, Scope, ActorStats }
object TimeRangeJsonBuilder {
def createTimeRangeJson(timeRange: TimeRange): JsObject =
Json.obj(
"startTime" -> timeRange.startTime,
"endTime" -> timeRange.endTime,
"rangeType" -> timeRange.rangeType.toString)
}
|
eed3si9n/activator
|
ui/test/ConfigTest.scala
|
<filename>ui/test/ConfigTest.scala
package test
import org.junit.Assert._
import org.junit._
import activator.RootConfigOps
import activator.AppConfig
import java.io.File
import scala.concurrent._
import scala.concurrent.duration._
import java.io.FileOutputStream
object TestRootConfig extends RootConfigOps {
private def mkDir(f: File): File = {
f.mkdirs()
f.getCanonicalFile
}
private def mkDir(path: String): File =
mkDir(new File(path))
val scratchHome = mkDir("target/config-test-scratch")
val configHome = mkDir(new File(scratchHome, "X.NEW"))
val previousConfigHome = mkDir(new File(scratchHome, "X.OLD"))
// has to be lazy because trait uses it to init
override lazy val userConfigFile =
new File(configHome, "config.json")
override lazy val previousUserConfigFile =
new File(previousConfigHome, "config.json")
def ensureDirsExist(): Unit = {
def ensureDir(d: File): Unit = {
d.mkdirs()
if (!d.exists() || !d.isDirectory())
throw new Exception("failed to create " + d)
}
ensureDir(configHome)
ensureDir(previousConfigHome)
}
}
// these tests are all synchronized because they are testing
// behavior of global state (TestRootConfig.user).
class ConfigTest {
@Before
def beforeEachTest(): Unit = synchronized {
TestRootConfig.userConfigFile.delete()
TestRootConfig.previousUserConfigFile.delete()
TestRootConfig.ensureDirsExist()
}
@Test
def testUserConfig(): Unit = synchronized {
val rewritten = TestRootConfig.rewriteUser { old =>
val appList = if (old.applications.exists(_.location.getPath == "foo"))
old.applications
else
AppConfig(new File("foo"), "id", createdTime = Some(1L), usedTime = Some(1L)) +: old.applications
old.copy(applications = appList)
}
Await.ready(rewritten, 5.seconds)
val c = TestRootConfig.user
assertTrue("app 'foo' now in user config", c.applications.exists(_.location.getPath == "foo"))
}
def removeProjectName(): Unit = {
val rewritten = TestRootConfig.rewriteUser { old =>
val withNoName = old.applications
.find(_.location.getPath == "foo")
.getOrElse(AppConfig(new File("foo"), "id", createdTime = Some(1L), usedTime = Some(1L)))
.copy(cachedName = None)
val appList = withNoName +: old.applications.filter(_.location.getPath != "foo")
old.copy(applications = appList)
}
Await.ready(rewritten, 5.seconds)
val c = TestRootConfig.user
assertTrue("app 'foo' now in user config with no name",
c.applications.exists({ p => p.location.getPath == "foo" && p.cachedName.isEmpty }))
}
@Test
def testAddingProjectName(): Unit = synchronized {
removeProjectName()
val rewritten = TestRootConfig.rewriteUser { old =>
val withName = old.applications
.find(_.location.getPath == "foo")
.getOrElse(AppConfig(new File("foo"), "id", createdTime = Some(1L), usedTime = Some(1L)))
.copy(cachedName = Some("Hello World"))
val appList = withName +: old.applications.filter(_.location.getPath != "foo")
old.copy(applications = appList)
}
Await.ready(rewritten, 5.seconds)
val c = TestRootConfig.user
assertTrue("app 'foo' now in user config with a name",
c.applications.exists({ p => p.location.getPath == "foo" && p.cachedName == Some("Hello World") }))
}
@Test
def testRecoveringFromBrokenFile(): Unit = synchronized {
val file = TestRootConfig.userConfigFile
try {
file.delete()
val stream = new FileOutputStream(file)
stream.write("{ invalid json! ]".getBytes())
stream.close()
TestRootConfig.forceReload()
val e = try {
TestRootConfig.user
throw new AssertionError("We expected to get an exception and not reach here (first time)")
} catch {
case e: Exception => e
}
assertTrue("got the expected exception on bad json", e.getMessage().contains("was expecting double"))
// bad json is still there, so things should still fail...
val e2 = try {
TestRootConfig.user
throw new AssertionError("We expected to get an exception and not reach here (second time)")
} catch {
case e: Exception => e
}
assertTrue("got the expected exception on bad json", e2.getMessage().contains("was expecting double"))
// delete the file... should now load the file fine
if (!file.delete())
throw new AssertionError("failed to delete " + file.getAbsolutePath())
try {
assertTrue("loaded an empty config after recovering from corrupt one", TestRootConfig.user.applications.isEmpty)
} catch {
case e: Exception =>
throw new AssertionError("should not have had an error loading empty config", e)
}
} finally {
// to avoid weird failures on next run of the tests
file.delete()
}
}
@Test
def testRecoveringFromEmptyJsonFile(): Unit = synchronized {
val file = TestRootConfig.userConfigFile
try {
file.delete()
val stream = new FileOutputStream(file)
stream.write("{}".getBytes())
stream.close()
TestRootConfig.forceReload()
val e = try {
TestRootConfig.user
throw new AssertionError("We expected to get an exception and not reach here (first time)")
} catch {
case e: Exception => e
}
assertTrue("got the expected exception on bad json (expecting error.path.missing, got '" + e.getMessage + "'", e.getMessage().contains("error.path.missing"))
// bad json is still there, so things should still fail...
val e2 = try {
TestRootConfig.user
throw new AssertionError("We expected to get an exception and not reach here (second time)")
} catch {
case e: Exception => e
}
assertTrue("got the expected exception on bad json (expecting error.path.missing, got '" + e.getMessage + "'", e.getMessage().contains("error.path.missing"))
// delete the file... should now load the file fine
if (!file.delete())
throw new AssertionError("failed to delete " + file.getAbsolutePath())
try {
assertTrue("loaded an empty config after recovering from corrupt one", TestRootConfig.user.applications.isEmpty)
} catch {
case e: Exception =>
throw new AssertionError("should not have had an error loading empty config", e)
}
} finally {
// to avoid weird failures on next run of the tests
file.delete()
}
}
@Test
def testRecoveringFromBrokenFileManyTimes(): Unit = synchronized {
// this is intended to reveal a race that we were seeing intermittently
for (_ <- 1 to 100)
testRecoveringFromBrokenFile()
}
@Test
def testUpgradingFromPrevious(): Unit = synchronized {
import play.api.libs.json._
import activator.RootConfig
val oldFile = TestRootConfig.previousUserConfigFile
val newFile = TestRootConfig.userConfigFile
val sampleApp = AppConfig(location = new File("somewhere"), id = "someapp", cachedName = None,
createdTime = Some(1L), usedTime = Some(1L))
val sampleRoot = RootConfig(Seq(sampleApp))
val oldJson = Json.toJson(sampleRoot)
val stream = new FileOutputStream(oldFile)
stream.write(Json.stringify(oldJson).getBytes("UTF-8"))
stream.close()
newFile.delete()
TestRootConfig.forceReload()
val newRoot = TestRootConfig.user
assertEquals("loaded old config", sampleRoot, newRoot)
assertTrue("new config file exists", newFile.exists)
}
@Test
def testIgnorePreviousWhenCurrentPresent(): Unit = synchronized {
import play.api.libs.json._
import activator.RootConfig
val oldFile = TestRootConfig.previousUserConfigFile
val newFile = TestRootConfig.userConfigFile
val sampleAppOld = AppConfig(location = new File("somewhere"), id = "someapp", cachedName = None,
createdTime = Some(1L), usedTime = Some(1L))
val sampleRootOld = RootConfig(Seq(sampleAppOld))
val sampleAppNew = AppConfig(location = new File("somewhere2"), id = "someapp2", cachedName = None,
createdTime = Some(1L), usedTime = Some(1L))
val sampleRootNew = RootConfig(Seq(sampleAppNew))
val oldJson = Json.toJson(sampleRootOld)
val newJson = Json.toJson(sampleRootNew)
def writeJson(f: File, json: JsValue): Unit = {
val stream = new FileOutputStream(f)
stream.write(Json.stringify(json).getBytes("UTF-8"))
stream.close()
}
newFile.delete()
oldFile.delete()
writeJson(oldFile, oldJson)
writeJson(newFile, newJson)
TestRootConfig.forceReload()
val newRoot = TestRootConfig.user
assertEquals("loaded new config", sampleRootNew, newRoot)
assertTrue("new config file exists", newFile.exists)
assertTrue("old config file exists", oldFile.exists)
}
}
|
eed3si9n/activator
|
ui/app/activator/HomePageActor.scala
|
<reponame>eed3si9n/activator<gh_stars>0
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import activator.typesafeproxy.{ TypesafeComProxyUIActor, UIActor, TypesafeComProxy }
import akka.actor._
import akka.util.Timeout
import play.api.libs.json._
import java.io.File
import akka.pattern.pipe
import scala.util.control.NonFatal
import scala.concurrent.{ Promise, Future }
import activator._
import play.api.libs.concurrent.Execution.Implicits.defaultContext
// THE API for the HomePage actor.
object HomePageActor {
import play.api.libs.json._
import play.api.libs.json.Json._
import play.api.libs.functional.syntax._
import JsonHelper._
case class OpenExistingApplication(location: String)
object OpenExistingApplication {
val tag = "OpenExistingApplication"
implicit val openExistingApplicationReads: Reads[OpenExistingApplication] =
extractRequest[OpenExistingApplication](tag)((__ \ "location").read[String].map(OpenExistingApplication.apply _))
implicit val openExistingApplicationWrites: Writes[OpenExistingApplication] =
emitRequest(tag)(in => obj("location" -> in.location))
def unapply(in: JsValue): Option[OpenExistingApplication] =
Json.fromJson[OpenExistingApplication](in).asOpt
}
case class CreateNewApplication(location: String, templateId: String, projectName: Option[String])
object CreateNewApplication {
val tag = "CreateNewApplication"
implicit val createNewApplicationReads: Reads[CreateNewApplication] =
extractRequest(tag) {
((__ \ "location").read[String] and
(__ \ "template").readNullable[String] and
(__ \ "name").readNullable[String])((l, t, n) => CreateNewApplication(l, t.getOrElse(""), n))
}
implicit val createNewApplicationWrites: Writes[CreateNewApplication] =
emitRequest(tag) { in =>
obj("location" -> in.location,
"template" -> in.templateId,
"name" -> in.projectName)
}
def unapply(in: JsValue): Option[CreateNewApplication] =
Json.fromJson[CreateNewApplication](in).asOpt
}
object RedirectToApplication {
def apply(id: String): JsValue =
JsObject(Seq(
"response" -> JsString("RedirectToApplication"),
"appId" -> JsString(id)))
}
object BadRequest {
def apply(request: String, errors: Seq[String]): JsValue =
JsObject(Seq(
"response" -> JsString("BadRequest"),
"errors" -> JsArray(errors map JsString.apply)))
}
object Status {
def apply(info: String): JsValue =
JsObject(Seq(
"response" -> JsString("Status"),
"info" -> JsString(info)))
}
case class Respond(json: JsValue)
object LicenseAccepted {
def apply(): JsValue =
JsObject(Seq(
"response" -> JsString("LicenseAccepted")))
def unapply(in: JsValue): Boolean =
try {
if ((in \ "request").as[String] == "LicenseAccepted") true
else false
} catch {
case e: JsResultException => false
}
}
}
class HomePageActor(typesafeComActor: ActorRef, lookupTimeout: Timeout) extends WebSocketActor[JsValue] with ActorLogging {
AppManager.registerKeepAlive(self)
import HomePageActor._
override def onMessage(json: JsValue): Unit = json match {
case UIActor.WebSocket.Inbound(req) =>
context.actorSelection(req.actorPath).resolveOne()(lookupTimeout).onSuccess({ case a => a ! req })
case TypesafeComProxyUIActor.Inbound(req) =>
context.actorOf(TypesafeComProxyUIActor.props(req, typesafeComActor, self))
case WebSocketActor.Ping(ping) => produce(WebSocketActor.Pong(ping.cookie))
case OpenExistingApplication(msg) => openExistingApplication(msg.location)
case CreateNewApplication(msg) => createNewApplication(msg.location, msg.templateId, msg.projectName)
case _ =>
log.error(s"HomeActor: received unknown msg: $json")
produce(BadRequest(json.toString, Seq(s"Could not parse JSON for request: ${json}")))
}
override def subReceive: Receive = {
case Respond(json) => produce(json)
case UIActor.WebSocket.Outbound(msg) =>
import UIActor.WebSocket._
produce(Json.toJson(msg))
case TypesafeComProxyUIActor.Outbound(msg) =>
import TypesafeComProxyUIActor._
produce(Json.toJson(msg))
}
// Goes off and tries to create/load an application.
def createNewApplication(location: String, template: String, projectName: Option[String]): Unit = {
import context.dispatcher
val appLocation = new java.io.File(location)
// a chance of knowing what the error is.
val installed: Future[ProcessResult[File]] =
controllers.api.Templates.doCloneTemplate(
template,
appLocation,
projectName) map (result => result map (_ => appLocation))
// Ensure feedback happens after clone-ing is done.
for (result <- installed) {
if (result.isSuccess)
self ! Respond(Status("Template is cloned, compiling project definition..."))
else
log.warning("Failed to clone template: " + result)
}
loadApplicationAndSendResponse("CreateNewApplication", installed)
}
// Goes off and tries to open an application, responding with
// whether or not we were successful to this actor.
def openExistingApplication(location: String): Unit = {
log.debug(s"Looking for existing application at: $location")
// TODO - Ensure timeout is ok...
val file = Validating(new File(location)).validate(
Validation.fileExists,
Validation.isDirectory)
if (file.isSuccess)
self ! Respond(Status("Compiling project definition..."))
else
log.warning(s"Failed to locate directory $location: " + file) // error response is generated in loadApplicationAndSendResponse
val filePromise = Promise[ProcessResult[File]]()
filePromise.success(file)
loadApplicationAndSendResponse("OpenExistingApplication", filePromise.future)
}
// helper method that given a validated file, will try to load
// the application id and return an appropriate response.
private def loadApplicationAndSendResponse(request: String, file: Future[ProcessResult[File]]) = {
import context.dispatcher
val id = file flatMapNested { file =>
AppManager.loadAppIdFromLocation(file,
Some({
json => self ! Respond(json)
}))
}
val response = id map {
case ProcessSuccess(id) =>
log.debug(s"HomeActor: Found application id: $id")
RedirectToApplication(id)
// TODO - Return with form and flash errors?
case ProcessFailure(errors) =>
log.debug(s"HomeActor: Failed to find application: ${errors map (_.msg) mkString "\n\t"}")
BadRequest(request, errors map (_.msg))
} recover {
case NonFatal(e) => BadRequest(request, Seq(s"${e.getClass.getName}: ${e.getMessage}"))
} map Respond.apply
pipe(response) to self
}
}
|
eed3si9n/activator
|
ui/test/activator/typesafeproxy/TypesafeComProxyTest.scala
|
<gh_stars>0
package activator.typesafeproxy
import akka.actor._
import org.junit.Assert._
import org.junit._
import scala.reflect.ClassTag
import scala.util.{ Failure, Success, Try }
object TypesafeComProxyTest {
val authenticated = Success(AuthenticationStates.Authenticated(AuthenticationStates.emptyAuthentication))
def failure(error: Throwable): Try[AuthenticationState] = Failure(error)
def success(subscriberData: SubscriberData = SubscriberData.exampleDetail()): Try[SubscriberData] = Success(subscriberData)
val canceled = failure(new ProxyCanceled("canceled"))
val failed = failure(new ProxyFailure("fail"))
val invalid = failure(new ProxyInvalidCredentials("invalid"))
val timeout = failure(new ProxyTimeout("timeout"))
val username = "username"
val password = "password"
val credentials = UIActor.Credentials(username, password)
def assertTypeOf[T](x: Any)(implicit ct: ClassTag[T]): Unit =
if (!ct.runtimeClass.isInstance(x)) throw new AssertionError(s"Value '$x' of type ${x.getClass.getName} is not of type ${ct.runtimeClass.getName}", null)
}
class TypesafeComProxyTest extends DefaultSpecification {
import TypesafeComProxy._
import TypesafeComProxyTest._
@Test
def testShouldAuthenticate(): Unit = withHelper { helper =>
import helper._
withProxy() { proxy =>
proxy ! Authentication.Get(testActor, testActor)
val init = expectMsgType[FakeAuthenticatorInit]
proxy ! Authentication.Put(authenticated, init.version, testActor)
val rs = expectMsgAllClassOf(classOf[Authentication.Outcome], classOf[Authentication.Value])
assertTrue(rs.find(_.isInstanceOf[Authentication.Outcome]).get.asInstanceOf[Authentication.Outcome].result.isSuccess)
val v = rs.find(_.isInstanceOf[Authentication.Value]).get.asInstanceOf[Authentication.Value]
assertTrue(v.value.isSuccess)
assertTrue(s"v.version: ${v.version}, init.version: ${init.version}", v.version == (init.version + 1))
}
}
@Test
def testShouldFailToUpdate(): Unit = withHelper { helper =>
import helper._
withProxy() { proxy =>
proxy ! Authentication.Get(testActor, testActor)
val init = expectMsgType[FakeAuthenticatorInit]
proxy ! Authentication.Put(authenticated, init.version + 1, testActor)
val rs = expectMsgType[Authentication.Outcome]
assertTrue(rs.result.isFailure)
}
}
}
|
eed3si9n/activator
|
ui/app/controllers/api/Proxy.scala
|
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package controllers.api
import play.api.mvc.{ Action, Controller }
import play.api.data._
import play.api.Logger
import scala.util.control.NonFatal
import play.api.libs.ws._
object Proxy extends Controller {
import play.api.Play.current
def getTypesafe(path: String) = Action.async { request =>
val holder = WS.url(s"https://lightbend.com/$path")
.withHeaders("Accept" -> "text/html")
.withRequestTimeout(25000)
.withFollowRedirects(true)
import concurrent.ExecutionContext.Implicits._
holder.get map { response =>
Ok(response.body)
}
}
}
|
eed3si9n/activator
|
ui/test/activator/typesafeproxy/TestHelpers.scala
|
package activator.typesafeproxy
import java.util.concurrent.atomic.AtomicReference
import activator.typesafeproxy.TypesafeComProxy.ActionPair
import akka.actor._
import akka.testkit._
import com.typesafe.config.{ Config, ConfigFactory }
import scala.concurrent.{ ExecutionContext, Future }
import scala.concurrent.duration._
import scala.util.{ Random, Try }
import java.util.concurrent.Executors
object AkkaTestKitHelper {
val configString =
"""
|akka {
| loglevel = "OFF"
| stdout-loglevel = "OFF"
|}
|test-dispatcher {
| type = Dispatcher
| executor = "fork-join-executor"
| fork-join-executor {
| parallelism-min = 2
| parallelism-factor = 2.0
| parallelism-max = 10
| }
| throughput = 1
|}
""".stripMargin
val config = ConfigFactory.parseString(configString)
def randomActorSystemName: String = s"test-actor-system-${new String(Random.alphanumeric.take(10).toArray)}"
}
class AkkaTestKitHelper(_system: ActorSystem) extends TestKit(_system) with ImplicitSender {
def this(config: Config) = this(ActorSystem(AkkaTestKitHelper.randomActorSystemName, config))
def this() = this(AkkaTestKitHelper.config)
def after() = {
system.shutdown()
system.awaitTermination()
}
def pinnedDispatcher = system.dispatchers.lookup("test-dispatcher")
lazy val name: String = s"test-${new String(Random.alphanumeric.take(10).toArray)}"
case class FakeAuthenticatorInit(version: Long)
case class FakeAuthenticatorMessage(replyTo: ActorRef, uiActor: ActorRef, message: Any) {
def reply(reply: Any)(implicit sender: ActorRef): Unit = replyTo.tell(message, sender)
}
class FakeAuthenticator(version: Long, replyTo: ActorRef, webSocketActor: ActorRef) extends Actor {
def receive: Receive = {
testActor ! FakeAuthenticatorInit(version)
{
case msg => testActor ! FakeAuthenticatorMessage(replyTo, webSocketActor, msg)
}
}
}
case object FakeSubscriptionRPCInit
case class FakeSubscriptionRPCMessage(replyTo: ActorRef, webSocketActor: ActorRef, message: Any) {
def reply(reply: Any)(implicit sender: ActorRef): Unit = replyTo.tell(message, sender)
}
class FakeSubscriptionRPC(version: Long, replyTo: ActorRef, webSocketActor: ActorRef) extends Actor {
def receive: Receive = {
testActor ! FakeSubscriptionRPCInit
{
case msg => testActor ! FakeSubscriptionRPCMessage(replyTo, webSocketActor, msg)
}
}
}
case object FakeActivatorInfoInit
case class FakeActivatorInfoMessage(replyTo: ActorRef, webSocketActor: ActorRef, message: Any) {
def reply(reply: Any)(implicit sender: ActorRef): Unit = replyTo.tell(message, sender)
}
class FakeActivatorInfo(version: Long, replyTo: ActorRef, webSocketActor: ActorRef) extends Actor {
def receive: Receive = {
testActor ! FakeActivatorInfoInit
{
case msg => testActor ! FakeActivatorInfoMessage(replyTo, webSocketActor, msg)
}
}
}
def fakeAuthenticatorProps(request: ActionPair[AuthenticationState]#Get, version: Long, replyTo: ActorRef, webSocketActor: ActorRef): Props =
Props(new FakeAuthenticator(version, replyTo, webSocketActor))
def fakeSubscriptionRPCProps(request: ActionPair[SubscriberData]#Get, version: Long, replyTo: ActorRef, webSocketActor: ActorRef): Props =
Props(new FakeSubscriptionRPC(version, replyTo, webSocketActor))
def fakeActivatorInfoProps(request: ActionPair[ActivatorLatestInfo]#Get, version: Long, replyTo: ActorRef, webSocketActor: ActorRef): Props =
Props(new FakeActivatorInfo(version, replyTo, webSocketActor))
def withProxy[T](initialCacheState: TypesafeComProxy.CacheState = TypesafeComProxy.initialStateBuilder(authGetter = fakeAuthenticatorProps,
subscriberDataGetter = fakeSubscriptionRPCProps,
activatorInfoGetter = fakeActivatorInfoProps),
webSocketActor: ActorRef = testActor)(body: ActorRef => T): T = {
val proxy = system.actorOf(TypesafeComProxy.props(initialCacheState))
val r = body(proxy)
system stop proxy
r
}
def authenticationResult(result: Try[AuthenticationState]): (String, String, ActorRef) => Unit = { (_, _, sendTo) =>
sendTo ! result
}
def delayedAuthenticationResult(delay: FiniteDuration, result: Option[Try[AuthenticationState]] = None): (String, String, ActorRef) => Unit = {
(_, _, sendTo) =>
{
Future {
Thread.sleep(delay.toMillis)
result.foreach(r => sendTo ! r)
}(pinnedDispatcher)
()
}
}
def mutableAuthenticationResult(result: Try[AuthenticationState]): (AtomicReference[Try[AuthenticationState]], (String, String, ActorRef) => Unit) = {
val ref = new AtomicReference[Try[AuthenticationState]](result)
(ref, (_, _, sendTo) => sendTo ! ref.get())
}
def withAuthenticationActor[T](doAuthenticate: AuthenticationActor.DoAuthenticate,
uiActorProps: ActorRef => Props = UIActor.props,
version: Long = 0L,
replyTo: ActorRef = testActor,
websocketActor: ActorRef = testActor,
initMessage: Option[String] = None)(body: ActorRef => T): T = {
val authenticator = system.actorOf(AuthenticationActor.props(doAuthenticate, uiActorProps, version, replyTo, websocketActor, initMessage))
val r = body(authenticator)
system stop authenticator
r
}
def activatorLatestResult(result: Try[ActivatorLatestInfo]): ActorRef => Unit = { sendTo =>
sendTo ! result
}
def delayedActivatorLatestResult(delay: FiniteDuration, result: Option[Try[ActivatorLatestInfo]] = None): ActorRef => Unit = {
sendTo =>
{
Future {
Thread.sleep(delay.toMillis)
result.foreach(r => sendTo ! r)
}(pinnedDispatcher)
()
}
}
def mutableActivatorLatestResult(result: Try[ActivatorLatestInfo]): (AtomicReference[Try[ActivatorLatestInfo]], ActorRef => Unit) = {
val ref = new AtomicReference[Try[ActivatorLatestInfo]](result)
(ref, sendTo => sendTo ! ref.get())
}
def withActivatorLatestActor[T](doGetActivatorLatest: ActivatorLatestActor.DoGetActivatorLatest,
uiActorProps: ActorRef => Props = UIActor.props,
version: Long = 0L,
replyTo: ActorRef = testActor,
websocketActor: ActorRef = testActor)(body: ActorRef => T): T = {
val authenticator = system.actorOf(ActivatorLatestActor.props(doGetActivatorLatest, uiActorProps, version, replyTo, websocketActor))
val r = body(authenticator)
system stop authenticator
r
}
def subscriptionDataResult(result: Try[SubscriberData]): (AuthenticationStates.AuthenticationData, ActorRef) => Unit = { (_, sendTo) =>
sendTo ! result
}
def mutableSubscriptionDataResult(result: Try[SubscriberData]): (AtomicReference[Try[SubscriberData]], (AuthenticationStates.AuthenticationData, ActorRef) => Unit) = {
val ref = new AtomicReference[Try[SubscriberData]](result)
(ref, (_, sendTo) => sendTo ! ref.get())
}
def delayedSubscriptionDataResult(delay: FiniteDuration, result: Option[SubscriberData] = None): (AuthenticationStates.AuthenticationData, ActorRef) => Unit = {
(_, sendTo) =>
{
Future {
Thread.sleep(delay.toMillis)
result.foreach(r => sendTo ! r)
}(pinnedDispatcher)
()
}
}
def withSubscriptionDataActor[T](doGetSubscriptionData: SubscriptionDataActor.DoGetSubscriptionData,
uiActorProps: ActorRef => Props = UIActor.props,
version: Long = 0L,
replyTo: ActorRef = testActor,
websocketActor: ActorRef = testActor)(body: ActorRef => T): T = {
val rpc = system.actorOf(SubscriptionDataActor.props(doGetSubscriptionData, uiActorProps, version, replyTo, websocketActor))
val r = body(rpc)
system stop rpc
r
}
}
abstract class Specification[T <: AkkaTestKitHelper] {
def gen(): T
def withHelper[U](body: T => U): U = {
val h = gen()
try body(h)
finally (h.after())
}
}
class DefaultSpecification extends Specification[AkkaTestKitHelper] {
def gen() = new AkkaTestKitHelper()
}
|
eed3si9n/activator
|
launcher/src/main/scala/activator/RepositoryConfig.scala
|
<reponame>eed3si9n/activator
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import scala.util.control.NonFatal
import activator.properties.ActivatorProperties._
import java.io.File
// Here's where we specify our repositories configuration. The
// goal is to add the offline repository that comes in our zip
// file to Activator itself AND to sbt server.
//
// Note: as with any comment this one can be wrong. This is how
// someone THOUGHT it worked at one point. Trust but verify!
//
// Important background:
// - there's a default repository configuration embedded in the
// launcher, which we generate in project/Packaging.scala
// - the launcher also configures a repository-config, which
// is an override file for the repositories section in the
// launcher
// - in this source file we are creating ~/.sbt/repositories
// - if ~/.sbt/repositories exists, sbt will IGNORE the entire
// embedded config in the launcher, not merge with it.
// ~/.sbt/repositories entirely replaces the embedded config.
// This is how the repository-config is set up in
// Packaging.scala.
// - activator.home is set to the activator/activator.bat wrapper
// scripts' location
// - ${activator.home}/repository would be our offline repo
// for the "fat" zip
// - in the "minimal" zip, ${activator.home}/repository doesn't
// exist
// - when the activator scripts are copied into an app,
// ${activator.home}/repository doesn't exist
// - activator.local.repository is a user-configurable override
// that replaces ${activator.home}/repository (you would
// specify this on the activator command line or in
// ~/.sbt/jvmargs)//
// With that background, we are trying to handle these cases:
// - if you've never run activator and run it the first time,
// the launcher uses its embedded repository config, and then
// creates ~/.sbt/repositories, which gets used by sbt server
// - the second time you run activator, both launcher and sbt
// server would use ~/.sbt/repositories
// - if you upgrade to a new Activator (which probably moves
// activator.home), the first time you run it, it uses
// ~/.sbt/repositories which will configure both the previous
// version of activator's repository and also the new version's
// repository due to activator.home
// - if you upgrade, the second time you run it we'll have edited
// ~/.sbt/repositories and it will only use the new version's
// repo
//
// We write two repos to ~/.sbt/repositories; one is hardcoded
// to the activator path when we created ~/.sbt/repositories,
// and this is intended to affect sbt server. The other honors
// activator.home which is set by the wrapper script, so this
// one is intended to work right with the Activator launcher
// (which may be a newer version).
//
// In the "average" case (the second and subsequent times you
// run the same version of Activator) the two repos should be
// the same directory.
//
// If the user has their own stuff in ~/.sbt/repositories we do
// our very best not to mess it up, we only change the lines
// that start with activator-.
//
// We also only edit ~/.sbt/repositories if we are a "fat" zip,
// that is if ${activator.home}/repository exists. If you run
// an activator from inside an app or from the minimal zip,
// we use the last "fat" repository you have used.
object RepositoryConfig {
private val repositoriesSectionName = "repositories"
private val isWindows = sys.props("os.name").toLowerCase.indexOf("win") >= 0
// configure your per-user repos to have the offline
// repo in activator.home, if possible.
def configureUserRepositories(): Unit =
// FIXME look at sbt.repository.config and possibly sbt.global.base
configureUserRepositories(new File(new File(GLOBAL_USER_HOME), ".sbt/repositories"))
// this overload exists for unit tests only
def configureUserRepositories(repositoriesFile: java.io.File): Unit = try {
// leave configuration alone if we don't have an offline repo, which
// means we'll use the config embedded in the launcher by project/Packaging.scala
if ((new java.io.File(ACTIVATOR_HOME, "repository")).exists)
replaceSection(repositoriesFile, repositoriesSectionName, newRepositorySection _)
} catch {
case NonFatal(e) =>
System.err.println(s"Configuring Activator offline repository failed: ${e.getMessage}")
}
private final val fileMatcher = "^file://*".r
private final val fileScheme = "file://"
private def quoteForFileURI(path: String): String = {
val uriString = (new java.io.File(path)).toURI.toASCIIString()
if (isWindows && fileMatcher.findFirstIn(uriString).isDefined)
fileMatcher.replaceFirstIn(uriString, "")
else if (!isWindows && uriString.startsWith(fileScheme))
uriString.substring(fileScheme.length)
else
path // give up, hope for best?
}
private def replaceRepoLine(old: Section, afterName: String, repoName: String, replacementLine: String): Section = {
val (before, after) =
old.lines.filterNot(_.trim.startsWith(s"${repoName}:")).partition(name =>
name.trim == afterName || name.trim.startsWith("${afterName}:"))
val oldLine = old.lines.find(_.trim.startsWith(s"${repoName}:"))
// this check is to avoid adding the comment if we haven't really changed anything
if (oldLine != Some(replacementLine))
old.copy(lines = before ++ Seq(replacementLine) ++ after)
else
old
}
// Under Windows `file://` is considered a UNC path. Adding the extra '/' or '//'
// solves the problem that the current user's authorization is sufficient
// to access the target file.
private lazy val fileMarker = if (isWindows) "file:////" else "file://"
private def newRepositorySection(oldOption: Option[Section]): Section = {
// this repo is primarily for sbt server - hardcoded activator.home
// based on the most recent Activator to run.
val activatorLocalLine = """ activator-local: """ + fileMarker + """${activator.local.repository-""" +
quoteForFileURI(ACTIVATOR_HOME) +
"""/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]"""
// this repo is for the launcher, which is run by a wrapper script
// that sets activator.home. The launcher only uses its embedded repo
// config if ~/.sbt/repositories doesn't exist.
val activatorLauncherLine = """ activator-launcher-local: """ + fileMarker + """${activator.local.repository-${activator.home-${user.home}/.activator}/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]"""
oldOption match {
case Some(old) =>
// if (!isWindows) {
val withActivatorLocal =
replaceRepoLine(old, "local", "activator-local", activatorLocalLine)
replaceRepoLine(withActivatorLocal, "local", "activator-launcher-local", activatorLauncherLine)
// } else stripRepoLines(old, Set("activator-local", "activator-launcher-local"))
case None =>
// create the entire repositories section
val header = """ local
""" + activatorLauncherLine + """
""" + activatorLocalLine
val allRepoLines = header + """
maven-central
typesafe-releases: http://repo.typesafe.com/typesafe/releases
typesafe-ivy-releasez: http://repo.typesafe.com/typesafe/ivy-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]"""
Section(repositoriesSectionName, allRepoLines.split('\n').toVector, s"[$repositoriesSectionName]")
}
}
// outputting the "rawLine" followed by "lines" for each section is supposed
// to preserve the original file
private case class Section(name: String, lines: Vector[String], rawLine: String) {
def isStuffBeforeFirstSection: Boolean = name.isEmpty
}
private object Section {
def stuffBeforeFirstSection = Section("", Vector.empty, "")
}
private def sectionSplit(reader: java.io.BufferedReader): Seq[Section] = {
// sbt parser defines a "section" as a trimmed line that starts with '['.
// if a line that starts with '[' isn't a section, it's an error.
def accumulate(reversedSections: List[Section], currentSection: Section): List[Section] = {
reader.readLine() match {
case null =>
(currentSection :: reversedSections).reverse
case line =>
val trimmed = line.trim()
if (trimmed.nonEmpty && trimmed.charAt(0) == '[') {
val i = trimmed.indexOf(']')
// if there's no ']' sbt throws an error, but we don't want to barf on anything here.
val name = trimmed.substring(1, if (i >= 0) i else trimmed.length)
accumulate(currentSection :: reversedSections, Section(name, Vector.empty, line))
} else {
// use original, not trimmed line because we don't want to gratuitously reformat the file
accumulate(reversedSections, currentSection.copy(lines = currentSection.lines :+ line))
}
}
}
accumulate(Nil, Section.stuffBeforeFirstSection)
}
private def writeSections(writer: java.io.BufferedWriter, sections: Seq[Section]): Unit = {
sections foreach { section =>
if (!section.isStuffBeforeFirstSection) {
writer.append(section.rawLine)
writer.newLine()
}
section.lines.foreach { l =>
writer.append(l)
writer.newLine()
}
// put a blank line after each section even if there wasn't before
section.lines.lastOption foreach { lastLine =>
if (lastLine.trim.nonEmpty) {
writer.newLine()
}
}
}
}
private def replaceSection(sections: Seq[Section], name: String, f: Option[Section] => Section): Seq[Section] = {
if (sections.exists(_.name == name)) {
sections map {
case s if s.name == name =>
f(Some(s))
case s => s
}
} else {
sections :+ f(None)
}
}
private def replaceSection(file: java.io.File, name: String, f: Option[Section] => Section): Unit = {
val reader = try new java.io.BufferedReader(new java.io.FileReader(file))
catch {
case _: java.io.IOException =>
// pretend file was empty
new java.io.BufferedReader(new java.io.StringReader(""))
}
val (newSections, oldSections) = try {
val sections = sectionSplit(reader)
(replaceSection(sections, name, f), sections)
} finally {
reader.close()
}
// check if we won't change anything because we don't want to gratuitously
// do file IO that might break or alter timestamps or whatever
if (newSections != oldSections) {
sbt.IO.createViaTemporary(file) { file =>
// this is half-ass vs. writing and atomically renaming a tmpfile,
// but java makes that annoying cross-platform
val writer = new java.io.BufferedWriter(new java.io.FileWriter(file))
try writeSections(writer, newSections)
finally {
writer.close()
}
}
}
}
}
|
eed3si9n/activator
|
ui/test/console/handler/rest/ActorJsonSpec.scala
|
<reponame>eed3si9n/activator<gh_stars>1-10
package console.handler.rest
import activator.analytics.data._
import activator.analytics.data.TimeRangeType
import com.typesafe.trace.uuid.UUID
import play.api.libs.json.Json
import org.specs2.mutable.Specification
import com.typesafe.trace._
import play.api.libs.json._
import ActorJsonBuilder.ActorResult
import akka.actor.ActorRef
import scala.util.Random
import org.specs2.matcher.MatchResult
import scala.concurrent.duration._
import akka.actor._
import scala.reflect.ClassTag
object ActorJsonSpec {
import console.handler.Generators._
val now = System.currentTimeMillis
// we don't go too nuts with the generation here because the below already takes around half a second,
// so it will rapidly grow
val minuteTimeRanges = Seq(TimeRange(now - 30.minutes.toMillis, now, TimeRangeType.Minutes))
val hourTimeRanges = Seq(TimeRange(now - 30.hours.toMillis, now, TimeRangeType.Hours))
val dayTimeRanges = Seq(TimeRange(now - 30.days.toMillis, now, TimeRangeType.Days))
val scopes = genActorScopes(Set(ActorPath.fromString("akka://user/a"), ActorPath.fromString("akka://user/b")),
Set(),
Set("host1", "host2"),
Set("dispatcher1", "dispatcher2"),
Set("system1", "system2"))
val timeRanges = minuteTimeRanges ++ hourTimeRanges ++ dayTimeRanges
val stats = {
val deviationsGen = genDeviationDetails()
genActorStats(scopes, timeRanges) { (i, s, tr) =>
ActorStats(tr, s, ActorStatsMetrics(bytesRead = i, bytesWritten = i, deviationDetails = deviationsGen()))
}
}
}
class ActorJsonSpec extends Specification {
import ActorJsonSpec._
"ActorJson" should {
"generate JSON for an actor" in {
forall(stats) { (s: ActorStats) =>
val r = ActorJsonBuilder.createActorJson(s)
// do timeRange manually since we go recursive
val timeRangeOpt = (r \ "timerange").asOpt[JsObject]
timeRangeOpt must beSome
timeRangeOpt foreach { tr =>
(tr \ "startTime").asOpt[JsNumber] must beSome
(tr \ "endTime").asOpt[JsNumber] must beSome
val rangeTypeOpt = (tr \ "rangeType").asOpt[String]
rangeTypeOpt must beSome
}
def hasField[T](field: String)(implicit tag: ClassTag[T], reads: Reads[T]) = {
(r \ field).asOpt[T] must beSome
}
hasField[JsNumber]("askMessagesCount")
hasField[JsNumber]("askRate")
hasField[JsNumber]("createdCount")
hasField[JsNumber]("deadletterCount")
hasField[JsArray]("deadletters")
hasField[JsNumber]("deadlockCount")
hasField[JsArray]("deadlocks")
hasField[JsNumber]("deviationCount")
hasField[JsNumber]("errorCount")
hasField[JsArray]("errors")
hasField[JsNumber]("failedCount")
hasField[JsNumber]("latestMessageTimestamp")
hasField[JsNumber]("latestTraceEventTimestamp")
hasField[JsNumber]("maxMailboxSize")
hasField[String]("maxMailboxSizeAddressNode")
hasField[String]("maxMailboxSizeAddressPath")
hasField[JsNumber]("maxMailboxSizeTimestamp")
hasField[JsObject]("maxTimeInMailbox")
hasField[String]("maxTimeInMailboxAddressNode")
hasField[String]("maxTimeInMailboxAddressPath")
hasField[JsNumber]("maxTimeInMailboxTimestamp")
hasField[JsNumber]("meanBytesReadRate")
hasField[String]("meanBytesReadRateUnit")
hasField[JsNumber]("meanBytesWrittenRate")
hasField[String]("meanBytesWrittenRateUnit")
hasField[JsNumber]("meanMailboxSize")
hasField[JsNumber]("meanProcessedMessageRate")
hasField[String]("meanProcessedMessageRateUnit")
hasField[JsObject]("meanTimeInMailbox")
hasField[JsNumber]("processedMessagesCount")
hasField[String]("rateUnit")
hasField[JsNumber]("receiveRate")
hasField[JsNumber]("restartCount")
hasField[JsObject]("scope")
hasField[JsNumber]("stoppedCount")
hasField[JsNumber]("tellMessagesCount")
hasField[JsNumber]("tellRate")
hasField[JsObject]("timerange")
hasField[JsNumber]("totalMessageRate")
hasField[JsNumber]("unhandledMessageCount")
hasField[JsArray]("unhandledMessages")
hasField[JsNumber]("warningCount")
hasField[JsArray]("warnings")
r.fieldSet.size mustEqual (43)
}
}
}
}
|
eed3si9n/activator
|
project/offline.scala
|
<reponame>eed3si9n/activator
import java.io.FileInputStream
import sbt._
import Keys._
import SbtSupport.sbtLaunchJar
import LocalTemplateRepo.localTemplateCacheCreated
import Packaging.localRepoCreated
object offline {
val runOfflineTests = TaskKey[Unit]("offline-tests", "runs tests to ensure templates can work with the offline repository.")
// set up offline repo tests as integration tests.
def settings: Seq[Setting[_]] = Seq(
runOfflineTests <<= (localTemplateCacheCreated in TheActivatorBuild.localTemplateRepo,
target,
localRepoCreated in TheActivatorBuild.dist,
sbtLaunchJar,
streams) map offlineTestsTask,
integration.tests <<= runOfflineTests
)
def offlineTestsTask(templateRepo: File, targetDir: File, localIvyRepo: File, launcher: File, streams: TaskStreams): Unit = {
val testDir = new File(targetDir, "to-try-updating")
if (testDir.exists) {
streams.log.info(s"Deleting ${testDir}")
IO.delete(testDir)
}
streams.log.info(s"Creating template projects to try updating in ${testDir}")
IO.copyDirectory(templateRepo, testDir)
runofflinetests(testDir, localIvyRepo, launcher, streams.log)
}
private class InterceptErrorsLogger(val underlying: sbt.Logger) extends sbt.Logger {
private var backwardErrors: List[String] = Nil
override def trace(t: => Throwable): Unit = underlying.trace(t)
override def success(message: => String): Unit = underlying.success(message)
override def log(level: sbt.Level.Value, message: => String): Unit = {
val m = message // eval only once
// the child sbt puts errors on stdout it looks like,
// so we have to look for error/warn inside the string.
if (level == sbt.Level.Error || level == sbt.Level.Warn ||
m.indexOf("error") >= 0 || m.indexOf("warn") >= 0)
backwardErrors = m :: backwardErrors
underlying.log(level, m)
}
final def errors: List[String] = backwardErrors.reverse
}
def runofflinetests(testDir: File, localIvyRepo: File, launcher: File, log: sbt.Logger): Unit = {
val results =
for {
projectInfo <- findAndRenameTestDirs(testDir)
name = projectInfo._2
_ = log.info("[OFFLINETEST]")
_ = log.info("[OFFLINETEST]")
_ = log.info("[OFFLINETEST] Running offline update test for template: " + name)
_ = log.info("[OFFLINETEST]")
_ = log.info("[OFFLINETEST]")
logger = new InterceptErrorsLogger(log)
result = runTest(localIvyRepo, testDir, projectInfo._1, projectInfo._2, launcher, logger)
} yield (name, result, logger.errors)
if(results exists (_._2 != true)) {
// Recap the error messages so people don't have to dig through miles of scrollback
log.info(s" [OFFLINETEST] Summary of errors saved from above log output follows.")
for((name, _, errors) <- results) {
if (errors.nonEmpty) {
log.error(s" [OFFLINETEST] $name had errors or warnings (to see them in context, scroll way back):")
for (e <- errors) {
log.error(s" [OFFLINETEST] $name: $e")
}
} else {
log.info(s" [OFFLINETEST] $name had no errors or warnings that we captured (scroll back for full output).")
}
}
val failureCount = results.filterNot(_._2).length
log.info("[OFFLINETEST] " + failureCount + " failures in " + results.length + " tests...")
for((name, result, _) <- results) {
log.info(" [OFFLINETEST] " + name + " - " + (if (result) "SUCCESS" else "FAILURE"))
}
log.info(s"[OFFLINETEST] Problems and dependency graph from building the local repository are in ${localIvyRepo.getParentFile}/local-repo-deps.txt")
log.info(s"[OFFLINETEST] Problems compiling the individual templates are in the logs above")
log.info(s"[OFFLINETEST] Projects we tried to update are in ${testDir}")
val failures = results.filter(_._2 != true).map(_._1).mkString(", ")
log.info(s"[OFFLINETEST] Failed-to-update projects: " + failures)
sys.error("Offline tests were unsuccessful: " + failures)
} else {
log.info("[OFFLINETEST] " + results.length + " tests successful.")
}
()
}
def findAndRenameTestDirs(root: File): Seq[(File, String)] = {
// extract the template name from the activator.properties file
def extractTemplateName(file: File): String = {
val fis = new FileInputStream(file.getAbsolutePath)
try {
val properties = new java.util.Properties
properties.load(fis)
properties.getProperty("name")
} finally {
fis.close()
}
}
for {
dir <- (root.***).get
if (dir / "project/build.properties").exists
if (dir / "activator.properties").exists
projectName = extractTemplateName((dir / "activator.properties").getAbsoluteFile)
// rename UUID dir to project name dir
niceDir = new File(dir.getParentFile(), projectName)
} yield {
IO.move(dir, niceDir)
(niceDir, projectName)
}
}
def runTest(localIvyRepo: File, testDir: File, template: File, templateName: String, launcher: File, log: sbt.Logger): Boolean = {
val repoFile = new File(testDir, templateName + "-repo.properties")
makeRepoFile(repoFile, localIvyRepo)
log.info(s"Offline repo config for $templateName is in $repoFile")
def sbt(args: String*) = runSbt(launcher, repoFile, template, log)(args)
sbt("update")
}
def runSbt(launcher: File, repoFile: File, cwd: File, log: sbt.Logger)(args: Seq[String]): Boolean =
IO.withTemporaryDirectory { globalBase =>
val jvmargs = Seq(
"-Dsbt.repository.config="+repoFile.getCanonicalPath,
"-Dsbt.override.build.repos=true",
// TODO - Enough for fresh cache?
"-Dsbt.ivy.home="+(globalBase / ".ivy2").getAbsolutePath,
// TODO - we should consolidate on the two supported sbt versoins if we can.
"-Dsbt.global.base="+globalBase.getAbsolutePath
)
val cmd = Seq("java") ++ jvmargs ++ Seq("-jar", launcher.getCanonicalPath) ++ args
log.info(s"Command to update $cwd offline: ${cmd.mkString(" ")}")
Process(cmd, cwd) ! log match {
case 0 => true
case n => false
}
}
def makeRepoFile(props: File, localIvyRepo: File): Unit = {
// TODO - Don't hardcode the props file!
IO.write(props,
"""
[repositories]
activator-local: %s, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
""" format(localIvyRepo.getCanonicalFile.toURI.toString))
}
}
|
eed3si9n/activator
|
ui/app/activator/SbtProtocol.scala
|
<reponame>eed3si9n/activator
package activator
import sbt.protocol._
import sbt.serialization._
import play.api.libs.json._
object SbtProtocol {
def wrapEvent(event: JsValue, subType: String): JsObject = {
JsObject(Seq("type" -> JsString("sbt"),
"subType" -> JsString(subType),
"event" -> event))
}
private def messageJson(message: Message)(implicit pickler: Pickler[Message]): JsObject =
Json.parse(SerializedValue(message).toJsonString) match {
case o: JsObject => o
case other => throw new RuntimeException(s"message $message should have become a JsObject not $other")
}
def wrapEvent(event: Event): JsObject = {
import sbt.serialization._
val klassName = event.getClass.getName
val subType = klassName.substring(klassName.lastIndexOf('.') + 1)
wrapEvent(messageJson(event), subType)
}
def synthesizeLogEvent(level: String, message: String): JsObject = {
wrapEvent(DetachedLogEvent(LogMessage(level, message)))
}
}
|
eed3si9n/activator
|
launcher/src/test/scala/activator/RepositoryConfigTest.scala
|
package activator
import org.junit._
import org.junit.Assert._
import java.io.File
import activator.properties.ActivatorProperties
import java.util.concurrent.TimeUnit
class RepositoryConfigTest {
var tmpActivatorHome: File = null
@Before
def setup(): Unit = {
tmpActivatorHome = sbt.IO.createTemporaryDirectory
System.setProperty("activator.home", tmpActivatorHome.getPath)
// so it looks like we have an offline repo
(new File(tmpActivatorHome, "repository")).mkdir()
}
@After
def teardown(): Unit = {
sbt.IO.delete(tmpActivatorHome)
System.clearProperty("activator.home")
}
private def withConfigFilename[T](f: File => T): T = {
sbt.IO.withTemporaryDirectory { dir =>
f(new File(dir, "repositories"))
}
}
private def withExistingConfig[T](s: String)(f: File => T): T = {
withConfigFilename { file =>
sbt.IO.write(file, s)
f(file)
}
}
private def checkContent(file: File, expected: String): Unit = {
val content = sbt.IO.read(file)
sbt.IO.write(new File("/tmp/content.txt"), content)
sbt.IO.write(new File("/tmp/expected.txt"), expected)
assertEquals(expected, content)
}
@Test
def createConfigWhenNone(): Unit = {
val expectedContent = """[repositories]
local
activator-launcher-local: file://${activator.local.repository-${activator.home-${user.home}/.activator}/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
activator-local: file://${activator.local.repository-""" + ActivatorProperties.ACTIVATOR_HOME() + """/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
maven-central
typesafe-releases: http://repo.typesafe.com/typesafe/releases
typesafe-ivy-releasez: http://repo.typesafe.com/typesafe/ivy-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
"""
withConfigFilename { file =>
assertFalse(s"$file doesn't exist yet", file.exists)
RepositoryConfig.configureUserRepositories(file)
checkContent(file, expectedContent)
}
}
@Test
def doNothingWhenWeAlreadyDidIt(): Unit = {
val existingContent = """[scala]
version: ${sbt.scala.version-auto}
[app]
org: com.typesafe.activator
name: activator-launcher
version: ${activator.version-read(activator.version)[%s]}
class: activator.ActivatorLauncher
cross-versioned: false
components: xsbti
[repositories]
local
activator-launcher-local: file://${activator.local.repository-${activator.home-${user.home}/.activator}/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
activator-local: file://${activator.local.repository-""" + ActivatorProperties.ACTIVATOR_HOME() + """/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
maven-central
typesafe-releases: http://repo.typesafe.com/typesafe/releases
typesafe-ivy-releasez: http://repo.typesafe.com/typesafe/ivy-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
[boot]
directory: ${sbt.boot.directory-${sbt.global.base-${user.home}/.sbt}/boot/}
properties: ${activator.boot.properties-${user.home}/.activator/version-${activator.launcher.generation-%d}.properties}
[ivy]
ivy-home: ${user.home}/.ivy2
checksums: ${sbt.checksums-sha1,md5}
override-build-repos: ${sbt.override.build.repos-false}
repository-config: ${sbt.repository.config-${sbt.global.base-${user.home}/.sbt}/repositories}
"""
withExistingConfig(existingContent) { configFile =>
assertTrue(s"$configFile exists", configFile.exists)
val oldModTime = configFile.lastModified()
Thread.sleep(1100)
RepositoryConfig.configureUserRepositories(configFile)
checkContent(configFile, existingContent)
assertEquals("file timestamp unchanged", oldModTime, configFile.lastModified())
}
}
@Test
def addMissingSection(): Unit = {
val existingContent = """[scala]
version: ${sbt.scala.version-auto}
[app]
org: com.typesafe.activator
name: activator-launcher
version: ${activator.version-read(activator.version)[%s]}
class: activator.ActivatorLauncher
cross-versioned: false
components: xsbti
[boot]
directory: ${sbt.boot.directory-${sbt.global.base-${user.home}/.sbt}/boot/}
properties: ${activator.boot.properties-${user.home}/.activator/version-${activator.launcher.generation-%d}.properties}
[ivy]
ivy-home: ${user.home}/.ivy2
checksums: ${sbt.checksums-sha1,md5}
override-build-repos: ${sbt.override.build.repos-false}
repository-config: ${sbt.repository.config-${sbt.global.base-${user.home}/.sbt}/repositories}
"""
val expectedContent = """[scala]
version: ${sbt.scala.version-auto}
[app]
org: com.typesafe.activator
name: activator-launcher
version: ${activator.version-read(activator.version)[%s]}
class: activator.ActivatorLauncher
cross-versioned: false
components: xsbti
[boot]
directory: ${sbt.boot.directory-${sbt.global.base-${user.home}/.sbt}/boot/}
properties: ${activator.boot.properties-${user.home}/.activator/version-${activator.launcher.generation-%d}.properties}
[ivy]
ivy-home: ${user.home}/.ivy2
checksums: ${sbt.checksums-sha1,md5}
override-build-repos: ${sbt.override.build.repos-false}
repository-config: ${sbt.repository.config-${sbt.global.base-${user.home}/.sbt}/repositories}
[repositories]
local
activator-launcher-local: file://${activator.local.repository-${activator.home-${user.home}/.activator}/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
activator-local: file://${activator.local.repository-""" + ActivatorProperties.ACTIVATOR_HOME() + """/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
maven-central
typesafe-releases: http://repo.typesafe.com/typesafe/releases
typesafe-ivy-releasez: http://repo.typesafe.com/typesafe/ivy-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
"""
withExistingConfig(existingContent) { configFile =>
assertTrue(s"$configFile exists", configFile.exists)
val oldModTime = configFile.lastModified()
Thread.sleep(1100)
RepositoryConfig.configureUserRepositories(configFile)
checkContent(configFile, expectedContent)
assertTrue("mod time should have changed", oldModTime != configFile.lastModified())
}
}
@Test
def addMissingActivatorLocalLine(): Unit = {
val existingContent = """[repositories]
local
maven-central
typesafe-releases: http://repo.typesafe.com/typesafe/releases
typesafe-ivy-releasez: http://repo.typesafe.com/typesafe/ivy-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
"""
val expectedContent = """[repositories]
local
activator-launcher-local: file://${activator.local.repository-${activator.home-${user.home}/.activator}/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
activator-local: file://${activator.local.repository-""" + ActivatorProperties.ACTIVATOR_HOME() + """/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
maven-central
typesafe-releases: http://repo.typesafe.com/typesafe/releases
typesafe-ivy-releasez: http://repo.typesafe.com/typesafe/ivy-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
"""
withExistingConfig(existingContent) { configFile =>
assertTrue(s"$configFile exists", configFile.exists)
RepositoryConfig.configureUserRepositories(configFile)
checkContent(configFile, expectedContent)
}
}
@Test
def replaceExistingActivatorLocalLine(): Unit = {
val existingContent = """[repositories]
local
maven-central
typesafe-releases: http://repo.typesafe.com/typesafe/releases
typesafe-ivy-releasez: http://repo.typesafe.com/typesafe/ivy-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
activator-local: wrongthinginwrongplace
activator-launcher-local: alsowrongalsowrongplace
"""
val expectedContent = """[repositories]
local
activator-launcher-local: file://${activator.local.repository-${activator.home-${user.home}/.activator}/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
activator-local: file://${activator.local.repository-""" + ActivatorProperties.ACTIVATOR_HOME() + """/repository}, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
maven-central
typesafe-releases: http://repo.typesafe.com/typesafe/releases
typesafe-ivy-releasez: http://repo.typesafe.com/typesafe/ivy-releases, [organization]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext]
"""
withExistingConfig(existingContent) { configFile =>
assertTrue(s"$configFile exists", configFile.exists)
RepositoryConfig.configureUserRepositories(configFile)
checkContent(configFile, expectedContent)
}
}
}
|
eed3si9n/activator
|
ui/test/activator/typesafeproxy/AuthenticationActorTest.scala
|
package activator.typesafeproxy
import akka.actor._
import org.junit.Assert._
import org.junit._
import scala.concurrent.duration._
import scala.reflect.ClassTag
import scala.util.{ Failure, Success, Try }
object AuthenticationActorTest {
val authenticated = Success(AuthenticationStates.Authenticated(AuthenticationStates.emptyAuthentication))
def failure(error: Throwable): Try[AuthenticationState] = Failure(error)
val canceled = failure(new ProxyCanceled("canceled"))
val failed = failure(new ProxyFailure("fail"))
val invalid = failure(new ProxyInvalidCredentials("invalid"))
val timeout = failure(new ProxyTimeout("timeout"))
val username = "username"
val password = "password"
val credentials = UIActor.Credentials(username, password)
def assertTypeOf[T](x: Any)(implicit ct: ClassTag[T]): Unit =
if (!ct.runtimeClass.isInstance(x)) throw new AssertionError(s"Value '$x' of type ${x.getClass.getName} is not of type ${ct.runtimeClass.getName}", null)
}
class AuthenticationActorTest extends DefaultSpecification {
import AuthenticationActorTest._
import TypesafeComProxy._
@Test
def testShouldSucceed(): Unit = withHelper { helper =>
import helper._
withAuthenticationActor(authenticationResult(authenticated)) { auth =>
expectMsgType[UIActor.WebSocket.RequestCredentials]
auth ! credentials
val put = expectMsgAllClassOf(classOf[Authentication.Put], classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction]).find(_.isInstanceOf[Authentication.Put]).map(_.asInstanceOf[Authentication.Put]).get
assertTrue(put.value.isSuccess)
}
}
@Test
def testHandleCanceledCredentialsRequest(): Unit = withHelper { helper =>
import helper._
withAuthenticationActor(authenticationResult(authenticated)) { auth =>
expectMsgType[UIActor.WebSocket.RequestCredentials]
auth ! UIActor.Cancel
val failure = expectMsgType[Authentication.Put]
assertTypeOf[ProxyCanceled](failure.value.failed.get)
}
}
@Test
def testHandleCanceledAuthentication(): Unit = withHelper { helper =>
import helper._
withAuthenticationActor(delayedAuthenticationResult(3.seconds)) { auth =>
expectMsgType[UIActor.WebSocket.RequestCredentials]
auth ! credentials
expectMsgType[UIActor.WebSocket.ReportStartAction]
auth ! UIActor.Cancel
val failure = expectMsgAllClassOf(classOf[Authentication.Put], classOf[UIActor.WebSocket.ReportEndAction]).find(_.isInstanceOf[Authentication.Put]).map(_.asInstanceOf[Authentication.Put]).get
assertTypeOf[ProxyCanceled](failure.value.failed.get)
}
}
@Test
def testHandleInvalidCredentialsRetry(): Unit = withHelper { helper =>
import helper._
val (ref, func) = mutableAuthenticationResult(invalid)
withAuthenticationActor(func) { auth =>
expectMsgType[UIActor.WebSocket.RequestCredentials]
auth ! credentials
expectMsgAllClassOf(classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction], classOf[UIActor.WebSocket.Failure])
auth ! UIActor.Retry
expectMsgType[UIActor.WebSocket.RequestCredentials]
ref.set(authenticated)
auth ! credentials
val put = expectMsgAllClassOf(classOf[Authentication.Put], classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction]).find(_.isInstanceOf[Authentication.Put]).map(_.asInstanceOf[Authentication.Put]).get
assertTrue(put.value.isSuccess)
}
}
@Test
def testHandleInvalidCredentialsCancel(): Unit = withHelper { helper =>
import helper._
val (ref, func) = mutableAuthenticationResult(invalid)
withAuthenticationActor(func) { auth =>
expectMsgType[UIActor.WebSocket.RequestCredentials]
auth ! credentials
expectMsgAllClassOf(classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction], classOf[UIActor.WebSocket.Failure])
auth ! UIActor.Cancel
val failure = expectMsgType[Authentication.Put]
assertTypeOf[ProxyCanceled](failure.value.failed.get)
}
}
@Test
def testHandleTimeoutRetry(): Unit = withHelper { helper =>
import helper._
val (ref, func) = mutableAuthenticationResult(timeout)
withAuthenticationActor(func) { auth =>
expectMsgType[UIActor.WebSocket.RequestCredentials]
auth ! credentials
expectMsgAllClassOf(classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction], classOf[UIActor.WebSocket.Failure])
auth ! UIActor.Retry
ref.set(authenticated)
auth ! credentials
val put = expectMsgAllClassOf(classOf[Authentication.Put], classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction]).find(_.isInstanceOf[Authentication.Put]).map(_.asInstanceOf[Authentication.Put]).get
assertTrue(put.value.isSuccess)
}
}
@Test
def testHandleTimeoutCancel(): Unit = withHelper { helper =>
import helper._
val (ref, func) = mutableAuthenticationResult(timeout)
withAuthenticationActor(func) { auth =>
expectMsgType[UIActor.WebSocket.RequestCredentials]
auth ! credentials
expectMsgAllClassOf(classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction], classOf[UIActor.WebSocket.Failure])
auth ! UIActor.Cancel
val failure = expectMsgType[Authentication.Put]
assertTypeOf[ProxyCanceled](failure.value.failed.get)
}
}
}
|
eed3si9n/activator
|
project/fixer.scala
|
import sbt._
import scala.annotation.tailrec
// generic for JS, Scala, Java
object Fixer {
private def fixWindowsLineBreaks(s: String): String =
s.replace("\r\n", "\n")
// this regex is used on the entire file
private val trailingWhitespaceRegex = """[\t ]+\n""".r
private def fixTrailingWhitespace(s: String): String =
trailingWhitespaceRegex.replaceAllIn(s, "\n")
private def fixNoNewlineAtEnd(s: String): String =
if (!s.endsWith("\n"))
s + "\n"
else
s
private val tabRegex = """\t""".r
private def fixTabsToTwoSpaces(s: String): String =
tabRegex.replaceAllIn(s, " ")
@tailrec
private def fixMultipleNewlinesAtEnd(s: String): String =
if (s.endsWith("\n\n"))
fixMultipleNewlinesAtEnd(s.substring(0, s.length - 1))
else
s
// we don't fix indentation whitespace because
// it requires too much judgment as to what
// was intended.
val whitespaceFixer = (fixWindowsLineBreaks _)
.andThen(fixTabsToTwoSpaces)
.andThen(fixNoNewlineAtEnd)
.andThen(fixTrailingWhitespace)
.andThen(fixMultipleNewlinesAtEnd)
def fix(f: File, fixer: String => String, log: Logger): File = {
val content = IO.read(f)
val fixed = fixer(content)
if (content != fixed) {
log.info("Fixing whitespace in " + f)
IO.write(f, fixed)
}
f
}
def fixWhitespace(f: File, log: Logger): File =
fix(f, whitespaceFixer, log)
}
|
eed3si9n/activator
|
project/name.sbt
|
name := "activator-build"
|
eed3si9n/activator
|
project/VersionGenerator.scala
|
<reponame>eed3si9n/activator
import sbt._
import Keys._
object VersionGenerator {
final val fileLocation = "/target/web/public/main/public/generated/dependencies.js"
def createInformation(path: File): Unit = {
val content =
s"""/**
| * Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
| */
|
|// *** AUTO GENERATED FILE - DO NOT CHANGE ****************************
|// This file is being generated as part of the build step in Activator.
|// For more information see project ui in project/build.scala.
|// ********************************************************************
|
|define([
|],function (
|) {
| var playVersion = "${Dependencies.shimPlayVersion}";
| var ideaVersion = "${Dependencies.ideaVersion}";
| var eclipseVersion = "${Dependencies.eclipseVersion}";
| var sbtCoreNextVersion = "${Dependencies.sbtCoreNextVersion}";
|
| return {
| playVersion: playVersion,
| ideaVersion: ideaVersion,
| eclipseVersion: eclipseVersion,
| sbtCoreNextVersion: sbtCoreNextVersion
| };
|});""".stripMargin
println(s"Generating file: ${path}${fileLocation}")
Properties.writeIfChanged(file = new File(path + fileLocation), content = content)
}
}
|
eed3si9n/activator
|
ui/app/activator/App.scala
|
<reponame>eed3si9n/activator<filename>ui/app/activator/App.scala
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import java.util.UUID
import akka.actor._
import java.util.concurrent.atomic.AtomicInteger
import activator.properties.ActivatorProperties
import java.net.URLEncoder
import akka.util.Timeout
final case class AppIdSocketId(appId: String, socketId: UUID)
class App(val id: AppIdSocketId, val config: AppConfig, val system: ActorSystem,
val appActorBuilder: AppConfig => Props) extends ActorWrapper {
require(config.id == id.appId)
val appInstance = App.nextInstanceId.getAndIncrement()
override def toString = s"App(${config.id}@$appInstance})"
val actorName = "app-" + URLEncoder.encode(config.id, "UTF-8") + "-" + appInstance
// val actor = system.actorOf(AppActor.props(config, typesafeComActor, lookupTimeout),
val actor = system.actorOf(appActorBuilder(config), name = actorName)
system.actorOf(Props(new ActorWatcher(actor, this)), "app-actor-watcher-" + appInstance)
}
object App {
val nextInstanceId = new AtomicInteger(1)
}
|
eed3si9n/activator
|
ui-common/src/main/scala/activator/Sbt.scala
|
<gh_stars>0
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package activator
import java.io.File
/** vaguely-sbt-related utilities */
object Sbt {
// "looks like" implies that this isn't completely reliable.
// since it isn't.
def looksLikeAProject(dir: File): Boolean = {
import language.postfixOps
Seq("build.sbt", "project/build.properties") find { relative =>
(new File(dir, relative)).exists
} isDefined
}
}
|
eed3si9n/activator
|
ui/app/activator/typesafeproxy/UIActor.scala
|
<reponame>eed3si9n/activator
package activator.typesafeproxy
import java.util.UUID
import akka.actor._
import play.api.libs.functional.syntax._
import play.api.libs.json.Json._
import play.api.libs.json._
// This is an abstract protocol used to communicate to the front-end
object UIActor {
def genActionId(): String = UUID.randomUUID().toString
sealed trait Response
sealed trait CancelableResponse extends Response
sealed trait RetryableResponse extends Response
sealed trait CredentialsResponse extends CancelableResponse
sealed trait FailureResponse extends RetryableResponse
case object Cancel extends CredentialsResponse with FailureResponse
case object Retry extends FailureResponse
case class Credentials(username: String, password: String) extends CredentialsResponse
sealed trait LocalRequest[+Resp] extends Request[Resp]
sealed trait Action {
def actionId: String
}
sealed trait CancelableRequest[T <: CancelableResponse] extends LocalRequest[T] {
def cancel()(implicit sender: ActorRef): Unit = response(Cancel)
}
sealed trait RetryableRequest[T <: RetryableResponse] extends LocalRequest[T] {
def cancel()(implicit sender: ActorRef): Unit = response(Cancel)
def retry()(implicit sender: ActorRef): Unit = response(Retry)
}
object CancelableRequests {
case class RequestCredentials(sendTo: ActorRef, message: Option[String] = None) extends CancelableRequest[CredentialsResponse] {
def credentials(username: String, password: String)(implicit sender: ActorRef): Unit =
response(Credentials(username, password))
}
case class ReportStartAction(message: String, actionId: String, sendTo: ActorRef) extends CancelableRequest[Nothing] with Action {
def endReport(endSendTo: ActorRef = sendTo): ReportEndAction = ReportEndAction(message, actionId, endSendTo)
}
}
case class ReportEndAction(message: String, actionId: String, sendTo: ActorRef) extends LocalRequest[Nothing] with Action
object RetryableRequests {
case class Failure(message: String, sendTo: ActorRef, retryable: Boolean) extends RetryableRequest[Nothing]
}
object WebSocket {
import activator.JsonHelper._
val requestTag = "TypesafeComProxy"
val responseTag = requestTag
sealed trait Response {
def actorPath: String
}
sealed trait LocalRequest[+T <: Response] {
def actorPath: String
}
sealed trait Action {
def actionId: String
}
sealed trait CancelableResponse extends Response
sealed trait RetryableResponse extends Response
sealed trait CredentialsResponse extends CancelableResponse
sealed trait FailureResponse extends RetryableResponse
case class Cancel(actorPath: String) extends CredentialsResponse with FailureResponse
case class Retry(actorPath: String) extends FailureResponse
case class Failure(message: String, retryable: Boolean, actorPath: String) extends LocalRequest[FailureResponse]
case class Credentials(username: String, password: String, actorPath: String) extends CredentialsResponse
case class RequestCredentials(message: Option[String], actorPath: String) extends LocalRequest[CredentialsResponse]
case class ReportStartAction(message: String, actionId: String, actorPath: String) extends LocalRequest[CancelableResponse] with Action
case class ReportEndAction(message: String, actionId: String, actorPath: String) extends LocalRequest[Nothing] with Action
implicit val websocketReads: Reads[Response] =
extractMessage[Response](requestTag)(new Reads[Response] {
def reads(in: JsValue): JsResult[Response] = ((in \ "type"), (in \ "actorPath")) match {
case (JsString("cancel"), JsString(ap)) =>
JsSuccess(Cancel(ap))
case (JsString("retry"), JsString(ap)) => JsSuccess(Retry(ap))
case (JsString("credentials"), JsString(ap)) =>
((__ \ "username").read[String] and (__ \ "password").read[String])((un, pw) => Credentials.apply(un, pw, ap)).reads(in)
case x => JsError(s"expected one of 'cancel', 'retry' or 'credentials', got $x")
}
})
implicit val websocketWrites: Writes[LocalRequest[_ <: Response]] =
emitMessage(responseTag)(_ match {
case RequestCredentials(None, ap) => Json.obj("type" -> "requestCredentials", "actorPath" -> ap)
case RequestCredentials(Some(m), ap) => Json.obj("type" -> "requestCredentials", "message" -> m, "actorPath" -> ap)
case ReportStartAction(m, aid, ap) => Json.obj("type" -> "reportStartAction", "message" -> m, "actionId" -> aid, "actorPath" -> ap)
case ReportEndAction(m, aid, ap) => Json.obj("type" -> "reportEndAction", "message" -> m, "actionId" -> aid, "actorPath" -> ap)
case Failure(m, r, ap) => Json.obj("type" -> "failure", "message" -> m, "retryable" -> r, "actorPath" -> ap)
})
object Inbound {
def unapply(in: JsValue): Option[Response] = Json.fromJson[Response](in).asOpt
}
object Outbound {
def unapply(in: Any): Option[LocalRequest[_ <: Response]] = in match {
case x: LocalRequest[_] => Some(x)
case _ => None
}
}
}
def props(websocketsActor: ActorRef): Props = Props(new UIActor(websocketsActor))
}
class UIActor(websocketsActor: ActorRef) extends Actor with ActorLogging {
import UIActor._
def proxyInteraction(awaiting: Option[LocalRequest[_]]): Receive = {
case x: WebSocket.Response =>
(x, awaiting) match {
case (_: WebSocket.Cancel, Some(a: CancelableRequest[_])) => a.cancel()
case (_: WebSocket.Cancel, Some(a: RetryableRequest[_])) => a.cancel()
case (msg: WebSocket.Credentials, Some(a: CancelableRequests.RequestCredentials)) => a.credentials(msg.username, msg.password)
case (_: WebSocket.Retry, Some(a: RetryableRequest[_])) => a.retry()
case (_, _) => // Ignore
}
context.become(proxyInteraction(None))
case x: CancelableRequests.ReportStartAction =>
websocketsActor ! WebSocket.ReportStartAction(x.message, x.actionId, self.path.toString)
context.become(proxyInteraction(Some(x)))
case x: ReportEndAction =>
websocketsActor ! WebSocket.ReportEndAction(x.message, x.actionId, self.path.toString)
context.become(proxyInteraction(None))
case x: CancelableRequests.RequestCredentials =>
websocketsActor ! WebSocket.RequestCredentials(x.message, self.path.toString)
context.become(proxyInteraction(Some(x)))
case x: RetryableRequests.Failure =>
websocketsActor ! WebSocket.Failure(x.message, x.retryable, self.path.toString)
context.become(proxyInteraction(Some(x)))
}
def receive: Receive = proxyInteraction(None)
}
|
eed3si9n/activator
|
ui/test/console/handler/ActorsHandlerSpec.scala
|
package console.handler
import activator.analytics.data._
import akka.actor.ActorRef
import activator.analytics.data.TimeRangeType
import scala.concurrent.duration._
import console.{ PagingInformation, ScopeModifiers }
import java.util.concurrent.TimeUnit
import activator.analytics.repository.ActorStatsSorted
import activator.analytics.rest.http.SortingHelpers.{ Ascending, SortDirection }
import console.AnalyticsRepository
object ActorsHandlerSpec {
def actorsHandler(repo: AnalyticsRepository)(body: (ActorRef, ActorStatsSorted) => Unit): ActorsHandlerBase = new ActorsHandlerBase {
final val defaultLimit: Int = 100
val repository: AnalyticsRepository = repo
def useActorStats(sender: ActorRef, stats: ActorStatsSorted): Unit = body(sender, stats)
}
}
class ActorsHandlerSpec extends ActorsSpec("ActorsHandlerSpec") with ActorHandlerSpecification {
isolated
import ActorHandlerSpec._
import ActorsHandlerSpec._
"Actors Handler" should {
"Find data" in {
var resultSender: ActorRef = null
var resultStats: ActorStatsSorted = null
val h = actorsHandler(repository) { (sender, stats) =>
resultSender = sender
resultStats = stats
}
forall(stats) { (as: ActorStats) =>
h.onModuleInformation(ActorRef.noSender, ActorsHandler.ActorsModuleInfo(as.scope,
modifiers = ScopeModifiers(),
time = as.timeRange,
pagingInformation = None,
sortOn = ActorStatsSorts.ActorName,
sortDirection = Ascending,
dataFrom = None,
traceId = None))
resultStats.total must equalTo(3) // Note: Should derive this number instead of hard-coding it.
resultSender must equalTo(ActorRef.noSender)
forall(resultStats.stats) { (as1: ActorStats) =>
as1.timeRange must equalTo(as.timeRange)
}
}
}
"Not find data outside of available range" in { // Note: this isn't actually correct, but hey. No data isn't the same as `zero` data
var resultSender: ActorRef = null
var resultStats: ActorStatsSorted = null
val h = actorsHandler(repository) { (sender, stats) =>
resultSender = sender
resultStats = stats
}
val oneMinute: Int = Duration(1, TimeUnit.MINUTES).toMillis.toInt
val maxMinutes = stats.filter(x => x.timeRange.rangeType == TimeRangeType.Minutes).maxBy(_.timeRange.startTime)
val outside = maxMinutes.copy(timeRange = TimeRange.rangeFor(maxMinutes.timeRange.startTime + oneMinute, TimeRangeType.Minutes))
val emptyStats = ActorStats(outside.timeRange, outside.scope)
h.onModuleInformation(ActorRef.noSender, ActorsHandler.ActorsModuleInfo(outside.scope,
modifiers = ScopeModifiers(),
time = outside.timeRange,
pagingInformation = None,
sortOn = ActorStatsSorts.ActorName,
sortDirection = Ascending,
dataFrom = None,
traceId = None))
resultStats.total must equalTo(0)
resultSender must equalTo(ActorRef.noSender)
resultStats.stats must beEmpty
}
}
}
|
eed3si9n/activator
|
project/ivy.scala
|
<filename>project/ivy.scala
import sbt._
import org.apache.ivy.core.resolve.IvyNode
import org.apache.ivy.core.module.id.ModuleRevisionId
import org.apache.ivy.core.report.ResolveReport
import org.apache.ivy.core.install.InstallOptions
import org.apache.ivy.plugins.matcher.PatternMatcher
import org.apache.ivy.util.filter.FilterHelper
import org.apache.ivy.core.resolve.IvyNode
import collection.JavaConverters._
import java.io.BufferedWriter
import org.apache.ivy.core.module.id.ModuleId
import com.typesafe.sbt.license._
object IvyHelper {
/** Resolves a set of modules from an SBT configured ivy and pushes them into
* the given repository (by name).
*
* Intended usage, requires the named resolve to exist, and be on that accepts installed artifacts (i.e. file://)
*/
def createLocalRepository(
modules: Seq[ModuleID],
localRepoName: String,
ivy: IvySbt,
targetDir: File,
log: Logger): Seq[License] = ivy.withIvy(log) { ivy =>
// This helper method installs a particular module and transitive dependencies.
def installModule(module: ModuleID): Option[ResolveReport] = {
// TODO - Use SBT's default ModuleID -> ModuleRevisionId
val mrid = IvySbtCheater toID module
val name = ivy.getResolveEngine.getSettings.getResolverName(mrid)
log.debug("Module: " + mrid + " should use resolver: " + name)
try Some(ivy.install(mrid, name, localRepoName,
new InstallOptions()
.setTransitive(true)
.setValidate(true)
.setOverwrite(true)
.setMatcherName(PatternMatcher.EXACT)
.setArtifactFilter(FilterHelper.NO_FILTER)
))
catch {
case e: Exception =>
log.debug("Failed to resolve module: " + module)
log.trace(e)
None
}
}
// Grab all Artifacts
val reports = (modules flatMap installModule).toSeq
dumpDepGraph(targetDir, reports)
val licenses = for {
report <- reports
license <- LicenseReport.getLicenses(report, configs = Seq.empty)
} yield license
ridiculousHacks(new File(targetDir, "local-repository"), log)
// Create reverse lookup table for licenses by artifact...
val grouped = LicenseReport.groupLicenses(licenses)
grouped.toIndexedSeq
}
def withPrintableFile(file: File)(f: (Any => Unit) => Unit): Unit = {
IO.createDirectory(file.getParentFile)
Using.fileWriter(java.nio.charset.Charset.defaultCharset, false)(file) { writer =>
def println(msg: Any): Unit = {
System.out.println(msg)
writer.write(msg.toString)
writer.newLine()
}
f(println _)
}
}
// TODO - Clean this up and put it somewhere useful.
def dumpDepGraph(targetDir: File, reports: Seq[ResolveReport]): Unit = withPrintableFile(new File(targetDir, "local-repo-deps.txt")) { println =>
// Here we make an assumption...
// THE FIRST MODULE is the one that we wanted, the rest are
// the ones we pulled in...
for((report, id) <- reports.zipWithIndex) {
val modules = report.getModuleIds.asInstanceOf[java.util.List[ModuleId]].asScala
val requested = modules.head
val name = requested.getOrganisation + ":" + requested.getName
println(name + " - requested")
val messages = report.getAllProblemMessages().asInstanceOf[java.util.List[String]].asScala
for (msg <- messages) {
println("\t PROBLEM: " + msg)
}
val evicted = Option(report.getEvictedNodes()).map(_.toSeq).getOrElse(Nil)
for (e <- evicted) {
println("\t EVICTED: " + e)
}
val artifacts = Option(report.getAllArtifactsReports()).map(_.toSeq).getOrElse(Nil)
for (a <- artifacts) {
println("\t ARTIFACT: " + a)
}
// Now find what we got:
val deps = for {
dep <- report.getDependencies.asInstanceOf[java.util.List[IvyNode]].asScala
if dep != null
depId = dep.getId
//if !((depId.getOrganisation == requested.getOrganisation) && (depId.getName == requested.getName))
} yield depId.getOrganisation + ":" + depId.getName + ":" + depId.getRevision
deps foreach { dep => println("\t DEPENDENCY: " + dep) }
}
}
def ridiculousHacks(dir: File, log: Logger): Unit = {
if (!dir.exists)
sys.error(s"$dir doesn't exist")
val jsch38jar = new File(dir, "com.jcraft/jsch/0.1.38/jars/jsch.jar")
val jsch38ivy = new File(dir, "com.jcraft/jsch/0.1.38/ivys/ivy.xml")
if (jsch38jar.exists) {
log.warn(s"$jsch38jar already exists, might be able to remove this hack (try clean;offlineTests without the hack first though)")
} else if (jsch38ivy.exists) {
log.warn("Broken local repo didn't get jsch 0.1.38, fixing it via ridiculous hack")
jsch38jar.getParentFile.mkdirs()
IO.download(new URL("http://central.maven.org/maven2/com/jcraft/jsch/0.1.38/jsch-0.1.38.jar"),
jsch38jar)
if (!jsch38jar.exists)
sys.error(s"Failed to download $jsch38jar")
} else {
log.warn(s"$jsch38ivy doesn't exist so this hack can probably be removed (try clean;offlineTests without the hack first though)")
}
def replaceInFile(file: java.io.File, ifText: String, oldText: String, newText: String, msg: String): Boolean = {
if (!file.exists())
throw new RuntimeException(s"no such file $file")
val oldContent = IO.read(file)
if (oldContent.indexOf(ifText) >= 0) {
val content = oldContent.replaceAllLiterally(oldText, newText)
if (oldContent != content) {
IO.write(file, content)
log.warn(s"Broken local repo contained $file with no ivy extra namespace in it, fixed via ridiculous hack")
// fix the checksums
val sha1File = new File(file.getPath + ".sha1")
val md5File = new File(file.getPath + ".md5")
md5File.delete()
IO.write(sha1File, Hash.toHex(Hash(content)) + "\n")
log.warn(s"Deleted $md5File and fixed $sha1File to reflect new $file")
true
} else false
} else false
}
val didAnythings = for {
f <- (dir.***).get
if f.name == "ivy.xml"
} yield replaceInFile(f,
"e:sbtTransformHash",
"xmlns:m=\"http://ant.apache.org/ivy/maven\">",
"xmlns:m=\"http://ant.apache.org/ivy/maven\" xmlns:e=\"http://ant.apache.org/ivy/extra\">",
s"Added missing ivy extra namespace to $f")
if (!didAnythings.foldLeft(false)(_ || _))
log.warn(s"No ivy.xml fixups, can remove this hack if clean;offlineTests works without it")
}
}
|
eed3si9n/activator
|
ui/test/console/handler/FakeJsonBuilder.scala
|
<filename>ui/test/console/handler/FakeJsonBuilder.scala
package console.handler
import akka.actor.{ ActorRef, Props, ActorSystem, Actor, SupervisorStrategy }
object FakeJsonBuilder {
def props(probe: ActorRef): Props =
Props(classOf[FakeJsonBuilder], probe)
}
class FakeJsonBuilder(probe: ActorRef) extends Actor {
def receive: Receive = {
case msg =>
probe.tell(msg, sender)
}
}
|
eed3si9n/activator
|
ui/app/console/ClientController.scala
|
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package console
import akka.actor._
import akka.pattern._
import akka.util.Timeout
import play.api.libs.iteratee.{ Enumerator, Iteratee }
import play.api.libs.json.JsValue
import scala.concurrent.duration._
import scala.concurrent.{ Future, ExecutionContext }
import controllers.ConsoleController
import play.api.Play.current
class ClientController(clientHandlerProps: Props,
updateInterval: Option[FiniteDuration]) extends Actor with ActorLogging {
import ClientController._
import ExecutionContext.Implicits.global
val tickScheduler: Option[Cancellable] =
updateInterval.map(ui => context.system.scheduler.schedule(ui, ui, self, Tick))
def receive = {
case ic @ InitializeCommunication(id, _) =>
if (context.child(id).isEmpty)
context.actorOf(clientHandlerProps, id) forward ic
case Tick => context.children foreach { _ ! Tick }
case Shutdown =>
context.children foreach { _ ! Shutdown }
tickScheduler.map(_.cancel())
}
}
object ClientController {
case class CreateClient(id: String)
case class Connection(ref: ActorRef, enum: Enumerator[JsValue])
case class HandleRequest(payload: JsValue)
case class Update(js: JsValue)
case class InitializeCommunication(id: String, consumer: ActorRef)
case object Tick
case object Shutdown
def derivedProps(repository: AnalyticsRepository,
defaultLimit: Int,
updateInterval: Option[FiniteDuration]): Props =
props(ClientHandler.derivedProps(repository, defaultLimit), updateInterval)
def props(clientHandlerProps: Props,
updateInterval: Option[FiniteDuration] = Some(ConsoleController.config.getLong("console.update-frequency").milliseconds)): Props =
Props(classOf[ClientController], clientHandlerProps, updateInterval)
def join(id: String): Future[(Iteratee[JsValue, _], Enumerator[JsValue])] = {
import play.api.libs.concurrent.Execution.Implicits._
implicit val timeout = Timeout(1.second)
(ConsoleController.clientHandlerActor ? CreateClient(id)).map {
case Connection(ref, enumerator) => (Iteratee.foreach[JsValue] { ref ! HandleRequest(_) }.map(_ => ref ! PoisonPill), enumerator)
}
}
}
|
eed3si9n/activator
|
ui/test/console/handler/ActorsSpec.scala
|
<gh_stars>1-10
package console.handler
import akka.actor.{ ActorRef, Props, ActorSystem, Actor, SupervisorStrategy }
import akka.testkit.{ TestKit, ImplicitSender }
import org.specs2.mutable.{ Specification, SpecificationLike }
import org.specs2.specification._
abstract class ActorsSpec(name: String) extends TestKit(ActorSystem(name)) with SpecificationLike with AfterExample with ImplicitSender {
override def map(fs: => Fragments) = super.map(fs) ^ step(system.shutdown, global = true)
def after = system.shutdown
}
|
eed3si9n/activator
|
ui/app/console/parser/SpanParser.scala
|
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console.parser
object SpanParser {
val DefaultSpanType = "message"
}
|
eed3si9n/activator
|
ui/app/activator/typesafeproxy/Models.scala
|
<reponame>eed3si9n/activator
package activator.typesafeproxy
import org.joda.time.DateTime
import play.api.libs.functional.syntax._
import play.api.libs.json._
import play.api.libs.json.Json._
sealed trait SubscriptionLevel {
def name: String
}
object SubscriptionLevels {
sealed abstract class NamedSubscription(override val name: String) extends SubscriptionLevel
case object Developer extends NamedSubscription("developerSubscriber")
case object Production extends NamedSubscription("productionSubscriber")
case object TwentyFourSeven extends NamedSubscription("twentyFourSevenSubscriber")
val subscriptionLevelReads: Reads[SubscriptionLevel] = new Reads[SubscriptionLevel] {
def reads(in: JsValue): JsResult[SubscriptionLevel] = in match {
case JsString(Developer.name) => JsSuccess(Developer)
case JsString(Production.name) => JsSuccess(Production)
case JsString(TwentyFourSeven.name) => JsSuccess(TwentyFourSeven)
case v => JsError(s"Expected one of 'developerSubscriber', 'productionSubscriber', or 'twentyFourSevenSubscriber' got: $v")
}
}
val subscriptionLevelWrites: Writes[SubscriptionLevel] = new Writes[SubscriptionLevel] {
def writes(in: SubscriptionLevel): JsValue = JsString(in.name)
}
implicit val subscriptionLevelFormat: Format[SubscriptionLevel] = Format[SubscriptionLevel](subscriptionLevelReads, subscriptionLevelWrites)
}
sealed trait SubscriberData
object SubscriberData {
import SubscriptionLevels._
case class NotASubscriber(message: String) extends SubscriberData
case class Detail(id: String, subscription: SubscriptionLevel, isPaidSubscriber: Boolean, acceptedDate: Option[DateTime], majorVersion: String, currentReleaseVersion: String) extends SubscriberData
private val exampleId = "id"
private val exampleMajorVersion = "majorVersion"
private val exampleCurrentReleaseVersion = "currentReleaseVersion"
def exampleDetail(id: String = exampleId,
subscription: SubscriptionLevel = SubscriptionLevels.Developer,
isPaidSubscriber: Boolean = false,
acceptedDate: Option[DateTime] = None,
majorVersion: String = exampleMajorVersion,
currentReleaseVersion: String = exampleCurrentReleaseVersion): Detail =
Detail(id, subscription, isPaidSubscriber, acceptedDate, majorVersion, currentReleaseVersion)
private val exampleMessage = "message"
def exampleNotASubscriber(message: String = exampleMessage): NotASubscriber =
NotASubscriber(message)
val subscriberDataReads: Reads[SubscriberData] = new Reads[SubscriberData] {
def reads(in: JsValue): JsResult[SubscriberData] = {
val id = Json.fromJson[String](in \ "id").asOpt
val subscription = Json.fromJson[Option[SubscriptionLevel]](in \ "primarySubscriberRole").asOpt
val isPaidSubscriber = Json.fromJson[Boolean](in \ "isSubscriber").asOpt
val acceptedDate = Json.fromJson[Option[DateTime]](in \ "acceptedDate").asOpt
val majorVersion = Json.fromJson[String](in \ "majorVersion").asOpt
val currentReleaseVersion = Json.fromJson[String](in \ "currentReleaseVersion").asOpt
val message = Json.fromJson[Option[String]](in \ "message").asOpt
(id, subscription, isPaidSubscriber, acceptedDate, majorVersion, currentReleaseVersion, message) match {
case (_, Some(None), _, _, _, _, Some(Some(m))) => JsSuccess(NotASubscriber(m))
case (_, Some(None), _, _, _, _, Some(None)) => JsSuccess(NotASubscriber("User not a subscriber"))
case (Some(id), Some(Some(s)), Some(ips), Some(ad), Some(mv), Some(crv), _) => JsSuccess(Detail(id, s, ips, ad, mv, crv))
case _ => JsError(s"'$in' could not be parsed into subscriber data")
}
}
}
val subscriberDataWrites: Writes[SubscriberData] = new Writes[SubscriberData] {
def writes(in: SubscriberData): JsValue = in match {
case NotASubscriber(m) => Json.obj("message" -> m)
case Detail(id, s, ips, ad, mv, crv) => Json.obj("id" -> id,
"primarySubscriberRole" -> s,
"isPaidSubscriber" -> ips,
"acceptedDate" -> ad,
"majorVersion" -> mv,
"currentReleaseVersion" -> crv)
}
}
implicit val subscriberDataFormat: Format[SubscriberData] = Format[SubscriberData](subscriberDataReads, subscriberDataWrites)
}
case class ActivatorLatestInfo(url: String,
miniUrl: String,
version: String,
size: String,
miniSize: String,
playVersion: String,
akkaVersion: String,
scalaVersion: String,
launcherGeneration: Int)
object ActivatorLatestInfo {
private val exampleUrl = "http://downloads.typesafe.com/typesafe-activator/1.3.2/typesafe-activator-1.3.2.zip"
private val exampleMiniUrl = "http://downloads.typesafe.com/typesafe-activator/1.3.2/typesafe-activator-1.3.2-minimal.zip"
private val exampleVersion = "1.3.2"
private val exampleSize = "408M"
private val exampleMiniSize = "1M"
private val examplePlayVersion = "2.4.0"
private val exampleAkkaVersion = "2.3.9"
private val exampleScalaVersion = "2.11.5"
private val exampleLauncherGeneration = 1
def exampleDetail(url: String = exampleUrl,
miniUrl: String = exampleMiniUrl,
version: String = exampleVersion,
size: String = exampleSize,
miniSize: String = exampleMiniSize,
playVersion: String = examplePlayVersion,
akkaVersion: String = exampleAkkaVersion,
scalaVersion: String = exampleScalaVersion,
launcherGeneration: Int = exampleLauncherGeneration): ActivatorLatestInfo =
ActivatorLatestInfo(url,
miniUrl,
version,
size,
miniSize,
playVersion,
akkaVersion,
scalaVersion,
launcherGeneration)
val activatorLatestInfoReads: Reads[ActivatorLatestInfo] = Json.reads[ActivatorLatestInfo]
val activatorLatestInfoWrites: Writes[ActivatorLatestInfo] = Json.writes[ActivatorLatestInfo]
implicit val activatorLatestInfoFormat: Format[ActivatorLatestInfo] = Format[ActivatorLatestInfo](activatorLatestInfoReads, activatorLatestInfoWrites)
}
|
eed3si9n/activator
|
project/pegdown.sbt
|
<reponame>eed3si9n/activator
libraryDependencies += "org.pegdown" % "pegdown" % "1.2.0"
libraryDependencies += "net.sf.jtidy" % "jtidy" % "r938"
|
eed3si9n/activator
|
ui/app/activator/JsonHelper.scala
|
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import play.api.libs.json._
import scala.util.parsing.json.JSONType
import scala.util.parsing.json.JSONObject
import scala.util.parsing.json.JSONArray
/** Helper methods to convert between JSON libraries. */
object JsonHelper {
import play.api.libs.json._
import play.api.libs.json.Json._
import play.api.libs.functional.syntax._
import play.api.libs.json.Reads._
import play.api.libs.json.Writes._
import java.io._
import play.api.data.validation.ValidationError
implicit object FileWrites extends Writes[File] {
def writes(file: File) = JsString(file.getPath)
}
implicit object FileReads extends Reads[File] {
def reads(json: JsValue) = json match {
case JsString(path) => JsSuccess(new File(path))
case _ => JsError(Seq(JsPath() -> Seq(ValidationError("validate.error.expected.jsstring"))))
}
}
def extractTagged[T](key: String, tag: String)(reads: Reads[T]): Reads[T] =
(__ \ key).read[String](pattern(tag.r)) ~> reads
def extractRequest[T](tag: String)(reads: Reads[T]): Reads[T] =
extractTagged("request", tag)(reads)
def extractMessage[T](tag: String)(reads: Reads[T]): Reads[T] =
extractTagged("tag", tag)(reads)
def emitMessage[T](tag: String)(bodyFunc: T => JsObject): Writes[T] =
emitTagged("tag", tag)(bodyFunc)
def extractResponse[T](tag: String)(reads: Reads[T]): Reads[T] =
extractTagged("response", tag)(reads)
def emitTagged[T](key: String, tag: String)(bodyFunc: T => JsObject): Writes[T] = new Writes[T] {
def writes(in: T): JsValue =
Json.obj(key -> tag) ++ bodyFunc(in)
}
def emitTagged[T](tagKey: String, tag: String, subTagKey: String, subTag: String)(bodyFunc: T => JsObject): Writes[T] = new Writes[T] {
def writes(in: T): JsValue =
Json.obj(tagKey -> tag) ++ Json.obj(subTagKey -> subTag) ++ bodyFunc(in)
}
def emitRequest[T](tag: String)(bodyFunc: T => JsObject): Writes[T] =
emitTagged("request", tag)(bodyFunc)
def emitResponse[T](tag: String, subTag: String)(bodyFunc: T => JsObject): Writes[T] =
emitTagged("type", tag, "subtype", subTag)(bodyFunc)
}
|
eed3si9n/activator
|
ui/app/activator/SnapConfig.scala
|
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import play.api.libs.json._
import scala.concurrent._
import ExecutionContext.Implicits.global
import java.io._
import activator.properties.ActivatorProperties.ACTIVATOR_USER_CONFIG_FILE
import activator.properties.ActivatorProperties.ACTIVATOR_PREVIOUS_USER_CONFIG_FILE
import activator.properties.ActivatorProperties.TEMPLATE_UUID_PROPERTY_NAME
import scala.concurrent.duration._
import sbt.IO
// createdTime and usedTime are only optional due to legacy config files
case class AppConfig(location: File, id: String, createdTime: Option[Long], usedTime: Option[Long], cachedName: Option[String] = None) {
// TODO - this method is dangerous, as it hits the file system.
// Figure out when it should initialize/run.
val templateID: Option[String] =
try {
val props = new java.util.Properties
sbt.IO.load(props, new java.io.File(location, "project/build.properties"))
Option(props.getProperty(TEMPLATE_UUID_PROPERTY_NAME, null))
} catch {
case e: java.io.IOException => None // TODO - Log?
}
}
object AppConfig {
import play.api.data.validation.ValidationError
implicit object FileWrites extends Writes[File] {
def writes(file: File) = JsString(file.getPath)
}
implicit val writes = Json.writes[AppConfig]
implicit object FileReads extends Reads[File] {
def reads(json: JsValue) = json match {
case JsString(path) => JsSuccess(new File(path))
case _ => JsError(Seq(JsPath() -> Seq(ValidationError("validate.error.expected.jsstring"))))
}
}
implicit val reads = Json.reads[AppConfig]
}
case class RootConfig(applications: Seq[AppConfig])
trait RootConfigOps {
protected def userConfigFile: File
protected def previousUserConfigFile: File
private def loadUser = ConfigFile(userConfigFile, upgradeFrom = Some(previousUserConfigFile))
// volatile because we read it unsynchronized. we don't care
// which one we get, just something sane. Also double-checked
// locking below requires volatile.
// this is an Option so we can make forceReload() defer reloading
// by setting to None and then going back to Some "on demand"
@volatile private var userFutureOption: Option[Future[ConfigFile]] = None
def forceReload(): Unit = {
// we want to ensure we reload the file next time, but
// avoid kicking off the reload now since we probably JUST
// discovered the file was broken.
userFutureOption = None
}
// get the current per-user configuration
def user: RootConfig = try {
// double-checked locking
val userFuture = userFutureOption match {
case None => synchronized {
if (userFutureOption.isEmpty)
userFutureOption = Some(loadUser)
userFutureOption.get
}
case Some(f) => f
}
// we use the evil Await because 99% of the time we expect
// the Future to be completed already.
Await.result(userFuture.map(_.config), 8.seconds)
} catch {
case e: Exception =>
// retry next time
forceReload()
// but go ahead and throw this time
throw e
}
// modify the per-user configuration
def rewriteUser(f: RootConfig => RootConfig): Future[Unit] = {
// the "synchronized" is intended to ensure that all "f"
// transformations in fact take place, though in undefined
// order. Otherwise we could use the same future twice as
// the "old" and generate two "new" one of which would be
// discarded.
synchronized {
// note that the actual file-rewriting is NOT synchronized,
// it is async. We're just synchronizing storing the Future
// in our var so that no Future is "skipped"
val userFuture = userFutureOption.getOrElse(loadUser) flatMap { configFile =>
ConfigFile.rewrite(configFile)(f)
}
userFutureOption = Some(userFuture)
userFuture map { _ => () }
}
}
}
object RootConfig extends RootConfigOps {
implicit val writes = Json.writes[RootConfig]
implicit val reads = Json.reads[RootConfig]
// has to be lazy because trait uses it to init
override lazy val userConfigFile = (new File(ACTIVATOR_USER_CONFIG_FILE)).getCanonicalFile()
override lazy val previousUserConfigFile = (new File(ACTIVATOR_PREVIOUS_USER_CONFIG_FILE)).getCanonicalFile()
}
private[activator] class ConfigFile(val file: File, json: JsValue) {
require(file ne null)
require(file.getParentFile ne null)
val config = json.as[RootConfig]
}
private[activator] object ConfigFile {
private def parse(file: File, upgradeFrom: Option[File]): JsValue = try {
val input = new FileInputStream(file)
val s = try {
val out = new ByteArrayOutputStream()
copy(input, out)
new String(out.toString("UTF-8"))
} finally {
input.close()
}
Json.parse(s) match {
case x: JsObject => x
case whatever => throw new Exception("config file contains non-JSON-object")
}
} catch {
case e: FileNotFoundException =>
upgradeFrom map { old =>
parse(old, upgradeFrom = None)
} getOrElse {
Json.toJson(RootConfig(Seq.empty[AppConfig]))
}
}
def apply(file: File, upgradeFrom: Option[File]): Future[ConfigFile] = {
// a file that hasn't been "canonicalized" may not have
// a parent file which eventually leads to NPE.
val canonicalFile = file.getCanonicalFile
require(canonicalFile.getParentFile ne null)
Future {
val obj = parse(canonicalFile, upgradeFrom)
new ConfigFile(canonicalFile, obj)
} flatMap { cf =>
if (cf.file.exists) {
Future.successful(cf)
} else {
// we must have upgraded, be sure to write
// out the new file in the new location
rewrite(cf)(identity)
}
}
}
def rewrite(configFile: ConfigFile)(f: RootConfig => RootConfig): Future[ConfigFile] = {
val newJson = Json.toJson(f(configFile.config))
Future {
// we parse the json we create back before doing any IO, as a sanity check
val newConfig = new ConfigFile(configFile.file, newJson)
val tmpFile = new File(newConfig.file.getCanonicalPath + ".tmp")
ignoringIOException { IO.createDirectory(tmpFile.getParentFile) }
ignoringIOException { IO.delete(tmpFile) }
val bytesToWrite = newJson.toString.getBytes("UTF-8")
val out = new FileOutputStream(tmpFile)
try {
val in = new ByteArrayInputStream(bytesToWrite)
copy(in, out)
} finally {
out.close()
}
// kind of a silly paranoia check
if (tmpFile.length() != bytesToWrite.length)
throw new IOException("File does not have expected size: " + tmpFile.getCanonicalPath() + ": " + bytesToWrite.length)
// then copy over
IO.move(tmpFile, newConfig.file)
newConfig
}
}
private def ignoringIOException[T](block: => T): Unit = {
try {
block
} catch {
case e: IOException => ()
}
}
private val MAX_BUF = 1024 * 1024
private val MIN_BUF = 1024
private def copy(in: InputStream, out: OutputStream): Long = {
val buf = new Array[Byte](Math.min(MAX_BUF, Math.max(MIN_BUF, in.available())))
var bytesWritten = 0
var bytesRead = 0
bytesRead = in.read(buf)
while (bytesRead != -1) {
out.write(buf, 0, bytesRead)
bytesWritten += bytesRead
bytesRead = in.read(buf)
}
bytesWritten
}
}
|
eed3si9n/activator
|
ui/app/activator/HttpHelper.scala
|
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import play.api.libs.ws.{ WSAuthScheme, DefaultWSResponseHeaders, WSRequestHolder }
import scala.concurrent.Future
import scala.util.{ Failure, Success }
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.iteratee._
import java.io._
import play.api.http.{ ContentTypeOf, Writeable }
object HttpHelper {
// Shameless Rx hack
trait Observer[T] {
def onCompleted(): Unit
def onError(error: Throwable): Unit
def onNext(data: T): Unit
}
case class ChunkData(contentLength: Option[Int],
chunkSize: Int,
total: Int)
trait ProgressObserver extends Observer[ChunkData]
val devNullBuilder: ProgressObserver = new ProgressObserver {
def onCompleted(): Unit = ()
def onError(error: Throwable): Unit = ()
def onNext(data: ChunkData): Unit = ()
}
val printProgressBuilder: ProgressObserver = new ProgressObserver {
private var seenBytes: Int = 0
private var expectedBytes: Option[Int] = None
def onCompleted(): Unit =
expectedBytes match {
case None =>
println(s"DONE!! total: $seenBytes")
case Some(cl) =>
println(s"DONE !! expected: $cl -- percent: ${(seenBytes.toDouble / cl.toDouble) * 100.0} -- total: $seenBytes")
}
def onError(error: Throwable): Unit =
println(s"Error: $error")
def onNext(data: ChunkData): Unit = {
seenBytes = data.total
expectedBytes = data.contentLength
expectedBytes match {
case None =>
println(s"chunk: ${data.chunkSize} -- total: ${data.total}")
case Some(cl) =>
println(s"expected: $cl -- percent: ${(data.total.toDouble / cl.toDouble) * 100.0} -- chunk: ${data.chunkSize} -- total: ${data.total}")
}
}
}
private def step(
expectedBytes: Option[Int],
destination: File,
outputStream: FileOutputStream,
progressObserver: ProgressObserver): Input[Array[Byte]] => Iteratee[Array[Byte], File] = {
def innerStep(total: Int): Input[Array[Byte]] => Iteratee[Array[Byte], File] = {
case Input.Empty => Cont(innerStep(total))
case Input.EOF =>
progressObserver.onCompleted()
Done(destination, Input.EOF)
case Input.El(e) =>
val newTotal = total + e.size
progressObserver.onNext(ChunkData(expectedBytes, e.size, newTotal))
outputStream.write(e)
Cont(innerStep(newTotal))
}
innerStep(0)
}
def identityHolder(holder: WSRequestHolder): WSRequestHolder = holder
def proxyHolder(holder: WSRequestHolder): WSRequestHolder = (sys.props.get("http.proxyUser"), sys.props.get("http.proxyPassword")) match {
case (Some(u), Some(p)) => holder.withAuth(u, p, WSAuthScheme.BASIC) // <- Only viable option?
case _ => holder
}
def doGet(
destination: File,
outputStream: FileOutputStream,
holder: WSRequestHolder,
observer: ProgressObserver): Future[Iteratee[Array[Byte], File]] = {
proxyHolder(holder).get {
case DefaultWSResponseHeaders(200, rh) =>
val contentLength = rh.get(play.api.http.HeaderNames.CONTENT_LENGTH).flatMap(_.headOption.map(_.toInt))
Cont(step(contentLength, destination, outputStream, observer))
case DefaultWSResponseHeaders(x, _) => throw new RuntimeException(s"non-200 response code: $x for request ${holder.url}")
}
}
def doPost[T](body: T)(
destination: File,
outputStream: FileOutputStream,
holder: WSRequestHolder,
observer: ProgressObserver)(implicit wrt: Writeable[T], ct: ContentTypeOf[T]): Future[Iteratee[Array[Byte], File]] = {
proxyHolder(holder).postAndRetrieveStream(body) {
case DefaultWSResponseHeaders(200, rh) =>
val contentLength = rh.get(play.api.http.HeaderNames.CONTENT_LENGTH).flatMap(_.headOption.map(_.toInt))
Cont(step(contentLength, destination, outputStream, observer))
case DefaultWSResponseHeaders(x, _) => throw new RuntimeException(s"non-200 response code: $x for request ${holder.url}")
}
}
def retrieveFileHttp(
holder: WSRequestHolder,
observer: ProgressObserver,
destination: File = File.createTempFile("activator_", ".tmp"),
executor: (File, FileOutputStream, WSRequestHolder, ProgressObserver) => Future[Iteratee[Array[Byte], File]] = doGet,
timeout: akka.util.Timeout = Akka.longTimeoutThatIsAProblem): Future[File] = {
// import com.ning.http.client.Realm.AuthScheme
val outputStream = new FileOutputStream(destination)
val iterateeFuture = executor(destination, outputStream, holder.withRequestTimeout(timeout.duration.toMillis.intValue), observer) flatMap (_.run)
iterateeFuture onComplete {
case _: Success[File] => outputStream.close()
case Failure(t) =>
outputStream.close()
destination.delete()
observer.onError(t)
}
iterateeFuture
}
}
|
eed3si9n/activator
|
project/site.sbt
|
<reponame>eed3si9n/activator
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.7.1")
|
eed3si9n/activator
|
project/Markdown.scala
|
import sbt._
object Markdown {
def parseIntoHtml(f: File): String = {
import org.pegdown._
val parser = new PegDownProcessor(Extensions.HARDWRAPS)
val content = IO read f
parser markdownToHtml content
}
def tidyHtml(in: String): String = {
import org.w3c.tidy.Tidy
val tidy = new Tidy
tidy setXHTML false
val input = new java.io.StringReader(in)
val output = new java.io.StringWriter
try tidy.parse(input, output)
finally {
input.close()
output.close()
}
output.toString
}
// TODO - provide outer context?
def makeHtml(md: File, html: File, title: String): Unit =
IO.write(html, tidyHtml("""|<!DOCTYPE html>
|<html>
|<head>
| <title>%s</title>
|</head>
|<body>
| %s
|</body>
|</html>""".stripMargin format (title, parseIntoHtml(md))))
}
|
eed3si9n/activator
|
ui/test/activator/typesafeproxy/SubscriptionDataActorTest.scala
|
<reponame>eed3si9n/activator
package activator.typesafeproxy
import org.junit.Assert._
import org.junit._
import scala.concurrent.duration._
import scala.reflect.ClassTag
import scala.util.{ Failure, Success, Try }
object SubscriptionDataActorTest {
def success(subscriberData: SubscriberData = SubscriberData.exampleDetail()): Try[SubscriberData] = Success(subscriberData)
def failure(error: Throwable): Try[SubscriberData] = Failure(error)
val canceled = failure(new ProxyCanceled("canceled"))
val failed = failure(new ProxyFailure("fail"))
val invalid = failure(new ProxyInvalidCredentials("invalid"))
val timeout = failure(new ProxyTimeout("timeout"))
def assertTypeOf[T](x: Any)(implicit ct: ClassTag[T]): Unit =
if (!ct.runtimeClass.isInstance(x)) throw new AssertionError(s"Value '$x' of type ${x.getClass.getName} is not of type ${ct.runtimeClass.getName}", null)
}
class SubscriptionDataActorTest extends DefaultSpecification {
import SubscriptionDataActorTest._
import TypesafeComProxy._
@Test
def testShouldSucceed(): Unit = withHelper { helper =>
import helper._
val expected = success()
withSubscriptionDataActor(subscriptionDataResult(expected)) { rpc =>
val g = expectMsgType[Authentication.Get]
g.withValue(AuthenticationActorTest.authenticated, 0L)
val put = expectMsgAllClassOf(10.seconds, classOf[SubscriberDetail.Put], classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction]).find(_.isInstanceOf[SubscriberDetail.Put]).map(_.asInstanceOf[SubscriberDetail.Put]).get
assertTrue(put.value.isSuccess)
}
}
@Test
def testHandleCanceledFetch(): Unit = withHelper { helper =>
import helper._
withSubscriptionDataActor(delayedSubscriptionDataResult(3.seconds)) { rpc =>
val g = expectMsgType[Authentication.Get]
g.withValue(AuthenticationActorTest.authenticated, 0L)
expectMsgType[UIActor.WebSocket.ReportStartAction]
rpc ! UIActor.Cancel
val failure = expectMsgAllClassOf(10.seconds, classOf[SubscriberDetail.Put], classOf[UIActor.WebSocket.ReportEndAction]).find(_.isInstanceOf[SubscriberDetail.Put]).map(_.asInstanceOf[SubscriberDetail.Put]).get
assertTypeOf[ProxyCanceled](failure.value.failed.get)
}
}
@Test
def testHandleTimeoutRetry(): Unit = withHelper { helper =>
import helper._
val expected = success()
val (ref, func) = mutableSubscriptionDataResult(timeout)
withSubscriptionDataActor(func) { rpc =>
val g = expectMsgType[Authentication.Get]
g.withValue(AuthenticationActorTest.authenticated, 0L)
expectMsgAllClassOf(classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction], classOf[UIActor.WebSocket.Failure])
ref.set(expected)
rpc ! UIActor.Retry
val put = expectMsgAllClassOf(10.seconds, classOf[SubscriberDetail.Put], classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction]).find(_.isInstanceOf[SubscriberDetail.Put]).map(_.asInstanceOf[SubscriberDetail.Put]).get
assertTrue(put.value.isSuccess)
}
}
@Test
def testHandleTimeoutCancel(): Unit = withHelper { helper =>
import helper._
val (ref, func) = mutableSubscriptionDataResult(timeout)
withSubscriptionDataActor(func) { rpc =>
val g = expectMsgType[Authentication.Get]
g.withValue(AuthenticationActorTest.authenticated, 0L)
expectMsgAllClassOf(classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction], classOf[UIActor.WebSocket.Failure])
rpc ! UIActor.Cancel
val failure = expectMsgType[SubscriberDetail.Put]
assertTypeOf[ProxyCanceled](failure.value.failed.get)
}
}
}
|
eed3si9n/activator
|
ui/app/activator/RequestHelpers.scala
|
<gh_stars>0
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import play.api.libs.functional.syntax._
import play.api.libs.json._
import play.api.libs.json.Json._
object RequestHelpers {
import JsonHelper._
def extractTypeOnly[T](typeName: String, value: T): Reads[T] =
extractTagged("type", typeName)(Reads[T](_ => JsSuccess(value)))
def extractType[T](typeName: String)(reads: Reads[T]): Reads[T] =
extractTagged("type", typeName)(reads)
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/ActorsJsonBuilder.scala
|
<reponame>eed3si9n/activator
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console.handler.rest
import akka.actor.{ ActorRef, Props }
import console.ClientController.Update
import activator.analytics.repository.ActorStatsSorted
import play.api.libs.json.{ Json, JsObject }
class ActorsJsonBuilder extends JsonBuilderActor {
import ActorsJsonBuilder._
def receive = {
case r: ActorsResult => r.receiver ! Update(createJson(r.actorStats))
}
}
object ActorsJsonBuilder {
def props(): Props =
Props(classOf[ActorsJsonBuilder])
case class ActorsResult(receiver: ActorRef, actorStats: ActorStatsSorted)
def createJson(stats: ActorStatsSorted): JsObject = {
Json.obj(
"type" -> "actors",
"data" ->
Json.obj(
"actors" -> createActorsJson(stats)))
}
def createActorsJson(stats: ActorStatsSorted): JsObject =
Json.obj(
"actors" -> Json.toJson(stats.stats.map { ActorJsonBuilder.createActorJson(_) }),
"offset" -> stats.offset,
"limit" -> stats.limit,
"total" -> stats.total)
}
|
eed3si9n/activator
|
ui/app/activator/AppWebSocketActor.scala
|
<reponame>eed3si9n/activator
package activator
import activator.typesafeproxy._
import akka.actor._
import akka.event.LoggingAdapter
import akka.pattern._
import console.ClientController.HandleRequest
import play.api.Play
import play.api.libs.json.Json._
import play.api.libs.json._
import activator.JsonHelper._
import scala.reflect.ClassTag
import scala.util.control.NonFatal
import scala.util.{ Failure, Success }
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import akka.util.Timeout
class AppWebSocketActor(val config: AppConfig,
val typesafeComActor: ActorRef,
val lookupTimeout: Timeout) extends WebSocketActor[JsValue] with ActorLogging {
implicit val timeout = WebSocketActor.timeout
override def onMessage(json: JsValue): Unit = {
json match {
case WebSocketActor.Ping(ping) => produce(WebSocketActor.Pong(ping.cookie))
case UIActor.WebSocket.Inbound(req) =>
context.actorSelection(req.actorPath).resolveOne()(lookupTimeout).onSuccess({ case a => a ! req })
case TypesafeComProxyUIActor.Inbound(req) =>
context.actorOf(TypesafeComProxyUIActor.props(req, typesafeComActor, self))
case SbtRequest(req) => handleSbtPayload(req.json)
case WriteTypesafeProperties(msg) =>
AppWebSocketActor.bestEffortCreateTypesafeProperties(config.location, msg.subscriptionId)
case _ => log.debug("unhandled message on web socket: {}", json)
}
}
import sbt.protocol.Completion
implicit val completionWrites = Json.writes[Completion]
/**
* Parses incoming sbt payload into an sbt command to execute.
* Send result of execution asynchronously via web socket.
*
* The 'serialId' is what connects the request with the asynchronous response.
* This way a client can filter out events that are a result of its invocation.
*
* Please note that the 'serialId' is not any id used in sbt-server.
*/
def handleSbtPayload(json: JsValue) = {
def sendResult(subType: String, serialId: Long, result: JsValue, partialCommand: Option[String] = None) = {
var payload = Seq(
"type" -> JsString("sbt"),
"subType" -> JsString(subType),
"serialId" -> JsNumber(serialId),
"result" -> result)
val pc = for {
pc <- partialCommand
r = Seq("partialCommand" -> JsString(pc))
} yield r
payload ++= pc.getOrElse(Seq.empty)
context.parent ! NotifyWebSocket(JsObject(payload))
}
json.validate[SbtPayload](SbtPayload.sbtPayloadReads) match {
case JsSuccess(payload, path) =>
payload.requestType match {
case AppWebSocketActor.requestExecution =>
context.parent ? RequestExecution(payload.serialId, Some(payload.command)) map {
case SbtClientResponse(serialId, executionId: Long, command) =>
sendResult(AppWebSocketActor.requestExecution, serialId, JsNumber(executionId))
case other =>
log.debug(s"sbt could not execute command: $other")
}
case AppWebSocketActor.cancelExecution =>
if (payload.executionId.isDefined) {
context.parent ? CancelExecution(payload.serialId, payload.executionId.get) map {
case SbtClientResponse(serialId, result: Boolean, _) =>
sendResult(AppWebSocketActor.cancelExecution, serialId, JsBoolean(result))
case other =>
log.debug("sbt could not cancel command")
}
} else {
log.debug("Cannot cancel sbt request without execution id.")
None
}
case AppWebSocketActor.possibleAutoCompletions =>
context.parent ? PossibleAutoCompletions(payload.serialId, Some(payload.command)) map {
case SbtClientResponse(serialId, choicesAny: Vector[_], command) =>
val choices = choicesAny.map(_.asInstanceOf[sbt.protocol.Completion])
sendResult(AppWebSocketActor.possibleAutoCompletions, serialId, JsArray(choices.toList map { Json.toJson(_) }), command)
case other => log.debug(s"sbt could not execute possible auto completions")
}
case other =>
log.debug("Unknown sbt request type: $other")
None
}
case e: JsError =>
log.debug(s"Could not parse $json to valid SbtPayload. Error is: $e")
None
}
}
override def subReceive: Receive = {
case NotifyWebSocket(json) =>
log.debug("sending message on web socket: {}", json)
produce(json)
case UIActor.WebSocket.Outbound(msg) =>
import UIActor.WebSocket._
produce(Json.toJson(msg))
case TypesafeComProxyUIActor.Outbound(msg) =>
import TypesafeComProxyUIActor._
produce(Json.toJson(msg))
}
}
object AppWebSocketActor {
val requestExecution = "RequestExecution"
val cancelExecution = "CancelExecution"
val possibleAutoCompletions = "PossibleAutoCompletions"
def bestEffortCreateTypesafeProperties(location: java.io.File, subscriptionId: String): Unit = {
val sid = subscriptionId.trim
if (sid.nonEmpty) {
val propertiesFile = new java.io.File(location, "project/typesafe.properties")
try {
// templates should not have the file already, but if they do, punt because
// we don't know what's going on.
if (location.exists && !propertiesFile.exists) {
propertiesFile.getParentFile().mkdirs() // in case project/ doesn't exist
val props = new java.util.Properties()
props.setProperty("typesafe.subscription", sid)
val stream = new java.io.FileOutputStream(propertiesFile)
props.store(stream, "Lightbend Reactive Platform subscription ID, see https://www.lightbend.com/subscription")
stream.close()
} else {
System.out.println(s"Not writing project/typesafe.properties to $location ${if (location.exists) s"($location does not exist)"} ${if (propertiesFile.exists) s"($propertiesFile already exists)"}")
}
} catch {
case NonFatal(e) =>
System.err.println(s"Failed to write $propertiesFile: ${e.getClass.getName}: ${e.getMessage}")
}
}
}
}
case class SbtRequest(json: JsValue)
case class SbtPayload(serialId: Long, requestType: String, command: String, executionId: Option[Long])
case class WriteTypesafeProperties(subscriptionId: String)
object WriteTypesafeProperties {
val tag = "WriteTypesafeProperties"
implicit val writeTypesafePropertiesReads: Reads[WriteTypesafeProperties] =
extractRequest[WriteTypesafeProperties](tag)((__ \ "subscriptionId").read[String].map(WriteTypesafeProperties.apply _))
implicit val writeTypesafePropertiesWrites: Writes[WriteTypesafeProperties] =
emitRequest(tag)(in => obj("subscriptionId" -> in.subscriptionId))
def unapply(in: JsValue): Option[WriteTypesafeProperties] = Json.fromJson[WriteTypesafeProperties](in).asOpt
}
object SbtRequest {
val tag = "sbt"
implicit val sbtRequestReads: Reads[SbtRequest] =
extractRequest[SbtRequest](tag)((__ \ "payload").read[JsValue].map(SbtRequest.apply _))
implicit val sbtRequestWrites: Writes[SbtRequest] =
emitRequest(tag)(in => obj("payload" -> in.json))
def unapply(in: JsValue): Option[SbtRequest] = Json.fromJson[SbtRequest](in).asOpt
}
object SbtPayload {
import play.api.libs.functional.syntax._
implicit val sbtPayloadReads = (
(__ \ "serialId").read[Long] and
(__ \ "type").read[String] and
(__ \ "command").read[String] and
(__ \ "executionId").readNullable[Long])(SbtPayload.apply _)
}
|
eed3si9n/activator
|
ui/app/activator/AppManager.scala
|
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import java.util.UUID
import akka.util.Timeout
import scala.concurrent.Future
import java.io.File
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import scala.concurrent.Promise
import akka.pattern._
import play.Logger
import akka.actor._
import scala.concurrent.Await
import scala.concurrent.duration._
import play.api.libs.json.JsObject
import java.util.concurrent.atomic.AtomicInteger
import scala.util.{ Failure, Success }
import scala.util.control.NonFatal
import activator._
sealed trait AppCacheRequest
case class GetOrCreateApp(id: AppIdSocketId) extends AppCacheRequest
case class GetApp(socketId: UUID) extends AppCacheRequest
case class ForgetApp(appId: String) extends AppCacheRequest
case object Cleanup extends AppCacheRequest
sealed trait AppCacheReply
case class GotApp(app: activator.App) extends AppCacheReply
case object ForgotApp extends AppCacheReply
private case class CachedApp(appId: String, futureApp: Future[activator.App])
class AppCacheActor(val typesafeComActor: ActorRef,
val lookupTimeout: Timeout,
val projectPreprocessor: (ActorRef, ActorRef, AppConfig) => Unit) extends Actor with ActorLogging {
private var appCache: Map[UUID, CachedApp] = Map.empty
override val supervisorStrategy = SupervisorStrategy.stoppingStrategy
private def cleanup(deadRef: Option[ActorRef]): Unit = {
appCache = appCache.filter {
case (socketId, cached) =>
if (cached.futureApp.isCompleted) {
try {
// this should be "instant" but 5 seconds to be safe
val app = Await.result(cached.futureApp, 5.seconds)
if (Some(app.actor) == deadRef || app.isTerminated) {
log.debug("cleaning up terminated app actor {} {}", socketId, app.actor)
false
} else {
//log.debug("keeping live app actor {} {}", id, app.actor)
true
}
} catch {
case e: Exception =>
log.debug("cleaning up app {} which failed to load due to '{}'", socketId, e.getMessage)
false
}
} else {
// still pending, keep it
log.debug("app actor {} still pending start", socketId)
true
}
}
}
override def receive = {
case Terminated(ref) =>
cleanup(Some(ref))
case req: AppCacheRequest => req match {
case GetOrCreateApp(id) =>
appCache.get(id.socketId) match {
case Some(cached) =>
log.debug(s"returning existing app from app cache for $id")
cached.futureApp map { a =>
log.debug(s"existing app ${a.id} terminated=${a.isTerminated}")
GotApp(a)
} pipeTo sender
case None => {
val appFuture: Future[activator.App] = AppManager.loadConfigFromAppId(id.appId) map { config =>
log.debug(s"creating a new app for $id")
val appActorBuilder: AppConfig => Props = AppActor.props(_, typesafeComActor, lookupTimeout, projectPreprocessor)
new activator.App(id, config, activator.Akka.system, appActorBuilder)
}
appCache += (id.socketId -> CachedApp(id.appId, appFuture))
// set up to watch the app's actor, or forget the future
// if the app is never created
appFuture.onComplete { value =>
log.debug(s"Completed app future for ${id} with ${value}")
value.foreach { app =>
context.watch(app.actor)
}
if (value.isFailure)
self ! Cleanup
}
appFuture.map(GotApp(_)).pipeTo(sender)
}
}
case GetApp(socketId) =>
appCache.get(socketId) match {
case Some(cached) =>
log.debug(s"returning existing app from app cache for $socketId")
cached.futureApp map { a =>
log.debug(s"existing app ${a.id} terminated=${a.isTerminated}")
GotApp(a)
} pipeTo sender
case None => {
sender ! Status.Failure(new RuntimeException(s"No app found with socket ID $socketId, we have these ids: ${appCache.keys}"))
}
}
case ForgetApp(appId) =>
appCache.find(_._2.appId == appId) match {
case Some(_) =>
log.debug(s"Attempt to forget in-use app $appId")
sender ! Status.Failure(new Exception("This app is currently in use"))
case None =>
RootConfig.rewriteUser { root =>
root.copy(applications = root.applications.filterNot(_.id == appId))
} map { _ =>
ForgotApp
} pipeTo sender
}
case Cleanup =>
cleanup(None)
}
}
override def postStop() = {
log.debug("postStop")
}
override def preRestart(reason: Throwable, message: Option[Any]) = {
log.debug("preRestart {} {}", reason, message)
}
}
sealed trait KeepAliveRequest
case class RegisterKeepAlive(ref: ActorRef) extends KeepAliveRequest
case class CheckForExit(serial: Int) extends KeepAliveRequest
// Note: we only CheckForExit on the transition from
// 1 to 0 keep alives, so we do not exit just because
// no keep alive has ever been added. This means there is
// infinite time on startup to wait for a browser tab to
// be opened.
class KeepAliveActor extends Actor with ActorLogging {
var keepAlives = Set.empty[ActorRef]
// this increments on keepAlives mutation, allowing us to decide
// whether a CheckForExit is still valid or should be dropped
var serial = 0
def receive = {
case Terminated(ref) =>
log.debug("terminated {}", ref)
if (keepAlives.contains(ref)) {
log.debug("Removing ref from keep alives {}", ref)
keepAlives -= ref
serial += 1
} else {
log.debug("Ref was not in the keep alives set {}", ref)
}
if (keepAlives.isEmpty) {
log.debug("scheduling CheckForExit")
context.system.scheduler.scheduleOnce(60.seconds, self, CheckForExit(serial))
}
case req: KeepAliveRequest => req match {
case RegisterKeepAlive(ref) =>
log.debug("Actor will keep us alive {}", ref)
keepAlives += ref
serial += 1
context.watch(ref)
case CheckForExit(validitySerial) =>
if (validitySerial == serial) {
log.debug("checking for exit, keepAlives={}", keepAlives)
if (keepAlives.isEmpty) {
log.info("Activator doesn't seem to be open in any browser tabs, so shutting down.")
self ! PoisonPill
}
} else {
log.debug("Something changed since CheckForExit scheduled, disregarding")
}
}
}
override def postStop() {
log.debug("postStop")
log.info("Exiting.")
// TODO - Not in debug mode
val debugMode = sys.props.get("activator.runinsbt").map(_ == "true").getOrElse(false)
if (!debugMode) System.exit(0)
else log.info("Would have killed activator if we weren't in debug mode.")
}
}
object AppManager {
private val keepAlive = activator.Akka.system.actorOf(Props(new KeepAliveActor), name = "keep-alive")
def registerKeepAlive(ref: ActorRef): Unit = {
keepAlive ! RegisterKeepAlive(ref)
}
val appCache = activator.Akka.system.actorOf(Props(new AppCacheActor(controllers.Application.typesafeComActor, controllers.Application.lookupTimeout, ProjectPreprocessor.defaultPreprocessor _)), name = "app-cache")
val requestManagerCount = new AtomicInteger(1)
// Loads an application based on its id.
// This needs to look in the RootConfig for the App/Location
// based on this ID.
// If the app id does not exist ->
// Return error
// If it exists
// Return the app
def getOrCreateApp(id: AppIdSocketId): Future[activator.App] = {
implicit val timeout = Akka.longTimeoutThatIsAProblem
(appCache ? GetOrCreateApp(id)).map {
case GotApp(app) => app
}
}
def getApp(socketId: UUID): Future[activator.App] = {
implicit val timeout = Akka.longTimeoutThatIsAProblem
(appCache ? GetApp(socketId)).map {
case GotApp(app) => app
}
}
// Loads the ID of an app based on the CWD.
// If we don't have an ID in RootConfig for this location, then
// - we should load the app and determine a good id
// - we should store the id/location in the RootConfig
// - We should return the new ID or None if this location is not an App.
def loadAppIdFromLocation(location: File, eventHandler: Option[JsObject => Unit] = None): Future[ProcessResult[String]] = {
val absolute = location.getAbsoluteFile()
RootConfig.user.applications.find(_.location == absolute) match {
case Some(app) => Promise.successful(ProcessSuccess(app.id)).future
case None => {
doInitialAppAnalysis(location, eventHandler) map { _.map(_.id) }
}
}
}
def loadConfigFromAppId(id: String): Future[activator.AppConfig] = {
RootConfig.user.applications.find(_.id == id) match {
case Some(config) =>
if (!new java.io.File(config.location, "project/build.properties").exists()) {
Promise.failed(new RuntimeException(s"${config.location} does not contain a valid sbt project")).future
} else {
Promise.successful(config).future
}
case whatever =>
Promise.failed(new RuntimeException("No such app with id: '" + id + "'")).future
}
}
def forgetApp(id: String): Future[Unit] = {
implicit val timeout = Akka.longTimeoutThatIsAProblem
(appCache ? ForgetApp(id)).map(_ => ())
}
// choose id "name", "name-1", "name-2", etc.
// should always be called inside rewriteUser to avoid
// a race creating the same ID
private def newIdFromName(root: RootConfig, name: String, suffix: Int = 0): String = {
val candidate = name + (if (suffix > 0) "-" + suffix.toString else "")
root.applications.find(_.id == candidate) match {
case Some(app) => newIdFromName(root, name, suffix + 1)
case None => candidate
}
}
// FIXME we need to send events here or somehow be sure they are displayed
// by the client side. Need sbt logs in the UI. Also failure-to-start-sbt
// errors should go there.
private def doInitialAppAnalysis(location: File, eventHandler: Option[JsObject => Unit] = None): Future[ProcessResult[AppConfig]] = {
import sbt.client._
import sbt.protocol._
import sbt.serialization._
val validated = ProcessSuccess(location).validate(
Validation.isDirectory,
Validation.looksLikeAnSbtProject)
validated flatMapNested { location =>
implicit val timeout = Akka.longTimeoutThatIsAProblem;
// TODO factor out the configName / humanReadableName to share with
// AppActor
val connector = SbtConnector(configName = "activator",
humanReadableName = "Activator", location)
val nameFuture = {
val namePromise = Promise[String]()
val nameFuture = namePromise.future
def onConnect(client: SbtClient): Unit = {
val eventsSub = client.handleEvents({ event =>
import sbt.protocol._
val json = event match {
case log: LogEvent => log match {
case e: TaskLogEvent => SbtProtocol.wrapEvent(e)
case e: DetachedLogEvent => SbtProtocol.wrapEvent(e)
case e: BackgroundJobLogEvent => SbtProtocol.wrapEvent(e)
}
case e: BuildLoaded => SbtProtocol.wrapEvent(e)
case e: BuildFailedToLoad => SbtProtocol.wrapEvent(e)
case e: DetachedEvent => SbtProtocol.wrapEvent(e)
case _ =>
SbtProtocol.synthesizeLogEvent(LogMessage.DEBUG, event.toString)
}
eventHandler.foreach(_.apply(json))
event match {
// if we can load the build, get the name
case _: BuildLoaded =>
client.lookupScopedKey("name") map { keys =>
if (keys.isEmpty) {
namePromise.tryFailure(new RuntimeException("Project has no 'name' setting"))
} else {
val sub =
client.watch[String](SettingKey[String](keys.head)) { (key, result) =>
result match {
case Success(name) => namePromise.trySuccess(name)
case Failure(e) => namePromise.tryFailure(new RuntimeException(s"Failed to get name setting from project: ${e.toString}"))
}
}
nameFuture.onComplete { _ => sub.cancel() }
}
}
// if we can't load the build, give up on name
case _: BuildFailedToLoad =>
namePromise.tryFailure(new RuntimeException("Failed to load the build"))
case _ =>
}
})
nameFuture.onComplete { _ => eventsSub.cancel() }
}
def onError(reconnecting: Boolean, message: String): Unit = {
if (reconnecting) {
// error for reason other than close
Logger.debug(s"Error connecting to sbt: ${message}")
} else {
// this happens on our explicit close, but should be a no-op if we've already
// gotten the project name
Logger.debug(s"Error connecting to sbt (probably just closed): ${message}")
namePromise.tryFailure(new RuntimeException("Connection to sbt closed without acquiring name of project"))
}
}
connector.open(onConnect, onError)
nameFuture.onComplete { _ =>
Logger.debug("Closing sbt connector used to get project name")
connector.close()
}
nameFuture
}
val resultFuture: Future[ProcessResult[AppConfig]] =
nameFuture map { name =>
Logger.debug("got project name from sbt: '" + name + "'")
name
} recover {
case NonFatal(e) =>
// here we need to just recover, because if you can't open the app
// you can't work on it to fix it
Logger.debug(s"error getting name from sbt: ${e.getClass.getName}: ${e.getMessage}")
val name = location.getName
Logger.debug("using file basename as app name: " + name)
name
} flatMap { name =>
RootConfig.rewriteUser { root =>
val oldConfig = root.applications.find(_.location == location)
val now = System.currentTimeMillis
val createdTime = oldConfig.flatMap(_.createdTime).getOrElse(now)
val usedTime = now
val config = AppConfig(id = newIdFromName(root, name), cachedName = Some(name),
createdTime = Some(createdTime), usedTime = Some(usedTime), location = location)
val newApps = root.applications.filterNot(_.location == config.location) :+ config
root.copy(applications = newApps)
} map { Unit =>
import ProcessResult.opt2Process
RootConfig.user.applications.find(_.location == location)
.validated(s"Somehow failed to save new app at ${location.getPath} in config")
}
}
// change a future-with-exception into a future-with-value
// where the value is a ProcessFailure
resultFuture recover {
case NonFatal(e) =>
ProcessFailure(e)
}
}
}
def onApplicationStop() = {
Logger.debug("AppManager onApplicationStop is disabled pending some refactoring so it works with FakeApplication in tests")
//Logger.debug("Killing app cache actor onApplicationStop")
//appCache ! PoisonPill
}
}
|
eed3si9n/activator
|
project/dispatch.sbt
|
libraryDependencies += "net.databinder" % "dispatch-http_2.10" % "0.8.10"
|
eed3si9n/activator
|
ui/test/console/handler/DeviationHandlerSpec.scala
|
<gh_stars>1-10
package console.handler
import com.typesafe.trace.{ TraceEvent, TraceEvents, Batch }
import akka.actor.{ ActorRef, Props, ActorSystem, Actor }
import com.typesafe.trace.uuid.UUID
import console.handler.rest.DeviationJsonBuilder.{ DeviationResult, ValidResult, InvalidResult }
import console.ClientController
import play.api.libs.json.Json
import console.handler.rest.DeviationJsonBuilder
import console.AnalyticsRepository
object DeviationHandlerSpec {
import Generators._
val traceId = new UUID()
val traces = genTraceEvents(randomFlatten(genNMessageTraceAnnotations(20)), traceId)
lazy val repository: AnalyticsRepository = {
val r = AnalyticsRepository.freshMemoryObjects
val tr = r.traceRepository
tr.store(Batch(Seq(TraceEvents(traces))))
r
}
abstract class DeviationHandlerBuilder {
def validResonse(sender: ActorRef, eventId: UUID, event: TraceEvent, traces: Seq[TraceEvent]): Unit
def invalidResponse(sender: ActorRef, eventId: UUID): Unit
}
def deviationHandler(repo: AnalyticsRepository, builder: DeviationHandlerBuilder): DeviationHandlerBase = new DeviationHandlerBase {
val repository: AnalyticsRepository = repo
def useNoDeviation(sender: ActorRef, eventId: UUID): Unit = builder.invalidResponse(sender, eventId)
def useDeviation(sender: ActorRef, eventId: UUID, event: TraceEvent, traces: Seq[TraceEvent]): Unit = builder.validResonse(sender, eventId, event, traces)
}
}
class DeviationHandlerSpec extends ActorsSpec("DeviationHandlerSpec") {
isolated
import DeviationHandlerSpec._
"Deviation Handler" should {
"Find data" in {
var resultSender: ActorRef = null
var resultEvent: TraceEvent = null
var resultTraces: Seq[TraceEvent] = null
var resultEventID: UUID = null
val builder = new DeviationHandlerBuilder {
def validResonse(sender: ActorRef, eventId: UUID, event: TraceEvent, traces: Seq[TraceEvent]): Unit = {
resultSender = sender
resultTraces = traces
resultEvent = event
resultEventID = eventId
}
def invalidResponse(sender: ActorRef, eventId: UUID): Unit = {
resultSender = sender
resultEventID = eventId
}
}
val h = deviationHandler(repository, builder)
forall(repository.traceRepository.allEventIds) { (eventID: UUID) =>
resultSender = null
resultTraces = null
resultEvent = null
resultEventID = null
h.onModuleInformation(ActorRef.noSender, DeviationHandler.DeviationModuleInfo(eventID))
val event = repository.traceRepository.event(eventID).get
resultSender must equalTo(ActorRef.noSender)
resultEventID must equalTo(eventID)
resultEvent must equalTo(event)
resultTraces must equalTo(traces)
}
}
"Not find data for unknown event IDs" in {
var resultSender: ActorRef = null
var resultEvent: TraceEvent = null
var resultTraces: Seq[TraceEvent] = null
var resultEventID: UUID = null
val builder = new DeviationHandlerBuilder {
def validResonse(sender: ActorRef, eventId: UUID, event: TraceEvent, traces: Seq[TraceEvent]): Unit = {
resultSender = sender
resultTraces = traces
resultEvent = event
resultEventID = eventId
}
def invalidResponse(sender: ActorRef, eventId: UUID): Unit = {
resultSender = sender
resultEventID = eventId
}
}
val h = deviationHandler(repository, builder)
val uniqueUUID = Generators.uniqueUUID(repository.traceRepository.allEventIds.toSet)
h.onModuleInformation(ActorRef.noSender, DeviationHandler.DeviationModuleInfo(uniqueUUID))
resultEventID must equalTo(uniqueUUID)
resultSender must equalTo(ActorRef.noSender)
resultTraces must equalTo(null)
resultEvent must equalTo(null)
}
"Send found data to the JSON builder" in {
val deviationsHandler = system.actorOf(DeviationHandler.props(repository, FakeJsonBuilder.props(testActor)))
val r = forall(repository.traceRepository.allEventIds.take(1)) { (eventID: UUID) =>
deviationsHandler ! DeviationHandler.DeviationModuleInfo(eventID)
val event = repository.traceRepository.event(eventID).get
val traces = repository.traceRepository.trace(event.trace)
val expected = ValidResult(testActor, eventID, event, traces)
expectMsgPF() {
case x: DeviationResult => x must equalTo(expected)
}
}
system.stop(deviationsHandler)
r
}
"Get JSON out" in {
val deviationsHandler = system.actorOf(DeviationHandler.props(repository))
val r = forall(repository.traceRepository.allEventIds.take(1)) { (eventID: UUID) =>
deviationsHandler ! DeviationHandler.DeviationModuleInfo(eventID)
val event = repository.traceRepository.event(eventID).get
val traces = repository.traceRepository.trace(event.trace)
val expected = ValidResult(testActor, eventID, event, traces)
val tracesJson = DeviationJsonBuilder.createJson(expected)
expectMsgPF() {
case x: ClientController.Update => x.js must equalTo(tracesJson)
}
}
system.stop(deviationsHandler)
r
}
}
}
|
eed3si9n/activator
|
ui-common/src/main/scala/activator/TemplatePopularityContest.scala
|
<gh_stars>1-10
package activator
import scala.concurrent._
import java.net._
import scala.util.control.NonFatal
object TemplatePopularityContest {
def recordCloned(name: String)(implicit ec: ExecutionContext): Future[Int] = {
// we use this constructor because it would escape any
// chars in "name" though in theory name is an url-friendly
// name to begin with.
val uri = new URI("https", null, "typesafe.com", -1,
s"/activator/template/$name/record-cloned", null, null)
Future {
val url = uri.toURL()
url.openConnection() match {
case c: HttpURLConnection =>
c.setRequestMethod("POST")
c.connect()
val code = c.getResponseCode()
c.disconnect()
code
}
}
}
def recordClonedIgnoringErrors(name: String)(implicit ec: ExecutionContext): Future[Unit] =
recordCloned(name) map { _ => () } recover {
case NonFatal(e) =>
Future.successful(())
}
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/DeviationDetailJsonBuilder.scala
|
<reponame>eed3si9n/activator<filename>ui/app/console/handler/rest/DeviationDetailJsonBuilder.scala
package console.handler.rest
import play.api.libs.json.{ Json, JsObject, JsValue, JsArray, Writes, JsString }
import activator.analytics.data.DeviationDetail
object DevationDetailJsonBuilder {
def createDeviationDetailJsonSeq(deviationDetails: Seq[DeviationDetail]): JsArray =
new JsArray(deviationDetails.map(createDeviationDetailJson(_)))
def createDeviationDetailJson(deviationDetail: DeviationDetail): JsObject =
Json.obj(
"event" -> deviationDetail.eventId.toString,
"trace" -> deviationDetail.traceId.toString,
"message" -> deviationDetail.message,
"timestamp" -> deviationDetail.timestamp)
}
|
eed3si9n/activator
|
ui/test/console/handler/DeviationsHandlerSpec.scala
|
package console.handler
import activator.analytics.data._
import activator.analytics.data.TimeRangeType.TimeRangeType
import akka.actor.{ ActorRef, Props, ActorSystem, Actor, ActorPath }
import com.typesafe.trace.uuid.UUID
import com.typesafe.trace.{ TraceEvent, TraceEvents, Batch }
import console.ClientController
import console.handler.rest.DeviationsJsonBuilder
import console.handler.rest.DeviationsJsonBuilder.DeviationsResult
import org.specs2.mutable._
import play.api.libs.json.Json
import scala.concurrent.duration._
import console.AnalyticsRepository
object DeviationsHandlerSpec {
import Generators._
val minuteTimeRanges = genTimeRanges(0, 30.minutes.toMillis.toInt, 1.minute.toMillis.toInt, TimeRangeType.Minutes)
val hourTimeRanges = genTimeRanges(0, 30.hours.toMillis.toInt, 1.hour.toMillis.toInt, TimeRangeType.Hours)
val dayTimeRanges = genTimeRanges(0, 30.days.toMillis.toInt, 1.day.toMillis.toInt, TimeRangeType.Days)
val scopes = genActorScopes(Set(ActorPath.fromString("akka://user/a"), ActorPath.fromString("akka://user/b"), ActorPath.fromString("akka://user/c")),
Set(),
Set("host1", "host2", "host3"),
Set("dispatcher1", "dispatcher2", "dispatcher3"),
Set("system1", "system2", "system3"))
val timeRanges = minuteTimeRanges ++ hourTimeRanges ++ dayTimeRanges
val stats = {
val deviationsGen = genDeviationDetails()
genActorStats(scopes, timeRanges) { (i, s, tr) =>
ActorStats(tr, s, ActorStatsMetrics(bytesRead = i, bytesWritten = i, deviationDetails = deviationsGen()))
}
}
val errorStats = {
timeRanges.flatMap { tr =>
val errorStatsGen = genErrorStats(tr)
genMultiple(1, errorStatsGen) ++ genMaxMultiple(4, errorStatsGen)
}
}
lazy val repository: AnalyticsRepository = {
val r = AnalyticsRepository.freshMemoryObjects
val asr = r.actorStatsRepository
asr.save(stats)
val esr = r.errorStatsRepository
esr.save(errorStats)
r
}
def deviationsHandler(repo: AnalyticsRepository)(body: (ActorRef, Either[Seq[ErrorStats], Seq[ActorStats]]) => Unit): DeviationsHandlerBase = new DeviationsHandlerBase {
val repository: AnalyticsRepository = repo
def useDeviationsResult(sender: ActorRef, result: Either[Seq[ErrorStats], Seq[ActorStats]]): Unit = body(sender, result)
}
}
class DeviationsHandlerSpec extends ActorsSpec("DeviationsHandlerSpec") with ActorHandlerSpecification {
isolated
import DeviationsHandlerSpec._
import DeviationsJsonBuilder._
"Deviations Handler" should {
"Find data for actors" in {
var resultSender: ActorRef = null
var resultResult: Either[Seq[ErrorStats], Seq[ActorStats]] = null
val h = deviationsHandler(repository) { (ar, result) =>
resultSender = ar
resultResult = result
}
forall(stats) { (as: ActorStats) =>
resultSender = null
resultResult = null
h.onModuleInformation(ActorRef.noSender, DeviationsHandler.DeviationsModuleInfo(as.scope,
time = as.timeRange,
dataFrom = None,
chunkRange = None))
resultSender must equalTo(ActorRef.noSender)
resultResult must (beRight { (r: Seq[ActorStats]) =>
r must have length (1)
r.head must beEqualActorStats(as)
})
}
}
"Find data for errors" in {
var resultSender: ActorRef = null
var resultResult: Either[Seq[ErrorStats], Seq[ActorStats]] = null
val h = deviationsHandler(repository) { (ar, result) =>
resultSender = ar
resultResult = result
}
forall(errorStats) { (es: ErrorStats) =>
resultSender = null
resultResult = null
h.onModuleInformation(ActorRef.noSender, DeviationsHandler.DeviationsModuleInfo(Scope(),
time = es.timeRange,
dataFrom = None,
chunkRange = None))
resultSender must equalTo(ActorRef.noSender)
resultResult must (beLeft { (r: Seq[ErrorStats]) =>
r must have length (1)
// r.head must equalTo(es) // At the moment cannot predict what value should be produced
// Because counts for error stats are recomputed after filter
// operation on time. Need a better way to test this.
})
}
}
"Send found actor data to the JSON builder" in {
val deviationsHandler = system.actorOf(DeviationsHandler.props(repository, 10, FakeJsonBuilder.props(testActor)))
val r = forall(stats) { (as: ActorStats) =>
deviationsHandler ! DeviationsHandler.DeviationsModuleInfo(as.scope,
time = as.timeRange,
dataFrom = None,
chunkRange = None)
expectMsgPF() {
case x: DeviationsResult =>
x.receiver must equalTo(testActor)
x.result must (beRight { (r: Seq[ActorStats]) =>
r must have length (1)
r.head must beEqualActorStats(as)
})
}
}
system.stop(deviationsHandler)
r
}
"Send found error data to the JSON builder" in {
val deviationsHandler = system.actorOf(DeviationsHandler.props(repository, 10, FakeJsonBuilder.props(testActor)))
val r = forall(errorStats) { (es: ErrorStats) =>
deviationsHandler ! DeviationsHandler.DeviationsModuleInfo(Scope(),
time = es.timeRange,
dataFrom = None,
chunkRange = None)
expectMsgPF() {
case x: DeviationsResult =>
x.receiver must equalTo(testActor)
x.result must (beLeft { (r: Seq[ErrorStats]) =>
r must have length (1)
// r.head must equalTo(es) // At the moment cannot predict what value should be produced
// Because counts for error stats are recomputed after filter
// operation on time. Need a better way to test this.
})
}
}
system.stop(deviationsHandler)
r
}
"Get JSON out for actor data" in {
// This is a bare minimum, mostly bogus test.
// Need meaningful way to ensure actual JSON conforms to expected value
val deviationsHandler = system.actorOf(DeviationsHandler.props(repository, 10))
val r = forall(stats) { (as: ActorStats) =>
deviationsHandler ! DeviationsHandler.DeviationsModuleInfo(as.scope,
time = as.timeRange,
dataFrom = None,
chunkRange = None)
expectMsgPF() {
case x: ClientController.Update => x.js must not equalTo (Json.obj())
}
}
system.stop(deviationsHandler)
r
}
"Get JSON out for error data" in {
// This is a bare minimum, mostly bogus test.
// Need meaningful way to ensure actual JSON conforms to expected value
val deviationsHandler = system.actorOf(DeviationsHandler.props(repository, 10))
val r = forall(errorStats) { (es: ErrorStats) =>
deviationsHandler ! DeviationsHandler.DeviationsModuleInfo(Scope(),
time = es.timeRange,
dataFrom = None,
chunkRange = None)
expectMsgPF() {
case x: ClientController.Update => x.js must not equalTo (Json.obj())
}
}
system.stop(deviationsHandler)
r
}
}
}
|
eed3si9n/activator
|
launcher/src/main/scala/activator/ActivatorCliHelper.scala
|
<reponame>eed3si9n/activator
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import sbt.complete.Parser
import akka.actor.ActorSystem
import scala.concurrent.duration._
import java.util.concurrent.TimeUnit
trait ActivatorCliHelper {
import ActivatorCliHelper._
implicit val timeout = akka.util.Timeout(defaultDuration)
/** Uses SBT complete library to read user input with a given auto-completing parser. */
def readLine[U](parser: Parser[U], prompt: String = "> ", mask: Option[Char] = None): Option[U] = {
val reader = new sbt.FullReader(None, parser)
reader.readLine(prompt, mask) flatMap { line =>
val parsed = Parser.parse(line, parser)
parsed match {
case Right(value) => Some(value)
case Left(e) => None
}
}
}
}
object ActivatorCliHelper {
val system = ActorSystem("default")
val defaultDuration = Duration(system.settings.config.getDuration("activator.timeout", MILLISECONDS), MILLISECONDS)
}
|
eed3si9n/activator
|
ui/test/console/handler/rest/DeviationJsonBuilderSpec.scala
|
<filename>ui/test/console/handler/rest/DeviationJsonBuilderSpec.scala
package console.handler.rest
import com.typesafe.trace.uuid.UUID
import play.api.libs.json.Json
import org.specs2.mutable.Specification
import com.typesafe.trace._
import play.api.libs.json._
import DeviationJsonBuilder.{ InvalidResult, ValidResult, DeviationResult }
import akka.actor.ActorRef
import scala.util.Random
import org.specs2.matcher.MatchResult
object DeviationJsonSpec {
import console.handler.Generators._
val numberOfTraces = 200
val traceId = new UUID()
val traces = genTraceEvents(randomFlatten(genNMessageTraceAnnotations(numberOfTraces)), traceId)
}
class DeviationJsonSpec extends Specification {
import DeviationJsonSpec._
def validEventStructure(in: JsValue): MatchResult[Any] = {
(in \ "id").asOpt[JsValue] must beSome
(in \ "trace").asOpt[JsValue] must beSome
(in \ "sampled").asOpt[JsValue] must beSome
(in \ "node").asOpt[JsValue] must beSome
(in \ "actorSystem").asOpt[JsValue] must beSome
(in \ "host").asOpt[JsValue] must beSome
(in \ "timestamp").asOpt[JsValue] must beSome
(in \ "nanoTime").asOpt[JsValue] must beSome
(in \ "annotation").asOpt[JsValue] must beSome
}
def validateChild(in: JsValue): MatchResult[Any] = {
(in \ "event").asOpt[JsValue] must beSome
validEventStructure(in \ "event")
(in \ "children").asOpt[JsValue] must beSome
validateChildren((in \ "traceTree" \ "children").as[JsArray].value)
}
def validateChildren(in: Seq[JsValue]): MatchResult[Any] =
forall(in)(validateChild)
"DeviationJson" should {
"generate JSON for a valid result" in {
val trace = traces(Random.nextInt(numberOfTraces))
val event = trace.id
val result = ValidResult(ActorRef.noSender, event, trace, traces)
val r = DeviationJsonBuilder.createJson(result)
(r \ "type").asOpt[String] must beSome("deviation")
(r \ "data").asOpt[JsValue] must beSome
val root = (r \ "data").as[JsValue]
(root \ "traceEvent").asOpt[JsValue] must beSome
validEventStructure(root \ "traceEvent")
(root \ "traceTree").asOpt[JsValue] must beSome
(root \ "traceTree" \ "event").asOpt[JsValue] must beSome
validEventStructure(root \ "traceTree" \ "event")
(root \ "traceTree" \ "children").asOpt[JsValue] must beSome
validateChildren((root \ "traceTree" \ "children").as[JsArray].value)
}
"generate JSON for an invalid result" in {
val trace = traces(Random.nextInt(numberOfTraces))
val event = trace.id
val result = InvalidResult(ActorRef.noSender, event)
val r = DeviationJsonBuilder.createJson(result)
(r \ "type").asOpt[String] must beSome("deviation")
(r \ "data").asOpt[JsValue] must beSome(JsNull)
}
}
}
|
eed3si9n/activator
|
project/JavaVersionCheck.scala
|
import sbt._
object JavaVersionCheck {
val javacVersionPrefix = taskKey[Option[String]]("java version prefix required by javacVersionCheck")
val javacVersionCheck = taskKey[String]("checks the Java version vs. javacVersionPrefix, returns actual version")
def javacVersionCheckSettings: Seq[Setting[_]] = Seq(
javacVersionPrefix := Some("1.8"),
javacVersionCheck := {
val realLog = Keys.streams.value.log
val javac = (Keys.compileInputs in Keys.compile in Compile).value.compilers.javac
val captureVersionLog = new sbt.Logger() {
var captured: Option[String] = None
def log(level: sbt.Level.Value,message: => String): Unit = {
val m = message
if (level == Level.Warn && m.startsWith("javac ")) {
captured = Some(m.substring("javac ".length).trim)
} else {
realLog.log(level, m)
}
}
def success(message: => String): Unit = realLog.success(message)
def trace(t: => Throwable): Unit = realLog.trace(t)
}
javac(sources=Nil, classpath=Nil, outputDirectory=file("."), options=Seq("-version"))(captureVersionLog)
val version = captureVersionLog.captured match {
case Some(v) => v
case None =>
throw new Exception("Failed to get or parse the output of javac -version")
}
javacVersionPrefix.value match {
case Some(prefix) =>
if (!version.startsWith(prefix)) {
throw new Exception(s"javac version ${version} may not be used to publish, it has to start with ${prefix} due to javacVersionPrefix setting")
}
case None =>
}
version
},
// we hook onto deliverConfiguration to run the version check as early as possible,
// before we actually do anything. But we don't want to require the version check
// just for compile.
Keys.deliverConfiguration := {
val log = Keys.streams.value.log
val javacVersion = javacVersionCheck.value
log.info("Will publish with javac version " + javacVersion)
Keys.deliverConfiguration.value
},
Keys.deliverLocalConfiguration := {
val log = Keys.streams.value.log
val javacVersion = javacVersionCheck.value
log.info("Will publish locally with javac version " + javacVersion)
Keys.deliverLocalConfiguration.value
}
)
}
|
eed3si9n/activator
|
ui/app/activator/typesafeproxy/SubscriptionDataActor.scala
|
package activator.typesafeproxy
import java.util.concurrent.TimeoutException
import akka.actor._
import play.api.Play.current
import play.api.libs.json._
import play.api.libs.ws._
import activator.HttpHelper
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import scala.util.{ Failure, Success, Try }
object SubscriptionDataActor {
type DoGetSubscriptionData = (AuthenticationStates.AuthenticationData, ActorRef) => Unit
sealed trait Notification
def httpGetSubscriptionData(subscriptionDataUrl: String, timeout: FiniteDuration, executionContext: ExecutionContext)(authentication: AuthenticationStates.AuthenticationData, sendTo: ActorRef): Unit = {
implicit val ec = executionContext
import SubscriberData._
def respondWith(result: Try[SubscriberData]): Unit = sendTo ! result
val req = HttpHelper.proxyHolder(WS.url(subscriptionDataUrl)
.withHeaders("Accept" -> "application/json",
"Cookie" -> authentication.toString)
.withRequestTimeout(timeout.toMillis.intValue))
req.get() onComplete {
case Success(response) => respondWith {
response.status match {
case 200 =>
Try(Json.fromJson[SubscriberData](Json.parse(response.body)).get)
case 401 => Failure(new ProxyInvalidCredentials("Invalid login credentials"))
case status => Failure(new ProxyFailure(s"Unknown response code: $status"))
}
}
case Failure(exception) => respondWith {
exception match {
case x: TimeoutException => Failure(new ProxyTimeout(s"Fetching subscriber data exceeded timeout ${timeout}", x))
case e => Failure(new ProxyFailure(s"Failed to fetch subscriber data: ${e.getMessage}", e))
}
}
}
}
def props(doGetSubscriptionData: SubscriptionDataActor.DoGetSubscriptionData,
uiActorProps: ActorRef => Props,
version: Long,
replyTo: ActorRef,
websocketActor: ActorRef): Props =
Props(new SubscriptionDataActor(doGetSubscriptionData, uiActorProps, version, replyTo, websocketActor))
}
class SubscriptionDataActor(doGetSubscriptionData: SubscriptionDataActor.DoGetSubscriptionData,
uiActorProps: ActorRef => Props,
version: Long,
replyTo: ActorRef,
websocketActor: ActorRef) extends Actor with ActorLogging {
import TypesafeComProxy._
private final val uiActor: ActorRef = context.actorOf(uiActorProps(websocketActor))
def cancel(message: String = "Fetching subscription data canceled by user"): Unit = {
replyTo ! SubscriberDetail.Put(Failure(new ProxyCanceled(message)), version, self)
context.become(doStop())
}
def onFailure(onRetry: () => Unit): Receive = {
case UIActor.Cancel => cancel()
case UIActor.Retry => onRetry()
}
def doStop(): Receive = {
context.watch(uiActor)
uiActor ! PoisonPill
{
case Terminated(`uiActor`) =>
context stop self
}
}
def handleResult(auth: AuthenticationStates.Authenticated, result: Try[SubscriberData], endReport: UIActor.ReportEndAction, authVersion: Long): Unit = result match {
case x @ Success(_: SubscriberData.Detail) =>
replyTo ! SubscriberDetail.Put(x, version, self)
context.become(doStop())
case x @ Success(_) =>
replyTo ! Authentication.Put(Failure(new ProxyInvalidCredentials("Resetting authentication")), authVersion, self)
replyTo ! SubscriberDetail.Put(x, version, self, false)
context.become(doStop())
case Failure(e: ProxyInvalidCredentials) =>
replyTo ! Authentication.Put(Failure(e), authVersion, self)
uiActor ! UIActor.RetryableRequests.Failure(e.getMessage, self, retryable = true)
context.become(onFailure(() => context.become(getAuthentication())))
case Failure(e: ProxyTimeout) =>
log.error("Unable to fetch subscriber data", e)
uiActor ! UIActor.RetryableRequests.Failure(e.getMessage, self, retryable = true)
context.become(onFailure(() => context.become(runRequest(auth, authVersion))))
case x @ Failure(e) =>
log.error("Unknown exception during fetching subscriber data: ", e)
uiActor ! UIActor.RetryableRequests.Failure(e.getMessage, self, retryable = false)
replyTo ! SubscriberDetail.Put(x, version, self)
context.become(doStop())
}
def runRequest(auth: AuthenticationStates.Authenticated, authVersion: Long): Receive = {
val actionId: String = UIActor.genActionId()
doGetSubscriptionData(auth.authenticationData, self)
val startReport = UIActor.CancelableRequests.ReportStartAction("Fetching subscription data", actionId, self)
uiActor ! startReport
val endReport = startReport.endReport()
{
case UIActor.Cancel =>
uiActor ! endReport
cancel()
case x @ Success(_: SubscriberData) =>
uiActor ! endReport
handleResult(auth, x.asInstanceOf[Success[SubscriberData]], endReport, authVersion)
case x: Failure[_] =>
uiActor ! endReport
handleResult(auth, x.asInstanceOf[Failure[SubscriberData]], endReport, authVersion)
}
}
def awaitAuthentication(): Receive = {
case Authentication.Value(Success(x: AuthenticationStates.Authenticated), authVersion) =>
context.become(runRequest(x, authVersion))
case Authentication.Value(Failure(e), authVersion) =>
replyTo ! SubscriberDetail.Put(Failure(e), version, self)
context.become(doStop())
}
def getAuthentication(): Receive = {
replyTo ! Authentication.Get(self, websocketActor)
awaitAuthentication()
}
def receive: Receive = getAuthentication()
}
|
eed3si9n/activator
|
integration-tests/src/main/scala/activator/tests/IntegrationTest.scala
|
<filename>integration-tests/src/main/scala/activator/tests/IntegrationTest.scala
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
package tests
/** Base class for integration tests. */
abstract class IntegrationTest extends DelayedInit with xsbti.AppMain {
// Junk to make delayed init work.
private var _config: xsbti.AppConfiguration = null
private var _test: () => Unit = null
final def delayedInit(x: => Unit): Unit = _test = () => x
/** Returns the current sbt launcher configuration for the test. */
final def configuration: xsbti.AppConfiguration = _config
// Runs our test, we hardcode this to return success in the absence of failure, so we can use
// classic exceptions to fail an integration test.
final def run(configuration: xsbti.AppConfiguration): xsbti.MainResult =
try withContextClassloader {
_config = configuration
_test()
// IF we don't throw an exception, we've succeeded
Success
} catch {
case t: Exception =>
t.printStackTrace()
Failure
}
private def cleanUriFileString(file: String): String =
file.replaceAll(" ", "%20")
/** Return a process builder that will run SNAP in a directory with the given args. */
final def run_activator(args: Seq[String], cwd: java.io.File): sys.process.ProcessBuilder = {
// TODO - pass on all props...
val fullArgs = Seq(
"java",
"-Dsbt.boot.directory=" + sys.props("sbt.boot.directory"),
"-Dactivator.home=" + cleanUriFileString(sys.props("activator.home")),
"-jar",
activator.properties.ActivatorProperties.ACTIVATOR_LAUNCHER_JAR(null)) ++ args
sys.process.Process(fullArgs, cwd)
}
}
|
eed3si9n/activator
|
project/LocalTemplateRepo.scala
|
<reponame>eed3si9n/activator
import sbt._
import ActivatorBuild._
import Keys._
object LocalTemplateRepo {
// TODO - We can probably move this to its own project, to more clearly delineate that the UI uses these
// for local testing....
val localTemplateCache = settingKey[File]("target directory for local template cache")
val localTemplateCacheCreated = taskKey[File]("task which creates local template cache")
val remoteTemplateCacheUri = settingKey[String]("base URI to get template cache from")
val localTemplateCacheHash = settingKey[String]("which index from the remote URI to seed the cache from")
val latestTemplateCacheHash = taskKey[String]("get the latest template cache hash from the remote URI")
val checkTemplateCacheHash = taskKey[String]("throw if our configured template cache hash is not the latest, otherwise return the local (and latest) hash")
val enableCheckTemplateCacheHash = settingKey[Boolean]("true to enable checking we have latest cache before we publish")
val overrideWithTemplates = settingKey[String]("templates names to use separated by ,")
def settings: Seq[Setting[_]] = Seq(
localTemplateCache <<= target(_ / "template-cache"),
overrideWithTemplates := "minimal-akka-java-seed, " +
"minimal-akka-scala-seed, " +
"minimal-java, " +
"minimal-scala, " +
"play-java, " +
"play-scala, " +
"hello-akka, " +
"hello-scala, " +
"hello-slick-3.0, " +
"reactive-stocks",
localTemplateCacheCreated <<= (localTemplateCache, localTemplateCacheHash, Keys.fullClasspath in Runtime, remoteTemplateCacheUri, streams, overrideWithTemplates) map makeTemplateCache,
scalaVersion := Dependencies.scalaVersion,
libraryDependencies += Dependencies.templateCache,
// TODO - Allow debug version for testing?
remoteTemplateCacheUri := "http://downloads.typesafe.com/typesafe-activator",
localTemplateCacheHash := "a55974273d2c517fbcaacd4a9f9b7da29c218d4c",
latestTemplateCacheHash := downloadLatestTemplateCacheHash(remoteTemplateCacheUri.value, streams.value),
checkTemplateCacheHash := {
if (enableCheckTemplateCacheHash.value)
checkLatestTemplateCacheHash(localTemplateCacheHash.value,
latestTemplateCacheHash.value)
else
localTemplateCacheHash.value
},
enableCheckTemplateCacheHash := true
)
def invokeTemplateCacheRepoMakerMain(cl: ClassLoader, dir: File, uri: String, templates: String): Unit =
invokeMainFor(
cl,
"activator.templates.TemplateCacheSeedGenerator",
Array(
"-remote", uri,
"-file", dir.getAbsolutePath,
"-templates", templates))
private def makeClassLoaderFor(classpath: Keys.Classpath): java.net.URLClassLoader = {
val jars = classpath map (_.data.toURL)
new java.net.URLClassLoader(jars.toArray, null)
}
private def invokeMainFor(cl: ClassLoader, mainClass: String, args: Array[String]): Unit = {
println("Loading " + mainClass + " from: " + cl)
val maker = cl.loadClass(mainClass)
println("Invoking object: " + maker)
val mainMethod = maker.getMethod("main", classOf[Array[String]])
println("Invoking maker: " + maker)
mainMethod.invoke(null, args)
}
def makeTemplateCache(targetDir: File, hash: String, classpath: Keys.Classpath, uri: String, streams: TaskStreams, templates: String): File = {
val cachePropsFile = targetDir / "cache.properties"
// Delete stale cache.
if (cachePropsFile.exists) {
val oldHash = readHashFromProps(cachePropsFile)
if (oldHash != hash) {
streams.log.info(s"Deleting old template cache $oldHash to create new one $hash")
IO.delete(targetDir)
}
}
if (targetDir.exists) {
streams.log.info(s"Template cache $hash appears to exist already")
} else try {
streams.log.info(s"Downloading template cache $hash")
IO createDirectory targetDir
// Important: Never _overwrite_ this file without
// also deleting the index, because
// activator-template-cache assumes the hash goes with
// the index we have.
IO.write(targetDir / "cache.properties", "cache.hash=" + hash + "\n")
val cl = makeClassLoaderFor(classpath)
// Akka requires this crazy
val old = Thread.currentThread.getContextClassLoader
Thread.currentThread.setContextClassLoader(cl)
try invokeTemplateCacheRepoMakerMain(cl, targetDir, uri, templates)
finally Thread.currentThread.setContextClassLoader(old)
} catch {
case ex: Exception =>
IO delete targetDir
throw ex
}
targetDir
}
def readHashFromProps(propsFile: File): String = {
val fis = new java.io.FileInputStream(propsFile)
try {
val props = new java.util.Properties
props.load(fis)
Option(props.getProperty("cache.hash")).getOrElse(sys.error(s"No cache.hash in ${propsFile}"))
} finally {
fis.close()
}
}
// IO.download appears to use caching and we need the latest here
def downloadWithoutCaching(url: URL, toFile: File): Unit = {
import java.net.HttpURLConnection
val connection = url.openConnection() match {
case http: HttpURLConnection =>
http.setUseCaches(false)
http
case whatever =>
throw new Exception("Got weird non-http connection " + whatever)
}
if (connection.getResponseCode() != 200)
sys.error(s"Response code ${connection.getResponseCode()} from ${url}")
Using.bufferedInputStream(connection.getInputStream()) { in =>
IO.transfer(in, toFile)
}
}
def downloadLatestTemplateCacheHash(uriString: String, streams: TaskStreams): String = {
IO.withTemporaryDirectory { tmpDir =>
// this is cut-and-pastey/hardcoded vs. activator-template-cache,
// the main problem with that is that it uses http instead of the
// S3 API and therefore gets stale cached content.
val propsFile = tmpDir / "current.properties"
val url = new URL(uriString + "/index/v2/current.properties")
streams.log.info(s"Downloading ${url} to ${propsFile}")
downloadWithoutCaching(url, propsFile)
val hash = readHashFromProps(propsFile)
streams.log.info(s"Got latest template cache hash $hash")
hash
}
}
def checkLatestTemplateCacheHash(ourHash: String, latestHash: String): String = {
if (ourHash != latestHash)
sys.error(s"The latest template index is ${latestHash} but our configured index is ${ourHash} (if you want to override this, `set LocalTemplateRepo.enableCheckTemplateCacheHash := false` perhaps)")
else
ourHash
}
}
|
eed3si9n/activator
|
ui/test/activator/typesafeproxy/ActivatorLatestActorTest.scala
|
<gh_stars>0
package activator.typesafeproxy
import akka.actor._
import org.junit.Assert._
import org.junit._
import scala.concurrent.duration._
import scala.reflect.ClassTag
import scala.util.{ Failure, Success, Try }
object ActivatorLatestActorTest {
val info = Success(ActivatorLatestInfo.exampleDetail())
def failure(error: Throwable): Try[ActivatorLatestInfo] = Failure(error)
val canceled = failure(new ProxyCanceled("canceled"))
val failed = failure(new ProxyFailure("fail"))
val invalid = failure(new ProxyInvalidCredentials("invalid"))
val timeout = failure(new ProxyTimeout("timeout"))
def assertTypeOf[T](x: Any)(implicit ct: ClassTag[T]): Unit =
if (!ct.runtimeClass.isInstance(x)) throw new AssertionError(s"Value '$x' of type ${x.getClass.getName} is not of type ${ct.runtimeClass.getName}", null)
}
class ActivatorLatestActorTest extends DefaultSpecification {
import ActivatorLatestActorTest._
import TypesafeComProxy._
@Test
def testShouldSucceed(): Unit = withHelper { helper =>
import helper._
withActivatorLatestActor(activatorLatestResult(info)) { ia =>
val put = expectMsgAllClassOf(classOf[ActivatorInfo.Put], classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction]).find(_.isInstanceOf[ActivatorInfo.Put]).map(_.asInstanceOf[ActivatorInfo.Put]).get
assertTrue(put.value.isSuccess)
}
}
@Test
def testHandleTimeoutRetry(): Unit = withHelper { helper =>
import helper._
val (ref, func) = mutableActivatorLatestResult(timeout)
withActivatorLatestActor(func) { ia =>
expectMsgAllClassOf(classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction], classOf[UIActor.WebSocket.Failure])
ref.set(info)
ia ! UIActor.Retry
val put = expectMsgAllClassOf(classOf[ActivatorInfo.Put], classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction]).find(_.isInstanceOf[ActivatorInfo.Put]).map(_.asInstanceOf[ActivatorInfo.Put]).get
assertTrue(put.value.isSuccess)
}
}
@Test
def testHandleTimeoutCancel(): Unit = withHelper { helper =>
import helper._
val (ref, func) = mutableActivatorLatestResult(timeout)
withActivatorLatestActor(func) { ia =>
expectMsgAllClassOf(classOf[UIActor.WebSocket.ReportStartAction], classOf[UIActor.WebSocket.ReportEndAction], classOf[UIActor.WebSocket.Failure])
ia ! UIActor.Cancel
val failure = expectMsgType[ActivatorInfo.Put]
assertTypeOf[ProxyCanceled](failure.value.failed.get)
}
}
}
|
eed3si9n/activator
|
ui/app/console/handler/LifecycleHandler.scala
|
<filename>ui/app/console/handler/LifecycleHandler.scala
/**
* Copyright (C) 2016 Lightbend, Inc <http://www.lightbend.com>
*/
package console
package handler
import akka.actor.{ ActorRef, Props }
object LifecycleHandler {
def props(repository: AnalyticsRepository): Props =
Props(classOf[LifecycleHandler], repository)
case class LifecycleModuleInfo(command: Command) extends ModuleInformationBase
sealed trait Command
case object ResetCommand extends Command
def extractCommand(command: String): Command = command match {
case _ => ResetCommand
}
}
trait LifecycleHandlerBase extends RequestHandler[LifecycleHandler.LifecycleModuleInfo] {
import LifecycleHandler._
def onModuleInformation(sender: ActorRef, mi: LifecycleModuleInfo): Unit = {
mi.command match {
case ResetCommand =>
repository.lifecycleRepository.clear()
case _ => log.error(s"Unknown lifecycle command: ${mi.command}")
}
}
}
class LifecycleHandler(val repository: AnalyticsRepository) extends LifecycleHandlerBase
|
eed3si9n/activator
|
project/plugins.sbt
|
addSbtPlugin("com.typesafe.sbt" % "sbt-scalariform" % "1.2.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-license-report" % "0.1")
addSbtPlugin("com.typesafe.sbt" % "sbt-less" % "1.0.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-jshint" % "1.0.2")
// Required by Echo
addSbtPlugin("com.typesafe.sbt" % "sbt-cotest" % "0.1.0")
|
eed3si9n/activator
|
ui/app/activator/typesafeproxy/ActivatorLatestActor.scala
|
package activator.typesafeproxy
import java.util.concurrent.TimeoutException
import akka.actor._
import play.api.Play.current
import play.api.libs.json._
import play.api.libs.ws._
import activator.HttpHelper
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
import scala.util.{ Try, Failure, Success }
object ActivatorLatestActor {
type DoGetActivatorLatest = ActorRef => Unit
sealed trait Notification
def httpGetActivatorLatest(activatorLatestUrl: String, timeout: FiniteDuration, executionContext: ExecutionContext)(sendTo: ActorRef): Unit = {
implicit val ec = executionContext
def respondWith(result: Try[ActivatorLatestInfo]): Unit = sendTo ! result
val req = HttpHelper.proxyHolder(WS.url(activatorLatestUrl)
.withRequestTimeout(timeout.toMillis.intValue))
req.get() onComplete {
case Success(response) => respondWith {
response.status match {
case 200 =>
Try(Json.fromJson[ActivatorLatestInfo](Json.parse(response.body)).get)
case status =>
Failure(new ProxyFailure(s"Unexpected response code: $status"))
}
}
case Failure(exception) => respondWith {
exception match {
case x: TimeoutException => Failure(new ProxyTimeout(s"Fetching Activator info exceeded timeout ${timeout}", x))
case e => Failure(new ProxyFailure(s"Failed to fetch Activator info: ${e.getMessage}", e))
}
}
}
}
def props(doGetActivatorLatest: ActivatorLatestActor.DoGetActivatorLatest,
uiActorProps: ActorRef => Props,
version: Long,
replyTo: ActorRef,
websocketActor: ActorRef): Props =
Props(new ActivatorLatestActor(doGetActivatorLatest, uiActorProps, version, replyTo, websocketActor))
}
class ActivatorLatestActor(doGetActivatorLatest: ActivatorLatestActor.DoGetActivatorLatest,
uiActorProps: ActorRef => Props,
version: Long,
replyTo: ActorRef,
websocketActor: ActorRef) extends Actor with ActorLogging {
import TypesafeComProxy._
private final val uiActor: ActorRef = context.actorOf(uiActorProps(websocketActor))
def cancel(message: String = "Fetching Activator info canceled by user"): Unit = {
replyTo ! ActivatorInfo.Put(Failure(new ProxyCanceled(message)), version, self)
context.become(doStop())
}
def onFailure(onRetry: () => Unit): Receive = {
case UIActor.Cancel => cancel()
case UIActor.Retry => onRetry()
}
def doStop(): Receive = {
context.watch(uiActor)
uiActor ! PoisonPill
{
case Terminated(`uiActor`) =>
context stop self
}
}
def handleResult(result: Try[ActivatorLatestInfo]): Unit = result match {
case x @ Success(_) =>
replyTo ! ActivatorInfo.Put(x, version, self)
context.become(doStop())
case Failure(e: ProxyTimeout) =>
log.error("Unable to fetch latest Activator version information", e)
uiActor ! UIActor.RetryableRequests.Failure(e.getMessage, self, retryable = true)
context.become(onFailure(() => context.become(runRequest())))
case x @ Failure(e) =>
log.error("Unknown exception fetching Activator info", e)
uiActor ! UIActor.RetryableRequests.Failure(e.getMessage, self, retryable = false)
replyTo ! ActivatorInfo.Put(x, version, self)
context.become(doStop())
}
def awaitResults(endReport: UIActor.ReportEndAction): Receive = {
case UIActor.Cancel =>
uiActor ! endReport
cancel()
case x @ Success(_: ActivatorLatestInfo) =>
uiActor ! endReport
handleResult(x.asInstanceOf[Success[ActivatorLatestInfo]])
case x: Failure[_] =>
uiActor ! endReport
handleResult(x.asInstanceOf[Failure[ActivatorLatestInfo]])
}
def runRequest(): Receive = {
val actionId: String = UIActor.genActionId()
doGetActivatorLatest(self)
val startReport = UIActor.CancelableRequests.ReportStartAction("Fetching Activator Latest Info", actionId, self)
uiActor ! startReport
awaitResults(startReport.endReport())
}
def receive: Receive = {
runRequest()
}
}
|
eed3si9n/activator
|
project/news.scala
|
import sbt._
import Keys._
import com.typesafe.sbt.S3Plugin._
object NewsHelper {
val newsVersion = SettingKey[String]("news-version")
val newsFile = SettingKey[File]("activator-news-file")
val jsonpNewsFile = SettingKey[File]("activator-jsonp-news-file")
val jsonpNewsFileCreated = TaskKey[File]("activator-jsonp-news-file-created")
val publishNews = InputKey[Unit]("publish-news")
def generateJsonp(htmlFile: File, jsonpFile: File): File = {
val html = IO read htmlFile
import scala.util.parsing.json._
val json = JSONObject(Map(
"html" -> html
))
val jsonp = "setNewsJson("+json+");"
if(!jsonpFile.getParentFile.exists) jsonpFile.getParentFile.mkdirs()
IO.write(jsonpFile, jsonp)
jsonpFile
}
import sbt.complete.DefaultParsers._
import sbt.complete.Parser
def versionParser: Parser[String] = NotSpace.* map (_ mkString "")
def publishParser = Space ~> token(versionParser, "<version>")
def publishNewsVersion(v: String, state: State): Unit = {
val state2 = injectVersion(switchToNewsProject(state), v)
val extracted = Project.extract(state2)
val result = extracted.runTask(S3.upload in extracted.currentRef, state2)
}
def switchToNewsProject(state: State): State = {
val session = Project.session(state)
val newProj = session.setCurrent(session.currentBuild, "news", session.currentEval)
Project.setProject(newProj, Project.structure(state), state)
}
def injectVersion(state: State, v: String): State = {
val session = Project.session(state)
val extracted = Project.extract(state)
//TODO - hack for news project!
val structure = Project.structure(state)
val newSettings: Seq[Setting[_]] =
(newsVersion in Global := v)
implicit val display = Project.showContextKey(state)
val newStructure = Load.reapply(
session.original ++ newSettings,
structure)
Project.setProject(session, newStructure, state)
}
def settings: Seq[Setting[_]] =
s3Settings ++
Seq[Setting[_]](
newsFile <<= baseDirectory apply (_ / "news.html"),
jsonpNewsFile <<= target apply (_ / "news.jsonp"),
jsonpNewsFileCreated <<= (newsFile, jsonpNewsFile) map generateJsonp,
newsVersion in Global <<= version,
mappings in S3.upload <<= (jsonpNewsFileCreated, newsVersion) map { (news, v) =>
Seq(news -> ("typesafe-activator/%s/news.js" format (v, v)))
},
S3.host in S3.upload := "downloads.typesafe.com.s3.amazonaws.com",
S3.progress in S3.upload := true,
publishNews <<= InputTask(_ => publishParser) { v =>
(v, state) map publishNewsVersion
}
)
}
|
eed3si9n/activator
|
ui/app/activator/AppActor.scala
|
/**
* Copyright (C) 2016 Lightbend <http://www.lightbend.com/>
*/
package activator
import akka.actor._
import java.io.File
import akka.util.Timeout
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import scala.concurrent.duration._
import play.api.libs.json._
import sbt.client._
import sbt.protocol._
sealed trait AppRequest
case object GetWebSocketCreated extends AppRequest
case object CreateWebSocket extends AppRequest
case class NotifyWebSocket(json: JsObject) extends AppRequest
case object InitialTimeoutExpired extends AppRequest
case class UpdateSourceFiles(files: Set[File]) extends AppRequest
case object ReloadSbtBuild extends AppRequest
case class OpenClient(client: SbtClient) extends AppRequest
case object CloseClient extends AppRequest
case object ProjectFilesChanged extends AppRequest
// requests that need an sbt client
sealed trait ClientAppRequest extends AppRequest {
def serialId: Long
def command: Option[String] = None
}
case class RequestExecution(serialId: Long, override val command: Option[String]) extends ClientAppRequest
case class CancelExecution(serialId: Long, executionId: Long) extends ClientAppRequest
case class PossibleAutoCompletions(serialId: Long, override val command: Option[String], detailLevel: Option[Int] = None) extends ClientAppRequest
case class RequestSelfDestruct(serialId: Long) extends ClientAppRequest
sealed trait AppReply
case class SbtClientResponse(serialId: Long, result: Any, command: Option[String] = None) extends AppReply
case object WebSocketAlreadyUsed extends AppReply
case class WebSocketCreatedReply(created: Boolean) extends AppReply
class InstrumentationRequestException(message: String) extends Exception(message)
class AppActor(val config: AppConfig,
val typesafeComActor: ActorRef,
val lookupTimeout: Timeout,
val projectPreprocessor: (ActorRef, ActorRef, AppConfig) => Unit) extends Actor with Stash with ActorLogging {
AppManager.registerKeepAlive(self)
def location = config.location
log.debug(s"Creating AppActor for $location")
var pending = Vector.empty[(ActorRef, ClientAppRequest)]
// TODO configName/humanReadableName are cut-and-pasted into AppManager, fix
val connector = SbtConnector(configName = "activator", humanReadableName = "Activator", location)
val socket = context.actorOf(Props(new AppWebSocketActor(config, typesafeComActor, lookupTimeout)), name = "socket")
val projectWatcher = context.actorOf(Props(new ProjectWatcher(location, newSourcesSocket = socket, appActor = self)),
name = "projectWatcher")
var sbtClientActor: Option[ActorRef] = None
var clientCount = 0
var webSocketCreated = false
context.watch(socket)
context.watch(projectWatcher)
// we can stay alive due to socket connection (and then die with the socket)
// or else we just die after being around a short time
context.system.scheduler.scheduleOnce(2.minutes, self, InitialTimeoutExpired)
override val supervisorStrategy = SupervisorStrategy.stoppingStrategy
@volatile
var startedConnecting = System.currentTimeMillis()
log.debug("Opening SbtConnector")
connector.open({ client =>
val now = System.currentTimeMillis()
val delta = now - startedConnecting
log.debug(s"Opened connection to sbt for ${location} AppActor=${self.path.name} after ${delta}ms (${delta.toDouble / 1000.0}s)")
produceLog(LogMessage.DEBUG, s"Opened sbt at '${location}'")
self ! OpenClient(client)
}, { (reconnecting, message) =>
startedConnecting = System.currentTimeMillis()
log.debug(s"Connection to sbt closed (reconnecting=${reconnecting}: ${message})")
produceLog(LogMessage.INFO, s"Lost or failed sbt connection: ${message}")
self ! CloseClient
if (!reconnecting) {
log.debug(s"SbtConnector gave up and isn't reconnecting; killing AppActor ${self.path.name}")
self ! PoisonPill
}
})
def produceLog(level: String, message: String): Unit = {
// self can be null after we are destroyed
val selfCopy = self
if (selfCopy != null)
selfCopy ! NotifyWebSocket(SbtProtocol.synthesizeLogEvent(level, message))
}
private final def handleNotify(body: Receive): Receive =
({
case notify: NotifyWebSocket =>
if (validateEvent(notify.json)) {
socket.forward(notify)
} else {
log.error("Attempt to send invalid event {}", notify.json)
}
}: Receive) orElse body
private final def handleTerminate(body: Receive): Receive =
({
case Terminated(ref) =>
if (ref == socket) {
log.debug(s"socket terminated, killing AppActor ${self.path.name}")
self ! PoisonPill
} else if (ref == projectWatcher) {
log.debug(s"projectWatcher terminated, killing AppActor ${self.path.name}")
self ! PoisonPill
} else if (Some(ref) == sbtClientActor) {
log.debug(s"clientActor terminated, dropping it")
sbtClientActor = None
} else if (ref == socket) {
for (p <- pending) p._1 ! Status.Failure(new RuntimeException("app shut down"))
}
}: Receive) orElse body
private final def handleCommon(body: Receive): Receive = handleNotify(handleTerminate(body))
private final def running: Receive = handleCommon {
case req: AppRequest => req match {
case GetWebSocketCreated =>
sender ! WebSocketCreatedReply(webSocketCreated)
case CreateWebSocket =>
log.debug("got CreateWebSocket")
if (webSocketCreated) {
log.debug("Attempt to create websocket for app a second time {}", config.id)
sender ! WebSocketAlreadyUsed
} else {
webSocketCreated = true
socket.tell(GetWebSocket, sender)
}
case InitialTimeoutExpired =>
if (!webSocketCreated) {
log.debug("Nobody ever connected to {}, killing it", config.id)
self ! PoisonPill
}
case UpdateSourceFiles(files) =>
projectWatcher ! SetSourceFilesRequest(files)
case ReloadSbtBuild =>
sbtClientActor.foreach(_ ! RequestSelfDestruct(AppActor.playInternalSerialId))
context.become(preprocess)
case ProjectFilesChanged =>
self ! NotifyWebSocket(AppActor.projectFilesChanged)
case OpenClient(client) =>
log.debug(s"Old client actor was ${sbtClientActor}")
sbtClientActor.foreach(_ ! PoisonPill) // shouldn't happen - paranoia
log.debug(s"Opening new client actor for sbt client ${client}")
clientCount += 1
self ! NotifyWebSocket(AppActor.clientOpenedJsonEvent)
sbtClientActor = Some(context.actorOf(SbtClientActor.props(client), name = s"client-$clientCount"))
sbtClientActor.foreach(context.watch(_))
flushPending()
case CloseClient =>
log.debug(s"Closing client actor ${sbtClientActor}")
sbtClientActor.foreach(_ ! PoisonPill) // shouldn't be needed - paranoia
sbtClientActor = None
self ! NotifyWebSocket(AppActor.clientClosedJsonEvent)
case r: ClientAppRequest =>
pending = pending :+ (sender -> r)
flushPending()
if (pending.nonEmpty) {
produceLog(LogMessage.DEBUG, s"request pending until connection to sbt opens: ${r}")
}
}
}
private final def preprocess: Receive = handleCommon {
projectPreprocessor(self, socket, config)
{
case ProjectPreprocessor.Finished =>
context.become(running)
unstashAll()
case req: AppRequest => stash()
}
}
override def receive = preprocess
private def flushPending(): Unit = {
while (sbtClientActor.isDefined && pending.nonEmpty) {
val req = pending.head
pending = pending.tail
sbtClientActor.foreach { actor =>
produceLog(LogMessage.DEBUG, s"sending request to sbt ${req._2}")
actor.tell(req._2, req._1)
}
}
if (pending.nonEmpty)
log.debug(s"Requests waiting for sbt client to be connected: ${pending}")
}
private def validateEvent(json: JsObject): Boolean = {
// be sure all events have "type" so on the client
// side we don't check for that.
val hasType = json \ "type" match {
case JsString(t) => true
case _ => false
}
hasType
}
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
super.preRestart(reason, message)
log.debug(s"preRestart, ${reason.getClass.getName}: ${reason.getMessage}, on $message")
}
override def postStop(): Unit = {
log.debug("postStop, closing sbt connector")
connector.close()
}
}
object AppActor {
val clientOpenedJsonEvent = SbtProtocol.wrapEvent(JsObject(Nil), "ClientOpened")
val clientClosedJsonEvent = SbtProtocol.wrapEvent(JsObject(Nil), "ClientClosed")
val playInternalSerialId = -1L
val projectFilesChanged = SbtProtocol.wrapEvent(JsObject(Nil), "ProjectFilesChanged")
def props(config: AppConfig,
typesafeComActor: ActorRef,
lookupTimeout: Timeout,
projectPreprocessor: (ActorRef, ActorRef, AppConfig) => Unit): Props =
Props(new AppActor(config,
typesafeComActor,
lookupTimeout,
projectPreprocessor))
}
|
eed3si9n/activator
|
ui/app/console/handler/rest/OverviewJsonBuilder.scala
|
<reponame>eed3si9n/activator
package console.handler.rest
import akka.actor.{ ActorRef, Props }
import activator.analytics.data._
import play.api.libs.json._
import console.ClientController.Update
import play.api.libs.json.JsObject
import activator.analytics.data.MetadataStats
class OverviewJsonBuilder extends JsonBuilderActor {
import OverviewJsonBuilder._
def receive = {
case r: OverviewResult => r.receiver ! Update(createJson(r.metadata, r.deviations, r.currentStorageTime))
}
}
object OverviewJsonBuilder {
def props(): Props =
Props(classOf[OverviewJsonBuilder])
case class OverviewResult(receiver: ActorRef, metadata: MetadataStats, deviations: ErrorStats, currentStorageTime: Long)
def createJson(metadata: MetadataStats, deviations: ErrorStats, currentStorageTime: Long): JsObject = {
Json.obj(
"type" -> "overview",
"data" ->
Json.obj(
"metadata" ->
Json.obj(
"playPatternCount" -> metadata.metrics.playPatterns.size,
"actorPathCount" -> metadata.metrics.paths.size),
"deviations" -> Json.obj(
"deviationCount" -> deviations.metrics.counts.total),
"currentStorageTime" -> currentStorageTime))
}
}
|
vaquarkhan/rdds-dataframes-datasets-presentation-2016
|
demo/RDDs-DataFrames-and-Datasets.scala
|
// Databricks notebook source exported at Wed, 31 Aug 2016 00:07:54 UTC
// MAGIC %md
// MAGIC
// MAGIC #
// MAGIC
// MAGIC # RDDs, DataFrames and Datasets
// MAGIC
// MAGIC ## Using Wikipedia data
// MAGIC
// MAGIC We're going to use some Wikipedia data, representing changes to various Wikimedia project pages in an hour's time. These edits are from March 3rd (yesterday), covering the hour starting at 22:00 UTC.
// MAGIC
// MAGIC Dataset: https://dumps.wikimedia.org/other/pagecounts-raw/
// COMMAND ----------
// MAGIC %fs ls dbfs:/tmp/pagecounts
// COMMAND ----------
// MAGIC %md How big is the file, in megabytes?
// COMMAND ----------
// For some reason, if this is outside a local block, some of the RDD transformation lambdas
// pick up the global dirContents variable, even though they don't reference it. Since the
// type of dirContents isn't serializable, a lambda that picks it up fails. Putting the code
// inside a local block solves the problem.
{
val dirContents = dbutils.fs.ls("dbfs:/tmp/pagecounts")
val size = dirContents.map(_.size).head / (1024 * 1024)
println(s"$size megabytes\n")
}
// COMMAND ----------
val path = dbutils.fs.ls("dbfs:/tmp/pagecounts").sortWith { (a, b) => a.name > b.name }.head.path
// COMMAND ----------
// MAGIC %md
// MAGIC ### RDDs
// MAGIC
// MAGIC RDDs can be created by using the Spark Context object's `textFile()` method.
// COMMAND ----------
// MAGIC %md Create an RDD from the recent pagecounts file:
// COMMAND ----------
// Notice that this returns a RDD of Strings
val pagecountsRDD = sc.textFile(path)
// COMMAND ----------
// MAGIC %md There's one partition, because a gzipped file cannot be uncompressed in parallel.
// COMMAND ----------
pagecountsRDD.partitions.length
// COMMAND ----------
// MAGIC %md Let's increase the parallelism by repartitioning. I'm using 6 partitions, which is twice the number of available threads on Databricks Community Edition.
// COMMAND ----------
val pagecountsRDD2 = pagecountsRDD.repartition(6)
pagecountsRDD2.partitions.length
// COMMAND ----------
// MAGIC %md The `count` action counts how many items (lines) total are in the RDD (this requires a full scan of the file):
// COMMAND ----------
val fmt = new java.text.DecimalFormat
println(fmt.format(pagecountsRDD2.count()))
// COMMAND ----------
// MAGIC %md So the count shows that there are several million lines in the file. Notice that the `count()` action took some time to run because it had to read the entire file remotely from S3.
// COMMAND ----------
// MAGIC %md Let's take a look at some of the data. This will return faster, because `take()` doesn't have to process the whole file.
// COMMAND ----------
pagecountsRDD2.take(10).foreach(println)
// COMMAND ----------
// MAGIC %md Notice that each line in the file actually contains 2 strings and 2 numbers, but our RDD is treating each line as a long string. We'll fix this typing issue shortly by using a custom parsing function.
// COMMAND ----------
// MAGIC %md In the output above, the first column (like `aa`) is the Wikimedia project name. The following abbreviations are used for the first column:
// MAGIC
// MAGIC * wikibooks: `".b"`
// MAGIC * wiktionary: `".d"`
// MAGIC * wikimedia: `".m"`
// MAGIC * wikipedia mobile: `".mw"`
// MAGIC * wikinews: `".n"`
// MAGIC * wikiquote: `".q"`
// MAGIC * wikisource: `".s"`
// MAGIC * wikiversity: `".v"`
// MAGIC * mediawiki: `".w"`
// MAGIC
// MAGIC Projects without a period and a following character are Wikipedia projects. So, any line starting with the column `aa` refers to the Aragones language Wikipedia. Similarly, any line starting with the column `en` refers to the English language Wikipedia. `en.b` refers to English Language Wikibooks.
// MAGIC
// MAGIC The second column is the title of the page retrieved, the third column is the number of requests, and the fourth column is the size of the content returned.
// COMMAND ----------
// MAGIC %md Let's sum up the request counts per page in the English Wikipiedia, then pull back the top 10. This is a variation of the code on Slide 4.
// MAGIC
// COMMAND ----------
pagecountsRDD2.flatMap { line =>
line.split("""\s+""") match {
case Array(project, page, numRequests, contentSize) => Some((project, page, numRequests.toLong))
case _ => None
}
}.
filter { case (project, page, numRequests) => project == "en" }.
map { case (project, page, numRequests) => (page, numRequests) }.
reduceByKey(_ + _).
sortBy({ case (page, numRequests) => numRequests }, ascending = false).
take(100).
foreach { case (page, totalRequests) => println(s"$page: $totalRequests") }
// COMMAND ----------
// MAGIC %md Let's remove the special pages. Page like "Talk:Topic", "User:username", "Special:Something", and anything starting with a "." are just cluttering things up. Here's a modification of the above, with a slightly different `filter()` call.
// COMMAND ----------
val SkipPages = Array(
"""^Special:""".r,
"""^File:""".r,
"""^Category:""".r,
"""^User:""".r,
"""^Talk:""".r,
"""^Template:""".r,
"""^Help:""".r,
"""^Wikipedia:""".r,
"""^MediaWiki:""".r,
"""^Portal:""".r,
"""^Book:""".r,
"""^Draft:""".r,
"""^Education_Program:""".r,
"""^TimedText:""".r,
"""^Module:""".r,
"""^Topic:""".r,
"""^Images/""".r,
"""^%22//upload.wikimedia.org""".r,
"""^%22//en.wikipedia.org""".r
)
def isSpecialPage(pageTitle: String): Boolean = SkipPages.exists { r => r.findFirstIn(pageTitle).isDefined }
// COMMAND ----------
def keepPage(page: String) = (! page.startsWith(".")) && (! isSpecialPage(page))
val pagecountsRDD3 =
pagecountsRDD2.flatMap { line =>
line.split("""\s+""") match {
case Array(project, page, numRequests, contentSize) if (project == "en") && keepPage(page) => Some((page, numRequests.toLong))
case _ => None
}
}.
reduceByKey(_ + _).
sortBy({ case (page, numRequests) => numRequests }, ascending = false)
pagecountsRDD3.take(100).foreach { case (page, totalRequests) => println(s"$page: $totalRequests") }
// COMMAND ----------
// MAGIC %md That's completely type-safe, but it's up to us to choose the right implementation. For instance, the code, above, _could_ have by done like this:
// MAGIC
// MAGIC ```
// MAGIC pagecountsParsedRDD2.flatMap { ... }.
// MAGIC filter { ... }.
// MAGIC map { case (project, page, numRequests) => (page, numRequests) }.
// MAGIC groupByKey(_._1).
// MAGIC reduce(_ + _).
// MAGIC ...
// MAGIC ```
// MAGIC
// MAGIC However, `groupByKey() + reduce()` is _far_ more inefficient than `reduceByKey()`. Yet, Spark cannot protect us from choosing the wrong transformations.
// COMMAND ----------
// MAGIC %md Before we move on to DataFrames, let's cache this last RDD and see how much memory it uses.
// COMMAND ----------
pagecountsRDD3.setName("pagecountsRDD3").cache()
val totalPagesRDD = pagecountsRDD3.count() // we need to run an action to fill the cache
println(fmt.format(totalPagesRDD))
// COMMAND ----------
// MAGIC %md
// MAGIC ## DataFrames
// MAGIC
// MAGIC Let's try the same thing with DataFrames.
// MAGIC
// MAGIC To make a DataFrame, we can simply our RDD into another RDD of a different type, something Spark can use to infer a _schema_. Since we have fewer than 23 columns, we'll use a case class.
// COMMAND ----------
object Parser extends Serializable { // This helps with scoping issues in the notebook
case class EditEntry(project: String, pageTitle: String, numberOfRequests: Long)
def parseLine(line: String) = {
line.split("""\s+""") match {
case Array(project, page, numRequests, _) =>
Some(EditEntry(project, page, numRequests.toLong))
case _ =>
None
}
}
}
val pagecountsDF = pagecountsRDD2.flatMap(Parser.parseLine).toDF
// COMMAND ----------
pagecountsDF.printSchema()
// COMMAND ----------
pagecountsDF.rdd.partitions.length
// COMMAND ----------
// MAGIC %md Of course, it would be more efficient to read the DataFrame from something other than a gzipped text file. Let's try that, as well. We'll save it to a Parquet file and read it back.
// COMMAND ----------
import org.apache.spark.sql.SaveMode
pagecountsDF.write.mode(SaveMode.Overwrite).parquet("dbfs:/tmp/pagecounts.parquet")
// COMMAND ----------
// MAGIC %fs ls /tmp/pagecounts.parquet
// COMMAND ----------
// Note the use of "spark", not "sqlContext". "spark" is a preinstantiated
// SparkSession, introduced in 2.0.
val pagecountsDFParquet = spark.read.parquet("dbfs:/tmp/pagecounts.parquet")
// COMMAND ----------
pagecountsDFParquet.rdd.partitions.length
// COMMAND ----------
// MAGIC %md Let's get rid of the special pages, as we did with the RDD version.
// COMMAND ----------
import org.apache.spark.sql.functions._
val uKeepPage = sqlContext.udf.register("keepPage", keepPage _)
val pagecountsDF2 = pagecountsDF.filter($"project" === "en").
filter(uKeepPage($"pageTitle")).
filter(substring($"pageTitle", 0, 1) !== ".").
groupBy($"pageTitle").
agg(sum($"numberOfRequests").as("count")).
orderBy($"count".desc)
pagecountsDF2.take(100).foreach { row =>
println(s"${row(0)}: ${row(1)}")
}
// COMMAND ----------
// MAGIC %md Easier to read, but... not type-safe.
// COMMAND ----------
// MAGIC %md
// MAGIC ### A very brief aside, about partitions
// MAGIC
// MAGIC According to Spark documentation, when a DataFrame shuffle occurs, the number of post-shuffle partitions is defined by Spark configuration parameter
// MAGIC `spark.sql.shuffle.partitions`, which defaults to 200. So, we should have that many partitions in `pagecountsDF2`, because the `groupBy` and `agg` calls, above, are likely to produce shuffles.
// MAGIC
// MAGIC But...
// COMMAND ----------
println(s"spark.sql.shuffle.partitions = ${sqlContext.getConf("spark.sql.shuffle.partitions")}")
println(s"pagecountsDF2 partitions = ${pagecountsDF2.rdd.partitions.length}")
// COMMAND ----------
// MAGIC %md Okay, that's not 200.
// MAGIC
// MAGIC If we reran the creation of `pagecountsDF2`, above, we might see 18, 19, 20, 21, 22, or some number in that area for the number of post-shuffle partitions. Why?
// MAGIC
// MAGIC It turns out that the `orderBy`, above, uses _range partitioning_. To determine reasonable upper and lower bounds for the range, Spark randomly samples the data. In this case, we end up with something around 22 partitions; the partition could might differ with different data. It's not always the same from run to run because the random sampling doesn't use the same seed every time.
// COMMAND ----------
// MAGIC %md
// MAGIC ### Back to our DataFrame
// MAGIC
// MAGIC The `pagecountsDF2` DataFrame consists of `Row` objects, which can contain heterogenous types. If we pull some `Rows` back to the driver, we can extract the columns, but only as type `Any`:
// COMMAND ----------
val first10Rows = pagecountsDF2.take(10)
// COMMAND ----------
val row = first10Rows.head
row(0)
// COMMAND ----------
// MAGIC %md Note that `Row` isn't typed. It can't be, since each row consists of columns of potentially different types. (I suppose Spark could've used Shapeless, but it didn't...) If we want to get back to actual, useful Scala types, we have to do something ugly, like this:
// COMMAND ----------
first10Rows.map { row =>
(row(0).asInstanceOf[String], row(1).asInstanceOf[Long])
}
// COMMAND ----------
// MAGIC %md Before we move on to Datasets, let's:
// MAGIC * verify that the number of items in the DataFrame match the RDD with the special pages filtered out
// MAGIC * cache the DataFrame
// MAGIC * compare the cached size to the cached size of the RDD.
// COMMAND ----------
val totalPagesDF = pagecountsDF2.cache().count()
println(s"RDD total: ${fmt.format(totalPagesRDD)}")
println(s"DF total: ${fmt.format(totalPagesDF)}")
// COMMAND ----------
// MAGIC %md
// MAGIC ## Datasets
// MAGIC The easiest way to create a Dataset from scratch is from a DataFrame. Prior to Spark 2.0, we'd use the `SQLContext` for that:
// MAGIC
// MAGIC ```
// MAGIC val ds = sqlContext.read.text("dbfs:/tmp/pagecounts").as[String]
// MAGIC |------------- makes a DataFrame ----------|
// MAGIC |--------|
// MAGIC |
// MAGIC +- converts to a Dataset
// MAGIC ```
// MAGIC
// MAGIC However, in 2.0, while that approach still works, we should prefer to use the `SparkSession`, available as `spark` in these notebooks and in the `spark-shell` Scala REPL.
// COMMAND ----------
// Notice that this returns a Dataset of Strings
val pagecountsDS = spark.read.text("dbfs:/tmp/pagecounts/").as[String]
pagecountsDS.take(3).foreach(println)
// COMMAND ----------
// MAGIC %md Of course, we could also just convert the existing `pagecountsDF` to a Dataset:
// COMMAND ----------
val pagecountsDS2 = pagecountsDF.as[(String, String, Long)]
pagecountsDS2.take(3).foreach(println)
// COMMAND ----------
// MAGIC %md Even better, though, let's make a Dataset that uses something more convenient than a tuple:
// COMMAND ----------
// Matching is done DataFrame column name -> case class field name
case class Edit(project: String, pageTitle: String, numberOfRequests: Long)
val pagecountsDS3 = pagecountsDF.as[Edit]
// COMMAND ----------
pagecountsDS3.take(4).foreach(println)
// COMMAND ----------
println(fmt.format(pagecountsDS3.count()))
// COMMAND ----------
// MAGIC %md ### I lied (a little)
// MAGIC
// MAGIC Prior to 2.0, `DataFrame` and `Dataset` were two different types. In 2.0, though, a `DataFrame` is just a type alias for `Dataset[Row]`. Thus, in 2.0,
// MAGIC when you start with a `DataFrame` and "convert" it to a `Dataset`, you're actually converting a `Dataset` of one type to a `Dataset` of another type.
// COMMAND ----------
pagecountsDF.getClass
// COMMAND ----------
// MAGIC %md You still have to get the types right on the conversions to Datasets, but once you have the Dataset, you have something that's type safe again.
// MAGIC
// MAGIC Once again, let's filter out the special pages, group by page title, and show to top 100 hits.
// COMMAND ----------
val pagecountsDS4 = pagecountsDS3.filter { e => (e.project == "en") && (! e.pageTitle.startsWith(".")) && (! isSpecialPage(e.pageTitle)) }.
groupByKey { _.pageTitle }. // GroupedDataset[String, DSEntry]
reduceGroups { (e1, e2) => e1.copy(e1.project, e1.pageTitle, e1.numberOfRequests + e2.numberOfRequests) }.
map(_._2). // skip the key; extract the value
orderBy($"numberOfRequests".desc)
pagecountsDS4.take(100).foreach { e => println(s"${e.pageTitle}: ${e.numberOfRequests}") }
// COMMAND ----------
// MAGIC %md Let's cache this Dataset and, for good measure, compare the number of items with our RDD and DataFrame.
// COMMAND ----------
val totalPagesDS = pagecountsDS4.cache().count()
println(s"DF total: ${fmt.format(totalPagesDF)}")
println(s"RDD total: ${fmt.format(totalPagesRDD)}")
println(s"DS total: ${fmt.format(totalPagesDS)}")
// COMMAND ----------
pagecountsDS4.rdd.partitions.length
// COMMAND ----------
// MAGIC %md
// MAGIC ### The last little bit of caching
// MAGIC
// MAGIC So far, we have cached:
// MAGIC
// MAGIC * A filtered RDD of `(pageTitle, totalRequests)` tuples
// MAGIC * A DataFrame of `Row` objects
// MAGIC * A Dataset of `Edit` objects
// COMMAND ----------
// MAGIC %md Just for completeness, let's cache an RDD with `Edit` objects. Given the small amount of memory available to Community Edition clusters, we'll allow this cache to spill to disk if it has to.
// COMMAND ----------
import org.apache.spark.storage.StorageLevel
val pagecountsEditRDD = pagecountsRDD2.flatMap { line =>
line.split("""\s+""") match {
case Array(project, pageTitle, requests, _) => Some(Edit(project, pageTitle, requests.toLong))
case _ => None
}
}.
filter { e => (e.project == "en") && (! e.pageTitle.startsWith(".")) && (! isSpecialPage(e.pageTitle)) }
pagecountsEditRDD.setName("pagecountsEditRDD").persist(StorageLevel.MEMORY_AND_DISK).count()
// COMMAND ----------
// MAGIC %md The Spark UI's Storage tab now shows all of these in memory. The Dataset is compressed in memory by default, so it takes up much less space.
// COMMAND ----------
// MAGIC %md ## END OF DEMO
// COMMAND ----------
|
vaquarkhan/rdds-dataframes-datasets-presentation-2016
|
demo/Page-Counts-Downloader.scala
|
<filename>demo/Page-Counts-Downloader.scala
// Databricks notebook source exported at Thu, 16 Jun 2016 20:18:02 UTC
// MAGIC %md
// MAGIC # Download the Wikipedia Pagecounts Data File
// MAGIC
// MAGIC Use this notebook to download the latest Wikipedia pagecounts data file.
// COMMAND ----------
import java.text.SimpleDateFormat
import java.util.Calendar
import java.net.URL
import java.io.File
import sys.process._
import scala.language.postfixOps
import scala.util.matching.Regex
import scala.collection.mutable
import scala.io.Source
// COMMAND ----------
// MAGIC %md #### Decide what the latest hourly pagecounts file is:
// COMMAND ----------
// MAGIC %md The function in the next cell will:
// MAGIC * Check the current year and month locally
// MAGIC * Go to wikimedia.org to download the webpage (html file) for the current month's file dumps to the local Driver container
// MAGIC * Parse the downloaded webpage and find the latest file to download
// MAGIC * Return the URL for the latest file to download
// COMMAND ----------
val TodaysDumpsURL = {
val today = new java.util.Date
val yearFormat = new SimpleDateFormat("y")
val year = yearFormat.format(today)
val monthFormat = new SimpleDateFormat("MM")
val month = monthFormat.format(today)
s"https://dumps.wikimedia.org/other/pagecounts-raw/$year/$year-$month"
}
// COMMAND ----------
// Define a function that figures out what the latest file is
def decideLatestFile():String = {
// Read the local file into String currentPagecountsWebpage
val source = Source.fromURL(TodaysDumpsURL)
val currentPagecountsWebpage = try source.mkString finally source.close()
// Define a regex pattern and apply it to currentPagecountsWebpage
val pattern = """<a href="[^"]+">pagecounts-(\d+)-(\d+)\.gz</a>""".r
val pagecountNames = (pattern findAllMatchIn currentPagecountsWebpage).map { m => (m.group(1), m.group(2)) }
val newest = pagecountNames.maxBy { case (date, hour) =>
s"$date$hour".toLong
}
// Construct a URL for the latest file to download and return it
TodaysDumpsURL + "/" + "pagecounts-" + newest._1 + "-" + newest._2 + ".gz" //newestFile.toString.drop(7)
}
// COMMAND ----------
// MAGIC %md Call the decideLatestFile() function and store the returned URL string in value 'url':
// COMMAND ----------
val url = decideLatestFile()
// COMMAND ----------
Source.fromURL(TodaysDumpsURL).getLines.take(20).foreach(println)
// COMMAND ----------
// MAGIC %md #### Download the latest pagecounts file to a shared S3 staging folder:
// COMMAND ----------
// MAGIC %md First, check which hour's pagecount file is currently in the staging folder:
// COMMAND ----------
val StagingDir = "dbfs:/tmp/pagecounts"
// COMMAND ----------
// MAGIC %fs rm --recurse=true "/tmp/pagecounts"
// COMMAND ----------
// MAGIC %fs mkdirs "/tmp/pagecounts"
// COMMAND ----------
// Define a function that downloads the latest file to DBFS
def downloadLatestFile(url:String) = {
val baseFile = url.split("/").last
val temp = s"/tmp/$baseFile"
// Clear target directory/bucket
try {
dbutils.fs.ls(StagingDir).foreach(f => dbutils.fs.rm(f.path, recurse=false))
}
catch {
case _: java.io.FileNotFoundException => // don't worry about it
}
// Download the file to the Driver's local file system
new URL(url) #> new File(temp) !!
// Copy the file from the Driver's file system to S3
dbutils.fs.cp(s"file://$temp", s"$StagingDir/$baseFile")
// Remove the local temporary file.
//s"rm $temp" !!
println(s"Sucessfully downloaded: $baseFile")
}
// COMMAND ----------
// MAGIC %md This download should take about 1-2 minutes to complete:
// COMMAND ----------
downloadLatestFile(url)
// COMMAND ----------
display(dbutils.fs.ls(StagingDir))
// COMMAND ----------
|
akahncmu/battleship-scala
|
app/controllers/Board.scala
|
<reponame>akahncmu/battleship-scala
package controllers
sealed trait StrikeResult[Ship]
object StrikeResult {
case class Hit[Ship](ship:Ship) extends StrikeResult[Ship]
case class Already_Taken[Ship]() extends StrikeResult[Ship]
case class Win[Ship](ship:Ship) extends StrikeResult[Ship]
case class Sunk[Ship](ship:Ship) extends StrikeResult[Ship]
case class Miss[Ship]() extends StrikeResult[Ship]
}
object Board {
def addShip[Position,Ship](ship:Ship, positions:Set[Position], shipLocations: Map[Position,Ship]): Option[Map[Position,Ship]] = {
if(shipLocations.valuesIterator.contains(ship)) None
else if (positions.exists(shipLocations.contains)) None
else {
Some(shipLocations ++ positions.map(p => (p, ship)))
}
}
}
case class Board[Position, Ship](shotsTaken: Set[Position], shipLocations: Map[Position, Ship]) {
def hasWon(): Boolean = {
shipLocations.keySet.forall(shotsTaken.contains)
}
def isSunk(s:Ship): Boolean ={
shipLocations.filter{ case (_,ship) => (ship == s)}.keySet.forall(shotsTaken.contains)
}
def attack(p: Position): (Board[Position, Ship], StrikeResult[Ship]) = {
if (shotsTaken contains p) {
(this,StrikeResult.Already_Taken())
}
else {
(Board(shotsTaken + p, shipLocations),
shipLocations.get(p) match {
case Some(ship) => {
if (hasWon) (StrikeResult.Win(ship))
else if (isSunk(ship))StrikeResult.Sunk(ship)
else StrikeResult.Hit(ship)
}
case None => StrikeResult.Miss()
}
)}
}
}
|
akahncmu/battleship-scala
|
app/controllers/BoardController.scala
|
package controllers
import javax.inject._
import play.api.libs.json.Json
import play.api.mvc._
@Singleton
class BoardController @Inject()(cc: ControllerComponents) extends AbstractController(cc) {
object StrikeResult extends Enumeration {
val Hit, Miss, Already_Taken, Sunk, Win = Value
}
//each ship needs only to keep track of how big it is and how many times it has been hit
//since shotsTaken already guarantees that no coordinate can be hit twice
class Ship(size: Int) {
var health = size
def strike(): Boolean = {
health -= 1
return (health == 0)
}
}
//currently only using this for the pretty printer
final val DIMENSION: Int = 10
var numShips: Int = 0
//Set of coordinates that have already been shot at
var shotsTaken: Set[(Int,Int)] = Set[(Int,Int)]()
//map of coordinates to ships
var shipLocations: Map[(Int,Int), Ship] = Map()
//TODO: currently no input validation
def addShip(size: Int, startX: Int, startY: Int, vertical: Boolean) = Action {
numShips = numShips + 1
val ship = new Ship(size)
for(i <- 0 until size)
if(vertical)
shipLocations += ((startX, startY + i) -> ship)
else
shipLocations += ((startX + i, startY) -> ship)
Ok(s"There are now $numShips ships")
}
def clearBoard() = Action {
numShips = 0
shotsTaken = Set[(Int,Int)]()
shipLocations = Map[(Int,Int), Ship]()
Ok(Json.toJson("Board cleared"))
}
//O is empty, X means a shot has already been taken there, S means there is a ship there that has not yet been shot
override def toString(): String = {
var result = ""
for(i <- 0 until this.DIMENSION) {
for(j <- 0 until this.DIMENSION) {
if(shotsTaken contains (j,i))
result += "X"
else if (shipLocations contains (j,i))
result += "S"
else
result += "O"
if(j < (DIMENSION - 1)) result += " "
}
if(i < (DIMENSION - 1)) result += "\n"
}
return result
}
def printBoard() = Action {Ok(this.toString())}
def attack(x: Int, y: Int) = Action {
if (shotsTaken contains (x, y)) {
Ok(Json.toJson(StrikeResult.Already_Taken))
}
else {
shotsTaken += ((x, y))
shipLocations.get(x,y) match {
case Some(ship) => {
if(ship.strike()) { //sunk that ship
numShips -= 1
if (numShips == 0)
Ok(Json.toJson(StrikeResult.Win))
else
Ok(Json.toJson(StrikeResult.Sunk))
}
else
Ok(Json.toJson(StrikeResult.Hit))
}
case None => Ok(Json.toJson(StrikeResult.Miss))
}
}
}
}
|
akahncmu/battleship-scala
|
test/FunctionalSpec.scala
|
import org.scalatestplus.play.PlaySpec
import org.scalatestplus.play.guice.GuiceOneAppPerSuite
import play.api.http.Status
import play.api.test.FakeRequest
import play.api.test.Helpers._
/**
* Functional tests start a Play application internally, available
* as `app`.
*/
class FunctionalSpec extends PlaySpec with GuiceOneAppPerSuite {
"BoardController" should {
"start with empty board" in {
val board = route(app, FakeRequest(GET, "/printBoard")).get
val emptyBoard: String =
"""O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O""".stripMargin
status(board) mustBe Status.OK
contentType(board) mustBe Some("text/plain")
contentAsString(board) mustBe (emptyBoard)
}
"place two ships, and attack" in {
val hit = "\"Hit\""
val miss = "\"Miss\""
val sunk = "\"Sunk\""
val already_taken = "\"Already_Taken\""
val win = "\"Win\""
var ship = route(app, FakeRequest(GET, "/addShip/2/0/0/true")).get
status(ship) mustBe Status.OK
contentType(ship) mustBe Some("text/plain")
contentAsString(ship) mustBe ("There are now 1 ships")
ship = route(app, FakeRequest(GET, "/addShip/3/2/2/false")).get
status(ship) mustBe Status.OK
contentType(ship) mustBe Some("text/plain")
contentAsString(ship) mustBe ("There are now 2 ships")
var board = route(app, FakeRequest(GET, "/printBoard")).get
var boardString: String =
"""S O O O O O O O O O
|S O O O O O O O O O
|O O S S S O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O""".stripMargin
status(board) mustBe Status.OK
contentType(board) mustBe Some("text/plain")
contentAsString(board) mustBe (boardString)
var attack = route(app, FakeRequest(GET, "/attack/0/0")).get
status(attack) mustBe Status.OK
contentType(attack) mustBe Some("application/json")
contentAsString(attack) mustBe (hit)
attack = route(app, FakeRequest(GET, "/attack/0/0")).get
status(attack) mustBe Status.OK
contentType(attack) mustBe Some("application/json")
contentAsString(attack) mustBe (already_taken)
attack = route(app, FakeRequest(GET, "/attack/9/9")).get
status(attack) mustBe Status.OK
contentType(attack) mustBe Some("application/json")
contentAsString(attack) mustBe (miss)
attack = route(app, FakeRequest(GET, "/attack/9/9")).get
status(attack) mustBe Status.OK
contentType(attack) mustBe Some("application/json")
contentAsString(attack) mustBe (already_taken)
attack = route(app, FakeRequest(GET, "/attack/0/1")).get
status(attack) mustBe Status.OK
contentType(attack) mustBe Some("application/json")
contentAsString(attack) mustBe (sunk)
attack = route(app, FakeRequest(GET, "/attack/2/2")).get
status(attack) mustBe Status.OK
contentType(attack) mustBe Some("application/json")
contentAsString(attack) mustBe (hit)
attack = route(app, FakeRequest(GET, "/attack/4/2")).get
status(attack) mustBe Status.OK
contentType(attack) mustBe Some("application/json")
contentAsString(attack) mustBe (hit)
attack = route(app, FakeRequest(GET, "/attack/3/2")).get
status(attack) mustBe Status.OK
contentType(attack) mustBe Some("application/json")
contentAsString(attack) mustBe (win)
boardString =
"""X O O O O O O O O O
|X O O O O O O O O O
|O O X X X O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O O
|O O O O O O O O O X""".stripMargin
board = route(app, FakeRequest(GET, "/printBoard")).get
status(board) mustBe Status.OK
contentType(board) mustBe Some("text/plain")
contentAsString(board) mustBe (boardString)
}
}
}
|
foursquare/swagger-scala-module
|
build.sbt
|
<reponame>foursquare/swagger-scala-module
// Settings file for all the modules.
import xml.Group
import sbt._
import Keys._
import Defaults._
organization := "io.swagger"
version := "1.0.7-SNAPSHOT"
scalaVersion := "2.11.12"
crossScalaVersions := Seq("2.10.6", scalaVersion.value, "2.12.6", "2.13.1")
organizationHomepage in ThisBuild := Some(url("http://swagger.io"))
scalacOptions in ThisBuild ++= Seq("-encoding", "UTF-8", "-deprecation", "-unchecked")
publishMavenStyle in ThisBuild := true
publishArtifact in Test := false
pomIncludeRepository := { x => false }
libraryDependencies ++= Seq(
"org.scala-lang" % "scala-reflect" % scalaVersion.value,
"io.swagger" % "swagger-core" % "1.5.24",
"org.scalatest" %% "scalatest" % "3.0.8" % "test",
"com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.10",
"junit" % "junit" % "4.12" % "test"
)
publishTo := {
if (version.value.trim.endsWith("SNAPSHOT"))
Some("Sonatype Nexus Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots")
else
Some("Sonatype Nexus Releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2")
}
credentials in ThisBuild += Credentials (Path.userHome / ".ivy2" / ".credentials")
resolvers in ThisBuild ++= Seq(
Resolver.mavenLocal,
Resolver.typesafeRepo("releases"),
Resolver.typesafeRepo("snapshots"),
Resolver.sonatypeRepo("releases"),
Resolver.sonatypeRepo("snapshots")
)
publishMavenStyle := true
publishArtifact in Test := false
pomIncludeRepository := { x => false }
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials")
homepage := Some(new URL("https://github.com/swagger-api/swagger-scala-module"))
parallelExecution in Test := false
startYear := Some(2014)
licenses := Seq(("Apache License 2.0", new URL("http://www.apache.org/licenses/LICENSE-2.0.html")))
pomExtra := {
pomExtra.value ++ Group(
<scm>
<connection>scm:git:git@github.com:swagger-api/swagger-scala-module.git</connection>
<developerConnection>scm:git:git@github.com:swagger-api/swagger-scala-module.git</developerConnection>
<url>https://github.com/swagger-api/swagger-scala-module</url>
</scm>
<issueManagement>
<system>github</system>
<url>https://github.com/swagger-api/swagger-scala-module/issues</url>
</issueManagement>
<developers>
<developer>
<id>fehguy</id>
<name><NAME></name>
<email><EMAIL></email>
</developer>
</developers>
)
}
|
ekasitk/spark
|
core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
|
<filename>core/src/main/scala/org/apache/spark/executor/TaskMetrics.scala
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.executor
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.spark._
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.scheduler.AccumulableInfo
import org.apache.spark.storage.{BlockId, BlockStatus}
/**
* :: DeveloperApi ::
* Metrics tracked during the execution of a task.
*
* This class is wrapper around a collection of internal accumulators that represent metrics
* associated with a task. The local values of these accumulators are sent from the executor
* to the driver when the task completes. These values are then merged into the corresponding
* accumulator previously registered on the driver.
*
* The accumulator updates are also sent to the driver periodically (on executor heartbeat)
* and when the task failed with an exception. The [[TaskMetrics]] object itself should never
* be sent to the driver.
*
* @param initialAccums the initial set of accumulators that this [[TaskMetrics]] depends on.
* Each accumulator in this initial set must be uniquely named and marked
* as internal. Additional accumulators registered later need not satisfy
* these requirements.
*/
@DeveloperApi
class TaskMetrics(initialAccums: Seq[Accumulator[_]]) extends Serializable {
import InternalAccumulator._
// Needed for Java tests
def this() {
this(InternalAccumulator.create())
}
/**
* All accumulators registered with this task.
*/
private val accums = new ArrayBuffer[Accumulable[_, _]]
accums ++= initialAccums
/**
* A map for quickly accessing the initial set of accumulators by name.
*/
private val initialAccumsMap: Map[String, Accumulator[_]] = {
val map = new mutable.HashMap[String, Accumulator[_]]
initialAccums.foreach { a =>
val name = a.name.getOrElse {
throw new IllegalArgumentException(
"initial accumulators passed to TaskMetrics must be named")
}
require(a.isInternal,
s"initial accumulator '$name' passed to TaskMetrics must be marked as internal")
require(!map.contains(name),
s"detected duplicate accumulator name '$name' when constructing TaskMetrics")
map(name) = a
}
map.toMap
}
// Each metric is internally represented as an accumulator
private val _executorDeserializeTime = getAccum(EXECUTOR_DESERIALIZE_TIME)
private val _executorRunTime = getAccum(EXECUTOR_RUN_TIME)
private val _resultSize = getAccum(RESULT_SIZE)
private val _jvmGCTime = getAccum(JVM_GC_TIME)
private val _resultSerializationTime = getAccum(RESULT_SERIALIZATION_TIME)
private val _memoryBytesSpilled = getAccum(MEMORY_BYTES_SPILLED)
private val _diskBytesSpilled = getAccum(DISK_BYTES_SPILLED)
private val _peakExecutionMemory = getAccum(PEAK_EXECUTION_MEMORY)
private val _updatedBlockStatuses =
TaskMetrics.getAccum[Seq[(BlockId, BlockStatus)]](initialAccumsMap, UPDATED_BLOCK_STATUSES)
/**
* Time taken on the executor to deserialize this task.
*/
def executorDeserializeTime: Long = _executorDeserializeTime.localValue
/**
* Time the executor spends actually running the task (including fetching shuffle data).
*/
def executorRunTime: Long = _executorRunTime.localValue
/**
* The number of bytes this task transmitted back to the driver as the TaskResult.
*/
def resultSize: Long = _resultSize.localValue
/**
* Amount of time the JVM spent in garbage collection while executing this task.
*/
def jvmGCTime: Long = _jvmGCTime.localValue
/**
* Amount of time spent serializing the task result.
*/
def resultSerializationTime: Long = _resultSerializationTime.localValue
/**
* The number of in-memory bytes spilled by this task.
*/
def memoryBytesSpilled: Long = _memoryBytesSpilled.localValue
/**
* The number of on-disk bytes spilled by this task.
*/
def diskBytesSpilled: Long = _diskBytesSpilled.localValue
/**
* Peak memory used by internal data structures created during shuffles, aggregations and
* joins. The value of this accumulator should be approximately the sum of the peak sizes
* across all such data structures created in this task. For SQL jobs, this only tracks all
* unsafe operators and ExternalSort.
*/
def peakExecutionMemory: Long = _peakExecutionMemory.localValue
/**
* Storage statuses of any blocks that have been updated as a result of this task.
*/
def updatedBlockStatuses: Seq[(BlockId, BlockStatus)] = _updatedBlockStatuses.localValue
@deprecated("use updatedBlockStatuses instead", "2.0.0")
def updatedBlocks: Option[Seq[(BlockId, BlockStatus)]] = {
if (updatedBlockStatuses.nonEmpty) Some(updatedBlockStatuses) else None
}
// Setters and increment-ers
private[spark] def setExecutorDeserializeTime(v: Long): Unit =
_executorDeserializeTime.setValue(v)
private[spark] def setExecutorRunTime(v: Long): Unit = _executorRunTime.setValue(v)
private[spark] def setResultSize(v: Long): Unit = _resultSize.setValue(v)
private[spark] def setJvmGCTime(v: Long): Unit = _jvmGCTime.setValue(v)
private[spark] def setResultSerializationTime(v: Long): Unit =
_resultSerializationTime.setValue(v)
private[spark] def incMemoryBytesSpilled(v: Long): Unit = _memoryBytesSpilled.add(v)
private[spark] def incDiskBytesSpilled(v: Long): Unit = _diskBytesSpilled.add(v)
private[spark] def incPeakExecutionMemory(v: Long): Unit = _peakExecutionMemory.add(v)
private[spark] def incUpdatedBlockStatuses(v: Seq[(BlockId, BlockStatus)]): Unit =
_updatedBlockStatuses.add(v)
private[spark] def setUpdatedBlockStatuses(v: Seq[(BlockId, BlockStatus)]): Unit =
_updatedBlockStatuses.setValue(v)
/**
* Get a Long accumulator from the given map by name, assuming it exists.
* Note: this only searches the initial set of accumulators passed into the constructor.
*/
private[spark] def getAccum(name: String): Accumulator[Long] = {
TaskMetrics.getAccum[Long](initialAccumsMap, name)
}
/* ========================== *
| INPUT METRICS |
* ========================== */
private var _inputMetrics: Option[InputMetrics] = None
/**
* Metrics related to reading data from a [[org.apache.spark.rdd.HadoopRDD]] or from persisted
* data, defined only in tasks with input.
*/
def inputMetrics: Option[InputMetrics] = _inputMetrics
/**
* Get or create a new [[InputMetrics]] associated with this task.
*/
private[spark] def registerInputMetrics(readMethod: DataReadMethod.Value): InputMetrics = {
synchronized {
val metrics = _inputMetrics.getOrElse {
val metrics = new InputMetrics(initialAccumsMap)
metrics.setReadMethod(readMethod)
_inputMetrics = Some(metrics)
metrics
}
// If there already exists an InputMetric with the same read method, we can just return
// that one. Otherwise, if the read method is different from the one previously seen by
// this task, we return a new dummy one to avoid clobbering the values of the old metrics.
// In the future we should try to store input metrics from all different read methods at
// the same time (SPARK-5225).
if (metrics.readMethod == readMethod) {
metrics
} else {
val m = new InputMetrics
m.setReadMethod(readMethod)
m
}
}
}
/* ============================ *
| OUTPUT METRICS |
* ============================ */
private var _outputMetrics: Option[OutputMetrics] = None
/**
* Metrics related to writing data externally (e.g. to a distributed filesystem),
* defined only in tasks with output.
*/
def outputMetrics: Option[OutputMetrics] = _outputMetrics
/**
* Get or create a new [[OutputMetrics]] associated with this task.
*/
private[spark] def registerOutputMetrics(
writeMethod: DataWriteMethod.Value): OutputMetrics = synchronized {
_outputMetrics.getOrElse {
val metrics = new OutputMetrics(initialAccumsMap)
metrics.setWriteMethod(writeMethod)
_outputMetrics = Some(metrics)
metrics
}
}
/* ================================== *
| SHUFFLE READ METRICS |
* ================================== */
private var _shuffleReadMetrics: Option[ShuffleReadMetrics] = None
/**
* Metrics related to shuffle read aggregated across all shuffle dependencies.
* This is defined only if there are shuffle dependencies in this task.
*/
def shuffleReadMetrics: Option[ShuffleReadMetrics] = _shuffleReadMetrics
/**
* Temporary list of [[ShuffleReadMetrics]], one per shuffle dependency.
*
* A task may have multiple shuffle readers for multiple dependencies. To avoid synchronization
* issues from readers in different threads, in-progress tasks use a [[ShuffleReadMetrics]] for
* each dependency and merge these metrics before reporting them to the driver.
*/
@transient private lazy val tempShuffleReadMetrics = new ArrayBuffer[ShuffleReadMetrics]
/**
* Create a temporary [[ShuffleReadMetrics]] for a particular shuffle dependency.
*
* All usages are expected to be followed by a call to [[mergeShuffleReadMetrics]], which
* merges the temporary values synchronously. Otherwise, all temporary data collected will
* be lost.
*/
private[spark] def registerTempShuffleReadMetrics(): ShuffleReadMetrics = synchronized {
val readMetrics = new ShuffleReadMetrics
tempShuffleReadMetrics += readMetrics
readMetrics
}
/**
* Merge values across all temporary [[ShuffleReadMetrics]] into `_shuffleReadMetrics`.
* This is expected to be called on executor heartbeat and at the end of a task.
*/
private[spark] def mergeShuffleReadMetrics(): Unit = synchronized {
if (tempShuffleReadMetrics.nonEmpty) {
val metrics = new ShuffleReadMetrics(initialAccumsMap)
metrics.setRemoteBlocksFetched(tempShuffleReadMetrics.map(_.remoteBlocksFetched).sum)
metrics.setLocalBlocksFetched(tempShuffleReadMetrics.map(_.localBlocksFetched).sum)
metrics.setFetchWaitTime(tempShuffleReadMetrics.map(_.fetchWaitTime).sum)
metrics.setRemoteBytesRead(tempShuffleReadMetrics.map(_.remoteBytesRead).sum)
metrics.setLocalBytesRead(tempShuffleReadMetrics.map(_.localBytesRead).sum)
metrics.setRecordsRead(tempShuffleReadMetrics.map(_.recordsRead).sum)
_shuffleReadMetrics = Some(metrics)
}
}
/* =================================== *
| SHUFFLE WRITE METRICS |
* =================================== */
private var _shuffleWriteMetrics: Option[ShuffleWriteMetrics] = None
/**
* Metrics related to shuffle write, defined only in shuffle map stages.
*/
def shuffleWriteMetrics: Option[ShuffleWriteMetrics] = _shuffleWriteMetrics
/**
* Get or create a new [[ShuffleWriteMetrics]] associated with this task.
*/
private[spark] def registerShuffleWriteMetrics(): ShuffleWriteMetrics = synchronized {
_shuffleWriteMetrics.getOrElse {
val metrics = new ShuffleWriteMetrics(initialAccumsMap)
_shuffleWriteMetrics = Some(metrics)
metrics
}
}
/* ========================== *
| OTHER THINGS |
* ========================== */
private[spark] def registerAccumulator(a: Accumulable[_, _]): Unit = {
accums += a
}
/**
* Return the latest updates of accumulators in this task.
*
* The [[AccumulableInfo.update]] field is always defined and the [[AccumulableInfo.value]]
* field is always empty, since this represents the partial updates recorded in this task,
* not the aggregated value across multiple tasks.
*/
def accumulatorUpdates(): Seq[AccumulableInfo] = {
accums.map { a => a.toInfo(Some(a.localValue), None) }
}
// If we are reconstructing this TaskMetrics on the driver, some metrics may already be set.
// If so, initialize all relevant metrics classes so listeners can access them downstream.
{
var (hasShuffleRead, hasShuffleWrite, hasInput, hasOutput) = (false, false, false, false)
initialAccums
.filter { a => a.localValue != a.zero }
.foreach { a =>
a.name.get match {
case sr if sr.startsWith(SHUFFLE_READ_METRICS_PREFIX) => hasShuffleRead = true
case sw if sw.startsWith(SHUFFLE_WRITE_METRICS_PREFIX) => hasShuffleWrite = true
case in if in.startsWith(INPUT_METRICS_PREFIX) => hasInput = true
case out if out.startsWith(OUTPUT_METRICS_PREFIX) => hasOutput = true
case _ =>
}
}
if (hasShuffleRead) { _shuffleReadMetrics = Some(new ShuffleReadMetrics(initialAccumsMap)) }
if (hasShuffleWrite) { _shuffleWriteMetrics = Some(new ShuffleWriteMetrics(initialAccumsMap)) }
if (hasInput) { _inputMetrics = Some(new InputMetrics(initialAccumsMap)) }
if (hasOutput) { _outputMetrics = Some(new OutputMetrics(initialAccumsMap)) }
}
}
private[spark] object TaskMetrics extends Logging {
def empty: TaskMetrics = new TaskMetrics
/**
* Get an accumulator from the given map by name, assuming it exists.
*/
def getAccum[T](accumMap: Map[String, Accumulator[_]], name: String): Accumulator[T] = {
require(accumMap.contains(name), s"metric '$name' is missing")
val accum = accumMap(name)
try {
// Note: we can't do pattern matching here because types are erased by compile time
accum.asInstanceOf[Accumulator[T]]
} catch {
case e: ClassCastException =>
throw new SparkException(s"accumulator $name was of unexpected type", e)
}
}
/**
* Construct a [[TaskMetrics]] object from a list of accumulator updates, called on driver only.
*
* Executors only send accumulator updates back to the driver, not [[TaskMetrics]]. However, we
* need the latter to post task end events to listeners, so we need to reconstruct the metrics
* on the driver.
*
* This assumes the provided updates contain the initial set of accumulators representing
* internal task level metrics.
*/
def fromAccumulatorUpdates(accumUpdates: Seq[AccumulableInfo]): TaskMetrics = {
// Initial accumulators are passed into the TaskMetrics constructor first because these
// are required to be uniquely named. The rest of the accumulators from this task are
// registered later because they need not satisfy this requirement.
val (initialAccumInfos, otherAccumInfos) = accumUpdates
.filter { info => info.update.isDefined }
.partition { info => info.name.exists(_.startsWith(InternalAccumulator.METRICS_PREFIX)) }
val initialAccums = initialAccumInfos.map { info =>
val accum = InternalAccumulator.create(info.name.get)
accum.setValueAny(info.update.get)
accum
}
// We don't know the types of the rest of the accumulators, so we try to find the same ones
// that were previously registered here on the driver and make copies of them. It is important
// that we copy the accumulators here since they are used across many tasks and we want to
// maintain a snapshot of their local task values when we post them to listeners downstream.
val otherAccums = otherAccumInfos.flatMap { info =>
val id = info.id
val acc = Accumulators.get(id).map { a =>
val newAcc = a.copy()
newAcc.setValueAny(info.update.get)
newAcc
}
if (acc.isEmpty) {
logWarning(s"encountered unregistered accumulator $id when reconstructing task metrics.")
}
acc
}
val metrics = new TaskMetrics(initialAccums)
otherAccums.foreach(metrics.registerAccumulator)
metrics
}
}
|
ekasitk/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala
|
<filename>sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.types._
/**
* A placeholder expression for cube/rollup, which will be replaced by analyzer
*/
trait GroupingSet extends Expression with CodegenFallback {
def groupByExprs: Seq[Expression]
override def children: Seq[Expression] = groupByExprs
// this should be replaced first
override lazy val resolved: Boolean = false
override def dataType: DataType = throw new UnsupportedOperationException
override def foldable: Boolean = false
override def nullable: Boolean = true
override def eval(input: InternalRow): Any = throw new UnsupportedOperationException
}
case class Cube(groupByExprs: Seq[Expression]) extends GroupingSet {}
case class Rollup(groupByExprs: Seq[Expression]) extends GroupingSet {}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.