Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Add more helpful error message about versioning information not being present in old-school workflows | package ducttape.versioner
import ducttape.util.Files
import java.io.File
class WorkflowVersionHistory(val history: Seq[WorkflowVersionInfo]) {
lazy val prevVersion: Option[Int] = history.size match {
case 0 => None
case _ => Some(history.map(_.version).max)
}
lazy val nextVersion: Int = prevVersion.getOrElse(0) + 1
def prevVersionInfo: Option[WorkflowVersionInfo] = prevVersion match {
case None => None
case Some(i) => Some(history(i))
}
def union(): WorkflowVersionInfo = {
// TODO: Produce something info-like that returns a version from any previous version
throw new Error("Unimplemented")
}
}
object WorkflowVersionHistory {
def load(versionHistoryDir: File) = new WorkflowVersionHistory(
Files.ls(versionHistoryDir).filter {
_.isDirectory
}.map { dir =>
try {
Some(WorkflowVersionStore.load(dir))
} catch {
case ex => {
System.err.println("Version is corrupt or incomplete, DELETING: %s: %s".format(dir, ex.getMessage))
val DELAY_SECS = 3
Thread.sleep(DELAY_SECS)
Files.deleteDir(dir)
None
}
}
}.collect {
// only keep versions that are non-broken
case Some(info) => info
}
)
}
| package ducttape.versioner
import ducttape.util.Files
import java.io.File
class WorkflowVersionHistory(val history: Seq[WorkflowVersionInfo]) {
lazy val prevVersion: Option[Int] = history.size match {
case 0 => None
case _ => Some(history.map(_.version).max)
}
lazy val nextVersion: Int = prevVersion.getOrElse(0) + 1
def prevVersionInfo: Option[WorkflowVersionInfo] = prevVersion match {
case None => None
case Some(i) => Some(history(i))
}
def union(): WorkflowVersionInfo = {
// TODO: Produce something info-like that returns a version from any previous version
throw new Error("Unimplemented")
}
}
object WorkflowVersionHistory {
def load(versionHistoryDir: File) = new WorkflowVersionHistory(
Files.ls(versionHistoryDir).filter {
_.isDirectory
}.map { dir =>
try {
Some(WorkflowVersionStore.load(dir))
} catch {
case ex => {
val DELAY_SECS = 3
System.err.println("WARNING: Version is corrupt or incomplete, DELETING in %d sec: %s: %s".format(DELAY_SECS, dir, ex.getMessage))
System.err.println("NOTE: This warning could be due to upgrading from an older version of ducttape that doesn't support versioning")
Thread.sleep(DELAY_SECS)
Files.deleteDir(dir)
None
}
}
}.collect {
// only keep versions that are non-broken
case Some(info) => info
}
)
}
|
Use DevHttpErrorHandler to check if play.editor link gets rendered | /*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.it.views
import play.api.Configuration
import play.api.Environment
import play.api.Mode
import play.api.http.DefaultHttpErrorHandler
import play.api.test._
class DevErrorPageSpec extends PlaySpecification {
"devError.scala.html" should {
val testExceptionSource = new play.api.PlayException.ExceptionSource("test", "making sure the link shows up") {
def line = 100.asInstanceOf[Integer]
def position = 20.asInstanceOf[Integer]
def input = "test"
def sourceName = "someSourceFile"
}
"link the error line if play.editor is configured" in {
DefaultHttpErrorHandler.setPlayEditor("someEditorLinkWith %s:%s")
val result = DefaultHttpErrorHandler.onServerError(FakeRequest(), testExceptionSource)
contentAsString(result) must contain("""href="someEditorLinkWith someSourceFile:100" """)
}
"show prod error page in prod mode" in {
val errorHandler = new DefaultHttpErrorHandler()
val result = errorHandler.onServerError(FakeRequest(), testExceptionSource)
Helpers.contentAsString(result) must contain("Oops, an error occurred")
}
}
}
| /*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.it.views
import play.api.http.DefaultHttpErrorHandler
import play.api.http.DevHttpErrorHandler
import play.api.test._
class DevErrorPageSpec extends PlaySpecification {
"devError.scala.html" should {
val testExceptionSource = new play.api.PlayException.ExceptionSource("test", "making sure the link shows up") {
def line = 100.asInstanceOf[Integer]
def position = 20.asInstanceOf[Integer]
def input = "test"
def sourceName = "someSourceFile"
}
"link the error line if play.editor is configured" in {
DevHttpErrorHandler.setPlayEditor("someEditorLinkWith %s:%s")
val result = DevHttpErrorHandler.onServerError(FakeRequest(), testExceptionSource)
contentAsString(result) must contain("""href="someEditorLinkWith someSourceFile:100" """)
}
"show prod error page in prod mode" in {
val errorHandler = new DefaultHttpErrorHandler()
val result = errorHandler.onServerError(FakeRequest(), testExceptionSource)
Helpers.contentAsString(result) must contain("Oops, an error occurred")
}
}
}
|
Change Item to items in url | package notification.services
import play.api.Logger
import play.api.libs.ws.WSClient
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
trait FastlyPurge {
def softPurge(contentApiId: String): Future[Boolean]
}
class FastlyPurgeImpl(wsClient: WSClient, configuration: Configuration)(implicit ec: ExecutionContext) extends FastlyPurge {
private val logger: Logger = Logger(this.getClass)
def softPurge(contentApiId: String): Future[Boolean] = {
val url = s"${configuration.fastlyApiEndpoint}/service/${configuration.fastlyKey}/purge/Item/$contentApiId"
wsClient.url(url)
.addHttpHeaders("Fastly-Soft-Purge" -> "1")
.withRequestTimeout(durationToPair(2.seconds))
.execute("PURGE")
.map { resp =>
logger.info(s"Soft purged $url got HTTP ${resp.status} back")
if (resp.status == 200) {
true
} else {
throw new Exception(s"Unable to soft purge url, got HTTP ${resp.status} for $url")
}
}
}
}
| package notification.services
import play.api.Logger
import play.api.libs.ws.WSClient
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
trait FastlyPurge {
def softPurge(contentApiId: String): Future[Boolean]
}
class FastlyPurgeImpl(wsClient: WSClient, configuration: Configuration)(implicit ec: ExecutionContext) extends FastlyPurge {
private val logger: Logger = Logger(this.getClass)
def softPurge(contentApiId: String): Future[Boolean] = {
val url = s"${configuration.fastlyApiEndpoint}/service/${configuration.fastlyKey}/purge/items/$contentApiId"
wsClient.url(url)
.addHttpHeaders("Fastly-Soft-Purge" -> "1")
.withRequestTimeout(durationToPair(2.seconds))
.execute("PURGE")
.map { resp =>
logger.info(s"Soft purged $url got HTTP ${resp.status} back")
if (resp.status == 200) {
true
} else {
throw new Exception(s"Unable to soft purge url, got HTTP ${resp.status} for $url")
}
}
}
}
|
Use own error types in ZIO | package zio
import scalaz.zio.{App, IO, Void}
import scalaz.zio.console._
import java.io.IOException
object Fibonacci extends App {
sealed trait AppError
case object NoneException extends AppError
case class AppException(exception: Exception) extends AppError
def run(args: List[String]): IO[Void, ExitStatus] =
myAppLogic.attempt.map(_.fold(_ => 1, _ => 0)).map(ExitStatus.ExitNow(_))
def myAppLogic: IO[IOException, Unit] =
for {
_ <- putStrLn("Hello! Which fibonacci value should calculate?")
n <- getStrLn
index <- IO.syncException(n.toInt).leftMap(_ => new IOException())
value <- fib(index).leftMap(_ => new IOException())
_ <- putStrLn(s"$index. fibonacci value is $value")
} yield ()
def fib(n: Int): IO[Void, Int] =
if (n <= 1) IO.point(1)
else for {
fiber1 <- fib(n - 2).fork
fiber2 <- fib(n - 1).fork
v2 <- fiber2.join
v1 <- fiber1.join
} yield v1 + v2
}
| package zio
import scalaz.zio.{App, IO, Void}
import scalaz.zio.console._
import java.io.IOException
object Fibonacci extends App {
sealed trait AppError
case object NoneException extends AppError
case class AppException(exception: Exception) extends AppError
def run(args: List[String]): IO[Void, ExitStatus] =
myAppLogic.attempt.map(_.fold(_ => 1, _ => 0)).map(ExitStatus.ExitNow(_))
def myAppLogic: IO[AppError, Unit] =
for {
_ <- putStrLn("Hello! Which fibonacci value should calculate?")
.leftMap[AppError](AppException(_))
n <- getStrLn
.leftMap[AppError](AppException(_))
index <- IO.syncException(n.toInt)
.leftMap[AppError](AppException(_))
value <- fib(index)
.leftMap[AppError](_ => NoneException)
_ <- putStrLn(s"$index. fibonacci value is $value")
.leftMap[AppError](AppException(_))
} yield ()
def fib(n: Int): IO[Void, Int] =
if (n <= 1) IO.point(1)
else for {
fiber1 <- fib(n - 2).fork
fiber2 <- fib(n - 1).fork
v2 <- fiber2.join
v1 <- fiber1.join
} yield v1 + v2
}
|
Use java.nio.file to work with files and paths | package ml.combust.mleap.springboot
import TypeConverters._
import javax.annotation.PostConstruct
import org.slf4j.LoggerFactory
import ml.combust.mleap.pb
import org.springframework.beans.factory.annotation.Value
import org.springframework.stereotype.Component
import scala.io.Source
import scalapb.json4s.Parser
@Component
class ModelLoader {
@Value("${mleap.model.config:#{null}}")
private val modelConfigPath: String = null
private val logger = LoggerFactory.getLogger(classOf[ModelLoader])
private val jsonParser = new Parser()
private val timeout = 60000
@PostConstruct
def loadModel(): Unit = {
if (modelConfigPath == null) {
logger.info("Skipping loading model on startup")
} else {
logger.info(s"Loading model from $modelConfigPath")
val fileSource = Source.fromFile(modelConfigPath)
val request = try {
fileSource.getLines.mkString
} finally {
fileSource.close()
}
StarterConfiguration.getMleapExecutor
.loadModel(jsonParser.fromJsonString[pb.LoadModelRequest](request))(timeout)
}
}
}
| package ml.combust.mleap.springboot
import TypeConverters._
import javax.annotation.PostConstruct
import org.slf4j.LoggerFactory
import ml.combust.mleap.pb
import org.springframework.beans.factory.annotation.Value
import org.springframework.stereotype.Component
import java.nio.file.{Paths, Files}
import scalapb.json4s.Parser
@Component
class ModelLoader {
@Value("${mleap.model.config:#{null}}")
private val modelConfigPath: String = null
private val logger = LoggerFactory.getLogger(classOf[ModelLoader])
private val jsonParser = new Parser()
private val timeout = 60000
@PostConstruct
def loadModel(): Unit = {
if (modelConfigPath == null) {
logger.info("Skipping loading model on startup")
return
}
val configPath = Paths.get(modelConfigPath)
if (!Files.exists(configPath)) {
logger.warn(s"Model path does not exist: $modelConfigPath")
return
}
logger.info(s"Loading model from $modelConfigPath")
val request = new String(Files.readAllBytes(configPath))
StarterConfiguration.getMleapExecutor
.loadModel(jsonParser.fromJsonString[pb.LoadModelRequest](request))(timeout)
}
}
|
Add scalastyle plugin to sbt | // These are needed even before build.scala
resolvers += Resolver.url("artifactory", url("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/"
resolvers += "Spray Repository" at "http://repo.spray.cc/"
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.9.2")
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.2.0")
addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.5.1")
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")
//addSbtPlugin("io.spray" %% "sbt-twirl" % "0.6.1")
// For Sonatype publishing
//resolvers += Resolver.url("sbt-plugin-releases", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)
//addSbtPlugin("com.jsuereth" % "xsbt-gpg-plugin" % "0.6")
| // These are needed even before build.scala
resolvers += Resolver.url("artifactory", url("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/"
resolvers += "Spray Repository" at "http://repo.spray.cc/"
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.9.2")
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.2.0")
addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.5.1")
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")
addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.7.0")
resolvers += "sonatype-releases" at "https://oss.sonatype.org/content/repositories/releases/"
//addSbtPlugin("io.spray" %% "sbt-twirl" % "0.6.1")
// For Sonatype publishing
//resolvers += Resolver.url("sbt-plugin-releases", new URL("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)
//addSbtPlugin("com.jsuereth" % "xsbt-gpg-plugin" % "0.6")
|
Improve the quality of these tests | package uk.ac.wellcome.platform.api.works
import com.twitter.finagle.http.Status
import com.twitter.finatra.http.EmbeddedHttpServer
import uk.ac.wellcome.display.models.ApiVersions
import uk.ac.wellcome.test.fixtures.TestWith
class ApiErrorsTest extends ApiWorksTestBase {
it("returns a Not Found error if you try to get an API version") {
withServer(indexNameV1 = "not-important", indexNameV2 = "not-important") {
server =>
server.httpGet(
path = "/catalogue/v567/works",
andExpect = Status.NotFound,
withJsonBody = badRequest(
s"catalogue/${ApiVersions.default.toString}",
"v567 is not a valid API version")
)
}
}
private def withServer[R](testWith: TestWith[EmbeddedHttpServer, R]): R =
withServer(indexNameV1 = "index-v1", indexNameV2 = "index-v2") { server =>
testWith(server)
}
}
| package uk.ac.wellcome.platform.api.works
import com.twitter.finagle.http.Status
import com.twitter.finatra.http.EmbeddedHttpServer
import uk.ac.wellcome.display.models.ApiVersions
import uk.ac.wellcome.test.fixtures.TestWith
class ApiErrorsTest extends ApiWorksTestBase {
it("returns a Not Found error if you try to get an API version") {
withServer { server =>
server.httpGet(
path = "/catalogue/v567/works",
andExpect = Status.NotFound,
withJsonBody = badRequest(
s"catalogue/${ApiVersions.default.toString}",
"v567 is not a valid API version")
)
}
}
it("returns a Not Found error if you try to get an unrecognised path") {
withServer { server =>
server.httpGet(
path = "/foo/bar",
andExpect = Status.NotFound,
withJsonBody = badRequest(
s"catalogue/${ApiVersions.default.toString}",
"v567 is not a valid API version")
)
}
}
private def withServer[R](testWith: TestWith[EmbeddedHttpServer, R]): R =
withServer(indexNameV1 = "index-v1", indexNameV2 = "index-v2") { server =>
testWith(server)
}
}
|
Fix compatible problem with other NameMapper | package org.jetbrains.plugins.scala
package debugger
import com.intellij.debugger.NameMapper
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.util.Computable
import com.intellij.psi.PsiClass
import org.jetbrains.annotations.NotNull
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTemplateDefinition, ScTrait}
/**
*@author ilyas
*/
class ScalaJVMNameMapper extends NameMapper {
def getQualifiedName(@NotNull clazz: PsiClass): String = {
ApplicationManager.getApplication.runReadAction(new Computable[String] {
def compute: String = {
clazz match {
case obj: ScObject => obj.qualifiedName + "$"
case tr: ScTrait => tr.qualifiedName
case templDef: ScTemplateDefinition => templDef.qualifiedName
case psiClass => psiClass.getQualifiedName
}
}
})
}
}
| package org.jetbrains.plugins.scala
package debugger
import com.intellij.debugger.NameMapper
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.util.Computable
import com.intellij.psi.PsiClass
import org.jetbrains.annotations.NotNull
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTemplateDefinition, ScTrait}
/**
*@author ilyas
*/
class ScalaJVMNameMapper extends NameMapper {
def getQualifiedName(@NotNull clazz: PsiClass): String = {
ApplicationManager.getApplication.runReadAction(new Computable[String] {
def compute: String = {
clazz match {
case obj: ScObject => obj.qualifiedName + "$"
case tr: ScTrait => tr.qualifiedName
case templDef: ScTemplateDefinition => templDef.qualifiedName
case psiClass => null
}
}
})
}
}
|
Remove undefined scenario from defined simulation | package org.onehippo.undefinedvariablesdemo
import io.gatling.core.Predef.{Simulation, _}
import io.gatling.http.Predef._
import scala.concurrent.duration._
class DefinedVariablesSimulation extends Simulation {
val httpProtocol = http
.baseURL("http://localhost:8080")
val headers_0 = Map("Pragma" -> "no-cache")
val headers_1 = Map("Upgrade-Insecure-Requests" -> "1")
val defined = scenario("DefinedVariablesSimulation").exec(http("Defined")
.get("/site/defined"))
.pause(7)
val undefined = scenario("UndefinedVariablesSimulation").exec(http("Undefined")
.get("/site/undefined"))
.pause(7)
def steps(users:Int, s:Int) = (1 to users).toList.flatMap(i =>
List(
constantUsersPerSec(i) during(s seconds)
)
)
val users=10
val stepTime = 10
setUp(
defined.inject(steps(users,stepTime))
).protocols(httpProtocol)
}
| package org.onehippo.undefinedvariablesdemo
import io.gatling.core.Predef.{Simulation, _}
import io.gatling.http.Predef._
import scala.concurrent.duration._
class DefinedVariablesSimulation extends Simulation {
val httpProtocol = http
.baseURL("http://localhost:8080")
val headers_0 = Map("Pragma" -> "no-cache")
val headers_1 = Map("Upgrade-Insecure-Requests" -> "1")
val defined = scenario("DefinedVariablesSimulation").exec(http("Defined")
.get("/site/defined"))
.pause(7)
def steps(users:Int, s:Int) = (1 to users).toList.flatMap(i =>
List(
constantUsersPerSec(i) during(s seconds)
)
)
val users=10
val stepTime = 10
setUp(
defined.inject(steps(users,stepTime))
).protocols(httpProtocol)
}
|
Integrate Cassandra config manager changes with facebook comments stream factory. | package com.microsoft.partnercatalyst.fortis.spark.sources.streamfactories
import com.github.catalystcode.fortis.spark.streaming.facebook.dto.FacebookComment
import com.github.catalystcode.fortis.spark.streaming.facebook.{FacebookAuth, FacebookUtils}
import com.microsoft.partnercatalyst.fortis.spark.sources.streamprovider.{ConnectorConfig, StreamFactory}
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
class FacebookCommentStreamFactory extends StreamFactory[FacebookComment] {
private val DELIMITER: String = "|"
/**
* Creates a DStream for a given connector config iff the connector config is supported by the stream factory.
* The param set allows the streaming context to be curried into the partial function that creates the stream.
*
* @param streamingContext The Spark Streaming Context
* @return A partial function for transforming a connector config
*/
override def createStream(streamingContext: StreamingContext): PartialFunction[ConnectorConfig, DStream[FacebookComment]] = {
case ConnectorConfig("FacebookComment", params) =>
val facebookAuth = FacebookAuth(params("appId"), params("appSecret"), params("accessToken"))
val pageIds = Option(params("pageIds")) match {
case None => Set()
case Some(pageIds) => pageIds.split(DELIMITER).toSet
}
FacebookUtils.createCommentsStreams(streamingContext, facebookAuth, pageIds.toSet)
}
}
| package com.microsoft.partnercatalyst.fortis.spark.sources.streamfactories
import com.github.catalystcode.fortis.spark.streaming.facebook.dto.FacebookComment
import com.github.catalystcode.fortis.spark.streaming.facebook.{FacebookAuth, FacebookUtils}
import com.microsoft.partnercatalyst.fortis.spark.sources.streamprovider.{ConnectorConfig, StreamFactory}
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.dstream.DStream
class FacebookCommentStreamFactory extends StreamFactory[FacebookComment] {
private val DELIMITER: String = "|"
/**
* Creates a DStream for a given connector config iff the connector config is supported by the stream factory.
* The param set allows the streaming context to be curried into the partial function that creates the stream.
*
* @param streamingContext The Spark Streaming Context
* @return A partial function for transforming a connector config
*/
override def createStream(streamingContext: StreamingContext): PartialFunction[ConnectorConfig, DStream[FacebookComment]] = {
case ConnectorConfig("FacebookComment", params) =>
import ParameterExtensions._
val facebookAuth = FacebookAuth(
params.getAs[String]("appId"),
params.getAs[String]("appSecret"),
params.getAs[String]("accessToken")
)
FacebookUtils.createCommentsStreams(streamingContext, facebookAuth, params.getTrustedSources.toSet)
}
}
|
Add process logger to the usage of ADB | package com.karumi.shot.android
import com.karumi.shot.domain.model.{AppId, Folder}
import scala.sys.process._
object Adb {
var adbBinaryPath: String = ""
}
class Adb {
private final val CR_ASCII_DECIMAL = 13
def devices: List[String] = {
executeAdbCommandWithResult("devices")
.split('\n')
.toList
.drop(1)
.map { line =>
line.split('\t').toList.head
}
.filter(device => !isCarriageReturnASCII(device))
}
def pullScreenshots(device: String,
screenshotsFolder: Folder,
appId: AppId): Unit =
executeAdbCommandWithResult(
s"-s $device pull /sdcard/screenshots/$appId/screenshots-default/ $screenshotsFolder")
def clearScreenshots(device: String, appId: AppId): Unit =
executeAdbCommand(
s"-s $device shell rm -r /sdcard/screenshots/$appId/screenshots-default/")
private def executeAdbCommand(command: String): Int =
s"${Adb.adbBinaryPath} $command".!
private def executeAdbCommandWithResult(command: String): String =
s"${Adb.adbBinaryPath} $command".!!
private def isCarriageReturnASCII(device: String): Boolean =
device.charAt(0) == CR_ASCII_DECIMAL
}
| package com.karumi.shot.android
import com.karumi.shot.domain.model.{AppId, Folder}
import scala.sys.process._
object Adb {
var adbBinaryPath: String = ""
}
class Adb {
private final val CR_ASCII_DECIMAL = 13
private val logger = ProcessLogger(
o => println("Shot ADB output: " + o),
e => println(Console.RED + "Shot ADB error: " + e + Console.RESET)
)
def devices: List[String] = {
executeAdbCommandWithResult("devices")
.split('\n')
.toList
.drop(1)
.map { line =>
line.split('\t').toList.head
}
.filter(device => !isCarriageReturnASCII(device))
}
def pullScreenshots(device: String,
screenshotsFolder: Folder,
appId: AppId): Unit =
executeAdbCommandWithResult(
s"-s $device pull /sdcard/screenshots/$appId/screenshots-default/ $screenshotsFolder")
def clearScreenshots(device: String, appId: AppId): Unit =
executeAdbCommand(
s"-s $device shell rm -r /sdcard/screenshots/$appId/screenshots-default/")
private def executeAdbCommand(command: String): Int =
s"${Adb.adbBinaryPath} $command" ! logger
private def executeAdbCommandWithResult(command: String): String =
s"${Adb.adbBinaryPath} $command" !! logger
private def isCarriageReturnASCII(device: String): Boolean =
device.charAt(0) == CR_ASCII_DECIMAL
}
|
Add type annotations to PulseInitalizationActor | package io.vamp.lifter.pulse
import io.vamp.lifter.elasticsearch.ElasticsearchInitializationActor
import io.vamp.lifter.elasticsearch.ElasticsearchInitializationActor.TemplateDefinition
import io.vamp.lifter.notification.LifterNotificationProvider
import io.vamp.model.resolver.NamespaceValueResolver
import io.vamp.pulse.{ ElasticsearchPulseActor, ElasticsearchPulseEvent }
import scala.io.Source
class ElasticsearchPulseInitializationActor extends ElasticsearchPulseEvent with NamespaceValueResolver with ElasticsearchInitializationActor with LifterNotificationProvider {
lazy val indexName = resolveWithNamespace(ElasticsearchPulseActor.indexName(), lookup = true)
lazy val indexTimeFormat = ElasticsearchPulseActor.indexTimeFormat()
lazy val elasticsearchUrl = ElasticsearchPulseActor.elasticsearchUrl()
override lazy val templates = {
def load(name: String) = Source.fromInputStream(getClass.getResourceAsStream(s"$name.json")).mkString.replace("$NAME", indexName)
List("template", "template-event").map(template ⇒ TemplateDefinition(s"$indexName-$template", load(template)))
}
override protected def initializeCustom(): Unit = {
initializeIndex(indexTypeName()._1)
super.initializeCustom()
}
}
| package io.vamp.lifter.pulse
import io.vamp.lifter.elasticsearch.ElasticsearchInitializationActor
import io.vamp.lifter.elasticsearch.ElasticsearchInitializationActor.TemplateDefinition
import io.vamp.lifter.notification.LifterNotificationProvider
import io.vamp.model.resolver.NamespaceValueResolver
import io.vamp.pulse.{ ElasticsearchPulseActor, ElasticsearchPulseEvent }
import scala.io.Source
class ElasticsearchPulseInitializationActor extends ElasticsearchPulseEvent with NamespaceValueResolver with ElasticsearchInitializationActor with LifterNotificationProvider {
lazy val indexName: String = resolveWithNamespace(ElasticsearchPulseActor.indexName(), lookup = true)
lazy val indexTimeFormat: Map[String, String] = ElasticsearchPulseActor.indexTimeFormat()
lazy val elasticsearchUrl: String = ElasticsearchPulseActor.elasticsearchUrl()
override lazy val templates: List[TemplateDefinition] = {
def load(name: String) = Source.fromInputStream(getClass.getResourceAsStream(s"$name.json")).mkString.replace("$NAME", indexName)
List("template", "template-event").map(template ⇒ TemplateDefinition(s"$indexName-$template", load(template)))
}
override protected def initializeCustom(): Unit = {
initializeIndex(indexTypeName()._1)
super.initializeCustom()
}
}
|
Fix leading zero reverse string. The code runs slower than the previous code. | object Solution {
import scala.collection.mutable
def genConvergeNums(n: Int): Array[BigInt] = {
val cache = mutable.Map.empty[BigInt, BigInt]
def recursiveCalculateConverge(x: BigInt, c: Int): BigInt = {
if (cache.contains(x)) cache(x)
else {
if (c >= 60) {
cache(x) = BigInt(-1)
cache(x)
}
else {
val xs = x.toString
val rxs = xs.reverse
if (xs == rxs) {
cache(x) = x
x
}
else {
val rx = BigInt(rxs)
val convergeNum = recursiveCalculateConverge(x + rx, c + 1)
cache(x) = convergeNum
if (xs.size == rxs.size) cache(rx) = convergeNum
convergeNum
}
}
}
}
(0 to n).toArray.map(recursiveCalculateConverge(_, 0))
}
def main(args: Array[String]) {
val n = readLine.toInt
val converges = genConvergeNums(n)
val occ = new mutable.HashMap[BigInt, Int] { override def default(k: BigInt) = 0 }
for (x <- converges if x != -1) occ(x) += 1
val (k, c)= occ.maxBy(_._2)
println(k + " " + c)
}
}
| object Solution {
import scala.collection.mutable
def genConvergeNums(n: Int): Array[BigInt] = {
val cache = mutable.Map.empty[BigInt, BigInt]
def recursiveCalculateConverge(x: BigInt, c: Int): BigInt = {
require(x >= 0)
if (x < 10) return x
if (cache.contains(x)) cache(x)
else {
if (c >= 60) {
cache(x) = BigInt(-1)
cache(x)
}
else {
val xs = x.toString
val rxs = xs.reverse.dropWhile(_ == '0')
if (xs == rxs) {
cache(x) = x
x
}
else {
val rx = BigInt(rxs)
val convergeNum = recursiveCalculateConverge(x + rx, c + 1)
cache(x) = convergeNum
if (xs.size == rxs.size) cache(rx) = convergeNum
convergeNum
}
}
}
}
(0 to n).toArray.map(recursiveCalculateConverge(_, 0))
}
def main(args: Array[String]) {
val n = readLine.toInt
val converges = genConvergeNums(n)
val occ = new mutable.HashMap[BigInt, Int] { override def default(k: BigInt) = 0 }
for (x <- converges if x != -1) occ(x) += 1
val (k, c)= occ.maxBy(_._2)
println(k + " " + c)
}
}
|
Update sbt-scalajs, scalajs-compiler, ... to 1.5.1 | addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.0.0")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.5.0")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.7")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.1.1")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")
addSbtPlugin("io.youi" % "youi-plugin" % "1.2.0")
libraryDependencies += "org.scala-js" %% "scalajs-env-jsdom-nodejs" % "1.1.0" | addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.0.0")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.5.1")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.7")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "2.1.1")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")
addSbtPlugin("io.youi" % "youi-plugin" % "1.2.0")
libraryDependencies += "org.scala-js" %% "scalajs-env-jsdom-nodejs" % "1.1.0" |
Update play, play-ahc-ws, play-joda-forms, ... to 2.8.1 | addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.8.0")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.1")
| addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.8.1")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.3.1")
|
Include sbt-pgp plugin to ensure build passes | resolvers += "simplytyped" at "http://simplytyped.github.io/repo/releases"
addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.7.4")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.13.0")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.1")
| resolvers += "simplytyped" at "http://simplytyped.github.io/repo/releases"
addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.7.4")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.13.0")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.1")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
|
Upgrade build-plugin 2.0.3 that upgrades scala 2.12.3 | resolvers += Resolver.url("gatling", url("http://dl.bintray.com/content/gatling/sbt-plugins/"))(Resolver.ivyStylePatterns)
addSbtPlugin("io.gatling" % "gatling-build-plugin" % "2.0.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "0.6.4")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.8.2")
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.3")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.25")
addMavenResolverPlugin
| resolvers += Resolver.url("gatling", url("http://dl.bintray.com/content/gatling/sbt-plugins/"))(Resolver.ivyStylePatterns)
addSbtPlugin("io.gatling" % "gatling-build-plugin" % "2.0.3")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "0.6.4")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.8.2")
addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.3")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.25")
addMavenResolverPlugin
|
Update mdoc, sbt-mdoc to 2.3.1 | addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.9.0")
addSbtPlugin("org.scala-js" % "sbt-jsdependencies" % "1.0.2")
addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.4.3")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.9.0")
val sbtSoftwareMillVersion = "2.0.9"
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-common" % sbtSoftwareMillVersion)
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-publish" % sbtSoftwareMillVersion)
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-browser-test-js" % sbtSoftwareMillVersion)
addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.0")
addSbtPlugin("org.jetbrains.scala" % "sbt-ide-settings" % "1.1.1")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1")
| addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.9.0")
addSbtPlugin("org.scala-js" % "sbt-jsdependencies" % "1.0.2")
addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.4.3")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.9.0")
val sbtSoftwareMillVersion = "2.0.9"
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-common" % sbtSoftwareMillVersion)
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-publish" % sbtSoftwareMillVersion)
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-browser-test-js" % sbtSoftwareMillVersion)
addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.1")
addSbtPlugin("org.jetbrains.scala" % "sbt-ide-settings" % "1.1.1")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1")
|
Update sbt-scalajs, scalajs-compiler, ... to 1.7.1 | addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.0")
addSbtPlugin("org.scala-js" % "sbt-jsdependencies" % "1.0.2")
addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.4.0")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.8.0")
val sbtSoftwareMillVersion = "2.0.8"
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-common" % sbtSoftwareMillVersion)
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-publish" % sbtSoftwareMillVersion)
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-browser-test-js" % sbtSoftwareMillVersion)
addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.23")
addSbtPlugin("org.jetbrains.scala" % "sbt-ide-settings" % "1.1.1")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1")
| addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.1")
addSbtPlugin("org.scala-js" % "sbt-jsdependencies" % "1.0.2")
addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.4.0")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.8.0")
val sbtSoftwareMillVersion = "2.0.8"
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-common" % sbtSoftwareMillVersion)
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-publish" % sbtSoftwareMillVersion)
addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-browser-test-js" % sbtSoftwareMillVersion)
addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.23")
addSbtPlugin("org.jetbrains.scala" % "sbt-ide-settings" % "1.1.1")
addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "1.0.1")
|
Allow a tracer to be set on the ServerBuilder | // ----- ostrich service
import com.twitter.finagle.builder.{Server, ServerBuilder}
import com.twitter.finagle.stats.OstrichStatsReceiver
import com.twitter.finagle.thrift.ThriftServerFramedCodec
import com.twitter.logging.Logger
import com.twitter.ostrich.admin.Service
trait ThriftServer extends Service with FutureIface {
val log = Logger.get(getClass)
def thriftCodec = ThriftServerFramedCodec()
val thriftProtocolFactory = new TBinaryProtocol.Factory()
val thriftPort: Int
val serverName: String
var server: Server = null
def start() {
val thriftImpl = new FinagledService(this, thriftProtocolFactory)
val serverAddr = new InetSocketAddress(thriftPort)
server = ServerBuilder().codec(thriftCodec).name(serverName).reportTo(new OstrichStatsReceiver).bindTo(serverAddr).build(thriftImpl)
}
def shutdown() {
synchronized {
if (server != null) {
server.close(0.seconds)
}
}
}
} | // ----- ostrich service
import com.twitter.finagle.builder.{Server, ServerBuilder}
import com.twitter.finagle.stats.{StatsReceiver, OstrichStatsReceiver}
import com.twitter.finagle.thrift.ThriftServerFramedCodec
import com.twitter.finagle.tracing.{NullTracer, Tracer}
import com.twitter.logging.Logger
import com.twitter.ostrich.admin.Service
trait ThriftServer extends Service with FutureIface {
val log = Logger.get(getClass)
def thriftCodec = ThriftServerFramedCodec()
def statsReceiver: StatsReceiver = new OstrichStatsReceiver
def tracer: Tracer = NullTracer
val thriftProtocolFactory = new TBinaryProtocol.Factory()
val thriftPort: Int
val serverName: String
var server: Server = null
def start() {
val thriftImpl = new FinagledService(this, thriftProtocolFactory)
val serverAddr = new InetSocketAddress(thriftPort)
server = ServerBuilder()
.codec(thriftCodec)
.name(serverName)
.reportTo(statsReceiver)
.bindTo(serverAddr)
.tracer(tracer)
.build(thriftImpl)
}
def shutdown() {
synchronized {
if (server != null) {
server.close(0.seconds)
}
}
}
} |
Correct like if not tags for story | package models
import play.api.libs.json._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
case class ViewerProfile(id: String, likeStoryIds: List[String] = List(), nopeStoryIds: List[String] = List(), tagsWeights: Map[String,Int] = Map())
{
def tagsFilterBy(filter: ((String,Int)) => (Boolean)) = this.tagsWeights.filter(filter).map(_._1).toList
}
object ViewerProfile extends MongoModel("viewerprofiles")
{
def findById(id: String): Future[ViewerProfile] = collection.find(Json.obj("_id" -> Json.obj("$oid" -> id))).cursor[ViewerProfile].collect[List]().map{
case vp :: Nil =>
vp
case _ =>
ViewerProfile(id)
}
def processEvent(event: Event) =
collection.update(
Json.obj("_id" -> Json.obj("$oid" -> event.viewerProfileId)),
Json.obj(
"$addToSet" -> Json.obj(event._type+"StoryIds" -> event.storyId),
"$inc" -> JsObject(event.tags.map(tag => ("tagsWeights."+tag, JsNumber(event.tagsWeight))))
),
multi = false,
upsert = true
)
} | package models
import play.api.libs.json._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
case class ViewerProfile(id: String, likeStoryIds: List[String] = List(), nopeStoryIds: List[String] = List(), tagsWeights: Map[String,Int] = Map())
{
def tagsFilterBy(filter: ((String,Int)) => (Boolean)) = this.tagsWeights.filter(filter).map(_._1).toList
}
object ViewerProfile extends MongoModel("viewerprofiles")
{
def findById(id: String): Future[ViewerProfile] = collection.find(Json.obj("_id" -> Json.obj("$oid" -> id))).cursor[ViewerProfile].collect[List]().map{
case vp :: Nil =>
vp
case _ =>
ViewerProfile(id)
}
def processEvent(event: Event) = {
val update = if(event.tags.nonEmpty) {
Json.obj(
"$addToSet" -> Json.obj(event._type + "StoryIds" -> event.storyId)
)
} else {
Json.obj(
"$addToSet" -> Json.obj(event._type + "StoryIds" -> event.storyId),
"$inc" -> JsObject(event.tags.map(tag => ("tagsWeights." + tag, JsNumber(event.tagsWeight))))
)
}
collection.update(
Json.obj("_id" -> Json.obj("$oid" -> event.viewerProfileId)),
update,
multi = false,
upsert = true
)
}
} |
Fix unit test for api change | package models
import com.gilt.apidocgenerator.models.ServiceDescription
import core.generator.ScalaServiceDescription
import core.{ServiceDescriptionBuilder}
import org.scalatest.{ ShouldMatchers, FunSpec }
class Play2BindablesSpec extends FunSpec with ShouldMatchers {
lazy val service = TestHelper.parseFile(s"../reference-api/api.json").serviceDescription.get
lazy val ssd = new ScalaServiceDescription(service)
lazy val ageGroup = ssd.enums.find(_.name == "AgeGroup").getOrElse {
sys.error("No age group enum found")
}
it("generates bindable for a single enum") {
TestHelper.assertEqualsFile(
"test/resources/generators/play-2-bindable-age-group.txt",
Play2Bindables.buildImplicit(ageGroup)
)
}
it("generates bindable object") {
TestHelper.assertEqualsFile(
"core/src/test/resources/generators/play-2-bindable-reference-api-object.txt",
Play2Bindables.build(ssd).getOrElse("")
)
}
}
| package models
import com.gilt.apidocgenerator.models.ServiceDescription
import core.generator.ScalaServiceDescription
import core.{ServiceDescriptionBuilder}
import org.scalatest.{ ShouldMatchers, FunSpec }
class Play2BindablesSpec extends FunSpec with ShouldMatchers {
lazy val service = TestHelper.parseFile(s"../reference-api/api.json").serviceDescription.get
lazy val ssd = new ScalaServiceDescription(service)
lazy val ageGroup = ssd.enums.find(_.name == "AgeGroup").getOrElse {
sys.error("No age group enum found")
}
it("generates bindable for a single enum") {
TestHelper.assertEqualsFile(
"test/resources/generators/play-2-bindable-age-group.txt",
Play2Bindables.buildImplicit(ageGroup)
)
}
it("generates bindable object") {
TestHelper.assertEqualsFile(
"core/src/test/resources/generators/play-2-bindable-reference-api-object.txt",
Play2Bindables.build(ssd)
)
}
}
|
Upgrade to Android support library 24.2.0 | androidBuildAar
autoScalaLibrary := false
githubProject := "floating-label-layout"
javacOptions ++=
"-source" :: "1.7" ::
"-target" :: "1.7" ::
Nil
libraryDependencies ++=
"com.android.support" % "design" % "24.1.1" ::
Nil
minSdkVersion := "7"
normalizedName := "floating-label-layout"
organization := "io.taig.android"
platformTarget := "android-24"
publishArtifact in ( Compile, packageDoc ) := false
scalacOptions ++=
"-deprecation" ::
"-feature" ::
Nil
targetSdkVersion := "24"
typedResources := false
version := "1.1.9" | androidBuildAar
autoScalaLibrary := false
githubProject := "floating-label-layout"
javacOptions ++=
"-source" :: "1.7" ::
"-target" :: "1.7" ::
Nil
libraryDependencies ++=
"com.android.support" % "design" % "24.2.0" ::
Nil
minSdkVersion := "7"
normalizedName := "floating-label-layout"
organization := "io.taig.android"
platformTarget := "android-24"
publishArtifact in ( Compile, packageDoc ) := false
scalacOptions ++=
"-deprecation" ::
"-feature" ::
Nil
typedResources := false
version := "1.1.9" |
Update sbt-scalajs, scalajs-compiler, ... to 1.7.1 | logLevel := Level.Warn
libraryDependencies += "org.scala-js" %% "scalajs-env-selenium" % "1.1.1"
libraryDependencies += "org.scala-js" %% "scalajs-env-jsdom-nodejs" % "1.1.0"
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.0")
addSbtPlugin("org.scala-js" % "sbt-jsdependencies" % "1.0.2")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.0")
addSbtPlugin("org.jetbrains" % "sbt-ide-settings" % "1.1.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-less" % "1.1.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.8.1")
// Deployment configuration
addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.1.2")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.10")
| logLevel := Level.Warn
libraryDependencies += "org.scala-js" %% "scalajs-env-selenium" % "1.1.1"
libraryDependencies += "org.scala-js" %% "scalajs-env-jsdom-nodejs" % "1.1.0"
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.7.1")
addSbtPlugin("org.scala-js" % "sbt-jsdependencies" % "1.0.2")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.0")
addSbtPlugin("org.jetbrains" % "sbt-ide-settings" % "1.1.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-less" % "1.1.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.8.1")
// Deployment configuration
addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.1.2")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.10")
|
Support scss compilation from scss-hidden | name := "scyig-judicial"
organization := "me.frmr.scyig"
version := "0.0.1-SNAPSHOT"
scalaVersion := "2.12.2"
scalacOptions in Compile ++= Seq("-feature", "-deprecation")
libraryDependencies ++= {
val liftVersion = "3.2.0-M1"
Seq(
"net.liftweb" %% "lift-webkit" % liftVersion,
"com.typesafe.slick" %% "slick" % "3.2.0",
"mysql" % "mysql-connector-java" % "8.0.7-dmr",
"ch.qos.logback" % "logback-classic" % "1.2.3",
"org.scalatest" %% "scalatest" % "3.0.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.13.4" % "test"
)
}
enablePlugins(JettyPlugin)
| name := "scyig-judicial"
organization := "me.frmr.scyig"
version := "0.0.1-SNAPSHOT"
scalaVersion := "2.12.2"
scalacOptions in Compile ++= Seq("-feature", "-deprecation")
libraryDependencies ++= {
val liftVersion = "3.2.0-M1"
Seq(
"net.liftweb" %% "lift-webkit" % liftVersion,
"com.typesafe.slick" %% "slick" % "3.2.0",
"mysql" % "mysql-connector-java" % "8.0.7-dmr",
"ch.qos.logback" % "logback-classic" % "1.2.3",
"org.scalatest" %% "scalatest" % "3.0.1" % "test",
"org.scalacheck" %% "scalacheck" % "1.13.4" % "test"
)
}
enablePlugins(JettyPlugin)
webappPostProcess := { webappDir: File =>
def recurseFiles(rootDir: File, targetDir: File, extension: String, handler: (String, String, String)=>Unit): Unit = {
if (! rootDir.isDirectory || ! targetDir.isDirectory) {
streams.value.log.error(s"$rootDir and $targetDir must both be directories")
} else {
for {
file <- rootDir.listFiles if file.getName.endsWith(extension)
} {
streams.value.log.info(s"Processing ${file.getPath}...")
handler(
rootDir.toString,
file.getName,
targetDir.toString
)
}
}
}
def compileScss(inputDir: String, inputFile: String, outputDir: String): Unit = {
val outputFilename = inputFile.replace(".scss", ".css")
s"scss $inputDir/$inputFile $outputDir/$outputFilename" ! streams.value.log
}
recurseFiles(
webappDir / "scss-hidden",
webappDir / "css",
".scss",
compileScss _
)
}
|
Upgrade to Couchbase client 2.2.6 | organization := "com.productfoundry"
name := "akka-persistence-couchbase"
version := "0.3"
scalaVersion := "2.11.7"
fork in Test := true
javaOptions in Test += "-Xmx512M"
scalacOptions ++= Seq(
"-encoding", "UTF-8",
"-feature",
"-unchecked",
"-deprecation",
"-Xlint",
"-Yno-adapted-args",
"-Ywarn-dead-code",
"-Ywarn-numeric-widen",
"-Xfuture"
)
parallelExecution in Test := false
resolvers ++= Seq(
"Typesafe Releases" at "http://repo.typesafe.com/typesafe/releases/"
)
licenses += ("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0.html"))
bintrayOrganization := Some("productfoundry")
val akkaVer = "2.4.2"
libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-persistence" % akkaVer,
"com.couchbase.client" % "java-client" % "2.2.4",
"commons-codec" % "commons-codec" % "1.10",
"com.typesafe.akka" %% "akka-persistence-tck" % akkaVer % "test",
"org.scalatest" %% "scalatest" % "2.2.4" % "test"
)
| organization := "com.productfoundry"
name := "akka-persistence-couchbase"
version := "0.3"
scalaVersion := "2.11.7"
fork in Test := true
javaOptions in Test += "-Xmx512M"
scalacOptions ++= Seq(
"-encoding", "UTF-8",
"-feature",
"-unchecked",
"-deprecation",
"-Xlint",
"-Yno-adapted-args",
"-Ywarn-dead-code",
"-Ywarn-numeric-widen",
"-Xfuture"
)
parallelExecution in Test := false
resolvers ++= Seq(
"Typesafe Releases" at "http://repo.typesafe.com/typesafe/releases/"
)
licenses += ("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0.html"))
bintrayOrganization := Some("productfoundry")
val akkaVer = "2.4.2"
libraryDependencies ++= Seq(
"com.typesafe.akka" %% "akka-persistence" % akkaVer,
"com.couchbase.client" % "java-client" % "2.2.6",
"commons-codec" % "commons-codec" % "1.10",
"com.typesafe.akka" %% "akka-persistence-tck" % akkaVer % "test",
"org.scalatest" %% "scalatest" % "2.2.4" % "test"
)
|
Update to 0.4 SNAPSHOT and use couchbase.akka | name := "couchbase-crud-scala-starter"
version := "1.0-SNAPSHOT"
lazy val root = (project in file(".")).enablePlugins(PlayScala)
scalaVersion := "2.11.1"
libraryDependencies ++= Seq(
cache,
"org.reactivecouchbase" %% "reactivecouchbase-play" % "0.3"
)
resolvers += "ReactiveCouchbase" at "https://raw.github.com/ReactiveCouchbase/repository/master/snapshots" | name := "couchbase-crud-scala-starter"
version := "1.0-SNAPSHOT"
lazy val root = (project in file(".")).enablePlugins(PlayScala)
scalaVersion := "2.11.1"
libraryDependencies ++= Seq(
cache,
"org.reactivecouchbase" %% "reactivecouchbase-play" % "0.4-SNAPSHOT"
)
resolvers += "ReactiveCouchbase" at "https://raw.github.com/ReactiveCouchbase/repository/master/snapshots" |
Update configuration to record version 0.0.82 | import com.github.retronym.SbtOneJar._
oneJarSettings
name := "api-build"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
version := "0.0.81"
exportJars := true
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"com.typesafe.play" %% "play-json" % "2.5.5",
"com.ning" % "async-http-client" % "1.9.39",
"org.scalatest" %% "scalatest" % "2.2.6" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
| import com.github.retronym.SbtOneJar._
oneJarSettings
name := "api-build"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
version := "0.0.82"
exportJars := true
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"com.typesafe.play" %% "play-json" % "2.5.5",
"com.ning" % "async-http-client" % "1.9.39",
"org.scalatest" %% "scalatest" % "2.2.6" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
|
Update configuration to record version 0.1.75 | name := "lib-reference-scala"
organization := "io.flow"
scalaVersion in ThisBuild := "2.12.6"
crossScalaVersions := Seq("2.12.6", "2.11.12", "2.10.6")
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.0.5" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
version := "0.1.74"
| name := "lib-reference-scala"
organization := "io.flow"
scalaVersion in ThisBuild := "2.12.6"
crossScalaVersions := Seq("2.12.6", "2.11.12", "2.10.6")
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.0.5" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
version := "0.1.75"
|
Update configuration to record version 0.3.1 | import play.PlayImport.PlayKeys._
name := "lib-play"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
crossScalaVersions := Seq("2.11.8")
version := "0.3.0"
lazy val root = project
.in(file("."))
.enablePlugins(PlayScala)
.settings(
libraryDependencies ++= Seq(
ws,
filters,
"com.jason-goodwin" %% "authentikat-jwt" % "0.4.3",
"org.scalatestplus" %% "play" % "1.4.0" % "test"
),
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases",
resolvers += "Artifactory" at "https://flow.artifactoryonline.com/flow/libs-release/",
credentials += Credentials(
"Artifactory Realm",
"flow.artifactoryonline.com",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
| import play.PlayImport.PlayKeys._
name := "lib-play"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
crossScalaVersions := Seq("2.11.8")
version := "0.3.1"
lazy val root = project
.in(file("."))
.enablePlugins(PlayScala)
.settings(
libraryDependencies ++= Seq(
ws,
filters,
"com.jason-goodwin" %% "authentikat-jwt" % "0.4.3",
"org.scalatestplus" %% "play" % "1.4.0" % "test"
),
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases",
resolvers += "Artifactory" at "https://flow.artifactoryonline.com/flow/libs-release/",
credentials += Credentials(
"Artifactory Realm",
"flow.artifactoryonline.com",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
|
Update example project plugin version to 0.10.3-SNAPSHOT | // The Play plugin
addSbtPlugin("com.typesafe.play" %% "sbt-plugin" % "2.8.0")
// play swagger plugin
addSbtPlugin("com.iheart" % "sbt-play-swagger" % "0.10.1-SNAPSHOT")
| // The Play plugin
addSbtPlugin("com.typesafe.play" %% "sbt-plugin" % "2.8.0")
// play swagger plugin
addSbtPlugin("com.iheart" % "sbt-play-swagger" % "0.10.3-SNAPSHOT")
|
Reduce log level to debug, as this is expected behaviour | package hyperion.ws
import akka.actor.{Actor, ActorLogging, ActorRef}
import hyperion.MessageDistributor.RegisterReceiver
import hyperion.p1.TelegramReceived
/**
* Actor that forwards telegrams to a WebSocket connection
*
* @param source Ref to an Actor that publishes the telegrams to a [[Stream]].
* @param messageDistributor Ref to the Actor that distributes messages.
*/
class ActualValuesHandlerActor(val source: ActorRef, val messageDistributor: ActorRef) extends Actor with ActorLogging {
override def preStart(): Unit = {
log.info("Registering for live updates")
messageDistributor ! RegisterReceiver
}
override def receive: Receive = {
case tr: TelegramReceived =>
source ! tr
case a: Any =>
log.debug(s"Ignoring $a")
}
}
| package hyperion.ws
import akka.actor.{Actor, ActorLogging, ActorRef}
import hyperion.MessageDistributor.RegisterReceiver
import hyperion.p1.TelegramReceived
/**
* Actor that forwards telegrams to a WebSocket connection
*
* @param source Ref to an Actor that publishes the telegrams to a [[Stream]].
* @param messageDistributor Ref to the Actor that distributes messages.
*/
class ActualValuesHandlerActor(val source: ActorRef, val messageDistributor: ActorRef) extends Actor with ActorLogging {
override def preStart(): Unit = {
log.debug("Registering for live updates")
messageDistributor ! RegisterReceiver
}
override def receive: Receive = {
case tr: TelegramReceived =>
source ! tr
case a: Any =>
log.debug(s"Ignoring $a")
}
}
|
Use Long instead of Int as the index type. | import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import scala.io.Source
import java.io.File
object SMTI {
def main(args: Array[String]) {
if (args.length != 1) {
println("Usage: /path/to/spark/bin/spark-submit " + "--class SMTI "
+ "target/scala-*/smti-assembly-*.jar " + "prefListDir")
sys.exit(1)
}
// Set up Spark environment
val conf = new SparkConf().setAppName("SMTI")
val sc = new SparkContext(conf)
// Load data
val prefListDir = args(0)
val config = loadConfig(prefListDir)
println(config(0))
// Clean up
sc.stop()
}
def loadConfig(dir: String): Array[Int] = {
val lines = Source.fromFile(new File(dir, "config.txt")).getLines().toArray
assert(lines.length == 1)
lines(0).split(" ").map( x => x.toInt )
}
/* Raw preference list for one person.
* From the most preferred to the least.
* Ties are represented using negative indices, i.e., negative index has the
* same preference as the one before it.
*/
type RawPrefList = List[Int]
}
| import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import scala.io.Source
import java.io.File
object SMTI {
def main(args: Array[String]) {
if (args.length != 1) {
println("Usage: /path/to/spark/bin/spark-submit " + "--class SMTI "
+ "target/scala-*/smti-assembly-*.jar " + "prefListDir")
sys.exit(1)
}
// Set up Spark environment
val conf = new SparkConf().setAppName("SMTI")
val sc = new SparkContext(conf)
// Load data
val prefListDir = args(0)
val config = loadConfig(prefListDir)
println(config(0))
// Clean up
sc.stop()
}
def loadConfig(dir: String): Array[Long] = {
val lines = Source.fromFile(new File(dir, "config.txt")).getLines().toArray
assert(lines.length == 1)
lines(0).split(" ").map( x => x.toLong )
}
/* Raw preference list for one person.
* From the most preferred to the least.
* Ties are represented using negative indices, i.e., negative index has the
* same preference as the one before it.
*/
type RawPrefList = Array[Long]
}
|
Add test for different arities | object Example extends App {
final case class Foo[A](run: A ?=> Int) {
// def copy[A]: this.A ?=> Int = (using a) => run
}
}
| object Example extends App {
final case class Foo[A](run: A ?=> Int)
}
object Example2 extends App {
final case class Foo[A, B](run: (A, B) ?=> Int)
}
object Example3 extends App {
final case class Foo[A, B](run: () ?=> Int)
}
|
Add hint to ignore common_field_types | package io.flow.lint.linters
import io.flow.lint.Linter
import com.bryzek.apidoc.spec.v0.models.{Field, Model, Service}
/**
* For well known field names, enforce specific types to ensure
* consistency. For example, all fields named 'id' must be
* strings.
*/
case object CommonFieldTypes extends Linter with Helpers {
val Expected = Map(
"id" -> "string", // we use string identifiers for all of our resources
"number" -> "string", // 'number' is the external unique identifier
"guid" -> "uuid",
"email" -> "string"
)
override def validate(service: Service): Seq[String] = {
service.models.flatMap(validateModel(service, _))
}
def validateModel(service: Service, model: Model): Seq[String] = {
model.fields.flatMap(validateFieldType(service, model, _))
}
def validateFieldType(service: Service, model: Model, field: Field): Seq[String] = {
Expected.get(field.name) match {
case None => {
Nil
}
case Some(exp) => {
exp == field.`type` match {
case true => {
Nil
}
case false => {
Seq(error(model, field, s"Type must be '$exp' and not ${field.`type`}"))
}
}
}
}
}
}
| package io.flow.lint.linters
import io.flow.lint.Linter
import com.bryzek.apidoc.spec.v0.models.{Field, Model, Service}
/**
* For well known field names, enforce specific types to ensure
* consistency. For example, all fields named 'id' must be
* strings.
*/
case object CommonFieldTypes extends Linter with Helpers {
val Expected = Map(
"id" -> "string", // we use string identifiers for all of our resources
"number" -> "string", // 'number' is the external unique identifier
"guid" -> "uuid",
"email" -> "string"
)
override def validate(service: Service): Seq[String] = {
service.models.
filter(m => !ignored(m.attributes, "common_field_types")).
flatMap(validateModel(service, _))
}
def validateModel(service: Service, model: Model): Seq[String] = {
model.fields.flatMap(validateFieldType(service, model, _))
}
def validateFieldType(service: Service, model: Model, field: Field): Seq[String] = {
Expected.get(field.name) match {
case None => {
Nil
}
case Some(exp) => {
exp == field.`type` match {
case true => {
Nil
}
case false => {
Seq(error(model, field, s"Type must be '$exp' and not ${field.`type`}"))
}
}
}
}
}
}
|
Update configuration to record version 0.0.70 | import com.github.retronym.SbtOneJar._
oneJarSettings
name := "api-build"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
version := "0.0.69"
exportJars := true
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"com.typesafe.play" %% "play-json" % "2.5.4",
"com.ning" % "async-http-client" % "1.9.39",
"org.scalatest" %% "scalatest" % "2.2.6" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
| import com.github.retronym.SbtOneJar._
oneJarSettings
name := "api-build"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
version := "0.0.70"
exportJars := true
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"com.typesafe.play" %% "play-json" % "2.5.4",
"com.ning" % "async-http-client" % "1.9.39",
"org.scalatest" %% "scalatest" % "2.2.6" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
|
Change controller to return the right format | package io.aigar.controller
import io.aigar.controller.response._
import org.json4s.{DefaultFormats, Formats}
import org.scalatra.json._
class GameController extends AigarStack with JacksonJsonSupport {
protected implicit val jsonFormats: Formats = DefaultFormats
before() {
contentType = formats("json")
}
object GameData {
var all = List(
GameState(1, 13),
GameState(1, 50)
)
}
case class Failure(data: String)
case class Success(data: String)
get("/:id") {
GameData.all find (_.id.toString() == params("id")) match {
case Some(b) => b
case None => halt(404)
}
}
post("/") {
Success("Well done, the game was created")
}
post("/:id/action") {
Success("Are you sure you want to do that?")
}
}
| package io.aigar.controller
import io.aigar.controller.response._
import org.json4s.{DefaultFormats, Formats}
import org.scalatra.json._
class GameController extends AigarStack with JacksonJsonSupport {
protected implicit val jsonFormats: Formats = DefaultFormats
before() {
contentType = formats("json")
}
object GameStates {
var all = List(
GameState(1, 13),
GameState(1, 50)
)
}
case class Failure(data: String)
case class Success(data: String)
get("/:id") {
GameStateResponse(
GameStates.all find (_.id.toString() == params("id")) match {
case Some(b) => b
case None => halt(404)
}
)
}
post("/") {
GameCreationResponse(GameCreation(42, "http://somewherekindasafe.xyz"))
}
post("/:id/action") {
SuccessResponse("ok")
}
}
|
Upgrade Mysql JDBC driver to be compatible with latest Mysql version | name := "Degage - db layer"
normalizedName := "db"
version := "1.2-SNAPSHOT"
organization := "be.ugent.degage"
crossPaths := false
libraryDependencies += "mysql" % "mysql-connector-java" % "5.1.34"
libraryDependencies += "com.google.guava" % "guava" % "17.0"
libraryDependencies += "com.novocode" % "junit-interface" % "0.10" % "test"
// TODO: use java.security instead ?
libraryDependencies += "org.mindrot" % "jbcrypt" % "0.3m"
autoScalaLibrary := false
parallelExecution in Test := false
| name := "Degage - db layer"
normalizedName := "db"
version := "1.2-SNAPSHOT"
organization := "be.ugent.degage"
crossPaths := false
libraryDependencies += "mysql" % "mysql-connector-java" % "5.1.36"
libraryDependencies += "com.google.guava" % "guava" % "17.0"
libraryDependencies += "com.novocode" % "junit-interface" % "0.10" % "test"
// TODO: use java.security instead ?
libraryDependencies += "org.mindrot" % "jbcrypt" % "0.3m"
autoScalaLibrary := false
parallelExecution in Test := false
|
Make SplitBranch's expr a Predicate | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dbis.pig.op
import dbis.pig.plan._
import dbis.pig.schema._
import scala.collection.mutable.ArrayBuffer
case class SplitBranch(val outPipeName: String, val expr: Expr)
/**
* SplitInto represents the SPLIT INTO operator of Pig.
*
* @param initialInPipeName the names of the input pipe.
* @param splits a list of split branches (output pipe + condition)
*/
case class SplitInto(val initialInPipeName: String, val splits: List[SplitBranch])
extends PigOperator("", initialInPipeName) {
override def initialOutPipeNames: List[String] = splits.map{ case branch => branch.outPipeName }
override def lineageString: String = {
s"""SPLIT%""" + super.lineageString
}
}
| /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dbis.pig.op
case class SplitBranch(val outPipeName: String, val expr: Predicate)
/**
* SplitInto represents the SPLIT INTO operator of Pig.
*
* @param initialInPipeName the names of the input pipe.
* @param splits a list of split branches (output pipe + condition)
*/
case class SplitInto(val initialInPipeName: String, val splits: List[SplitBranch])
extends PigOperator("", initialInPipeName) {
override def initialOutPipeNames: List[String] = splits.map{ case branch => branch.outPipeName }
override def lineageString: String = {
s"""SPLIT%""" + super.lineageString
}
}
|
Update configuration to record version 0.2.90 | name := "lib-reference-scala"
organization := "io.flow"
scalaVersion := "2.13.6"
lazy val allScalacOptions = Seq(
"-feature",
"-Xfatal-warnings",
"-unchecked",
"-Xcheckinit",
"-Xlint:adapted-args",
"-Ypatmat-exhaust-depth", "100", // Fixes: Exhaustivity analysis reached max recursion depth, not all missing cases are reported.
"-Wconf:src=generated/.*:silent",
"-Wconf:src=target/.*:silent", // silence the unused imports errors generated by the Play Routes
)
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.2.9" % Test,
),
credentials += Credentials(
"Artifactory Realm",
"flow.jfrog.io",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.jfrog.io/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
scalacOptions ++= allScalacOptions
version := "0.2.89"
| name := "lib-reference-scala"
organization := "io.flow"
scalaVersion := "2.13.6"
lazy val allScalacOptions = Seq(
"-feature",
"-Xfatal-warnings",
"-unchecked",
"-Xcheckinit",
"-Xlint:adapted-args",
"-Ypatmat-exhaust-depth", "100", // Fixes: Exhaustivity analysis reached max recursion depth, not all missing cases are reported.
"-Wconf:src=generated/.*:silent",
"-Wconf:src=target/.*:silent", // silence the unused imports errors generated by the Play Routes
)
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.2.9" % Test,
),
credentials += Credentials(
"Artifactory Realm",
"flow.jfrog.io",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.jfrog.io/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
scalacOptions ++= allScalacOptions
version := "0.2.90"
|
Fix the simple example: optional multiplier | import io.hydrosphere.mist.lib.MistJob
object SimpleContext extends MistJob {
/** Contains implementation of spark job with ordinary [[org.apache.spark.SparkContext]]
* Abstract method must be overridden
*
* @param numbers list of int to process
* @return result of the job
*/
def doStuff(numbers: List[Int], multiplier: Int): Map[String, Any] = {
val rdd = context.parallelize(numbers)
Map("result" -> rdd.map(x => x * multiplier).collect())
}
}
| import io.hydrosphere.mist.lib.MistJob
object SimpleContext extends MistJob {
/** Contains implementation of spark job with ordinary [[org.apache.spark.SparkContext]]
* Abstract method must be overridden
*
* @param numbers list of int to process
* @return result of the job
*/
def doStuff(numbers: List[Int], multiplier: Option[Int]): Map[String, Any] = {
val rdd = context.parallelize(numbers)
Map("result" -> rdd.map(x => x * multiplier.getOrElse(2)).collect())
}
}
|
Change the supervision strategy of UserActionActor router. | package beyond
import akka.actor.Actor
import akka.actor.OneForOneStrategy
import akka.actor.Props
import akka.actor.SupervisorStrategy._
import akka.routing.ConsistentHashingRouter
class BeyondSupervisor extends Actor {
override def preStart() {
val numProcessors = Runtime.getRuntime().availableProcessors()
val router = ConsistentHashingRouter(nrOfInstances = numProcessors)
context.actorOf(Props[UserActionActor].withRouter(router), name = "userActionActor")
context.actorOf(Props[LauncherSupervisor], name = "launcherSupervisor")
}
override val supervisorStrategy =
OneForOneStrategy() {
// FIXME: Need policy for all exceptions escalated by Beyond actors.
case t =>
super.supervisorStrategy.decider.applyOrElse(t, (_: Any) => Escalate)
}
override def receive: Receive = {
case _ =>
}
}
| package beyond
import akka.actor.Actor
import akka.actor.OneForOneStrategy
import akka.actor.Props
import akka.actor.SupervisorStrategy
import akka.actor.SupervisorStrategy._
import akka.routing.ConsistentHashingRouter
class BeyondSupervisor extends Actor {
override def preStart() {
val numProcessors = Runtime.getRuntime().availableProcessors()
// Routers default to a strategy of "always escalate". This is problematic because
// a failure in a routee is escalated up to the router's supervisor for handling.
// If the router's supervisor decides to restart the child, (which is the default,
// unfortunately), the router and all of its routees are restarted.
//
// So override the router's strategy with SupervisorStrategy.defaultStrategy which
// restarts only the failing child actor upon Exception.
//
// See Routers and Supervision section of
// http://doc.akka.io/docs/akka/2.2.1/scala/routing.html for further discussions.
val router = ConsistentHashingRouter(nrOfInstances = numProcessors,
supervisorStrategy = SupervisorStrategy.defaultStrategy)
context.actorOf(Props[UserActionActor].withRouter(router), name = "userActionActor")
context.actorOf(Props[LauncherSupervisor], name = "launcherSupervisor")
}
override val supervisorStrategy =
OneForOneStrategy() {
// FIXME: Need policy for all exceptions escalated by Beyond actors.
case t =>
super.supervisorStrategy.decider.applyOrElse(t, (_: Any) => Escalate)
}
override def receive: Receive = {
case _ =>
}
}
|
Improve handling of missing user on intialization of commit importer - should now allow starting the application | package com.softwaremill.codebrag.service.github.jgit
import com.softwaremill.codebrag.service.github.{GitHubCommitImportService, GitHubCommitImportServiceFactory}
import com.softwaremill.codebrag.dao.{UserDAO, CommitInfoDAO}
import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider
import com.softwaremill.codebrag.common.EventBus
import com.softwaremill.codebrag.service.config.CodebragConfig
import com.typesafe.scalalogging.slf4j.Logging
class JgitGitHubCommitImportServiceFactory(commitInfoDao: CommitInfoDAO,
userDao: UserDAO,
eventBus: EventBus,
codebragConfiguration: CodebragConfig) extends GitHubCommitImportServiceFactory with Logging {
def createInstance(login: String): GitHubCommitImportService = {
val importingUserOpt = userDao.findByLoginOrEmail(login)
val token = importingUserOpt match {
case Some(user) => user.authentication.token
case None => {
logger.warn("User $login not found in DB. Cannot properly initialize commit importer")
s"user-$login-not-found"
}
}
val credentials = new UsernamePasswordCredentialsProvider(token, "")
val uriBuilder = new GitHubRemoteUriBuilder
new GitHubCommitImportService(
new JgitGitHubCommitsLoader(
new JgitFacade(credentials),
new InternalGitDirTree(codebragConfiguration),
new JgitLogConverter,
uriBuilder,
commitInfoDao),
commitInfoDao,
eventBus)
}
}
| package com.softwaremill.codebrag.service.github.jgit
import com.softwaremill.codebrag.service.github.{GitHubCommitImportService, GitHubCommitImportServiceFactory}
import com.softwaremill.codebrag.dao.{UserDAO, CommitInfoDAO}
import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider
import com.softwaremill.codebrag.common.EventBus
import com.softwaremill.codebrag.service.config.CodebragConfig
import com.typesafe.scalalogging.slf4j.Logging
class JgitGitHubCommitImportServiceFactory(commitInfoDao: CommitInfoDAO,
userDao: UserDAO,
eventBus: EventBus,
codebragConfiguration: CodebragConfig) extends GitHubCommitImportServiceFactory with Logging {
def createInstance(login: String): GitHubCommitImportService = {
val importingUserOpt = userDao.findByLoginOrEmail(login)
val token = importingUserOpt match {
case Some(user) => user.authentication.token
case None => {
logger.warn(s"User $login not found in DB. Cannot properly initialize commit importer")
s"user-$login-not-found"
}
}
val credentials = new UsernamePasswordCredentialsProvider(token, "")
val uriBuilder = new GitHubRemoteUriBuilder
new GitHubCommitImportService(
new JgitGitHubCommitsLoader(
new JgitFacade(credentials),
new InternalGitDirTree(codebragConfiguration),
new JgitLogConverter,
uriBuilder,
commitInfoDao),
commitInfoDao,
eventBus)
}
}
|
Update configuration to record version 0.3.7 | name := "lib-reference-scala"
organization := "io.flow"
scalaVersion := "2.13.6"
lazy val allScalacOptions = Seq(
"-feature",
"-Xfatal-warnings",
"-unchecked",
"-Xcheckinit",
"-Xlint:adapted-args",
"-Ypatmat-exhaust-depth", "100", // Fixes: Exhaustivity analysis reached max recursion depth, not all missing cases are reported.
"-Wconf:src=generated/.*:silent",
"-Wconf:src=target/.*:silent", // silence the unused imports errors generated by the Play Routes
)
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.2.13" % Test,
),
credentials += Credentials(
"Artifactory Realm",
"flow.jfrog.io",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.jfrog.io/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
scalacOptions ++= allScalacOptions
version := "0.3.6"
| name := "lib-reference-scala"
organization := "io.flow"
scalaVersion := "2.13.6"
lazy val allScalacOptions = Seq(
"-feature",
"-Xfatal-warnings",
"-unchecked",
"-Xcheckinit",
"-Xlint:adapted-args",
"-Ypatmat-exhaust-depth", "100", // Fixes: Exhaustivity analysis reached max recursion depth, not all missing cases are reported.
"-Wconf:src=generated/.*:silent",
"-Wconf:src=target/.*:silent", // silence the unused imports errors generated by the Play Routes
)
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.2.13" % Test,
),
credentials += Credentials(
"Artifactory Realm",
"flow.jfrog.io",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.jfrog.io/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
scalacOptions ++= allScalacOptions
version := "0.3.7"
|
Disable parallel execution for unit tests | organization := "io.reactivex"
name := "rxscala"
lazy val root = project in file(".")
lazy val examples = project in file("examples") dependsOn (root % "test->test;compile->compile") settings(
libraryDependencies ++= Seq(
"org.apache.bcel" % "bcel" % "5.2" % "test"
)
)
scalacOptions in ThisBuild := Seq("-feature", "-unchecked", "-deprecation", "-encoding", "utf8")
scalaVersion in ThisBuild := "2.11.6"
crossScalaVersions in ThisBuild := Seq("2.10.5", "2.11.6")
libraryDependencies ++= Seq(
"io.reactivex" % "rxjava" % "1.0.12",
"org.mockito" % "mockito-core" % "1.9.5" % "test",
"junit" % "junit" % "4.11" % "test",
"org.scalatest" %% "scalatest" % "2.2.2" % "test")
| organization := "io.reactivex"
name := "rxscala"
lazy val root = project in file(".")
lazy val examples = project in file("examples") dependsOn (root % "test->test;compile->compile") settings(
libraryDependencies ++= Seq(
"org.apache.bcel" % "bcel" % "5.2" % "test"
)
)
scalacOptions in ThisBuild := Seq("-feature", "-unchecked", "-deprecation", "-encoding", "utf8")
scalaVersion in ThisBuild := "2.11.6"
crossScalaVersions in ThisBuild := Seq("2.10.5", "2.11.6")
parallelExecution in Test := false
libraryDependencies ++= Seq(
"io.reactivex" % "rxjava" % "1.0.12",
"org.mockito" % "mockito-core" % "1.9.5" % "test",
"junit" % "junit" % "4.11" % "test",
"org.scalatest" %% "scalatest" % "2.2.2" % "test")
|
Tidy up probability option shorthand code | package hmrc.smartstub
import org.scalacheck._
import Gen._
case class RichGen[A](g: Gen[A]) extends AnyVal {
def seeded[IN](in: IN)(implicit tl: ToLong[IN]) =
g(Parameters.default, rng.Seed(tl.asLong(in)))
def asMutable[K](implicit en: Enumerable[K]): PersistentGen[K,A] =
new PersistentGen(
g, scala.collection.concurrent.TrieMap.empty[K,Option[A]]
)
def iterator[K](implicit en: Enumerable[K]): Iterator[A] =
en.iterator.flatMap(seeded(_))
def optFrequency(i: Int): Gen[Option[A]] =
Gen.frequency(
i -> g.map(x => Some(x)),
{100 - i} → const[Option[A]](None)
)
def hardlyEver: Gen[Option[A]] = optFrequency(1)
def rarely: Gen[Option[A]] = optFrequency(25)
def sometimes: Gen[Option[A]] = optFrequency(50)
def usually: Gen[Option[A]] = optFrequency(75)
def almostAlways: Gen[Option[A]] = optFrequency(99)
}
| package hmrc.smartstub
import org.scalacheck._
import Gen._
case class RichGen[A](g: Gen[A]) extends AnyVal {
def seeded[IN](in: IN)(implicit tl: ToLong[IN]) =
g(Parameters.default, rng.Seed(tl.asLong(in)))
def asMutable[K](implicit en: Enumerable[K]): PersistentGen[K,A] =
new PersistentGen(
g, scala.collection.concurrent.TrieMap.empty[K,Option[A]]
)
def iterator[K](implicit en: Enumerable[K]): Iterator[A] =
en.iterator.flatMap(seeded(_))
def optFrequency(i: Int): Gen[Option[A]] =
Gen.frequency(
i -> g.map(Some(_)),
{100 - i} -> const[Option[A]](None)
)
def hardlyEver: Gen[Option[A]] = optFrequency(1)
def rarely: Gen[Option[A]] = optFrequency(25)
def sometimes: Gen[Option[A]] = optFrequency(50)
def usually: Gen[Option[A]] = optFrequency(75)
def almostAlways: Gen[Option[A]] = optFrequency(99)
}
|
Remove square brackets in commments | package uk.ac.wellcome.platform.archive.archivist.flow
import java.io.File
import akka.stream.{FlowShape, IOResult}
import akka.stream.scaladsl.{FileIO, Flow, GraphDSL, Source}
import akka.util.ByteString
import scala.concurrent.Future
/** Takes a [[File]] and a source of bytes, and writes the bytes to the file.
*
* It emits the [[IOResult]] from the file write.
*
*/
object FileStoreFlow {
def apply(tmpFile: File,
parallelism: Int): Flow[ByteString, IOResult, Future[IOResult]] = {
val fileSink = FileIO.toPath(tmpFile.toPath)
Flow
.fromGraph(GraphDSL.create(fileSink) { implicit builder => sink =>
FlowShape(sink.in, builder.materializedValue)
})
.flatMapMerge(parallelism, Source.fromFuture)
}
}
| package uk.ac.wellcome.platform.archive.archivist.flow
import java.io.File
import akka.stream.{FlowShape, IOResult}
import akka.stream.scaladsl.{FileIO, Flow, GraphDSL, Source}
import akka.util.ByteString
import scala.concurrent.Future
/** Takes a File and a source of bytes, and writes the bytes to the file.
*
* It emits the IOResult from the file write.
*
*/
object FileStoreFlow {
def apply(tmpFile: File,
parallelism: Int): Flow[ByteString, IOResult, Future[IOResult]] = {
val fileSink = FileIO.toPath(tmpFile.toPath)
Flow
.fromGraph(GraphDSL.create(fileSink) { implicit builder => sink =>
FlowShape(sink.in, builder.materializedValue)
})
.flatMapMerge(parallelism, Source.fromFuture)
}
}
|
Update configuration to record version 0.1.34 | name := "lib-reference-scala"
organization := "io.flow"
scalaVersion in ThisBuild := "2.12.3"
crossScalaVersions := Seq("2.12.3", "2.11.11", "2.10.6")
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.0.4" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
version := "0.1.33"
| name := "lib-reference-scala"
organization := "io.flow"
scalaVersion in ThisBuild := "2.12.3"
crossScalaVersions := Seq("2.12.3", "2.11.11", "2.10.6")
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.0.4" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
version := "0.1.34"
|
Revert "Update Selenium to the latest version" | name := "arroch"
version := "1.0"
lazy val `arroch` = (project in file(".")).enablePlugins(PlayJava)
scalaVersion := "2.11.7"
libraryDependencies ++= Seq(javaJdbc, cache, javaWs)
libraryDependencies ++= Seq(
javaJpa,
"org.postgresql" % "postgresql" % "42.0.0",
"dom4j" % "dom4j" % "1.6.1",
"org.hibernate" % "hibernate-core" % "5.2.10.Final"
) //Hibernate ORM
libraryDependencies += "org.fluentlenium" % "fluentlenium-core" % "0.10.9"
libraryDependencies += "org.assertj" % "assertj-core" % "3.6.2"
libraryDependencies += "org.seleniumhq.selenium" % "selenium-server" % "3.4.0"
libraryDependencies += "org.seleniumhq.selenium" % "selenium-java" % "3.4.0"
libraryDependencies += "org.seleniumhq.selenium" % "selenium-htmlunit-driver" % "2.52.0"
PlayKeys.externalizeResources := false
unmanagedResourceDirectories in Test += baseDirectory ( _ /"target/web/public/test" ).value
resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases"
| name := "arroch"
version := "1.0"
lazy val `arroch` = (project in file(".")).enablePlugins(PlayJava)
scalaVersion := "2.11.7"
libraryDependencies ++= Seq(javaJdbc, cache, javaWs)
libraryDependencies ++= Seq(
javaJpa,
"org.postgresql" % "postgresql" % "42.0.0",
"dom4j" % "dom4j" % "1.6.1",
"org.hibernate" % "hibernate-core" % "5.2.10.Final"
) //Hibernate ORM
libraryDependencies += "org.fluentlenium" % "fluentlenium-core" % "0.10.9"
libraryDependencies += "org.assertj" % "assertj-core" % "3.6.2"
libraryDependencies += "org.seleniumhq.selenium" % "selenium-java" % "2.48.2"
libraryDependencies += "org.seleniumhq.selenium" % "selenium-htmlunit-driver" % "2.48.2"
PlayKeys.externalizeResources := false
unmanagedResourceDirectories in Test += baseDirectory ( _ /"target/web/public/test" ).value
resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases"
|
Add the sbt-idea plugin to work with IntelliJ | //webplugin
addSbtPlugin("com.earldouglas" % "xsbt-web-plugin" % "0.5.0")
//eclipse plugin
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.1.1")
//sbt-dependency-graph
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")
// Resolver for the sbt-assembly plugin
resolvers += Resolver.url("artifactory", url("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.8.5") | //webplugin
addSbtPlugin("com.earldouglas" % "xsbt-web-plugin" % "0.5.0")
//eclipse plugin
addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.1.1")
//IntelliJ IDEA plugin
addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.6.0")
//sbt-dependency-graph
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.7.4")
// Resolver for the sbt-assembly plugin
resolvers += Resolver.url("artifactory", url("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.8.5") |
Update sbt-scalajs, scalajs-compiler to 0.6.29 | libraryDependencies += "org.scala-js" %% "scalajs-env-selenium" % "0.3.0"
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.28")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.11")
// addSbtPlugin("ch.epfl.scala" % "sbt-scalajs-bundler" % "0.10.0") | libraryDependencies += "org.scala-js" %% "scalajs-env-selenium" % "0.3.0"
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.29")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.2")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.11")
// addSbtPlugin("ch.epfl.scala" % "sbt-scalajs-bundler" % "0.10.0") |
Remove deprecated kryo serializer buffer size. | /**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fnothaft.ananas
import org.bdgenomics.utils.misc.SparkFunSuite
trait AnanasFunSuite extends SparkFunSuite {
override val appName: String = "ananas"
override val properties: Map[String, String] = Map(("spark.serializer", "org.apache.spark.serializer.KryoSerializer"),
("spark.kryo.registrator", "org.bdgenomics.adam.serialization.ADAMKryoRegistrator"),
("spark.kryoserializer.buffer.mb", "4"),
("spark.kryo.referenceTracking", "true"))
override val master: String = "local[1]"
}
| /**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fnothaft.ananas
import org.bdgenomics.utils.misc.SparkFunSuite
trait AnanasFunSuite extends SparkFunSuite {
override val appName: String = "ananas"
override val properties: Map[String, String] = Map(("spark.serializer", "org.apache.spark.serializer.KryoSerializer"),
("spark.kryo.registrator", "org.bdgenomics.adam.serialization.ADAMKryoRegistrator"))
override val master: String = "local[1]"
}
|
Use ScalaCheck in FicusConfig spec | package net.ceedubs.ficus
import com.typesafe.config.ConfigFactory
import FicusConfig.{ booleanValueReader, optionValueReader, toFicusConfig }
class FicusConfigSpec extends Spec { def is =
"A Ficus config should" ^
"be implicitly converted from a Typesafe config" ! implicitlyConverted ^
"read a value with a value reader" ! readAValue ^
"get a value as an option"
"accept a CongigKey and return the appropriate type" ! acceptAConfigKey
def implicitlyConverted = {
val cfg = ConfigFactory.parseString("myValue = true")
cfg.as[Boolean]("myValue") must beTrue
}
def readAValue = {
val cfg = ConfigFactory.parseString("myValue = true")
cfg.as[Boolean]("myValue") must beTrue
}
def getAsOption = {
val cfg = ConfigFactory.parseString("myValue = true")
(cfg.getAs[Boolean]("myValue") must beSome(true)) and (cfg.getAs[Boolean]("nonValue") must beNone)
}
def acceptAConfigKey = {
val cfg = ConfigFactory.parseString("myValue = true")
val key: ConfigKey[Boolean] = SimpleConfigKey("myValue")
cfg(key) must beTrue
}
}
| package net.ceedubs.ficus
import com.typesafe.config.ConfigFactory
import FicusConfig.{ booleanValueReader, optionValueReader, toFicusConfig }
class FicusConfigSpec extends Spec { def is =
"A Ficus config should" ^
"be implicitly converted from a Typesafe config" ! implicitlyConverted ^
"read a value with a value reader" ! readAValue ^
"get an existing value as a Some" + getAsSome ^
"get a missing value as a None" + getAsNone ^
"accept a CongigKey and return the appropriate type" ! acceptAConfigKey
def implicitlyConverted = {
val cfg = ConfigFactory.parseString("myValue = true")
cfg.as[Boolean]("myValue") must beTrue
}
def readAValue = prop { b: Boolean =>
val cfg = ConfigFactory.parseString(s"myValue = $b")
cfg.as[Boolean]("myValue") must beEqualTo(b)
}
def getAsSome = prop { b: Boolean =>
val cfg = ConfigFactory.parseString(s"myValue = $b")
cfg.getAs[Boolean]("myValue") must beSome(b)
}
def getAsNone = {
val cfg = ConfigFactory.parseString("myValue = true")
cfg.getAs[Boolean]("nonValue") must beNone
}
def acceptAConfigKey = prop { b: Boolean =>
val cfg = ConfigFactory.parseString(s"myValue = $b")
val key: ConfigKey[Boolean] = SimpleConfigKey("myValue")
cfg(key) must beEqualTo(b)
}
}
|
Add some error handling to the API | package net.matthaynes.ner.controller
import net.matthaynes.ner.service._
import org.scalatra._
import net.liftweb.json._
import net.liftweb.json.Serialization.{write}
class ApiServlet extends ScalatraServlet {
implicit val formats = new Formats {
val dateFormat = DefaultFormats.lossless.dateFormat
override val typeHints = ShortTypeHints(List(classOf[NamedEntity]))
override val typeHintFieldName = "type"
}
def respond(responseBody : Map[String, Any], responseStatus : Int = 200) : String = {
status(responseStatus)
contentType = "application/json"
write(responseBody)
}
def respondWithError(message : String, responseStatus : Int = 500) = {
val responseBody = Map[String, Map[String, Any]]("error" -> Map[String, Any]("status" -> responseStatus, "message" -> message))
respond(responseBody, responseStatus)
}
notFound {
respondWithError("Not Found", 404)
}
methodNotAllowed { allow =>
response.setHeader("Allow", allow.mkString(", "))
respondWithError("Method Not Allowed", 405)
}
error {
case e : java.util.NoSuchElementException => respondWithError("Bad Request " + e.getMessage, 400)
}
}
| package net.matthaynes.ner.controller
import net.matthaynes.ner.service._
import org.scalatra._
import net.liftweb.json._
import net.liftweb.json.Serialization.{write}
class ApiServlet extends ScalatraServlet {
implicit val formats = new Formats {
val dateFormat = DefaultFormats.lossless.dateFormat
override val typeHints = ShortTypeHints(List(classOf[NamedEntity]))
override val typeHintFieldName = "type"
}
def respond(responseBody : Map[String, Any], responseStatus : Int = 200) : String = {
status(responseStatus)
contentType = "application/json"
write(responseBody)
}
def respondWithError(message : String, responseStatus : Int = 500) = {
val responseBody = Map[String, Map[String, Any]]("error" -> Map[String, Any]("status" -> responseStatus, "message" -> message))
respond(responseBody, responseStatus)
}
notFound {
respondWithError("Not Found", 404)
}
methodNotAllowed { allow =>
response.setHeader("Allow", allow.mkString(", "))
respondWithError("Method Not Allowed", 405)
}
error {
case e : java.util.NoSuchElementException => respondWithError("Bad Request " + e.getMessage, 400)
case e : _ => respondWithError("Internal Server Error " + e.getMessage, 500)
}
}
|
Update sbt-http4s-org to 0.12.1 in series/0.22 | libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.2.10"
// https://github.com/coursier/coursier/issues/450
classpathTypes += "maven-plugin"
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.34")
addSbtPlugin("com.earldouglas" % "xsbt-web-plugin" % "4.2.4")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0")
addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0")
addSbtPlugin("com.github.tkawachi" % "sbt-doctest" % "0.9.9")
addSbtPlugin("org.http4s" % "sbt-http4s-org" % "0.12.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3")
addSbtPlugin("com.typesafe.sbt" % "sbt-twirl" % "1.5.1")
addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.8")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3")
| libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.2.10"
// https://github.com/coursier/coursier/issues/450
classpathTypes += "maven-plugin"
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.9.34")
addSbtPlugin("com.earldouglas" % "xsbt-web-plugin" % "4.2.4")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0")
addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0")
addSbtPlugin("com.github.tkawachi" % "sbt-doctest" % "0.9.9")
addSbtPlugin("org.http4s" % "sbt-http4s-org" % "0.12.1")
addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3")
addSbtPlugin("com.typesafe.sbt" % "sbt-twirl" % "1.5.1")
addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.8")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3")
|
Allow passing a Source to XSLT.transform | package com.github.eerohele.expek
import java.io.{File, StringReader}
import javax.xml.transform.Source
import javax.xml.transform.stream.StreamSource
import net.sf.saxon.s9api._
import scala.xml.Elem
/** A trait that lets you run XSLT transformations. */
trait XsltSupport {
import utils.NodeConversions._
val xsltCompiler: XsltCompiler
/** Functions for converting an XSLT stylesheet into a [[Source]]. */
object XSLT {
/** Read a stylesheet from a file. */
def file(xslt: String): Source = file(new File(xslt))
/** Read a stylesheet from a file. */
def file(xslt: File): Source = new StreamSource(xslt)
/** Read a stylesheet from an [[Elem]]. */
def elem(elem: Elem): Source = new StreamSource(new StringReader(elem))
/** Transform an [[Elem]] with the given stylesheet. */
def transform[T <: XdmValue](stylesheet: Source, elem: Elem): T = {
val t: Xslt30Transformer = xsltCompiler.compile(stylesheet).load30
t.applyTemplates(elem).asInstanceOf[T]
}
}
}
| package com.github.eerohele.expek
import java.io.{File, StringReader}
import javax.xml.transform.Source
import javax.xml.transform.stream.StreamSource
import net.sf.saxon.s9api._
import scala.xml.Elem
/** A trait that lets you run XSLT transformations. */
trait XsltSupport {
import utils.NodeConversions._
val xsltCompiler: XsltCompiler
/** Functions for converting an XSLT stylesheet into a [[Source]]. */
object XSLT {
/** Read a stylesheet from a file. */
def file(xslt: String): Source = file(new File(xslt))
/** Read a stylesheet from a file. */
def file(xslt: File): Source = new StreamSource(xslt)
/** Read a stylesheet from an [[Elem]]. */
def elem(elem: Elem): Source = new StreamSource(new StringReader(elem))
/** Transform a [[Source]] with the given stylesheet. */
def transform[T <: XdmValue](stylesheet: Source, input: Source): T = {
val t: Xslt30Transformer = xsltCompiler.compile(stylesheet).load30
t.applyTemplates(input).asInstanceOf[T]
}
/** Transform an [[Elem]] with the given stylesheet. */
def transform[T <: XdmValue](stylesheet: Source, elem: Elem): T = transform(stylesheet, elem)
}
}
|
Print app list on stdout rather than stderr in list command | package coursier.cli.install
import caseapp.core.app.CaseApp
import caseapp.core.RemainingArgs
import coursier.install.InstallDir
object List extends CaseApp[ListOptions] {
def run(options: ListOptions, args: RemainingArgs): Unit = {
val params = ListParams(options)
val installDir = InstallDir(params.installPath, new NoopCache)
val names = installDir.list()
System.err.print(names.map(_ + System.lineSeparator).mkString)
}
}
| package coursier.cli.install
import caseapp.core.app.CaseApp
import caseapp.core.RemainingArgs
import coursier.install.InstallDir
object List extends CaseApp[ListOptions] {
def run(options: ListOptions, args: RemainingArgs): Unit = {
val params = ListParams(options)
val installDir = InstallDir(params.installPath, new NoopCache)
val names = installDir.list()
print(names.map(_ + System.lineSeparator).mkString)
}
}
|
Set default max connections to 512 | /*
* Copyright 2014-2020 http4s.org
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.http4s.blaze
import java.nio.ByteBuffer
import org.http4s.blaze.pipeline.LeafBuilder
import scala.concurrent.Future
package object channel {
type SocketPipelineBuilder = SocketConnection => Future[LeafBuilder[ByteBuffer]]
/** Default number of threads used to make a new
* [[org.http4s.blaze.channel.nio1.SelectorLoopPool]] if not specified
*/
val DefaultPoolSize: Int =
math.max(4, Runtime.getRuntime.availableProcessors() + 1)
/** Default max number of connections that can be active at any time.
* A negative number means that there is no max.
*/
val DefaultMaxConnections: Int = -1
}
| /*
* Copyright 2014-2020 http4s.org
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.http4s.blaze
import java.nio.ByteBuffer
import org.http4s.blaze.pipeline.LeafBuilder
import scala.concurrent.Future
package object channel {
type SocketPipelineBuilder = SocketConnection => Future[LeafBuilder[ByteBuffer]]
/** Default number of threads used to make a new
* [[org.http4s.blaze.channel.nio1.SelectorLoopPool]] if not specified
*/
val DefaultPoolSize: Int =
math.max(4, Runtime.getRuntime.availableProcessors() + 1)
/** Default max number of connections that can be active at any time.
* A negative number means that there is no max.
*/
val DefaultMaxConnections: Int = 512
}
|
Add SKINNY_PORT as a supported env variable | package skinny.standalone
import org.eclipse.jetty.server.Server
import org.eclipse.jetty.servlet.{ DefaultServlet, ServletContextHandler }
import org.eclipse.jetty.webapp.WebAppContext
import org.scalatra.servlet.ScalatraListener
/**
* Jetty server launcher for standalone apps.
*
* see: http://scalatra.org/2.2/guides/deployment/standalone.html
*/
object JettyLauncher {
def main(args: Array[String]) {
val port = getEnvVarOrSysProp("skinny.port").map(_.toInt) getOrElse 8080
val server = new Server(port)
val context = new WebAppContext()
context setContextPath "/"
context.setResourceBase("src/main/webapp")
context.addEventListener(new ScalatraListener)
context.addServlet(classOf[DefaultServlet], "/")
server.setHandler(context)
server.start
server.join
}
def getEnvVarOrSysProp(key: String): Option[String] = {
sys.env.get(key) orElse sys.props.get(key)
}
}
| package skinny.standalone
import org.eclipse.jetty.server.Server
import org.eclipse.jetty.servlet.{ DefaultServlet, ServletContextHandler }
import org.eclipse.jetty.webapp.WebAppContext
import org.scalatra.servlet.ScalatraListener
/**
* Jetty server launcher for standalone apps.
*
* see: http://scalatra.org/2.2/guides/deployment/standalone.html
*/
object JettyLauncher {
def main(args: Array[String]) {
val port = sys.env.get("SKINNY_PORT")
.orElse(getEnvVarOrSysProp("skinny.port"))
.map(_.toInt)
.getOrElse(8080)
val server = new Server(port)
val context = new WebAppContext()
context setContextPath "/"
context.setResourceBase("src/main/webapp")
context.addEventListener(new ScalatraListener)
context.addServlet(classOf[DefaultServlet], "/")
server.setHandler(context)
server.start
server.join
}
def getEnvVarOrSysProp(key: String): Option[String] = {
sys.env.get(key) orElse sys.props.get(key)
}
}
|
Set project version to 0.1.5-BETA | import sbt._
import sbt.Keys._
import android.Keys._
import android.Plugin._
object Build extends android.AutoBuild
{
lazy val main = Project( "toolbelt", file( "." ) )
.settings( buildAar: _* )
.settings(
libraryDependencies ++= Seq(
"org.scala-lang" % "scala-reflect" % scalaVersion.value,
"com.android.support" % "support-v4" % "20.0.0",
"com.github.japgolly.android" % "svg-android" % "2.0.6"
),
name := "Toolbelt",
organization := "com.taig.android",
scalaVersion := "2.11.2",
scalacOptions ++= Seq(
"-deprecation",
"-feature",
"-language:dynamics",
"-language:implicitConversions",
"-language:reflectiveCalls"
),
version := "0.1.4-BETA",
libraryProject in Android := true,
minSdkVersion in Android := "10",
targetSdkVersion in Android := "21"
)
} | import sbt._
import sbt.Keys._
import android.Keys._
import android.Plugin._
object Build extends android.AutoBuild
{
lazy val main = Project( "toolbelt", file( "." ) )
.settings( buildAar: _* )
.settings(
libraryDependencies ++= Seq(
"org.scala-lang" % "scala-reflect" % scalaVersion.value,
"com.android.support" % "support-v4" % "20.0.0",
"com.github.japgolly.android" % "svg-android" % "2.0.6"
),
name := "Toolbelt",
organization := "com.taig.android",
scalaVersion := "2.11.2",
scalacOptions ++= Seq(
"-deprecation",
"-feature",
"-language:dynamics",
"-language:implicitConversions",
"-language:reflectiveCalls"
),
version := "0.1.5-BETA",
libraryProject in Android := true,
minSdkVersion in Android := "10",
targetSdkVersion in Android := "21"
)
} |
Format date string in Scala.js | package wvlet.log
/**
*
*/
object LogTimestampFormatter {
//val systemZone = ZoneId.systemDefault().normalized()
// val noSpaceTimestampFormat = new DateTimeFormatterBuilder()
// .parseCaseInsensitive()
// .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
// .appendLiteral('-')
// .appendValue(MONTH_OF_YEAR, 2)
// .appendLiteral('-')
// .appendValue(DAY_OF_MONTH, 2)
// .appendLiteral('T')
// .appendValue(HOUR_OF_DAY, 2)
// .appendLiteral(':')
// .appendValue(MINUTE_OF_HOUR, 2)
// .appendLiteral(':')
// .appendValue(SECOND_OF_MINUTE, 2)
// .appendLiteral('.')
// .appendValue(MILLI_OF_SECOND, 3)
// .appendOffset("+HHMM", "Z")
// .toFormatter(Locale.US)
//
// val humanReadableTimestampFormatter = new DateTimeFormatterBuilder()
// .parseCaseInsensitive()
// .appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
// .appendLiteral('-')
// .appendValue(MONTH_OF_YEAR, 2)
// .appendLiteral('-')
// .appendValue(DAY_OF_MONTH, 2)
// .appendLiteral(' ')
// .appendValue(HOUR_OF_DAY, 2)
// .appendLiteral(':')
// .appendValue(MINUTE_OF_HOUR, 2)
// .appendLiteral(':')
// .appendValue(SECOND_OF_MINUTE, 2)
// .appendOffset("+HHMM", "Z")
// .toFormatter(Locale.US)
def formatTimestamp(timeMillis: Long): String = {
//val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(timeMillis), systemZone)
timeMillis.toString
//humanReadableTimestampFormatter.format(timestamp)
}
def formatTimestampWithNoSpaace(timeMillis: Long): String = {
//val timestamp = ZonedDateTime.ofInstant(Instant.ofEpochMilli(timeMillis), systemZone)
timeMillis.toString
//noSpaceTimestampFormat.format(timestamp)
}
}
| package wvlet.log
import scala.scalajs.js
/**
* Use scalajs.js.Date to foramte timestamp
*/
object LogTimestampFormatter {
def formatTimestamp(timeMillis: Long): String = {
new js.Date(timeMillis).toISOString()
}
def formatTimestampWithNoSpaace(timeMillis: Long): String = {
new js.Date(timeMillis).toISOString()
}
}
|
Update configuration to record version 0.3.0 | name := "lib-reference-scala"
organization := "io.flow"
scalaVersion := "2.13.6"
lazy val allScalacOptions = Seq(
"-feature",
"-Xfatal-warnings",
"-unchecked",
"-Xcheckinit",
"-Xlint:adapted-args",
"-Ypatmat-exhaust-depth", "100", // Fixes: Exhaustivity analysis reached max recursion depth, not all missing cases are reported.
"-Wconf:src=generated/.*:silent",
"-Wconf:src=target/.*:silent", // silence the unused imports errors generated by the Play Routes
)
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.2.11" % Test,
),
credentials += Credentials(
"Artifactory Realm",
"flow.jfrog.io",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.jfrog.io/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
scalacOptions ++= allScalacOptions
version := "0.2.99"
| name := "lib-reference-scala"
organization := "io.flow"
scalaVersion := "2.13.6"
lazy val allScalacOptions = Seq(
"-feature",
"-Xfatal-warnings",
"-unchecked",
"-Xcheckinit",
"-Xlint:adapted-args",
"-Ypatmat-exhaust-depth", "100", // Fixes: Exhaustivity analysis reached max recursion depth, not all missing cases are reported.
"-Wconf:src=generated/.*:silent",
"-Wconf:src=target/.*:silent", // silence the unused imports errors generated by the Play Routes
)
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "3.2.11" % Test,
),
credentials += Credentials(
"Artifactory Realm",
"flow.jfrog.io",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.jfrog.io/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
scalacOptions ++= allScalacOptions
version := "0.3.0"
|
Implement transitive lib dependencies filter | package org.jetbrains.sbt.execution
import com.intellij.openapi.module.Module
import com.intellij.openapi.project.Project
import com.intellij.openapi.roots.OrderEnumerationHandler
/**
* @author Nikolay.Tropin
*/
class SbtOrderEnumeratorHandler extends OrderEnumerationHandler {
override def shouldProcessDependenciesRecursively(): Boolean = false
}
class SbtOrderEnumeratorHandlerFactory extends OrderEnumerationHandler.Factory {
override def createHandler(module: Module): OrderEnumerationHandler = new SbtOrderEnumeratorHandler
override def isApplicable(project: Project): Boolean = {
// ModuleManager.getInstance(project).getModules.exists(isApplicable)
false
}
override def isApplicable(module: Module): Boolean = {
// SbtSystemSettings.getInstance(module.getProject).getLinkedProjectSettings(module).isDefined
false
}
} | package org.jetbrains.sbt.execution
import java.util
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil
import com.intellij.openapi.module.{ModuleManager, Module}
import com.intellij.openapi.project.Project
import com.intellij.openapi.roots.OrderEnumerationHandler.AddDependencyType
import com.intellij.openapi.roots.impl.ModuleOrderEnumerator
import com.intellij.openapi.roots._
import com.intellij.util.CommonProcessors
import org.jetbrains.sbt.project.SbtProjectSystem
/**
* @author Nikolay.Tropin
*/
class SbtOrderEnumeratorHandler extends OrderEnumerationHandler {
override def shouldAddDependency(orderEntry: OrderEntry, settings: OrderEnumeratorSettings): AddDependencyType = {
(orderEntry, settings) match {
case (library: LibraryOrderEntry, enumerator: ModuleOrderEnumerator) =>
val isTransitive = getModuleFromEnumerator(enumerator).fold(false)(_ != library.getOwnerModule)
if (isTransitive) AddDependencyType.DO_NOT_ADD else AddDependencyType.DEFAULT
case _ =>
AddDependencyType.DEFAULT
}
}
private def getModuleFromEnumerator(enumerator: ModuleOrderEnumerator): Option[Module] = {
// This method assumes that `processRootModules` in `ModuleOrderEnumerator` calls
// given processor only on module extracted from its underlying `ModuleRootModel`.
// If this behaviour is subject to change, it's better to roll back to reflection calls to inner fields.
import scala.collection.JavaConverters._
val modules = new util.ArrayList[Module]()
enumerator.processRootModules(new CommonProcessors.CollectProcessor[Module](modules))
modules.asScala.headOption
}
}
class SbtOrderEnumeratorHandlerFactory extends OrderEnumerationHandler.Factory {
override def createHandler(module: Module): OrderEnumerationHandler = new SbtOrderEnumeratorHandler
override def isApplicable(project: Project): Boolean = {
ModuleManager.getInstance(project).getModules.exists(isApplicable)
}
override def isApplicable(module: Module): Boolean = {
ExternalSystemApiUtil.isExternalSystemAwareModule(SbtProjectSystem.Id, module)
}
} |
Add a type annotation for "log" method. | package util
import org.slf4j.LoggerFactory
trait Logger {
val log = LoggerFactory.getLogger(this.getClass.getName)
} | package util
import org.slf4j
import org.slf4j.LoggerFactory
trait Logger {
val log: slf4j.Logger = LoggerFactory.getLogger(this.getClass.getName)
} |
Call only an external script from the debug servlet | package com.softwaremill.codebrag.rest.debug
import com.softwaremill.codebrag.rest.JsonServlet
import com.softwaremill.codebrag.service.commits.{RepoDataProducer, CommitImportService}
import net.liftweb.mongodb.record.MongoMetaRecord
import com.softwaremill.codebrag.dao._
import com.softwaremill.codebrag.service.config.CodebragConfig
import com.foursquare.rogue.LiftRogue._
class DebugServlet(repoDataProducer: RepoDataProducer,
commitImportService: CommitImportService,
configuration: CodebragConfig)
extends JsonServlet with DebugBasicAuthSupport {
override def login = configuration.debugServicesLogin
override def password = configuration.debugServicesPassword
get("/resetAll") {
basicAuth()
dropAllDataExceptInitialUsers()
triggerRepositoryUpdate()
"Reset successfull."
}
def triggerRepositoryUpdate() {
repoDataProducer.createFromConfiguration().foreach(commitImportService.importRepoCommits(_))
}
def dropAllDataExceptInitialUsers() {
val list: List[MongoMetaRecord[_]] = List(
CommitInfoRecord,
CommitReviewTaskRecord,
FollowupRecord,
CommentRecord,
LikeRecord
)
list.foreach(_.drop)
deleteUsersExcludingInitial()
}
def deleteUsersExcludingInitial() {
UserRecord.where(_.authentication.subfield(_.provider) eqs "GitHub").
and(_.authentication.subfield(_.usernameLowerCase) neqs "codebrag").
bulkDelete_!!!
}
}
object DebugServlet {
val MappingPath = "debug"
}
| package com.softwaremill.codebrag.rest.debug
import com.softwaremill.codebrag.rest.JsonServlet
import com.softwaremill.codebrag.service.commits.{RepoDataProducer, CommitImportService}
import com.softwaremill.codebrag.service.config.CodebragConfig
import scala.sys.process.Process
import org.apache.commons.lang3.exception.ExceptionUtils
class DebugServlet(repoDataProducer: RepoDataProducer,
commitImportService: CommitImportService,
configuration: CodebragConfig)
extends JsonServlet with DebugBasicAuthSupport {
override def login = configuration.debugServicesLogin
override def password = configuration.debugServicesPassword
get("/resetAll") {
basicAuth()
val homeDir = System.getProperty("user.home")
try
{
Process("./resetAll.sh", new java.io.File(homeDir)).!
"Reset successfull."
}
catch {
case exception: Throwable => ExceptionUtils.getStackTrace(exception)
}
}
}
object DebugServlet {
val MappingPath = "debug"
}
|
Use the actual parameter name | package notification.services.guardian
import models.{PlatformCount, Topic}
import play.api.libs.ws.WSClient
import utils.LruCache
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration.DurationLong
class ReportTopicRegistrationCounter(
ws: WSClient,
registrationCounterUrl: String
)(implicit ec: ExecutionContext) extends TopicRegistrationCounter {
val lruCache: LruCache[PlatformCount] = new LruCache[PlatformCount](200, 1000, 3.days)
override def count(topics: List[Topic]): Future[PlatformCount] = {
lruCache(topics.toSet) {
val topicParameters = topics.map(t => "t" -> t.toString)
ws.url(registrationCounterUrl)
.withQueryStringParameters(topicParameters: _*)
.get
.map(response => response.json.as[PlatformCount])
}
}
}
| package notification.services.guardian
import models.{PlatformCount, Topic}
import play.api.libs.ws.WSClient
import utils.LruCache
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration.DurationLong
class ReportTopicRegistrationCounter(
ws: WSClient,
registrationCounterUrl: String
)(implicit ec: ExecutionContext) extends TopicRegistrationCounter {
val lruCache: LruCache[PlatformCount] = new LruCache[PlatformCount](200, 1000, 3.days)
override def count(topics: List[Topic]): Future[PlatformCount] = {
lruCache(topics.toSet) {
val topicParameters = topics.map(topic => "topics" -> topic.toString)
ws.url(registrationCounterUrl)
.withQueryStringParameters(topicParameters: _*)
.get
.map(response => response.json.as[PlatformCount])
}
}
}
|
Fix compilation error for scala 2.11 | package org.scalafmt.config
import metaconfig._
@DeriveConfDecoder
case class SortSettings(
order: Vector[SortSettings.ModKey]
)
object SortSettings {
implicit val SortSettingsModKeyReader: ConfDecoder[ModKey] =
ReaderUtil.oneOfIgnoreBackticks[ModKey](
`implicit`,
`final`,
`sealed`,
`abstract`,
`override`,
`private`,
`protected`,
`lazy`,
)
val defaultOrder: Vector[ModKey] =
Vector(
`implicit`,
//
`final`,
`sealed`,
`abstract`,
//
`override`,
//
`private`,
`protected`,
//
`lazy`,
)
def default: SortSettings =
SortSettings(defaultOrder)
sealed trait ModKey
case object `private` extends ModKey
case object `protected` extends ModKey
case object `final` extends ModKey
case object `sealed` extends ModKey
case object `abstract` extends ModKey
case object `implicit` extends ModKey
case object `override` extends ModKey
case object `lazy` extends ModKey
}
| package org.scalafmt.config
import metaconfig._
@DeriveConfDecoder
case class SortSettings(
order: Vector[SortSettings.ModKey]
)
object SortSettings {
implicit val SortSettingsModKeyReader: ConfDecoder[ModKey] =
ReaderUtil.oneOfIgnoreBackticks[ModKey](
`implicit`,
`final`,
`sealed`,
`abstract`,
`override`,
`private`,
`protected`,
`lazy`
)
val defaultOrder: Vector[ModKey] =
Vector(
`implicit`,
//
`final`,
`sealed`,
`abstract`,
//
`override`,
//
`private`,
`protected`,
//
`lazy`
)
def default: SortSettings =
SortSettings(defaultOrder)
sealed trait ModKey
case object `private` extends ModKey
case object `protected` extends ModKey
case object `final` extends ModKey
case object `sealed` extends ModKey
case object `abstract` extends ModKey
case object `implicit` extends ModKey
case object `override` extends ModKey
case object `lazy` extends ModKey
}
|
Revert "Bump ScalaJS from 0.6.5 to 0.6.6" | addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.6")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.6")
| addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.6")
addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.5")
|
Enable make file upload for source java/scala models | package com.ligadata.metadataapiservice
import akka.actor.{Actor, ActorRef}
import akka.event.Logging
import akka.io.IO
import com.ligadata.fatafat.metadata._
import spray.routing.RequestContext
import spray.httpx.SprayJsonSupport
import spray.client.pipelining._
import org.json4s.jackson.JsonMethods._
import scala.util.{ Success, Failure }
import com.ligadata.MetadataAPI._
object UploadModelConfigService {
case class Process(cfgJson:String)
}
class UploadModelConfigService(requestContext: RequestContext, userid:Option[String], password:Option[String], cert:Option[String]) extends Actor {
import UploadModelConfigService._
implicit val system = context.system
import system.dispatcher
val log = Logging(system, getClass)
val APIName = "UploadModelConfigService"
def receive = {
case Process(cfgJson) =>
log.debug("Received a upload config request by the actor")
process(cfgJson)
context.stop(self)
}
def process(cfgJson:String) = {
log.debug("Requesting UploadModelConfig {}", cfgJson)
val apiResult = MetadataAPIImpl.UploadModelsConfig(cfgJson, null, null)
requestContext.complete(apiResult)
}
}
| package com.ligadata.metadataapiservice
import akka.actor.{Actor, ActorRef}
import akka.event.Logging
import akka.io.IO
import com.ligadata.fatafat.metadata._
import spray.routing.RequestContext
import spray.httpx.SprayJsonSupport
import spray.client.pipelining._
import org.json4s.jackson.JsonMethods._
import scala.util.{ Success, Failure }
import com.ligadata.MetadataAPI._
object UploadModelConfigService {
case class Process(cfgJson:String)
}
class UploadModelConfigService(requestContext: RequestContext, userid:Option[String], password:Option[String], cert:Option[String]) extends Actor {
import UploadModelConfigService._
implicit val system = context.system
import system.dispatcher
val log = Logging(system, getClass)
val APIName = "UploadModelConfigService"
def receive = {
case Process(cfgJson) =>
log.debug("Received a upload config request by the actor")
process(cfgJson)
context.stop(self)
}
def process(cfgJson:String) = {
log.debug("Requesting UploadModelConfig {}", cfgJson)
val apiResult = MetadataAPIImpl.UploadModelsConfig(cfgJson, userid, null)
requestContext.complete(apiResult)
}
}
|
Use non SNAPSHOT version of ascii-graphs | sbtPlugin := true
name := "sbt-dependency-graph"
organization := "net.virtual-void"
version := "0.6.1-SNAPSHOT"
homepage := Some(url("http://github.com/jrudolph/sbt-dependency-graph"))
licenses in GlobalScope += "Apache License 2.0" -> url("https://github.com/jrudolph/sbt-dependency-graph/raw/master/LICENSE")
(LsKeys.tags in LsKeys.lsync) := Seq("dependency", "graph", "sbt-plugin", "sbt")
(LsKeys.docsUrl in LsKeys.lsync) <<= homepage
(description in LsKeys.lsync) :=
"An sbt plugin to visualize dependencies of your build."
libraryDependencies += "com.github.mdr" %% "ascii-graphs" % "0.0.1-SNAPSHOT"
resolvers += "Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots"
| sbtPlugin := true
name := "sbt-dependency-graph"
organization := "net.virtual-void"
version := "0.6.1-SNAPSHOT"
homepage := Some(url("http://github.com/jrudolph/sbt-dependency-graph"))
licenses in GlobalScope += "Apache License 2.0" -> url("https://github.com/jrudolph/sbt-dependency-graph/raw/master/LICENSE")
(LsKeys.tags in LsKeys.lsync) := Seq("dependency", "graph", "sbt-plugin", "sbt")
(LsKeys.docsUrl in LsKeys.lsync) <<= homepage
(description in LsKeys.lsync) :=
"An sbt plugin to visualize dependencies of your build."
libraryDependencies += "com.github.mdr" %% "ascii-graphs" % "0.0.1"
|
Update to prepare for release | releasePublishArtifactsAction := PgpKeys.publishSigned.value
publishMavenStyle := true
pomIncludeRepository := { _ => false }
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (isSnapshot.value)
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
pomExtra := (
<url>https://github.com/waylayio/influxdb-scala</url>
<licenses>
<license>
<name>MIT License</name>
<url>http://www.opensource.org/licenses/mit-license.php</url>
<distribution>repo</distribution>
</license>
</licenses>
<developers>
<developer>
<id>francisdb</id>
<name>Francis De Brabandere</name>
<url>https://github.com/francisdb</url>
</developer>
<developer>
<id>thomastoye</id>
<name>Thomas Toye</name>
<url>https://github.com/thomastoye</url>
</developer>
</developers>) | releasePublishArtifactsAction := PgpKeys.publishSigned.value
publishMavenStyle := true
pomIncludeRepository := { _ => false }
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (isSnapshot.value)
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
updateOptions := updateOptions.value.withGigahorse(false)
pomExtra := (
<url>https://github.com/waylayio/influxdb-scala</url>
<licenses>
<license>
<name>MIT License</name>
<url>http://www.opensource.org/licenses/mit-license.php</url>
<distribution>repo</distribution>
</license>
</licenses>
<developers>
<developer>
<id>francisdb</id>
<name>Francis De Brabandere</name>
<url>https://github.com/francisdb</url>
</developer>
<developer>
<id>thomastoye</id>
<name>Thomas Toye</name>
<url>https://github.com/thomastoye</url>
</developer>
<developer>
<id>brunoballekens</id>
<name>Bruno Ballekens</name>
<url>https://github.com/brunoballekens</url>
</developer>
<developer>
<id>gabrielreid</id>
<name>Gabriel Reid</name>
<url>https://github.com/gabrielreid</url>
</developer>
</developers>) |
Modify a unit test code to make it consistent with a code example in docs | abstract class AbstractFile {
def name: String
val extension: String = name.substring(4)
}
class RemoteFile(url: String) extends AbstractFile {
val localFile: String = url.hashCode + ".tmp" // error
def name: String = localFile
}
| abstract class AbstractFile {
def name: String
val extension: String = name.substring(4)
}
class RemoteFile(url: String) extends AbstractFile {
val localFile: String = s"${url.##}.tmp" // error: usage of `localFile` before it's initialized
def name: String = localFile
}
|
Add documentation comments for addModifier() and addModifiers() | package k2b6s9j.boatcraft.api.registry
import java.util.{HashMap, List, Map}
import scala.collection.JavaConversions.asScalaBuffer
import k2b6s9j.boatcraft.api.traits.Modifier
import net.minecraft.item.ItemStack
/** Contains the methods needed to register Materials with BoatCraft:Core. */
object ModifierRegistry
{
var modifiers: Map[String, Modifier] = new HashMap[String, Modifier]
def addModifier(newMaterial: Modifier)
{
modifiers put(newMaterial toString, newMaterial)
}
def addModifiers(newMaterials: List[Modifier])
{
for (modifier <- newMaterials)
modifiers put(modifier toString, modifier)
}
def getModifier(name: String) =
modifiers get name
def getModifier(stack: ItemStack) =
modifiers get (stack.stackTagCompound getString "modifier")
} | package k2b6s9j.boatcraft.api.registry
import java.util.{HashMap, List, Map}
import scala.collection.JavaConversions.asScalaBuffer
import k2b6s9j.boatcraft.api.traits.Modifier
import net.minecraft.item.ItemStack
/** Contains the methods needed to register Materials with BoatCraft:Core. */
object ModifierRegistry
{
var modifiers: Map[String, Modifier] = new HashMap[String, Modifier]
/** Adds a single Modifier to the Map used by BoatCraft:Core for boat creation.
*
* @param newModifier the Modifier being registered
*/
def addModifier(newModifier: Modifier)
{
modifiers put(newModifier toString, newModifier)
}
/** Adds a List of Modifiers to the Map used by BoatCraft:Core for boat creation.
*
* @param newModifiers list of Modifiers being registered
*/
def addModifiers(newModifiers: List[Modifier])
{
for (modifier <- newModifiers)
modifiers put(modifier toString, modifier)
}
def getModifier(name: String) =
modifiers get name
def getModifier(stack: ItemStack) =
modifiers get (stack.stackTagCompound getString "modifier")
} |
Make Router.route return an Option[SandboxedLanguage], and None if we hit an unimplemented language. | package gd.eval
import gd.eval.languages._
object Router {
def route(language: String, code: String) = language match {
case "ruby" | "mri" => Ruby(code)
case "scala" => Scala(code)
case "c" => C(code)
}
}
| package gd.eval
import gd.eval.languages._
object Router {
def route(language: String, code: String) = language match {
case "ruby" | "mri" => Some(Ruby(code))
case "scala" => Some(Scala(code))
case "c" => Some(C(code))
case _ => None
}
}
|
Update circe-core, circe-generic, ... to 0.14.3 in series/0.23 | scalacOptions := Seq(
"-deprecation"
)
// For MimeLoader plugin. Dogfooding and hoping it doesn't clash with
// our other sbt plugins.
libraryDependencies ++= List(
"com.eed3si9n" %% "treehugger" % "0.4.4",
"io.circe" %% "circe-generic" % "0.14.2",
"org.http4s" %% "http4s-ember-client" % "0.23.15",
"org.http4s" %% "http4s-circe" % "0.23.15",
)
// native packager and scoverage are conflicting...
libraryDependencySchemes ++= Seq(
"org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always
)
| scalacOptions := Seq(
"-deprecation"
)
// For MimeLoader plugin. Dogfooding and hoping it doesn't clash with
// our other sbt plugins.
libraryDependencies ++= List(
"com.eed3si9n" %% "treehugger" % "0.4.4",
"io.circe" %% "circe-generic" % "0.14.3",
"org.http4s" %% "http4s-ember-client" % "0.23.15",
"org.http4s" %% "http4s-circe" % "0.23.15",
)
// native packager and scoverage are conflicting...
libraryDependencySchemes ++= Seq(
"org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always
)
|
Add SBT plugin for TeamCity test logging | addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.12.0")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "0.8.5")
| resolvers += Resolver.url("sbt-plugin-snapshots", new URL("http://repo.scala-sbt.org/scalasbt/sbt-plugin-snapshots/"))(Resolver.ivyStylePatterns)
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.12.0")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "0.8.5")
addSbtPlugin("org.jetbrains" % "sbt-teamcity-logger" % "0.1.0-SNAPSHOT") |
Update configuration to record version 0.2.10 | import play.PlayImport.PlayKeys._
name := "lib-play"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
crossScalaVersions := Seq("2.11.8")
version := "0.2.9"
lazy val root = project
.in(file("."))
.enablePlugins(PlayScala)
.settings(
libraryDependencies ++= Seq(
ws,
filters,
"com.jason-goodwin" %% "authentikat-jwt" % "0.4.3",
"org.scalatestplus" %% "play" % "1.4.0" % "test"
),
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases",
resolvers += "Artifactory" at "https://flow.artifactoryonline.com/flow/libs-release/",
credentials += Credentials(
"Artifactory Realm",
"flow.artifactoryonline.com",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
| import play.PlayImport.PlayKeys._
name := "lib-play"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
crossScalaVersions := Seq("2.11.8")
version := "0.2.10"
lazy val root = project
.in(file("."))
.enablePlugins(PlayScala)
.settings(
libraryDependencies ++= Seq(
ws,
filters,
"com.jason-goodwin" %% "authentikat-jwt" % "0.4.3",
"org.scalatestplus" %% "play" % "1.4.0" % "test"
),
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases",
resolvers += "Artifactory" at "https://flow.artifactoryonline.com/flow/libs-release/",
credentials += Credentials(
"Artifactory Realm",
"flow.artifactoryonline.com",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
|
Add a test for array load | object Test {
def len(x: Array[String]): Unit = x.length
def check(x: => Any) = try { x; sys.error("failed to throw NPE!") } catch { case _: NullPointerException => }
def main(args: Array[String]) {
check(len(null))
}
}
| object Test {
def len(x: Array[String]): Unit = x.length
def load(x: Array[String]): Unit = x(0)
def check(x: => Any) = try { x; sys.error("failed to throw NPE!") } catch { case _: NullPointerException => }
def main(args: Array[String]) {
check(len(null)) // bug: did not NPE
check(load(null))
}
}
|
Increment version from 0.0.22 => 0.0.23 | import play.PlayImport.PlayKeys._
name := "lib-play"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.7"
crossScalaVersions := Seq("2.11.7")
version := "0.0.22"
lazy val root = project
.in(file("."))
.enablePlugins(PlayScala)
.settings(
libraryDependencies ++= Seq(
ws,
"org.scalatest" %% "scalatest" % "2.2.6" % "test"
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
| import play.PlayImport.PlayKeys._
name := "lib-play"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.7"
crossScalaVersions := Seq("2.11.7")
version := "0.0.23"
lazy val root = project
.in(file("."))
.enablePlugins(PlayScala)
.settings(
libraryDependencies ++= Seq(
ws,
"org.scalatest" %% "scalatest" % "2.2.6" % "test"
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
|
Update sbt-scalajs, scalajs-compiler to 1.1.1 | addSbtPlugin("com.dwijnand" % "sbt-travisci" % "1.2.0")
addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.3")
addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.0.0")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.1.0")
| addSbtPlugin("com.dwijnand" % "sbt-travisci" % "1.2.0")
addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.3")
addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.0.0")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.1.1")
|
Update sbt-native-packager to 1.9.10 in series/0.23 | libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.2.11"
// https://github.com/coursier/coursier/issues/450
classpathTypes += "maven-plugin"
addSbtPlugin("com.earldouglas" % "xsbt-web-plugin" % "4.2.4")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0")
addSbtPlugin("com.github.tkawachi" % "sbt-doctest" % "0.10.0")
addSbtPlugin("org.http4s" % "sbt-http4s-org" % "0.14.4")
addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.9")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.10.1")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.1")
| libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.2.11"
// https://github.com/coursier/coursier/issues/450
classpathTypes += "maven-plugin"
addSbtPlugin("com.earldouglas" % "xsbt-web-plugin" % "4.2.4")
addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.11.0")
addSbtPlugin("com.github.tkawachi" % "sbt-doctest" % "0.10.0")
addSbtPlugin("org.http4s" % "sbt-http4s-org" % "0.14.4")
addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.9.10")
addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.10.1")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.1")
|
Update tut plugin to 0.4.1 | libraryDependencies += "org.slf4j" % "slf4j-nop" % "1.7.10"
resolvers += Resolver.url(
"tut-plugin",
url("http://dl.bintray.com/content/tpolecat/sbt-plugin-releases"))(
Resolver.ivyStylePatterns)
addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.5.4")
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.8.5")
addSbtPlugin("de.knutwalker" % "sbt-knutwalker" % "0.2.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.8.1")
addSbtPlugin("org.tpolecat" % "tut-plugin" % "0.4.0")
| libraryDependencies += "org.slf4j" % "slf4j-nop" % "1.7.10"
addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.5.4")
addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "0.8.5")
addSbtPlugin("de.knutwalker" % "sbt-knutwalker" % "0.2.0")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "0.8.1")
addSbtPlugin("org.tpolecat" % "tut-plugin" % "0.4.1")
|
Update sbt-wartremover, wartremover to 3.0.5 | resolvers ++= Seq(
Classpaths.typesafeReleases,
Classpaths.sbtPluginReleases,
"jgit-repo" at "https://download.eclipse.org/jgit/maven",
Resolver.sonatypeRepo("snapshots")
)
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3")
addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0")
addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.1.2")
addSbtPlugin("com.github.sbt" % "sbt-release" % "1.1.0")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.13")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3")
addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "3.0.4")
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.1")
| resolvers ++= Seq(
Classpaths.typesafeReleases,
Classpaths.sbtPluginReleases,
"jgit-repo" at "https://download.eclipse.org/jgit/maven",
Resolver.sonatypeRepo("snapshots")
)
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3")
addSbtPlugin("com.github.sbt" % "sbt-unidoc" % "0.5.0")
addSbtPlugin("com.github.sbt" % "sbt-pgp" % "2.1.2")
addSbtPlugin("com.github.sbt" % "sbt-release" % "1.1.0")
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.13")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3")
addSbtPlugin("com.47deg" % "sbt-microsites" % "1.3.4")
addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6")
addSbtPlugin("org.wartremover" % "sbt-wartremover" % "3.0.5")
addSbtPlugin("ch.epfl.scala" % "sbt-scalafix" % "0.10.1")
|
Increment version from 0.0.16 => 0.0.17 | import com.github.retronym.SbtOneJar._
oneJarSettings
name := "api-lint"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.7"
version := "0.0.16"
exportJars := true
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"com.typesafe.play" %% "play-json" % "2.4.6",
"com.ning" % "async-http-client" % "1.9.32",
"org.scalatest" %% "scalatest" % "2.2.6" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
| import com.github.retronym.SbtOneJar._
oneJarSettings
name := "api-lint"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.7"
version := "0.0.17"
exportJars := true
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"com.typesafe.play" %% "play-json" % "2.4.6",
"com.ning" % "async-http-client" % "1.9.32",
"org.scalatest" %% "scalatest" % "2.2.6" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
|
Put back the XXX line in partest. | package scala.tools
package partest
import nsc.io.{ File, Path, Directory }
import util.{ PathResolver }
import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
object PartestDefaults {
import nsc.Properties._
private def wrapAccessControl[T](body: => Option[T]): Option[T] =
try body catch { case _: java.security.AccessControlException => None }
def testRootName = propOrNone("partest.root")
def srcDirName = propOrElse("partest.srcdir", "files")
def testRootDir = testRootName map (x => Directory(x))
def classPath = propOrElse("partest.classpath", "")
def javaCmd = propOrElse("partest.javacmd", "java")
def javacCmd = propOrElse("partest.javac_cmd", "javac")
def javaOpts = propOrElse("partest.java_opts", "")
def scalacOpts = propOrElse("partest.scalac_opts", "-deprecation")
def testBuild = propOrNone("partest.build")
def errorCount = propOrElse("partest.errors", "0").toInt
def numActors = propOrElse("partest.actors", "6").toInt
def poolSize = wrapAccessControl(propOrNone("actors.corePoolSize"))
def timeout = "1200000"
}
| package scala.tools
package partest
import nsc.io.{ File, Path, Directory }
import util.{ PathResolver }
import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
object PartestDefaults {
import nsc.Properties._
private def wrapAccessControl[T](body: => Option[T]): Option[T] =
try body catch { case _: java.security.AccessControlException => None }
def testRootName = propOrNone("partest.root")
def srcDirName = propOrElse("partest.srcdir", "files")
def testRootDir = testRootName map (x => Directory(x))
// def classPath = propOrElse("partest.classpath", "")
def classpath = PathResolver.Environment.javaUserClassPath // XXX
def javaCmd = propOrElse("partest.javacmd", "java")
def javacCmd = propOrElse("partest.javac_cmd", "javac")
def javaOpts = propOrElse("partest.java_opts", "")
def scalacOpts = propOrElse("partest.scalac_opts", "-deprecation")
def testBuild = propOrNone("partest.build")
def errorCount = propOrElse("partest.errors", "0").toInt
def numActors = propOrElse("partest.actors", "6").toInt
def poolSize = wrapAccessControl(propOrNone("actors.corePoolSize"))
def timeout = "1200000"
}
|
Update mdoc, sbt-mdoc to 2.3.0 | addSbtPlugin("com.lightbend.paradox" % "sbt-paradox" % "0.9.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.4.1")
addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3")
addSbtPlugin("org.typelevel" % "sbt-typelevel-ci-release" % "0.4.5")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.2")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3")
addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.5")
addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.1.21")
addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.2.24")
| addSbtPlugin("com.lightbend.paradox" % "sbt-paradox" % "0.9.2")
addSbtPlugin("com.typesafe.sbt" % "sbt-site" % "1.4.1")
addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3")
addSbtPlugin("org.typelevel" % "sbt-typelevel-ci-release" % "0.4.5")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.9.3")
addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.2")
addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.3")
addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.5")
addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.1.21")
addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.3.0")
|
Update sbt-crossproject, ... to 1.0.0 | addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")
addSbtPlugin("com.47deg" % "sbt-microsites" % "0.9.1")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.32")
addSbtPlugin("org.portable-scala" % "sbt-crossproject" % "0.6.1")
addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "0.6.1")
addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.16")
addSbtPlugin("org.tpolecat" % "tut-plugin" % "0.6.13")
addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.3")
addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.1.10")
addSbtPlugin("ch.epfl.scala" % "sbt-scalajs-bundler" % "0.12.0")
addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.2")
| addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.6.1")
addSbtPlugin("com.47deg" % "sbt-microsites" % "0.9.1")
addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.13")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.32")
addSbtPlugin("org.portable-scala" % "sbt-crossproject" % "1.0.0")
addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.0.0")
addSbtPlugin("com.lucidchart" % "sbt-scalafmt" % "1.16")
addSbtPlugin("org.tpolecat" % "tut-plugin" % "0.6.13")
addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.3")
addSbtPlugin("io.github.davidgregory084" % "sbt-tpolecat" % "0.1.10")
addSbtPlugin("ch.epfl.scala" % "sbt-scalajs-bundler" % "0.12.0")
addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.2")
|
Switch signup form to not be an AJAX form. | package code
package snippet
import net.liftweb.http._
import net.liftweb.util.Helpers._
import com.hacklanta.formality.Formality
import Formality._
import model.User
object Signup {
import net.liftweb.sitemap._
import Loc._
val menu =
Menu.i("signup") / "signup" >>
If(
LoginHelpers.notLoggedIn_? _,
() => RedirectResponse("/")
)
val loc = menu.loc
}
class Signup {
def form = {
val registrationForm =
Formality.form withField
field[String]("#email") withField
field[String]("#password") ajaxFormalize() onSuccess {
case email :+: password :+: HNil =>
LoginHelpers.logUserIn(User.create(User(email, password)))
S.redirectTo("/")
}
"form" #> registrationForm.binder()
}
}
| package code
package snippet
import net.liftweb.http._
import net.liftweb.util.Helpers._
import com.hacklanta.formality.Formality
import Formality._
import model.User
object Signup {
import net.liftweb.sitemap._
import Loc._
val menu =
Menu.i("signup") / "signup" >>
If(
LoginHelpers.notLoggedIn_? _,
() => RedirectResponse("/")
)
val loc = menu.loc
}
class Signup {
def form = {
val registrationForm =
Formality.form withField
field[String]("#email") withField
field[String]("#password") formalize() onSuccess {
case email :+: password :+: HNil =>
LoginHelpers.logUserIn(User.create(User(email, password)))
S.redirectTo("/")
}
"form" #> registrationForm.binder()
}
}
|
Add "Write Whole Class" submission: improving the report output by removing color characters which can't be treated correctly in html | package service
import java.io.ByteArrayOutputStream
import org.scalatest.Suite
import scala.util.{Failure, Success, Try}
/**
* Runs scalatest library test suite using the 'execute' method
*/
object ScalaTestRunner {
val failedMarker = "FAILED"
val failedInRuntimeMarker = "failed in runtime"
val userClass = "UserSolution"
def execSuite(solution: String, suiteClass: Class[Suite], solutionTrait: Class[AnyRef]): String = {
Try {
val solutionInstance = createSolutionInstance(solution, solutionTrait)
execSuite(suiteClass.getConstructor(solutionTrait).newInstance(solutionInstance))
} match {
case Success(s) => s
case Failure(e) => s"Test failed in runtime with error:\n${e.getMessage}'"
}
}
def execSuite(suiteInstance: Suite): String = {
val stream = new ByteArrayOutputStream
Console.withOut(stream) {
suiteInstance.execute(stats = true, shortstacks = true, durations = true)
}
stream.toString
}
private def createSolutionInstance(solution: String, solutionTrait: Class[AnyRef]): AnyRef = {
import scala.reflect.runtime._
val cm = universe.runtimeMirror(getClass.getClassLoader)
import scala.tools.reflect.ToolBox
val tb = cm.mkToolBox()
val patchedSolution = solution.replaceFirst("(class [A-Za-z0-9]* )", s"class $userClass extends ${solutionTrait.getSimpleName} ")
val dynamicCode = s"import ${solutionTrait.getName}; $patchedSolution; new $userClass"
tb.eval(tb.parse(dynamicCode)).asInstanceOf[AnyRef]
}
} | package service
import java.io.ByteArrayOutputStream
import org.scalatest.{run, Suite}
import scala.util.{Failure, Success, Try}
/**
* Runs scalatest library test suite using the 'execute' method
*/
object ScalaTestRunner {
val failedMarker = "FAILED"
val failedInRuntimeMarker = "failed in runtime"
val userClass = "UserSolution"
def execSuite(solution: String, suiteClass: Class[Suite], solutionTrait: Class[AnyRef]): String = {
Try {
val solutionInstance = createSolutionInstance(solution, solutionTrait)
execSuite(suiteClass.getConstructor(solutionTrait).newInstance(solutionInstance))
} match {
case Success(s) => s
case Failure(e) => s"Test failed in runtime with error:\n${e.getMessage}'"
}
}
def execSuite(suiteInstance: Suite): String = {
val stream = new ByteArrayOutputStream
Console.withOut(stream) {
suiteInstance.execute(color = false)
}
stream.toString
}
private def createSolutionInstance(solution: String, solutionTrait: Class[AnyRef]): AnyRef = {
import scala.reflect.runtime._
val cm = universe.runtimeMirror(getClass.getClassLoader)
import scala.tools.reflect.ToolBox
val tb = cm.mkToolBox()
val patchedSolution = solution.replaceFirst("(class [A-Za-z0-9]* )", s"class $userClass extends ${solutionTrait.getSimpleName} ")
val dynamicCode = s"import ${solutionTrait.getName}; $patchedSolution; new $userClass"
tb.eval(tb.parse(dynamicCode)).asInstanceOf[AnyRef]
}
} |
Update http4s-circe, http4s-ember-client to 0.23.11 in series/0.22 | scalacOptions := Seq(
"-deprecation"
)
// For MimeLoader plugin. Dogfooding and hoping it doesn't clash with
// our other sbt plugins.
libraryDependencies ++= List(
"com.eed3si9n" %% "treehugger" % "0.4.4",
"io.circe" %% "circe-generic" % "0.14.1",
"org.http4s" %% "http4s-ember-client" % "0.23.10",
"org.http4s" %% "http4s-circe" % "0.23.10",
)
| scalacOptions := Seq(
"-deprecation"
)
// For MimeLoader plugin. Dogfooding and hoping it doesn't clash with
// our other sbt plugins.
libraryDependencies ++= List(
"com.eed3si9n" %% "treehugger" % "0.4.4",
"io.circe" %% "circe-generic" % "0.14.1",
"org.http4s" %% "http4s-ember-client" % "0.23.11",
"org.http4s" %% "http4s-circe" % "0.23.11",
)
|
Allow metadata retrieval without unzipping pages. | package edu.umd.mith.hathi
import java.io.File
import scalaz._, Scalaz._
/** Represents a set of metadata files and a data set.
*/
class Collection(val metadataBase: File, datasetBase: File) extends Dataset(datasetBase)
with MetadataJson {
def volume(htid: Htid): Throwable \/ Volume = for {
metadataFile <- isFile(new File(metadataBase, s"${ htid.toFileName }.json"))
metadataJson <- contents(metadataFile)
metadata <- parseVolumeMetadata(htid)(metadataJson)
pages <- pages(htid)
} yield Volume(metadata, pages)
}
| package edu.umd.mith.hathi
import java.io.File
import scalaz._, Scalaz._
/** Represents a set of metadata files and a data set.
*/
class Collection(val metadataBase: File, datasetBase: File) extends Dataset(datasetBase)
with MetadataJson {
def volumeMetadata(htid: Htid): Throwable \/ VolumeMetadata = for {
metadataFile <- isFile(new File(metadataBase, s"${ htid.toFileName }.json"))
metadataJson <- contents(metadataFile)
metadata <- parseVolumeMetadata(htid)(metadataJson)
} yield metadata
def volume(htid: Htid): Throwable \/ Volume = for {
metadata <- volumeMetadata(htid)
pages <- pages(htid)
} yield Volume(metadata, pages)
}
|
Update http4s-blaze-client, ... to 0.21.11 | scalacOptions := Seq(
"-deprecation"
)
// For MimeLoader plugin. Dogfooding and hoping it doesn't clash with
// our other sbt plugins.
libraryDependencies ++= List(
"com.eed3si9n" %% "treehugger" % "0.4.4",
"io.circe" %% "circe-generic" % "0.13.0",
"org.http4s" %% "http4s-blaze-client" % "0.21.9",
"org.http4s" %% "http4s-circe" % "0.21.9",
)
| scalacOptions := Seq(
"-deprecation"
)
// For MimeLoader plugin. Dogfooding and hoping it doesn't clash with
// our other sbt plugins.
libraryDependencies ++= List(
"com.eed3si9n" %% "treehugger" % "0.4.4",
"io.circe" %% "circe-generic" % "0.13.0",
"org.http4s" %% "http4s-blaze-client" % "0.21.11",
"org.http4s" %% "http4s-circe" % "0.21.11",
)
|
Update dependencies to latest stable releases | name := "plotly"
version := "0.2.0"
organization := "co.theasi"
scalaVersion := "2.11.8"
crossScalaVersions := Seq("2.11.8", "2.10.6")
libraryDependencies ++= Seq(
"org.scalaj" %% "scalaj-http" % "2.2.1",
"org.json4s" %% "json4s-native" % "3.3.0",
"org.scalatest" %% "scalatest" % "2.2.4" % "test"
)
initialCommands := """
|import co.theasi.plotly._
""".stripMargin
publishMavenStyle := true
// Publishing
publishTo <<= version { (v: String) =>
val nexus = "https://oss.sonatype.org/"
if (v.trim.endsWith("SNAPSHOT"))
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
// Testing
parallelExecution in Test := false
logBuffered in Test := false
// Documentation
enablePlugins(SiteScaladocPlugin)
ghpages.settings
git.remoteRepo := "git@github.com:ASIDataScience/scala-plotly-client.git"
| name := "plotly"
version := "0.2.0"
organization := "co.theasi"
scalaVersion := "2.11.8"
crossScalaVersions := Seq("2.11.8", "2.10.6")
libraryDependencies ++= Seq(
"org.scalaj" %% "scalaj-http" % "2.2.1",
"org.json4s" %% "json4s-native" % "3.4.1",
"org.scalatest" %% "scalatest" % "3.0.0" % "test"
)
initialCommands := """
|import co.theasi.plotly._
""".stripMargin
publishMavenStyle := true
// Publishing
publishTo <<= version { (v: String) =>
val nexus = "https://oss.sonatype.org/"
if (v.trim.endsWith("SNAPSHOT"))
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
}
// Testing
parallelExecution in Test := false
logBuffered in Test := false
// Documentation
enablePlugins(SiteScaladocPlugin)
ghpages.settings
git.remoteRepo := "git@github.com:ASIDataScience/scala-plotly-client.git"
|
Create a Runnable from a { block } implicitly | package com.michalrus.helper
import android.view.View
import android.widget.Button
import android.view.View.OnClickListener
import android.util.Log
trait ViewHelper {
def log(s: String) = Log.d("com.michalrus.helper", s)
implicit def scalaizeView(v: View) = new ScalaView(v)
class ScalaView(val v: View) {
def find[T](id: Int) = v.findViewById(id).asInstanceOf[T]
}
implicit def scalaizeButton(b: Button) = new ScalaButton(b)
class ScalaButton(val b: Button) {
def onClick[A](f: => A) {
b.setOnClickListener(new OnClickListener {
def onClick(v: View) = f
})
}
}
}
| package com.michalrus.helper
import android.view.View
import android.widget.Button
import android.view.View.OnClickListener
import android.util.Log
trait ViewHelper {
def log(s: String) = Log.d("com.michalrus.helper", s)
implicit def scalaizeView(v: View) = new ScalaView(v)
class ScalaView(val v: View) {
def find[T](id: Int) = v.findViewById(id).asInstanceOf[T]
}
implicit def scalaizeButton(b: Button) = new ScalaButton(b)
class ScalaButton(val b: Button) {
def onClick[A](f: => A) {
b.setOnClickListener(new OnClickListener {
def onClick(v: View) = f
})
}
}
implicit def blockToRunnable(f: => Unit) = new Runnable {
def run() = f
}
}
|
Add correct licence for bintray | name := "hadoop-aws"
organization := "io.grhodes"
version := "git describe --tags --dirty --always".!!.stripPrefix("v").trim
scalaVersion := "2.11.8"
libraryDependencies ++= Seq(
"com.amazonaws" % "aws-java-sdk-s3" % "1.10.68",
"com.fasterxml.jackson.core" % "jackson-databind" % "2.2.3",
"com.fasterxml.jackson.core" % "jackson-annotations" % "2.2.3",
"org.apache.hadoop" % "hadoop-common" % "2.7.2" % Configurations.Provided
)
| name := "hadoop-aws"
organization := "io.grhodes"
version := "git describe --tags --dirty --always".!!.stripPrefix("v").trim
scalaVersion := "2.11.8"
licenses += ("Apache-2.0", url("http://opensource.org/licenses/apache-2.0"))
libraryDependencies ++= Seq(
"com.amazonaws" % "aws-java-sdk-s3" % "1.10.68",
"com.fasterxml.jackson.core" % "jackson-databind" % "2.2.3",
"com.fasterxml.jackson.core" % "jackson-annotations" % "2.2.3",
"org.apache.hadoop" % "hadoop-common" % "2.7.2" % Configurations.Provided
)
|
Update configuration to record version 0.1.10 | import com.github.retronym.SbtOneJar._
oneJarSettings
name := "api-build"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
version := "0.1.9"
exportJars := true
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"com.typesafe.play" %% "play-json" % "2.5.9",
"com.ning" % "async-http-client" % "1.9.40",
"org.scalatest" %% "scalatest" % "3.0.1" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
| import com.github.retronym.SbtOneJar._
oneJarSettings
name := "api-build"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.8"
version := "0.1.10"
exportJars := true
lazy val root = project
.in(file("."))
.settings(
libraryDependencies ++= Seq(
"com.typesafe.play" %% "play-json" % "2.5.9",
"com.ning" % "async-http-client" % "1.9.40",
"org.scalatest" %% "scalatest" % "3.0.1" % Test
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
|
Add test case for reservoir sampling. | package io.reactors
package protocol.algo
import io.reactors.test._
import org.scalatest._
import org.scalatest.concurrent.AsyncTimeLimitedTests
import scala.collection._
import scala.concurrent._
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
class AlgoSpec extends FunSuite {
test("reservoir sampling, less than k") {
val e = new Events.Emitter[Int]
val sample = e.reservoirSample(5)
e.react(7)
e.react(17)
e.unreact()
assert(sample().toSeq == Seq(7, 17))
}
test("reservoir sampling, more than k") {
val e = new Events.Emitter[Int]
val sample = e.reservoirSample(5)
val elems = (0 until 16)
for (i <- elems) e.react(i)
e.unreact()
assert(sample().toSeq.length == 5)
assert(sample().toSeq.forall(x => elems.toSet.contains(x)))
assert(sample().distinct.length == 5)
}
}
| package io.reactors
package protocol.algo
import io.reactors.test._
import org.scalatest._
import org.scalatest.concurrent.AsyncTimeLimitedTests
import scala.collection._
import scala.concurrent._
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
class AlgoSpec extends FunSuite {
test("reservoir sampling, no events") {
val e = new Events.Emitter[Int]
val sample = e.reservoirSample(5)
e.unreact()
assert(sample().length == 0)
}
test("reservoir sampling, less than k") {
val e = new Events.Emitter[Int]
val sample = e.reservoirSample(5)
e.react(7)
e.react(17)
e.unreact()
assert(sample().toSeq == Seq(7, 17))
}
test("reservoir sampling, more than k") {
val e = new Events.Emitter[Int]
val sample = e.reservoirSample(5)
val elems = (0 until 16)
for (i <- elems) e.react(i)
e.unreact()
assert(sample().toSeq.length == 5)
assert(sample().toSeq.forall(x => elems.toSet.contains(x)))
assert(sample().distinct.length == 5)
}
}
|
Extend default behaviour of ZK mock | package com.ataraxer.zooowner
import org.apache.zookeeper.ZooKeeper
import org.apache.zookeeper.ZooKeeper.States
import org.apache.zookeeper.data.Stat
import org.scalatest.Suite
import org.mockito.Mockito._
import scala.concurrent.duration.FiniteDuration
trait ZKMock {
object zkMock {
val ephemeralStat = {
val stat = mock(classOf[Stat])
when(stat.getEphemeralOwner).thenReturn(1)
stat
}
val persistentStat = {
val stat = mock(classOf[Stat])
when(stat.getEphemeralOwner).thenReturn(0)
stat
}
val client = {
val zk = mock(classOf[ZooKeeper])
when(zk.getState).thenReturn(States.CONNECTED)
zk
}
}
}
// vim: set ts=2 sw=2 et:
| package com.ataraxer.zooowner
import org.apache.zookeeper.{ZooKeeper, Watcher => ZKWatcher}
import org.apache.zookeeper.ZooKeeper.States
import org.apache.zookeeper.KeeperException._
import org.apache.zookeeper.data.Stat
import org.scalatest.Suite
import org.mockito.Mockito._
import org.mockito.Matchers._
import org.mockito.Matchers.{eq => matchString}
import scala.concurrent.duration.FiniteDuration
trait ZKMock {
object zkMock {
val ephemeralStat = {
val stat = mock(classOf[Stat])
when(stat.getEphemeralOwner).thenReturn(1)
stat
}
val persistentStat = {
val stat = mock(classOf[Stat])
when(stat.getEphemeralOwner).thenReturn(0)
stat
}
def anyWatcher = any(classOf[ZKWatcher])
def anyStat = any(classOf[Stat])
val client = {
val zk = mock(classOf[ZooKeeper])
when(zk.getState).thenReturn(States.CONNECTED)
when(zk.getData(anyString, anyWatcher, anyStat))
.thenThrow(new NoNodeException)
when(zk.exists(anyString, anyWatcher))
.thenReturn(null)
zk
}
}
}
// vim: set ts=2 sw=2 et:
|
Update configuration to record version 0.4.12 | import play.PlayImport.PlayKeys._
name := "lib-play"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.11"
crossScalaVersions := Seq("2.11.11")
version := "0.4.11"
lazy val root = project
.in(file("."))
.enablePlugins(PlayScala)
.settings(
libraryDependencies ++= Seq(
ws,
filters,
"com.jason-goodwin" %% "authentikat-jwt" % "0.4.3",
"org.scalatestplus" %% "play" % "1.4.0" % "test"
),
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases",
resolvers += "Artifactory" at "https://flow.artifactoryonline.com/flow/libs-release/",
credentials += Credentials(
"Artifactory Realm",
"flow.artifactoryonline.com",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
| import play.PlayImport.PlayKeys._
name := "lib-play"
organization := "io.flow"
scalaVersion in ThisBuild := "2.11.11"
crossScalaVersions := Seq("2.11.11")
version := "0.4.12"
lazy val root = project
.in(file("."))
.enablePlugins(PlayScala)
.settings(
libraryDependencies ++= Seq(
ws,
filters,
"com.jason-goodwin" %% "authentikat-jwt" % "0.4.3",
"org.scalatestplus" %% "play" % "1.4.0" % "test"
),
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/",
resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases",
resolvers += "Artifactory" at "https://flow.artifactoryonline.com/flow/libs-release/",
credentials += Credentials(
"Artifactory Realm",
"flow.artifactoryonline.com",
System.getenv("ARTIFACTORY_USERNAME"),
System.getenv("ARTIFACTORY_PASSWORD")
)
)
publishTo := {
val host = "https://flow.artifactoryonline.com/flow"
if (isSnapshot.value) {
Some("Artifactory Realm" at s"$host/libs-snapshot-local;build.timestamp=" + new java.util.Date().getTime)
} else {
Some("Artifactory Realm" at s"$host/libs-release-local")
}
}
|
Optimize sbt lib dependecies such that they do not conflict | name := """eocene"""
version := "1.0-SNAPSHOT"
lazy val root = (project in file(".")).enablePlugins(PlayScala)
scalaVersion := "2.11.1"
resolvers += Resolver.sonatypeRepo("snapshots")
libraryDependencies ++= Seq(
jdbc,
anorm,
cache,
ws,
"mysql" % "mysql-connector-java" % "5.1.27",
"ws.securesocial" % "securesocial_2.11" % "3.0-M3",
"org.scalatest" %% "scalatest" % "2.2.1" % "test",
"org.scalatestplus" %% "play" % "1.4.0-M3" % "test",
"org.specs2" %% "specs2-core" % "3.7" % "test",
"org.mockito" % "mockito-all" % "1.9.5"
)
| name := """eocene"""
version := "1.0-SNAPSHOT"
lazy val root = (project in file(".")).enablePlugins(PlayScala)
scalaVersion := "2.11.1"
resolvers += Resolver.sonatypeRepo("snapshots")
libraryDependencies ++= Seq(
jdbc,
anorm,
cache,
ws,
"mysql" % "mysql-connector-java" % "5.1.27",
"ws.securesocial" % "securesocial_2.11" % "3.0-M3",
"org.scalatestplus" % "play_2.11" % "1.2.0"% "test",
"org.scalatest" % "scalatest_2.11" % "3.0.0-M1"% "test"
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.