code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1
value | license stringclasses 15
values | size int64 5 1M |
|---|---|---|---|---|---|
type RingF2[A] = RingF[RingF[RingF[A]]] | hmemcpy/milewski-ctfp-pdf | src/content/3.8/code/scala/snippet08.scala | Scala | gpl-3.0 | 39 |
package aafa.model
import io.realm.RealmObject
import io.realm.annotations.PrimaryKey
class User extends RealmObject{
@PrimaryKey var id: Long = 1
var name: String = "test"
} | aafa/realm-sbt-plugin | src/sbt-test/sbt-realm-test/realm/src/main/scala/aafa/model/model.scala | Scala | mit | 181 |
package com.twitter.diffy.lifter
import com.google.common.net.{HttpHeaders, MediaType}
import com.twitter.io.Charsets
import com.twitter.logging.Logger
import com.twitter.util.{Try, Future}
import org.jboss.netty.handler.codec.http.{HttpResponse, HttpRequest}
import scala.collection.JavaConversions._
object HttpLifter {
val ControllerEndpointHeaderName = "X-Action-Name"
def contentTypeNotSupportedException(contentType: String) = new Exception(s"Content type: $contentType is not supported")
def contentTypeNotSupportedExceptionFuture(contentType: String) = Future.exception(contentTypeNotSupportedException(contentType))
case class MalformedJsonContentException(cause: Throwable)
extends Exception("Malformed Json content")
{
initCause(cause)
}
}
class HttpLifter(excludeHttpHeadersComparison: Boolean) {
import HttpLifter._
private[this] val log = Logger(classOf[HttpLifter])
private[this] def headersMap(response: HttpResponse): Map[String, Any] = {
if(!excludeHttpHeadersComparison) {
val rawHeaders = response.headers.entries().map { header =>
(header.getKey, header.getValue)
}.toSeq
val headers = rawHeaders groupBy { case (name, _) => name } map { case (name, values) =>
name -> (values map { case (_, value) => value } sorted)
}
Map( "headers" -> FieldMap(headers))
} else Map.empty
}
def liftRequest(req: HttpRequest): Future[Message] = {
val canonicalResource = Option(req.headers.get("Canonical-Resource"))
Future.value(Message(canonicalResource, FieldMap(Map("request"-> req.toString))))
}
def liftResponse(resp: Try[HttpResponse]): Future[Message] = {
Future.const(resp) flatMap { r: HttpResponse =>
val mediaTypeOpt: Option[MediaType] =
Option(r.headers.get(HttpHeaders.CONTENT_TYPE)) map { MediaType.parse }
val contentLengthOpt = Option(r.headers.get(HttpHeaders.CONTENT_LENGTH))
/** header supplied by macaw, indicating the controller reached **/
val controllerEndpoint = Option(r.headers.get(ControllerEndpointHeaderName))
(mediaTypeOpt, contentLengthOpt) match {
/** When Content-Length is 0, only compare headers **/
case (_, Some(length)) if length.toInt == 0 =>
Future.const(
Try(Message(controllerEndpoint, FieldMap(headersMap(r))))
)
/** When Content-Type is set as application/json, lift as Json **/
case (Some(mediaType), _) if mediaType.is(MediaType.JSON_UTF_8) || mediaType.toString == "application/json" => {
val jsonContentTry = Try {
JsonLifter.decode(r.getContent.toString(Charsets.Utf8))
}
Future.const(jsonContentTry map { jsonContent =>
val responseMap = Map(
r.getStatus.getCode.toString -> (Map(
"content" -> jsonContent,
"chunked" -> r.isChunked
) ++ headersMap(r))
)
Message(controllerEndpoint, FieldMap(responseMap))
}).rescue { case t: Throwable =>
Future.exception(new MalformedJsonContentException(t))
}
}
/** When Content-Type is set as text/html, lift as Html **/
case (Some(mediaType), _)
if mediaType.is(MediaType.HTML_UTF_8) || mediaType.toString == "text/html" => {
val htmlContentTry = Try {
HtmlLifter.lift(HtmlLifter.decode(r.getContent.toString(Charsets.Utf8)))
}
Future.const(htmlContentTry map { htmlContent =>
val responseMap = Map(
r.getStatus.getCode.toString -> (Map(
"content" -> htmlContent,
"chunked" -> r.isChunked
) ++ headersMap(r))
)
Message(controllerEndpoint, FieldMap(responseMap))
})
}
/** When content type is not set, only compare headers **/
case (None, _) => {
Future.const(Try(
Message(controllerEndpoint, FieldMap(headersMap(r)))))
}
case (Some(mediaType), _) => {
log.debug(s"Content type: $mediaType is not supported")
contentTypeNotSupportedExceptionFuture(mediaType.toString)
}
}
}
}
}
| camiloribeiro/diffy | src/main/scala/com/twitter/diffy/lifter/HttpLifter.scala | Scala | apache-2.0 | 4,281 |
package com.sksamuel.elastic4s.requests.nodes
trait NodesApi {
def nodeInfo(names: Iterable[String]) = NodeInfoRequest(names.toSeq)
def nodeInfo(names: String*) = NodeInfoRequest(names)
def nodeStats(): NodeStatsRequest = NodeStatsRequest(Seq.empty)
def nodeStats(first: String, rest: String*): NodeStatsRequest = nodeStats(first +: rest)
def nodeStats(nodes: Iterable[String]): NodeStatsRequest = NodeStatsRequest(nodes.toSeq)
}
case class NodeStatsRequest(nodes: Seq[String], stats: Seq[String] = Seq.empty) {
def stats(stats: Seq[String]): NodeStatsRequest = copy(stats = stats)
}
case class NodeInfoRequest(nodes: Seq[String])
| stringbean/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/nodes/NodesApi.scala | Scala | apache-2.0 | 691 |
/*
* Copyright 2012 Jahziah Wagner <jahziah[dot]wagner[at]gmail[dot]com>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.openlobby.login
import com.openlobby.communication.ListenerObserver
import com.openlobby.communication.ListenerService
import com.openlobby.communication.MessengerService
import com.springrts.unitsync.Unitsync
import org.apache.felix.dm.DependencyActivatorBase
import org.apache.felix.dm.DependencyManager
import org.osgi.framework.BundleContext
import org.osgi.service.log.LogService
class Activator extends DependencyActivatorBase {
def init(ctx : BundleContext, manager : DependencyManager) {
manager.add(createComponent
.setInterface(Array(classOf[LoginService].getName, classOf[ListenerObserver].getName), null)
.setImplementation(classOf[LoginServiceImpl])
.add(createServiceDependency.setService(classOf[ListenerService])
.setRequired(true)
)
.add(createServiceDependency
.setService(classOf[LogService])
.setRequired(false)
)
.add(createServiceDependency
.setService(classOf[Unitsync])
.setRequired(false)
)
.add(createServiceDependency
.setService(classOf[MessengerService])
.setRequired(true)
)
)
}
def destroy(ctx : BundleContext, manager : DependencyManager) {
}
}
| jahwag/OpenLobby | modules/Login/src/main/scala/com/openlobby/login/Activator.scala | Scala | apache-2.0 | 2,063 |
package pt.cnbc.wikimodels.client.snippet
import _root_.net.liftweb.common._
import _root_.net.liftweb.http._
import S._
import _root_.net.liftweb.util._
import Helpers._
import _root_.scala.xml._
import _root_.pt.cnbc.wikimodels.rest.client.RestfulAccess
/**
* Created by IntelliJ IDEA.
* User: alex
* Date: 26-04-2011
* Time: 16:33
* To change this template use File | Settings | File Templates.
*/
class ExportSBMLModelSnip {
def doIt = {
val model = User.restfulConnection.getRequest("/model/" + S.param("modelMetaId").openOr("TODO: Error export model") )
val completeModel =
<sbml xmlns="http://www.sbml.org/sbml/level2/version4" level="2" version="4">{
model
}</sbml>
}
} | alexmsmartins/WikiModels | wm_web_client/src/main/scala/pt/cnbc/wikimodels/client/snippet/ExportSBMLModelSnip.scala | Scala | mit | 723 |
package models.conf
import com.mysql.jdbc.exceptions.jdbc4.MySQLIntegrityConstraintViolationException
import exceptions.UniqueNameException
import play.api.Play.current
import models.PlayCache
import scala.slick.driver.MySQLDriver.simple._
import scala.slick.jdbc.JdbcBackend
import scala.slick.lifted.ProvenShape
/**
* 项目模板
*
* @author of546
*/
case class Template(id: Option[Int], name: String, remark: Option[String], dependentProjectIds: Seq[Int])
case class TemplateFrom(id: Option[Int], name: String, remark: Option[String], items: List[TemplateItem]) {
def toTemplate = Template(id, name, remark, Seq.empty)
}
class TemplateTable(tag: Tag) extends Table[Template](tag, "template") {
def id = column[Int]("id", O.PrimaryKey, O.AutoInc)
def name = column[String]("name")
def remark = column[String]("remark", O.Nullable)
def dependentProjectIds = column[Seq[Int]]("dependent_project", O.DBType("VARCHAR(254)"))(MappedColumnType.base[Seq[Int], String](
_.mkString(","), _ match {
case e if e.isEmpty => Seq.empty
case x => x.split(",").map(_.toInt).toSeq
}))
override def * = (id.?, name, remark.?, dependentProjectIds) <> (Template.tupled, Template.unapply _)
def idx = index("idx_name", name, unique = true)
}
object TemplateHelper extends PlayCache {
import models.AppDB._
val qTemplate = TableQuery[TemplateTable]
def findById(id: Int) = db withSession { implicit session =>
qTemplate.filter(_.id === id).firstOption
}
def findByName(name: String) = db withSession { implicit session =>
qTemplate.filter(_.name === name).firstOption
}
def all = db withSession { implicit session =>
qTemplate.list
}
def create(template: Template) = db withSession { implicit session =>
_create(template)
}
def create(template: Template, items: Seq[TemplateItem]) = db withTransaction { implicit session =>
val templateId = _create(template)
items.map(item => TemplateItemHelper._create(item.copy(None, Some(templateId)))).size
}
@throws[UniqueNameException]
def _create(template: Template)(implicit session: JdbcBackend#Session) = {
try {
qTemplate.returning(qTemplate.map(_.id)).insert(template)(session)
} catch {
case x: MySQLIntegrityConstraintViolationException => throw new UniqueNameException
}
}
def delete(id: Int) = db withTransaction { implicit session =>
_delete(id)(session)
TemplateItemHelper._deleteByTemplateId(id)
}
def _delete(id: Int)(implicit session: JdbcBackend#Session) = {
qTemplate.filter(_.id === id).delete
}
def update(id: Int, template: Template) = db withSession { implicit session =>
_update(id, template)
}
def update(id: Int, template: Template, items: Seq[TemplateItem]) = db withTransaction { implicit session =>
_update(id, template) // 更新项目
TemplateItemHelper._deleteByTemplateId(id) // 删除该项目下所有属性
items.map(item => TemplateItemHelper._create(item.copy(None, Some(id)))).size
}
@throws[UniqueNameException]
def _update(id: Int, template: Template)(implicit session: JdbcBackend#Session) = {
val template2update = template.copy(Some(id))
try {
qTemplate.filter(_.id === id).update(template2update)(session)
} catch {
case x: MySQLIntegrityConstraintViolationException => throw new UniqueNameException
}
}
}
| sdgdsffdsfff/bugatti | app/models/conf/Template.scala | Scala | bsd-2-clause | 3,385 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.integration.spark.testsuite.timeseries
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
import org.apache.carbondata.common.exceptions.sql.{MalformedCarbonCommandException, MalformedDataMapCommandException}
import org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider.TIMESERIES
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
class TestTimeSeriesCreateTable extends QueryTest with BeforeAndAfterAll with BeforeAndAfterEach{
val timeSeries = TIMESERIES.toString
var timestampFormat: String = _
override def beforeAll: Unit = {
timestampFormat = CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
sql("DROP TABLE IF EXISTS mainTable")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
sql("CREATE TABLE mainTable(dataTime timestamp, name string, city string, age int) STORED BY 'org.apache.carbondata.format'")
sql(
s"""
| CREATE DATAMAP agg0_second ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'SECOND_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
sql(
s"""
| CREATE DATAMAP agg0_hour ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'HOUR_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
sql(
s"""
| CREATE DATAMAP agg0_day ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'day_granularity'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
sql(
s"""
| CREATE DATAMAP agg0_month ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'month_granularity'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
sql(
s"""
| CREATE DATAMAP agg0_year ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'year_granularity'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
override def afterEach(): Unit = {
dropDataMaps("mainTable", "agg1_second", "agg1_minute",
"agg1_hour", "agg1_day", "agg1_month", "agg1_year")
}
test("test timeseries create table 1") {
checkExistence(sql("DESCRIBE FORMATTED mainTable_agg0_second"), true, "maintable_agg0_second")
sql("DROP DATAMAP agg0_second ON TABLE mainTable")
}
test("test timeseries create table 2") {
checkExistence(sql("DESCRIBE FORMATTED mainTable_agg0_hour"), true, "maintable_agg0_hour")
sql("DROP DATAMAP agg0_hour ON TABLE mainTable")
}
test("test timeseries create table 3") {
checkExistence(sql("DESCRIBE FORMATTED maintable_agg0_day"), true, "maintable_agg0_day")
sql("DROP DATAMAP agg0_day ON TABLE mainTable")
}
test("test timeseries create table 4") {
checkExistence(sql("DESCRIBE FORMATTED mainTable_agg0_month"), true, "maintable_agg0_month")
sql("DROP DATAMAP agg0_month ON TABLE mainTable")
}
test("test timeseries create table 5") {
checkExistence(sql("DESCRIBE FORMATTED mainTable_agg0_year"), true, "maintable_agg0_year")
sql("DROP DATAMAP agg0_year ON TABLE mainTable")
}
test("test timeseries create table 6: TIMESERIES should define time granularity") {
sql("DROP DATAMAP IF EXISTS agg0_second ON TABLE mainTable")
val e = intercept[MalformedCarbonCommandException] {
sql(
s"""CREATE DATAMAP agg0_second ON TABLE mainTable USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='dataTime',
| 'SEC_GRANULARITY'='1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.contains("TIMESERIES should define time granularity"))
}
test("test timeseries create table 7: Granularity only support 1") {
sql("DROP DATAMAP IF EXISTS agg0_second ON TABLE mainTable")
val e = intercept[MalformedDataMapCommandException] {
sql(
s"""
| CREATE DATAMAP agg0_second ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'DAY_GRANULARITY'='1.5')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.equals("Granularity only support 1"))
}
test("test timeseries create table 8: Granularity only support 1") {
dropDataMaps("mainTable", "agg1_hour")
val e = intercept[MalformedCarbonCommandException] {
sql(
s"""CREATE DATAMAP agg1_hour ON TABLE mainTable USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='dataTime',
| 'HOUR_GRANULARITY'='hour=-2')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.contains("Granularity only support "))
checkExistence(sql("SHOW DATAMAP ON TABLE mainTable"), false, "maintable_agg1_hour")
}
test("test timeseries create table 9: SECOND_GRANULARITY is null") {
sql("DROP DATAMAP IF EXISTS agg1 ON TABLE mainTable")
val e = intercept[MalformedCarbonCommandException] {
sql(
s"""CREATE DATAMAP agg0_hour ON TABLE mainTable
|USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='dataTime',
| 'HOUR_GRANULARITY'='')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.contains("Granularity only support 1"))
}
test("test timeseries create table 10: Table already exists in database") {
val e = intercept[MalformedDataMapCommandException] {
sql(
s"""CREATE DATAMAP agg1_hour ON TABLE mainTable USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='dataTime',
| 'HOUR_GRANULARITY'='1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
sql(
s"""CREATE DATAMAP agg1_hour ON TABLE mainTable USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='dataTime',
| 'HOUR_GRANULARITY'='1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.contains(
"DataMap name 'agg1_hour' already exist"))
}
test("test timeseries create table 11: don't support create timeseries table on non timestamp") {
sql("DROP DATAMAP IF EXISTS agg0_hour ON TABLE mainTable")
val e = intercept[MalformedCarbonCommandException] {
sql(
s"""
| CREATE DATAMAP agg0_hour ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='name',
| 'HOUR_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.equals("Timeseries event time is only supported on Timestamp column"))
}
test("test timeseries create table 12: Time series column dataTime does not exists in select") {
sql("DROP DATAMAP IF EXISTS agg0_hour ON TABLE mainTable")
val e = intercept[MalformedCarbonCommandException] {
sql(
s"""
| CREATE DATAMAP agg0_hour ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'HOUR_GRANULARITY'='1')
| AS SELECT name, SUM(age) FROM mainTable
| GROUP BY name
""".stripMargin)
}
assert(e.getMessage.equals("Time series column dataTime does not exists in select"))
}
test("test timeseries create table 13: don't support create timeseries table on non timestamp") {
sql("DROP DATAMAP IF EXISTS agg0_hour ON TABLE mainTable")
val e = intercept[MalformedCarbonCommandException] {
sql(
s"""CREATE DATAMAP agg0_hour ON TABLE mainTable
|USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='name',
| 'HOUR_GRANULARITY'='1')
|AS SELECT name, SUM(age) FROM mainTable
|GROUP BY name
""".stripMargin)
}
assert(e.getMessage.contains("Timeseries event time is only supported on Timestamp column"))
}
test("test timeseries create table 14: USING") {
val e: Exception = intercept[MalformedDataMapCommandException] {
sql(
"""CREATE DATAMAP agg0_hour ON TABLE mainTable
| USING 'abc'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'HOUR_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.equals("DataMap 'abc' not found"))
}
test("test timeseries create table 15: USING and catch MalformedCarbonCommandException") {
val e: Exception = intercept[MalformedCarbonCommandException] {
sql(
"""CREATE DATAMAP agg0_hour ON TABLE mainTable
| USING 'abc'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'HOUR_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.equals("DataMap 'abc' not found"))
}
test("test timeseries create table 16: Only one granularity level can be defined 1") {
sql("DROP DATAMAP IF EXISTS agg0_hour ON TABLE mainTable")
val e: Exception = intercept[MalformedCarbonCommandException] {
sql(
s"""
| CREATE DATAMAP agg0_hour ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'SECOND_GRANULARITY'='1',
| 'HOUR_GRANULARITY'='1',
| 'DAY_GRANULARITY'='1',
| 'MONTH_GRANULARITY'='1',
| 'YEAR_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
e.printStackTrace()
assert(e.getMessage.equals("Only one granularity level can be defined"))
}
test("test timeseries create table 17: Only one granularity level can be defined 2") {
sql("DROP DATAMAP IF EXISTS agg0_hour ON TABLE mainTable")
val e: Exception = intercept[MalformedDataMapCommandException] {
sql(
s"""
| CREATE DATAMAP agg0_hour ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'SECOND_GRANULARITY'='1',
| 'HOUR_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.equals("Only one granularity level can be defined"))
}
test("test timeseries create table 18: Only one granularity level can be defined 3") {
sql("DROP DATAMAP IF EXISTS agg0_hour ON TABLE mainTable")
val e: Exception = intercept[MalformedDataMapCommandException] {
sql(
s"""
| CREATE DATAMAP agg0_hour ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'DAY_GRANULARITY'='1',
| 'HOUR_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.equals("Only one granularity level can be defined"))
}
test("test timeseries create table 19: timeSeries should define time granularity") {
sql("DROP DATAMAP IF EXISTS agg0_hour ON TABLE mainTable")
val e = intercept[MalformedDataMapCommandException] {
sql(
s"""
| CREATE DATAMAP agg0_hour ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.equals(s"$timeSeries should define time granularity"))
}
test("test timeseries create table 20: should support if not exists, create when same table exists") {
sql("DROP DATAMAP IF EXISTS agg1 ON TABLE mainTable")
sql(
s"""
| CREATE DATAMAP agg1 ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'MONTH_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
sql(
s"""
| CREATE DATAMAP IF NOT EXISTS agg1 ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'MONTH_GRANULARITY'='1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
checkExistence(sql("SHOW DATAMAP ON TABLE mainTable"), true, "agg1")
checkExistence(sql("DESC FORMATTED mainTable_agg1"), true, "maintable_age_sum")
}
test("test timeseries create table 32: should support if not exists, create when same table not exists") {
sql("DROP DATAMAP IF EXISTS agg1_year ON TABLE mainTable")
sql(
s"""
|CREATE DATAMAP if not exists agg1_year ON TABLE mainTable
|USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='dataTime',
| 'YEAR_GRANULARITY'='1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
checkExistence(sql("SHOW DATAMAP ON TABLE mainTable"), true, "agg1_year")
checkExistence(sql("DESC FORMATTED mainTable_agg1_year"), true, "maintable_age_sum")
}
test("test timeseries create table 20: don't support 'create datamap if exists'") {
val e: Exception = intercept[AnalysisException] {
sql(
s"""CREATE DATAMAP IF EXISTS agg2 ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime',
| 'MONTH_GRANULARITY'='1')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.contains("identifier matching regex"))
}
test("test timeseries create table 26: test different data type") {
sql("drop table if exists dataTable")
sql(
s"""
| CREATE TABLE dataTable(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| decimalField DECIMAL(18,2),
| charField CHAR(5),
| floatField FLOAT,
| dataTime timestamp
| )
| STORED BY 'carbondata'
""".stripMargin)
sql(
s"""CREATE DATAMAP agg0_hour ON TABLE dataTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'event_time'='dataTime',
| 'HOUR_GRANULARITY'='1')
| AS SELECT
| dataTime,
| SUM(intField),
| shortField,
| booleanField,
| intField,
| bigintField,
| doubleField,
| stringField,
| decimalField,
| charField,
| floatField
| FROM dataTable
| GROUP BY
| dataTime,
| shortField,
| booleanField,
| intField,
| bigintField,
| doubleField,
| stringField,
| decimalField,
| charField,
| floatField
""".stripMargin)
checkExistence(sql("SHOW DATAMAP ON TABLE dataTable"), true, "datatable_agg0_hour")
sql("DROP TABLE IF EXISTS dataTable")
}
test("test timeseries create table 27: test data map name") {
sql(
s"""CREATE DATAMAP agg1_hour ON TABLE mainTable
|USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='dataTime',
| 'HOUR_GRANULARITY'='1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
checkExistence(sql("SHOW DATAMAP ON TABLE mainTable"), true, "agg1_hour")
checkExistence(sql("DESC FORMATTED mainTable_agg1_hour"), true, "maintable_age_sum")
}
test("test timeseries create table 28: event_time is null") {
sql("DROP DATAMAP IF EXISTS agg1 ON TABLE mainTable")
intercept[NullPointerException] {
sql(
s"""CREATE DATAMAP agg1 ON TABLE mainTable
|USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='',
| 'HOUR_GRANULARITY'='1')
|AS SELECT name, SUM(age) FROM mainTable
|GROUP BY name
""".stripMargin)
}
}
test("test timeseries create table 29: table not exists") {
sql("DROP DATAMAP IF EXISTS agg1 ON TABLE mainTable")
val e = intercept[AnalysisException] {
sql(
s"""CREATE DATAMAP agg1 ON TABLE mainTable
|USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='dataTime',
| 'HOUR_GRANULARITY'='1')
|AS SELECT dataTime, SUM(age) FROM mainTableNo
|GROUP BY dataTime
""".stripMargin)
}
assert(e.getMessage.contains("Table or view not found: maintableno"))
}
test("test timeseries create table 33: support event_time and granularity key with space") {
sql("DROP DATAMAP IF EXISTS agg1_month ON TABLE maintable")
sql(
s"""CREATE DATAMAP agg1_month ON TABLE mainTable
|USING '$timeSeries'
|DMPROPERTIES (
| ' event_time '='dataTime',
| ' MONTH_GRANULARITY '='1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
checkExistence(sql("SHOW DATAMAP ON TABLE maintable"), true, "maintable_agg1_month")
sql("DROP DATAMAP IF EXISTS agg1_month ON TABLE maintable")
}
test("test timeseries create table 34: support event_time value with space") {
sql("DROP DATAMAP IF EXISTS agg1_month ON TABLE maintable")
sql(
s"""CREATE DATAMAP agg1_month ON TABLE mainTable
|USING '$timeSeries'
|DMPROPERTIES (
| 'event_time '=' dataTime',
| 'MONTH_GRANULARITY '='1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
checkExistence(sql("SHOW DATAMAP ON TABLE maintable"), true, "maintable_agg1_month")
sql("DROP DATAMAP IF EXISTS agg1_month ON TABLE maintable")
}
test("test timeseries create table 35: support granularity value with space") {
sql("DROP DATAMAP IF EXISTS agg1_month ON TABLE maintable")
sql(
s"""CREATE DATAMAP agg1_month ON TABLE mainTable
|USING '$timeSeries'
|DMPROPERTIES (
| 'event_time '='dataTime',
| 'MONTH_GRANULARITY '=' 1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
checkExistence(sql("SHOW DATAMAP ON TABLE maintable"), true, "maintable_agg1_month")
sql("DROP DATAMAP IF EXISTS agg1_month ON TABLE maintable")
}
test("test timeseries create table 36: support event_time and granularity value with space") {
sql("DROP DATAMAP IF EXISTS agg1_month ON TABLE maintable")
sql(
s"""
| CREATE DATAMAP agg1_month ON TABLE mainTable
| USING '$timeSeries'
| DMPROPERTIES (
| 'EVENT_TIME'='dataTime ',
| 'MONTH_GRANULARITY'=' 1 ')
| AS SELECT dataTime, SUM(age) FROM mainTable
| GROUP BY dataTime
""".stripMargin)
checkExistence(sql("SHOW DATAMAP ON TABLE maintable"), true, "maintable_agg1_month")
}
test("test timeseries create table 37: unsupport event_time error value") {
sql("DROP DATAMAP IF EXISTS agg1_month ON TABLE maintable")
intercept[NullPointerException] {
sql(
s"""CREATE DATAMAP agg1_month ON TABLE mainTable USING '$timeSeries'
|DMPROPERTIES (
| 'event_time'='data Time',
| 'MONTH_GRANULARITY'='1')
|AS SELECT dataTime, SUM(age) FROM mainTable
|GROUP BY dataTime
""".stripMargin)
}
sql("DROP DATAMAP IF EXISTS agg1_month ON TABLE maintable")
}
override def afterAll: Unit = {
dropTable("mainTable")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timestampFormat)
}
}
| sgururajshetty/carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/timeseries/TestTimeSeriesCreateTable.scala | Scala | apache-2.0 | 22,157 |
package wvlet.log
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
import wvlet.log.io.IOUtil
object SerializationTest {
trait A extends LogSupport {
info("new A")
def hello = info("hello")
}
}
/**
*
*/
class SerializationTest extends Spec {
import SerializationTest._
"Logger" should {
"serializable" in {
val a = new A {}
val b = new ByteArrayOutputStream()
IOUtil.withResource(new ObjectOutputStream(b)) {out =>
out.writeObject(a)
}
val ser = b.toByteArray
IOUtil.withResource(new ObjectInputStream(new ByteArrayInputStream(ser))) { in =>
info("deserialization")
val a = in.readObject().asInstanceOf[A]
a.hello
}
}
}
}
| wvlet/log | wvlet-log/jvm/src/test/scala/wvlet/log/SerializationTest.scala | Scala | apache-2.0 | 784 |
package stretchypants
import org.json4s.{ JArray, JString, JValue, JNothing, JInt, JObject }
import org.json4s.JsonDSL._
import scala.concurrent.duration.FiniteDuration
sealed trait Facet {
def asJson: JValue
}
/** http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-facets.html */
object Facet {
sealed trait Ordering {
def value: String
}
object Ordering {
abstract class Value(val value: String) extends Ordering
case object Count extends Value("count")
case object Term extends Value("term")
case object ReverseCount extends Value("reverse_count")
case object ReverseTerm extends Value("reverse_term")
}
/** http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-facets-terms-facet.html */
case class Terms(
_field: Option[List[String]] = None, // one = field, many = fields
_scriptField: Option[String] = None,
_size: Option[Int] = None,
_shardSize: Option[Int] = None,
_ordering: Option[Ordering] = None,
_allTerms: Option[Boolean] = None,
_exclude: List[String] = Nil,
_regex: Option[String] = None,
_regexFlags: Option[String] = None,
_script: Option[String] = None) extends Facet {
def field(f: String*) = copy(_field = Some(f.toList))
def scriptField(sf: String) = copy(_scriptField = Some(sf))
def size(s: Int) = copy(_size = Some(s))
def shardSize(s: Int) = copy(_shardSize = Some(s))
def ordering(o: Ordering) = copy(_ordering = Some(o))
def allTerms(at: Boolean) = copy(_allTerms = Some(at))
def exclude(excl: String*) = copy(_exclude = excl.toList)
def regex(re: String) = copy(_regex = Some(re))
def regexFlags(rf: String) = copy(_regexFlags = Some(rf))
def script(s: String) = copy(_script = Some(s))
private[this] def primary =
_field.map {
case one :: Nil =>
("field" -> JString(one))
case many =>
("fields" -> JArray(many.map(JString(_))))
}.orElse(_scriptField.map { sf =>
("script_field" -> JString(sf))
}).getOrElse(
("field" -> JNothing)
)
def asJson: JValue =
("terms" ->
primary ~
("size" -> _size) ~
("shard_size" -> _shardSize) ~
("all_terms" -> _allTerms) ~
("exclude" -> Some(_exclude).filter(_.nonEmpty)) ~
("regex" -> _regex) ~
("regex_flags" -> _regexFlags) ~
("script" -> _script) ~
("order" -> _ordering.map(_.value)))
}
def terms = Terms()
/** http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-facets-range-facet.html */
case class Range(
_field: Option[String] = None,
_keyValueFields: Option[(String, String)] = None,
_keyValueScripts: Option[(String, String)] = None,
bounds: List[(Option[Int], Option[Int])] = Nil) extends Facet {
def field(f: String) = copy(_field = Some(f))
def keyValueFields(key: String, value: String) =
copy(_keyValueFields = Some(key, value))
def keyValueScripts(key: String, value: String) =
copy(_keyValueScripts = Some(key, value))
def from(f: Int) = copy(bounds = (Some(f), None) :: bounds)
def to(t: Int) = copy(bounds = (None, Some(t)) :: bounds)
def range(f: Int, t: Int) = copy(bounds = (Some(f), Some(t)) :: bounds)
def asJson = {
val ranges = bounds.map { case (to, from) => ("to" -> to) ~ ("from" -> from) }
("range" ->
(_field.map { fld => (fld -> ranges): JObject }.orElse {
_keyValueFields.map {
case (k,v) =>
(("ranges" -> ranges) ~
("key_field" -> k) ~
("value_field" -> v): JObject)
}
}.orElse {
_keyValueScripts.map {
case (k,v) =>
(("ranges" -> ranges) ~
("key_script" -> k) ~
("value_script" -> k): JObject)
}
}))
}
}
/** http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-facets-histogram-facet.html */
case class Histogram(
_field: Option[String] = None,
_keyValueFields: Option[(String, String)] = None,
_keyValueScripts: Option[(String, String)] = None,
_scriptParams: Option[Map[String, String]] = None,
_interval: Option[Int] = None,
_timeInterval: Option[FiniteDuration] = None) extends Facet {
def field(f: String) = copy(_field = Some(f))
def keyValueFields(key: String, value: String) =
copy(_keyValueFields = Some(key, value))
def keyValueScripts(key: String, value: String) =
copy(_keyValueScripts = Some(key, value))
def scriptParams(kv: (String, String)*) = copy(_scriptParams = Some(kv.toMap))
def interval(i: Int) = copy(_interval = Some(i))
def interval(d: FiniteDuration) = copy(_timeInterval = Some(d))
def asJson = {
("histogram" ->
(_field.map { fld =>
(("field" -> fld) ~
("interval" -> _interval)): JObject
}.orElse {
_keyValueFields.map {
case (k,v) =>
(("interval" -> _interval) ~
("key_field" -> k) ~
("value_field" -> v): JObject)
}
}.orElse {
_keyValueScripts.map {
case (k,v) =>
(("interval" -> _interval) ~
("key_script" -> k) ~
("value_script" -> k): JObject)
}
}))
}
}
def histogram = Histogram()
/** http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-facets-date-histogram-facet.html#search-facets-date-histogram-facet */
case class DateHistogram(
_field: Option[String] = None,
_keyField: Option[String] = None,
_valueField: Option[String] = None,
_valueScript: Option[String] = None,
_interval: Option[String] = None) extends Facet {
def field(f: String) = copy(_field = Some(f))
def keyField(f: String) = copy(_keyField = Some(f))
def valueField(f: String) = copy(_valueField = Some(f))
def valueScript(s: String) = copy(_valueScript = Some(s))
def year = copy(_interval = Some("year"))
def quarter = copy(_interval = Some("quarter"))
def month = copy(_interval = Some("month"))
def week = copy(_interval = Some("week"))
def day = copy(_interval = Some("day"))
def hour = copy(_interval = Some("hour"))
def minute = copy(_interval = Some("minute"))
def second = copy(_interval = Some("second"))
def asJson =
("date_histogram" ->
("interval" -> _interval) ~
("field" -> _field) ~
("key_field" -> _keyField) ~
("value_field" -> _valueField) ~
("value_script" -> _valueScript))
}
def dateHistogram = DateHistogram()
/** http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-facets-filter-facet.html */
def filter(f: Filter) = new Facet {
def asJson = ("filter" -> f.asJson)
}
/** http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-facets-query-facet.html */
def query(q: Query) = new Facet {
def asJson = ("query" -> q.asJson)
}
/** http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-facets-statistical-facet.html */
/** http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-facets-terms-stats-facet.html */
}
| softprops/stretchy-pants | src/main/scala/Facet.scala | Scala | mit | 7,428 |
package debox
import scala.annotation.{switch, tailrec}
import scala.reflect.ClassTag
import scala.{specialized => sp}
import spire.algebra._
import spire.syntax.all._
/**
* Set is a mutable hash set, with open addressing and double hashing.
*
* Set provides constant-time membership tests, and amortized
* constant-time addition and removal. One underlying array stores
* items, and another tracks which buckets are used and defined.
*
* When the type A is known (or the caller is specialized on A),
* Set[A] will store the values in an unboxed array.
*/
final class Set[@sp (Short, Char, Int, Float, Long, Double, AnyRef) A] protected[debox](as: Array[A], bs: Array[Byte], n: Int, u: Int)(implicit val ct: ClassTag[A]) extends Serializable { lhs =>
// set machinery
var items: Array[A] = as // slots for items
var buckets: Array[Byte] = bs // buckets track defined/used slots
var len: Int = n // number of defined slots
var used: Int = u // number of used slots (used >= len)
// hashing internals
var mask: Int = buckets.length - 1 // size-1, used for hashing
var limit: Int = (buckets.length * 0.65).toInt // point at which we should grow
/**
* Check if two Sets are equal.
*
* Equal means the sets have the same type (which is checked
* using the ClassTag instances) and the same contents.
*
* Comparing Sets with any of Scala's collection types will
* return false.
*
* On average this is an O(n) operation. In some cases a false
* result can be returned more quickly.
*/
override def equals(that: Any): Boolean = that match {
case that: Set[_] =>
if (size != that.size || ct != that.ct) return false
val s = that.asInstanceOf[Set[A]]
forall(s.apply)
case _ =>
false
}
/**
* Hash the contents of the set to an Int value.
*
* By xor'ing all the set's values together, we can be sure that
* sets with the same contents will have the same hashCode
* regardless of the order those elements appear.
*
* This is an O(n) operation.
*/
override def hashCode: Int = fold(0xdeadd065)(_ ^ _.##)
/**
* Return a string representation of the contents of the set.
*
* This is an O(n) operation.
*/
override def toString: String = {
val sb = new StringBuilder
sb.append("Set(")
var i = 0
while (i < buckets.length && buckets(i) != 3) i += 1
if (i < buckets.length) {
sb.append(items(i).toString)
i += 1
}
while (i < buckets.length) {
if (buckets(i) == 3) {
sb.append(", ")
sb.append(items(i).toString)
}
i += 1
}
sb.append(")")
sb.toString
}
/**
* Return the size of this Set as an Int.
*
* Since Sets use arrays, their size is limited to what a 32-bit
* signed integer can represent.
*
* This is an O(1) operation.
*/
final def size: Int = len
/**
* Return true if the Set is empty, false otherwise.
*
* This is an O(1) operation.
*/
final def isEmpty: Boolean = len == 0
/**
* Return true if the Set is non-empty, false otherwise.
*
* This is an O(1) operation.
*/
final def nonEmpty: Boolean = len > 0
/**
* Return whether the item is found in the Set or not.
*
* On average, this is an O(1) operation; the (unlikely) worst-case
* is O(n).
*/
final def apply(item: A): Boolean = {
@inline @tailrec def loop(i: Int, perturbation: Int): Boolean = {
val j = i & mask
val status = buckets(j)
if (status == 0) {
false
} else if (status == 3 && items(j) == item) {
true
} else {
loop((i << 2) + i + perturbation + 1, perturbation >> 5)
}
}
val i = item.## & 0x7fffffff
loop(i, i)
}
/**
* Make a (shallow) copy of this set.
*
* This method creates a copy of the set with the same
* structure. However, the actual elements will not be copied.
*
* This is an O(n) operation.
*/
final def copy: Set[A] = new Set(items.clone, buckets.clone, len, used)
/**
* Clears the set's internal state.
*
* After calling this method, the set's state is identical to that
* obtained by calling Set.empty[A].
*
* The previous arrays are not retained, and will become available
* for garbage collection. This method returns a null of type
* Unit1[A] to trigger specialization without allocating an actual
* instance.
*
* This is an O(1) operation, but may generate a lot of garbage if
* the set was previously large.
*/
final def clear: Unit1[A] = { absorb(Set.empty[A]); null }
/**
* Aborb the given set's contents into this set.
*
* This method does not copy the other set's contents. Thus, this
* should only be used when there are no saved references to the
* other set. It is private, and exists primarily to simplify the
* implementation of certain methods.
*
* This is an O(1) operation, although it can potentially generate a
* lot of garbage (if the set was previously large).
*/
private[this] def absorb(that: Set[A]): Unit = {
items = that.items
buckets = that.buckets
len = that.len
used = that.used
mask = that.mask
limit = that.limit
}
/**
* Synonym for +=.
*/
final def add(item: A): Boolean = this += item
/**
* Add item to the set.
*
* Returns whether or not the item was added. If item was already in
* the set, this method will do nothing and return false.
*
* On average, this is an amortized O(1) operation; the worst-case
* is O(n), which will occur when the set must be resized.
*/
def +=(item: A): Boolean = {
@inline @tailrec def loop(i: Int, perturbation: Int): Boolean = {
val j = i & mask
val status = buckets(j)
if (status == 3) {
if (items(j) == item)
false
else
loop((i << 2) + i + perturbation + 1, perturbation >> 5)
} else if (status == 2 && apply(item)) {
false
} else {
items(j) = item
buckets(j) = 3
len += 1
if (status == 0) {
used += 1
if (used > limit) grow()
}
true
}
}
val i = item.## & 0x7fffffff
loop(i, i)
}
/**
* Synonym for ++=.
*/
def addAll(items: Iterable[A]): Unit = this ++= items
/**
* Synonym for ++=.
*/
def addAll(items: Buffer[A]): Unit = this ++= items
/**
* Synonym for ++=.
*/
def addAll(items: Array[A]): Unit = this ++= items
/**
* Add every item in items to the set.
*
* This is an O(n) operation, where n is the size of items.
*/
def ++=(items: Iterable[A]): Unit =
items.foreach(this += _)
/**
* Add every item in items to the set.
*
* This is an O(n) operation, where n is the size of items.
*/
def ++=(buf: Buffer[A]): Unit =
cfor(0)(_ < buf.length, _ + 1) { this += buf(_) }
/**
* Add every item in items to the set.
*
* This is an O(n) operation, where n is the size of items.
*/
def ++=(arr: Array[A]): Unit =
cfor(0)(_ < arr.length, _ + 1) { this += arr(_) }
/**
* Synonym for -=.
*/
def remove(item: A): Boolean = this -= item
/**
* Remove an item from the set.
*
* Returns whether the item was originally in the set or not.
*
* This is an amortized O(1) operation.
*/
final def -=(item: A): Boolean = {
@inline @tailrec def loop(i: Int, perturbation: Int): Boolean = {
val j = i & mask
val status = buckets(j)
if (status == 3 && items(j) == item) {
buckets(j) = 2
len -= 1
true
} else if (status == 0) {
false
} else {
loop((i << 2) + i + perturbation + 1, perturbation >> 5)
}
}
val i = item.## & 0x7fffffff
loop(i, i)
}
/**
* Set/unset the value of item in the set.
*
* Like += and -= this is an amortized O(1) operation.
*/
final def update(item: A, b: Boolean) =
if (b) this += item else this -= item
/**
* Loop over the set's contents, appying f to each element.
*
* There is no guaranteed order that the set's elements will be
* traversed in, so use of foreach should not rely on a particular
* order.
*
* This is an O(n) operation, where n is the length of the buffer.
*/
def foreach(f: A => Unit): Unit =
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3) f(items(i))
}
/**
* Translate this Set into another Set using the given function f.
*
* Note that the resulting set may be smaller than this set, if f is
* not a one-to-one function (an injection).
*
* This is an O(n) operation, where n is the size of the set.
*/
def map[@sp(Short, Char, Int, Float, Long, Double, AnyRef) B: ClassTag](f: A => B): Set[B] = {
val out = Set.ofSize[B](len)
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3) out.add(f(items(i)))
}
if (out.size < len / 3) out.compact
out
}
/**
* Fold the set's values into a single value, using the provided
* initial state and combining function f.
*
* Like foreach, fold makes no guarantees about the order that
* elements will be reached.
*
* This is an O(n) operation, where n is the size of the set.
*/
def fold[@sp(Int, Long, Double, AnyRef) B](init: B)(f: (B, A) => B): B = {
var result = init
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3) result = f(result, items(i))
}
result
}
/**
* Grow the underlying array to best accomodate the set's size.
*
* To preserve hashing access speed, the set's size should never be
* more than 66% of the underlying array's size. When this size is
* reached, the set needs to be updated (using this method) to have a
* larger array.
*
* The underlying array's size must always be a multiple of 2, which
* means this method grows the array's size by 2x (or 4x if the set
* is very small). This doubling helps amortize the cost of
* resizing, since as the set gets larger growth will happen less
* frequently. This method returns a null of type Unit1[A] to
* trigger specialization without allocating an actual instance.
*
* Growing is an O(n) operation, where n is the set's size.
*/
final def grow(): Unit1[A] = {
val next = buckets.length * (if (buckets.length < 10000) 4 else 2)
val set = Set.ofAllocatedSize[A](next)
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3) set += items(i)
}
absorb(set)
null
}
/**
* Compacts the set's internal arrays to reduce memory usage.
*
* This operation should be used if a set has been shrunk
* (e.g. through --=) and is not likely to grow again.
*
* This method will shrink the set to the smallest possible size
* that allows it to be <66% full. It returns a null of type
* Unit1[A] to trigger specialization without allocating an actual
* instance.
*
* This is an O(n) operation, where n it the set's size.
*/
final def compact(): Unit1[A] = {
val set = Set.ofSize[A](len)
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3) set += items(i)
}
absorb(set)
null
}
// For a lot of the following methods, there are size tests to try
// to make sure we're looping over the smaller of the two
// sets. Some things to keep in mind:
//
// 1. System.arraycopy is faster than a loop.
// 2. We want to avoid copying a large object and then shrinking it.
// 3. & and | are symmetric (but -- is not). They all require a copy.
// 4. &=, |=, and --= are not symmetric (they modify the lhs).
//
// So where possible we'd like to be looping over a smaller set,
// doing membership tests against a larger set.
/**
* Union this set with the rhs set.
*
* This has the effect of adding all members of rhs to lhs.
*
* This is an O(n) operation, where n is rhs.size.
*/
def |=(rhs: Set[A]): Unit =
if (lhs.size >= rhs.size) {
cfor(0)(_ < rhs.buckets.length, _ + 1) { i =>
if (rhs.buckets(i) == 3) lhs += rhs.items(i)
}
} else {
val out = rhs.copy
out |= lhs
lhs.absorb(out)
}
/**
* Synonym for |.
*/
def union(rhs: Set[A]): Set[A] = lhs | rhs
/**
* Return new set which is the union of lhs and rhs.
*
* The new set will contain all members of lhs and rhs.
*
* This is an O(m max n) operation, where m and n are the sizes of
* the sets.
*/
def |(rhs: Set[A]): Set[A] =
if (lhs.size >= rhs.size) {
val out = lhs.copy
out |= rhs
out
} else {
val out = rhs.copy
out |= lhs
out
}
/**
* Remove any member of this which is not in rhs.
*
* This is an O(m min n) operation, where m and n are the sizes of
* the lhs and rhs sets.
*/
def &=(rhs: Set[A]): Unit =
if (lhs.size <= rhs.size) {
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3 && !rhs(items(i))) {
buckets(i) = 2
len -= 1
}
}
} else {
val out = rhs.copy
out &= lhs
lhs.absorb(out)
}
/**
* Synonym for &.
*/
def intersection(rhs: Set[A]): Set[A] = this & rhs
/**
* Intersect this set with the rhs set.
*
* This has the effect of removing any item not in rhs.
*
* This is an O(m min n) operation, where m and n are the sizes of
* the lhs and rhs sets.
*/
def &(rhs: Set[A]): Set[A] =
if (lhs.size <= rhs.size) {
val out = lhs.copy
out &= rhs
out
} else {
val out = rhs.copy
out &= lhs
out
}
/**
* Remove members of rhs from the set.
*
* This operation is an O(m min n) operation, where m and n are the
* sizes of the lhs and rhs sets.
*/
def --=(rhs: Set[A]): Unit =
if (lhs.size >= rhs.size) {
cfor(0)(_ < rhs.buckets.length, _ + 1) { i =>
if (rhs.buckets(i) == 3) lhs -= rhs.items(i)
}
} else {
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3 && rhs(items(i))) {
buckets(i) = 2
len -= 1
}
}
}
/**
* Remove the members of items from the set.
*
* This is an O(n) operation, where n is the length of items.
*/
def --=(items: Iterable[A]): Unit =
items.foreach(a => this -= a)
/**
* Remove the members of arr from the set.
*
* This is an O(n) operation, where n is the length of arr.
*/
def --=(arr: Array[A]): Unit =
cfor(0)(_ < arr.length, _ + 1) { i => this -= arr(i) }
/**
* Remove the members of buf from the set.
*
* This is an O(n) operation, where n is the length of buf.
*/
def --=(buf: Buffer[A]): Unit =
cfor(0)(_ < buf.length, _ + 1) { i => this -= buf(i) }
/**
* This is a synonym for --.
*/
def difference(rhs: Set[A]): Set[A] = lhs -- rhs
/**
* Create a new set with the elements of lhs that are not in rhs.
*
* This is an O(n) operation, where n is the size of the set.
*/
def --(rhs: Set[A]): Set[A] = {
val out = lhs.copy
out --= rhs
out
}
/**
* Count how many elements of the set satisfy the predicate p.
*
* This is an O(n) operation, where n is the size of the set.
*/
def count(p: A => Boolean): Int =
fold(0)((n, a) => if (p(a)) n + 1 else n)
/**
* Determine if every member of the set satisifes the predicate p.
*
* This is an O(n) operation, where n is the size of the
* set. However, it will return as soon as a false result is
* obtained.
*/
def forall(p: A => Boolean): Boolean = {
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3 && !p(items(i))) return false
}
true
}
/**
* Determine if any member of the set satisifes the predicate p.
*
* This is an O(n) operation, where n is the size of the
* set. However, it will return as soon as a true result is
* obtained.
*/
def exists(p: A => Boolean): Boolean = {
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3 && p(items(i))) return true
}
false
}
/**
* Find a member of the set that satisfies the predicate p.
*
* The method returns Some(item) if item satisfies p, and None if
* none of set's elements satisfy p. Since Set is not ordered, if
* multiple elements satisfy the predicate there is no guarantee
* which one wil be found.
*
* This is an O(n) operation, where n is the size of the
* set. However, it will return as soon as a member satisfying the
* predicate is found.
*/
def find(p: A => Boolean): Option[A] = {
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3) {
val a = items(i)
if (p(a)) return Some(a)
}
}
None
}
/**
* Create a new set containing all the members of this set that
* satisfy p.
*
* This is an O(n) operation, where n is the size of the set.
*/
def findAll(p: A => Boolean): Set[A] = {
val out = Set.empty[A]
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3 && p(items(i))) out += items(i)
}
out
}
/**
* Remove any member of the set that does not satisfy p.
*
* After this method, all membrers of the set will satisfy p.
*
* This is an O(n) operation, where n is the size of the set.
*/
def filterSelf(p: A => Boolean): Unit =
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3 && !p(items(i))) {
buckets(i) = 2
len -= 1
}
}
/**
* Partition this set into two new sets, the first consisting of all
* members that fail to satisfy the predicate p, and the second for
* all those that do satisfy the predicate.
*
* This is an O(n) operation, where n is the size of the set.
*/
def partition(p: A => Boolean): (Set[A], Set[A]) = {
val no = Set.ofSize[A](len / 2)
val yes = Set.ofSize[A](len / 2)
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3) {
val a = items(i)
if (p(a)) yes += a else no += a
}
}
if (no.size < len / 6) no.compact
if (yes.size < len / 6) yes.compact
(no, yes)
}
/**
* Return an iterator over this set's contents.
*
* This method does not do any copying or locking. Thus, if the set
* is modified while the iterator is "live" the results will be
* undefined and probably bad. Also, since sets are not ordered,
* there is no guarantee elements will be returned in a particular
* order.
*
* Use this.copy.iterator to get a "clean" iterator if needed.
*
* Creating the iterator is an O(1) operation.
*/
def iterator: Iterator[A] = {
var i = 0
while (i < buckets.length && buckets(i) != 3) i += 1
new Iterator[A] {
var index = i
def hasNext: Boolean = index < buckets.length
def next: A = {
val item = items(index)
index += 1
while (index < buckets.length && buckets(index) != 3) index += 1
item
}
}
}
/**
* Copy the set's elements into an array.
*
* This is an O(n) operation, where n is the size of the set.
*/
def toArray: Array[A] = {
val arr = new Array[A](size)
var j = 0
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3) {
arr(j) = items(i)
j += 1
}
}
arr
}
/**
* Copy the set's elements into a buffer.
*
* This is an O(n) operation, where n is the size of the set.
*/
def toBuffer: Buffer[A] = Buffer.fromArray(toArray)
/**
* Copy the set's elements into a sorted buffer.
*
* Elements will be arranged from lowest-to-highest.
*
* This is an O(n) operation, where n is the size of the set.
*/
def toSortedBuffer(implicit o: Order[A]): Buffer[A] = {
val buf = Buffer.fromArray(toArray)
buf.sort
buf
}
/**
* Copy the sets contents into a Map. The elements of the set will
* be keys, and each keys' value will be determined with the
* provided function.
*
* This is an O(n) operation, where n is the size of the set.
*/
def toMap[@sp(Boolean, Int, Long, Double) B: ClassTag](f: A => B): Map[A, B] = {
val out = Map.ofSize[A, B](len)
cfor(0)(_ < buckets.length, _ + 1) { i =>
if (buckets(i) == 3) {
val a = items(i)
out(a) = f(a)
}
}
out
}
/**
* Wrap this set in an Iterable[A] instance.
*
* This method exists as a cheap way to get compatibility with Scala
* collections without copying/conversion. Note that since Scala
* collections are not specialized, using this iterable will box
* values as they are accessed (although the underlying set will
* still be unboxed).
*
* Like iterator, this method directly wraps the set. Thus, you
* should not mutate the set while using the resulting iterable, or
* risk corruption and undefined behavior.
*
* To get a "safe" value that is compatible with Scala collections,
* consider using toScalaSet.
*
* Creating the Iterable[A] instance is an O(1) operation.
*/
def toIterable: Iterable[A] =
new Iterable[A] {
override def size: Int = lhs.size
def iterator: Iterator[A] = lhs.iterator
override def foreach[U](f: A => U): Unit = lhs.foreach(a => f(a))
}
/**
* Create an immutable instance of scala's Set[A].
*
* This method copies the elements into a new instance which is
* compatible with Scala's collections and Set[A] type.
*
* This is an O(n) operation, where n is the size of the set.
*/
def toScalaSet: scala.collection.immutable.Set[A] =
iterator.toSet
}
object Set {
/**
* Allocate an empty Set.
*/
def empty[@sp A: ClassTag] = new Set(new Array[A](8), new Array[Byte](8), 0, 0)
/**
* Allocate an empty Set, capable of holding n items without
* resizing itself.
*
* This method is useful if you know you'll be adding a large number
* of elements in advance and you want to save a few resizes.
*/
def ofSize[@sp A: ClassTag](n: Int) =
ofAllocatedSize(n / 2 * 3)
/**
* Allocate an empty Set, with underlying storage of size n.
*
* This method is useful if you know exactly how big you want the
* underlying array to be. In most cases ofSize() is probably what
* you want instead.
*/
private[debox] def ofAllocatedSize[@sp A: ClassTag](n: Int) = {
val sz = Util.nextPowerOfTwo(n) match {
case n if n < 0 => throw DeboxOverflowError(n)
case 0 => 8
case n => n
}
new Set(new Array[A](sz), new Array[Byte](sz), 0, 0)
}
/**
* Build a Set instance from the provided values.
*/
def apply[@sp A: ClassTag](as: A*): Set[A] = fromIterable(as)
/**
* Build a Set from the provided array.
*
* The advantage of using this method is that, unlike apply() or
* fromIterable(), the values will not be boxed prior to the set
* being built.
*/
def fromArray[@sp A: ClassTag](as: Array[A]): Set[A] = {
val n = spire.math.max(8, as.length + as.length / 2)
val set = ofSize[A](n)
cfor(0)(_ < as.length, _ + 1)(i => set.add(as(i)))
set
}
/**
* Build a Set from the provided iterable object.
*/
def fromIterable[@sp A: ClassTag](as: Iterable[A]): Set[A] = {
val set = empty[A]
set ++= as
set
}
/**
* Provide a Eq[Set[A]] instance.
*
* Since Sets are so reliant on equality, and use hash codes
* internally, the default equality is used to compare elements.
*/
implicit def eqv[A] =
new Eq[Set[A]] {
def eqv(lhs: Set[A], rhs: Set[A]): Boolean = lhs == rhs
}
/**
* Provide a CMonoid[Set[A]] instance.
*
* Since element order is irrelevant, union is a commutative
* operation. The empty set is the identity element.
*/
implicit def cmonoid[@sp A: ClassTag] =
new CMonoid[Set[A]] {
def id = Set.empty[A]
def op(lhs: Set[A], rhs: Set[A]): Set[A] = lhs | rhs
}
}
| beni55/debox | src/main/scala/debox/Set.scala | Scala | mit | 24,163 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtOptionalInteger}
import uk.gov.hmrc.ct.computations.calculations.LossesCarriedForwardsCalculator
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class CP288(value: Option[Int]) extends CtBoxIdentifier(name = "Losses Carried forward") with CtOptionalInteger
object CP288 extends Calculated[CP288, ComputationsBoxRetriever] with LossesCarriedForwardsCalculator {
override def calculate(fieldValueRetriever: ComputationsBoxRetriever): CP288 = {
lossesCarriedForwardsCalculation(
cp281 = fieldValueRetriever.cp281(),
cp118 = fieldValueRetriever.cp118(),
cp283 = fieldValueRetriever.cp283(),
cp998 = fieldValueRetriever.cp998(),
cp287 = fieldValueRetriever.cp287(),
cp997 = fieldValueRetriever.cp997(),
cp997d = fieldValueRetriever.cp997d(),
cp997c = fieldValueRetriever.cp997c()
)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP288.scala | Scala | apache-2.0 | 1,572 |
package mesosphere.marathon
import java.util.UUID
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicBoolean
import javax.inject.Named
import akka.actor.SupervisorStrategy.Restart
import akka.actor._
import akka.event.EventStream
import akka.routing.RoundRobinPool
import com.codahale.metrics.Gauge
import com.google.inject._
import com.google.inject.name.Names
import com.twitter.common.base.Supplier
import com.twitter.common.zookeeper.{ Candidate, CandidateImpl, Group => ZGroup, ZooKeeperClient }
import com.twitter.zk.{ NativeConnector, ZkClient }
import mesosphere.chaos.http.HttpConf
import mesosphere.marathon.api.LeaderInfo
import mesosphere.marathon.core.launchqueue.LaunchQueue
import mesosphere.marathon.event.http.{
HttpEventStreamActor,
HttpEventStreamActorMetrics,
HttpEventStreamHandle,
HttpEventStreamHandleActor
}
import mesosphere.marathon.event.{ EventModule, HistoryActor }
import mesosphere.marathon.health.{ HealthCheckManager, MarathonHealthCheckManager }
import mesosphere.marathon.io.storage.StorageProvider
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.state._
import mesosphere.marathon.tasks.{ TaskIdUtil, TaskTracker, _ }
import mesosphere.marathon.upgrade.{ DeploymentManager, DeploymentPlan }
import mesosphere.util.SerializeExecution
import mesosphere.util.state._
import mesosphere.util.state.memory.InMemoryStore
import mesosphere.util.state.mesos.MesosStateStore
import mesosphere.util.state.zk.ZKStore
import org.apache.mesos.state.ZooKeeperState
import org.apache.zookeeper.ZooDefs
import org.apache.zookeeper.ZooDefs.Ids
import org.slf4j.LoggerFactory
import scala.collection.JavaConverters._
import scala.concurrent.Await
import scala.util.control.NonFatal
object ModuleNames {
final val NAMED_CANDIDATE = "CANDIDATE"
final val NAMED_HOST_PORT = "HOST_PORT"
final val NAMED_LEADER_ATOMIC_BOOLEAN = "LEADER_ATOMIC_BOOLEAN"
final val NAMED_SERVER_SET_PATH = "SERVER_SET_PATH"
final val NAMED_SERIALIZE_GROUP_UPDATES = "SERIALIZE_GROUP_UPDATES"
final val NAMED_HTTP_EVENT_STREAM = "HTTP_EVENT_STREAM"
}
class MarathonModule(conf: MarathonConf, http: HttpConf, zk: ZooKeeperClient)
extends AbstractModule {
//scalastyle:off magic.number
val log = LoggerFactory.getLogger(getClass.getName)
def configure() {
bind(classOf[MarathonConf]).toInstance(conf)
bind(classOf[HttpConf]).toInstance(http)
bind(classOf[ZooKeeperClient]).toInstance(zk)
bind(classOf[LeaderProxyConf]).toInstance(conf)
// needs to be eager to break circular dependencies
bind(classOf[SchedulerCallbacks]).to(classOf[SchedulerCallbacksServiceAdapter]).asEagerSingleton()
bind(classOf[MarathonSchedulerDriverHolder]).in(Scopes.SINGLETON)
bind(classOf[SchedulerDriverFactory]).to(classOf[MesosSchedulerDriverFactory]).in(Scopes.SINGLETON)
bind(classOf[MarathonLeaderInfoMetrics]).in(Scopes.SINGLETON)
bind(classOf[MarathonScheduler]).in(Scopes.SINGLETON)
bind(classOf[MarathonSchedulerService]).in(Scopes.SINGLETON)
bind(classOf[LeaderInfo]).to(classOf[MarathonLeaderInfo]).in(Scopes.SINGLETON)
bind(classOf[TaskTracker]).in(Scopes.SINGLETON)
bind(classOf[TaskFactory]).to(classOf[DefaultTaskFactory]).in(Scopes.SINGLETON)
bind(classOf[HealthCheckManager]).to(classOf[MarathonHealthCheckManager]).asEagerSingleton()
bind(classOf[String])
.annotatedWith(Names.named(ModuleNames.NAMED_SERVER_SET_PATH))
.toInstance(conf.zooKeeperServerSetPath)
bind(classOf[Metrics]).in(Scopes.SINGLETON)
bind(classOf[HttpEventStreamActorMetrics]).in(Scopes.SINGLETON)
// If running in single scheduler mode, this node is the leader.
val leader = new AtomicBoolean(!conf.highlyAvailable())
bind(classOf[AtomicBoolean])
.annotatedWith(Names.named(ModuleNames.NAMED_LEADER_ATOMIC_BOOLEAN))
.toInstance(leader)
}
@Provides
@Singleton
def provideMesosLeaderInfo(): MesosLeaderInfo = {
conf.mesosLeaderUiUrl.get match {
case someUrl @ Some(_) => ConstMesosLeaderInfo(someUrl)
case None => new MutableMesosLeaderInfo
}
}
@Named(ModuleNames.NAMED_HTTP_EVENT_STREAM)
@Provides
@Singleton
def provideHttpEventStreamActor(system: ActorSystem,
leaderInfo: LeaderInfo,
@Named(EventModule.busName) eventBus: EventStream,
metrics: HttpEventStreamActorMetrics): ActorRef = {
val outstanding = conf.eventStreamMaxOutstandingMessages.get.getOrElse(50)
def handleStreamProps(handle: HttpEventStreamHandle): Props =
Props(new HttpEventStreamHandleActor(handle, eventBus, outstanding))
system.actorOf(Props(new HttpEventStreamActor(leaderInfo, metrics, handleStreamProps)), "HttpEventStream")
}
@Provides
@Singleton
def provideStore(): PersistentStore = {
def directZK(): PersistentStore = {
implicit val timer = com.twitter.util.Timer.Nil
import com.twitter.util.TimeConversions._
val sessionTimeout = conf.zooKeeperSessionTimeout.get.map(_.millis).getOrElse(30.minutes)
val connector = NativeConnector(conf.zkHosts, None, sessionTimeout, timer)
val client = ZkClient(connector)
.withAcl(Ids.OPEN_ACL_UNSAFE.asScala)
.withRetries(3)
new ZKStore(client, client(conf.zooKeeperStatePath))
}
def mesosZK(): PersistentStore = {
val state = new ZooKeeperState(
conf.zkHosts,
conf.zkTimeoutDuration.toMillis,
TimeUnit.MILLISECONDS,
conf.zooKeeperStatePath
)
new MesosStateStore(state, conf.zkTimeoutDuration)
}
conf.internalStoreBackend.get match {
case Some("zk") => directZK()
case Some("mesos_zk") => mesosZK()
case Some("mem") => new InMemoryStore()
case backend: Option[String] => throw new IllegalArgumentException(s"Storage backend $backend not known!")
}
}
//scalastyle:off parameter.number method.length
@Named("schedulerActor")
@Provides
@Singleton
@Inject
def provideSchedulerActor(
system: ActorSystem,
appRepository: AppRepository,
groupRepository: GroupRepository,
deploymentRepository: DeploymentRepository,
healthCheckManager: HealthCheckManager,
taskTracker: TaskTracker,
taskQueue: LaunchQueue,
frameworkIdUtil: FrameworkIdUtil,
driverHolder: MarathonSchedulerDriverHolder,
taskIdUtil: TaskIdUtil,
leaderInfo: LeaderInfo,
storage: StorageProvider,
@Named(EventModule.busName) eventBus: EventStream,
taskFailureRepository: TaskFailureRepository,
config: MarathonConf): ActorRef = {
val supervision = OneForOneStrategy() {
case NonFatal(_) => Restart
}
import system.dispatcher
def createSchedulerActions(schedulerActor: ActorRef): SchedulerActions = {
new SchedulerActions(
appRepository,
groupRepository,
healthCheckManager,
taskTracker,
taskQueue,
eventBus,
schedulerActor,
config)
}
def deploymentManagerProps(schedulerActions: SchedulerActions): Props = {
Props(
new DeploymentManager(
appRepository,
taskTracker,
taskQueue,
schedulerActions,
storage,
healthCheckManager,
eventBus
)
)
}
val historyActorProps = Props(new HistoryActor(eventBus, taskFailureRepository))
system.actorOf(
MarathonSchedulerActor.props(
createSchedulerActions,
deploymentManagerProps,
historyActorProps,
appRepository,
deploymentRepository,
healthCheckManager,
taskTracker,
taskQueue,
driverHolder,
leaderInfo,
eventBus
).withRouter(RoundRobinPool(nrOfInstances = 1, supervisorStrategy = supervision)),
"MarathonScheduler")
}
@Named(ModuleNames.NAMED_HOST_PORT)
@Provides
@Singleton
def provideHostPort: String = {
val port = if (http.disableHttp()) http.httpsPort() else http.httpPort()
"%s:%d".format(conf.hostname(), port)
}
@Named(ModuleNames.NAMED_CANDIDATE)
@Provides
@Singleton
def provideCandidate(zk: ZooKeeperClient, @Named(ModuleNames.NAMED_HOST_PORT) hostPort: String): Option[Candidate] = {
if (conf.highlyAvailable()) {
log.info("Registering in Zookeeper with hostPort:" + hostPort)
val candidate = new CandidateImpl(new ZGroup(zk, ZooDefs.Ids.OPEN_ACL_UNSAFE, conf.zooKeeperLeaderPath),
new Supplier[Array[Byte]] {
def get(): Array[Byte] = {
hostPort.getBytes("UTF-8")
}
})
//scalastyle:off return
return Some(candidate)
//scalastyle:on
}
None
}
@Provides
@Singleton
def provideTaskFailureRepository(
store: PersistentStore,
conf: MarathonConf,
metrics: Metrics): TaskFailureRepository = {
new TaskFailureRepository(
new MarathonStore[TaskFailure](
store,
metrics,
() => TaskFailure.empty,
prefix = "taskFailure:"
),
conf.zooKeeperMaxVersions.get,
metrics
)
}
@Provides
@Singleton
def provideAppRepository(
store: PersistentStore,
conf: MarathonConf,
metrics: Metrics): AppRepository = {
new AppRepository(
new MarathonStore[AppDefinition](store, metrics, () => AppDefinition.apply(), prefix = "app:"),
maxVersions = conf.zooKeeperMaxVersions.get,
metrics
)
}
@Provides
@Singleton
def provideGroupRepository(
store: PersistentStore,
conf: MarathonConf,
metrics: Metrics): GroupRepository = {
new GroupRepository(
new MarathonStore[Group](store, metrics, () => Group.empty, "group:"),
conf.zooKeeperMaxVersions.get,
metrics
)
}
@Provides
@Singleton
def provideDeploymentRepository(
store: PersistentStore,
conf: MarathonConf,
metrics: Metrics): DeploymentRepository = {
new DeploymentRepository(
new MarathonStore[DeploymentPlan](store, metrics, () => DeploymentPlan.empty, "deployment:"),
conf.zooKeeperMaxVersions.get,
metrics
)
}
@Provides
@Singleton
def provideActorSystem(): ActorSystem = ActorSystem("marathon")
/* Reexports the `akka.actor.ActorSystem` as `akka.actor.ActorRefFactory`. It doesn't work automatically. */
@Provides
@Singleton
def provideActorRefFactory(system: ActorSystem): ActorRefFactory = system
@Provides
@Singleton
def provideFrameworkIdUtil(store: PersistentStore, metrics: Metrics, conf: MarathonConf): FrameworkIdUtil = {
new FrameworkIdUtil(
new MarathonStore[FrameworkId](store, metrics, () => new FrameworkId(UUID.randomUUID().toString), ""),
conf.zkTimeoutDuration)
}
@Provides
@Singleton
def provideMigration(
store: PersistentStore,
appRepo: AppRepository,
groupRepo: GroupRepository,
metrics: Metrics,
config: MarathonConf): Migration = {
new Migration(store, appRepo, groupRepo, config, metrics)
}
@Provides
@Singleton
def provideTaskIdUtil(): TaskIdUtil = new TaskIdUtil
@Provides
@Singleton
def provideStorageProvider(config: MarathonConf, http: HttpConf): StorageProvider =
StorageProvider.provider(config, http)
@Named(ModuleNames.NAMED_SERIALIZE_GROUP_UPDATES)
@Provides
@Singleton
def provideSerializeGroupUpdates(actorRefFactory: ActorRefFactory): SerializeExecution = {
SerializeExecution(actorRefFactory, "serializeGroupUpdates")
}
@Provides
@Singleton
def provideGroupManager(
@Named(ModuleNames.NAMED_SERIALIZE_GROUP_UPDATES) serializeUpdates: SerializeExecution,
scheduler: MarathonSchedulerService,
taskTracker: TaskTracker,
groupRepo: GroupRepository,
appRepo: AppRepository,
storage: StorageProvider,
config: MarathonConf,
@Named(EventModule.busName) eventBus: EventStream,
metrics: Metrics): GroupManager = {
val groupManager: GroupManager = new GroupManager(
serializeUpdates,
scheduler,
taskTracker,
groupRepo,
appRepo,
storage,
config,
eventBus
)
metrics.gauge("service.mesosphere.marathon.app.count", new Gauge[Int] {
override def getValue: Int = {
Await.result(groupManager.rootGroup(), conf.zkTimeoutDuration).transitiveApps.size
}
})
metrics.gauge("service.mesosphere.marathon.group.count", new Gauge[Int] {
override def getValue: Int = {
Await.result(groupManager.rootGroup(), conf.zkTimeoutDuration).transitiveGroups.size
}
})
groupManager
}
}
| sledigabel/marathon | src/main/scala/mesosphere/marathon/MarathonModule.scala | Scala | apache-2.0 | 12,662 |
package cn.changhong.finagle.http
/**
* Created by yangguo on 14-10-24.
*/
case class User(username:String,password:String)
| guoyang2011/myfinagle | Server/src/main/scala/cn/changhong/finagle/http/EntityFactory.scala | Scala | apache-2.0 | 127 |
package amora.backend.indexer
import org.junit.Test
import amora.backend.schema.Schema
import amora.api._
class CommitTest extends RestApiTest {
import amora.TestUtils._
private case class Person(name: String, age: Int)
private def buildTurtleUpdate(ps: Seq[Person]) = Schema.turtleBuilder {
(addPrefix, addData) ⇒
addPrefix("Person", "http://amora.center/kb/amora/Schema/Person/")
addPrefix("PersonData", "http://amora.center/kb/amora/Person/")
for (p ← ps) {
val id = s"PersonData:${p.name}"
addData(id, "a", "Person:")
addData(id, "Person:name", s""""${p.name}"""")
addData(id, "Person:age", p.age.toString)
}
}
def update1() = turtleRequest(buildTurtleUpdate(Seq(Person("franz", 49))))
def update2() = turtleRequest(buildTurtleUpdate(Seq(Person("hugo", 25))))
def update3() = turtleRequest(buildTurtleUpdate(Seq(Person("sarah", 27))))
@Test
def return_empty_string_when_there_is_no_commit_yet() = {
headCommit() === ""
}
@Test
def head_commit_exists_for_single_update() = {
update1()
headCommit().take(8) === "e002e422"
}
@Test
def list_no_commits_when_there_are_no_commits_yet() = {
listCommits() === ""
}
@Test
def list_single_commit() = {
update1()
listCommits().take(8) === "e002e422"
}
@Test
def head_commit_exists_after_multiple_updates() = {
update1()
update2()
update3()
headCommit().take(8) === "946c1b6a"
}
@Test
def list_commits_after_multiple_updates() = {
update1()
update2()
update3()
listCommits().split(",").map(_.take(8)).toSeq === Seq(
"946c1b6a",
"265fb068",
"e002e422")
}
@Test
def get_data_of_commit() = {
update1()
update2()
update3()
val Array(hash1, hash2, hash3) = listCommits().split(",")
val q = sparqlQuery"""
prefix Person:<http://amora.center/kb/amora/Schema/Person/>
select * where {
[Person:name ?name; Person:age ?age] .
}
order by ?name
"""
modelAsData(showCommit(hash1), q) === Seq(
Seq(Data("name", "sarah"), Data("age", "27")))
modelAsData(showCommit(hash2), q) === Seq(
Seq(Data("name", "hugo"), Data("age", "25")))
modelAsData(showCommit(hash3), q) === Seq(
Seq(Data("name", "franz"), Data("age", "49")))
sparqlRequest(q) === Seq(
Seq(Data("name", "franz"), Data("age", "49")),
Seq(Data("name", "hugo"), Data("age", "25")),
Seq(Data("name", "sarah"), Data("age", "27")))
}
}
| sschaef/tooling-research | backend/src/test/scala/amora/backend/indexer/CommitTest.scala | Scala | mit | 2,540 |
package scalapb.json4s
import org.scalatest.{FlatSpec, MustMatchers}
class NameUtilsSpec extends FlatSpec with MustMatchers {
"snakeCaseToCamelCase" should "work for normal names" in {
NameUtils.snakeCaseToCamelCase("scala_pb") must be("scalaPb")
NameUtils.snakeCaseToCamelCase("foo_bar") must be("fooBar")
NameUtils.snakeCaseToCamelCase("foo_bar_123baz") must be("fooBar123Baz")
NameUtils.snakeCaseToCamelCase("foo_bar_123_baz") must be("fooBar123Baz")
NameUtils.snakeCaseToCamelCase("__foo_bar") must be("FooBar")
NameUtils.snakeCaseToCamelCase("_foo_bar") must be("FooBar")
NameUtils.snakeCaseToCamelCase("_scala_pb") must be("ScalaPb")
NameUtils.snakeCaseToCamelCase("foo__bar") must be("fooBar")
NameUtils.snakeCaseToCamelCase("123bar") must be("123Bar")
NameUtils.snakeCaseToCamelCase("123_bar") must be("123Bar")
}
"snakeCaseToCamelCase" should "work when already in camel case" in {
NameUtils.snakeCaseToCamelCase("fooBar") must be("fooBar")
NameUtils.snakeCaseToCamelCase("fooBar_baz") must be("fooBarBaz")
NameUtils.snakeCaseToCamelCase("FooBar") must be("fooBar")
}
}
| trueaccord/scalapb-json4s | src/test/scala/scalapb/json4s/NameUtilsSpec.scala | Scala | apache-2.0 | 1,142 |
object Main extends App {
for (i <- 1 to (100,2)) println(i)
}
| nikai3d/ce-challenges | easy/oddnums.scala | Scala | bsd-3-clause | 65 |
package scala.slick.ast
/**
* The standard library for query operators.
*/
object Library {
trait AggregateFunctionSymbol extends Symbol
class JdbcFunction(name: String) extends FunctionSymbol(name)
class SqlFunction(name: String) extends FunctionSymbol(name)
class SqlOperator(name: String) extends FunctionSymbol(name)
class AggregateFunction(name: String) extends FunctionSymbol(name) with AggregateFunctionSymbol
class SqlAggregateFunction(name: String) extends SqlFunction(name) with AggregateFunctionSymbol
// Boolean operators
val And = new SqlOperator("and")
val Or = new SqlOperator("or")
val Not = new SqlOperator("not")
// Numeric operators and functions
val + = new SqlOperator("+")
val - = new SqlOperator("-")
val * = new SqlOperator("*")
val / = new SqlOperator("/")
val % = new JdbcFunction("mod")
val Between = new FunctionSymbol("between")
val Abs = new JdbcFunction("abs")
val Ceiling = new JdbcFunction("ceiling")
val Floor = new JdbcFunction("floor")
val Sign = new JdbcFunction("sign")
val Degrees = new JdbcFunction("degrees")
val Radians = new JdbcFunction("radians")
// Comparison
val < = new SqlOperator("<")
val <= = new SqlOperator("<=")
val > = new SqlOperator(">")
val >= = new SqlOperator(">=")
val == = new SqlOperator("=")
// Set membership
val In = new SqlOperator("in")
// String functions
val Length = new JdbcFunction("length")
val Concat = new JdbcFunction("concat")
val UCase = new JdbcFunction("ucase")
val LCase = new JdbcFunction("lcase")
val LTrim = new JdbcFunction("ltrim")
val RTrim = new JdbcFunction("rtrim")
val Trim = new FunctionSymbol("Trim")
val Like = new FunctionSymbol("Like")
val StartsWith = new FunctionSymbol("StartsWith")
val EndsWith = new FunctionSymbol("EndsWith")
// Aggregate functions
val Min = new SqlAggregateFunction("min")
val Max = new SqlAggregateFunction("max")
val Avg = new SqlAggregateFunction("avg")
val Sum = new SqlAggregateFunction("sum")
val Count = new SqlAggregateFunction("count")
val CountAll = new AggregateFunction("count(*)")
val CountDistinct = new AggregateFunction("count distinct")
val Exists = new SqlFunction("exists")
val Cast = new FunctionSymbol("Cast")
val IfNull = new JdbcFunction("ifnull")
// Values
val User = new JdbcFunction("user")
val Database = new JdbcFunction("database")
val CurrentDate = new JdbcFunction("curdate")
val CurrentTime = new JdbcFunction("curtime")
val Pi = new JdbcFunction("pi")
// Sequence operations
val NextValue = new FunctionSymbol("NextValue")
val CurrentValue = new FunctionSymbol("CurrentValue")
}
/** A Symbol that represents a library function or operator */
class FunctionSymbol(val name: String) extends Symbol {
/** Create an untyped Apply of this Symbol */
//def apply(ch: Node*): Apply = Apply(this, ch)
/** Match an Apply of this Symbol */
def unapplySeq(n: Node) = n match {
case Apply(sym, ch) if sym eq this => Some(ch)
case _ => None
}
/** Create a typed Apply of this Symbol */
def typed(tpe: Type, ch: Node*): Apply with TypedNode = Apply(this, ch)(tpe)
/** Create a typed Apply of this Symbol */
def typed[T : StaticType](ch: Node*): Apply with TypedNode = Apply(this, ch)(implicitly[StaticType[T]])
override def toString = "Function "+name
}
| boldradius/slick | src/main/scala/scala/slick/ast/Library.scala | Scala | bsd-2-clause | 3,364 |
/**
* Copyright (C) 2012-2013 Vadim Bartko (vadim.bartko@nevilon.com).
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* See file LICENSE.txt for License information.
*/
package com.nevilon.nomad.crawler
import org.apache.commons.lang.builder.{EqualsBuilder, HashCodeBuilder}
class Domain(val name: String, val status: DomainStatus.Value) {
def updateStatus(newStatus: DomainStatus.Value): Domain = new Domain(name, newStatus)
override def equals(obj: Any): Boolean = {
if (obj.isInstanceOf[Domain]) {
val other = obj.asInstanceOf[Domain]
new EqualsBuilder()
.append(name, other.name)
.isEquals
} else false
}
override def hashCode(): Int = {
new HashCodeBuilder()
.append(name)
.toHashCode
}
}
class DomainStatus
object DomainStatus extends Enumeration {
val IN_PROGRESS = Value("IN_PROGRESS")
val COMPLETE = Value("COMPLETE")
val NEW = Value("NEW")
val SKIP = Value("SKIP")
}
| hudvin/nomad | src/main/scala/com/nevilon/nomad/crawler/Domain.scala | Scala | gpl-2.0 | 1,172 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail
import cats.laws._
import cats.laws.discipline._
import monix.eval.Coeval
import monix.execution.internal.Platform
import org.scalacheck.Test
import org.scalacheck.Test.Parameters
import scala.annotation.tailrec
import scala.collection.mutable.ListBuffer
object IterantInterleaveSuite extends BaseTestSuite {
override lazy val checkConfig: Parameters = {
if (Platform.isJVM)
Test.Parameters.default.withMaxSize(256)
else
Test.Parameters.default.withMaxSize(32)
}
def interleaveLists[A](lh: List[A], rh: List[A]): List[A] = {
@tailrec
def loop(lh: List[A], rh: List[A], acc: ListBuffer[A]): List[A] =
lh match {
case x :: xs =>
acc += x
loop(rh, xs, acc)
case Nil =>
acc.toList
}
loop(lh, rh, ListBuffer.empty)
}
test("interleaveLists #1") { _ =>
val list1 = List(1, 2, 3, 4)
val list2 = List(1, 2)
assertEquals(interleaveLists(list1, list2), List(1, 1, 2, 2, 3))
}
test("interleaveLists #2") { _ =>
val list1 = List(1, 2)
val list2 = List(1, 2, 3)
assertEquals(interleaveLists(list1, list2), List(1, 1, 2, 2))
}
test("Iterant.interleave equivalence with interleaveLists") { implicit s =>
check4 { (list1: List[Int], idx1: Int, list2: List[Int], idx2: Int) =>
val stream1 = arbitraryListToIterant[Coeval, Int](list1, math.abs(idx1) + 1, allowErrors = false)
val stream2 = arbitraryListToIterant[Coeval, Int](list2, math.abs(idx2) + 1, allowErrors = false)
stream1.interleave(stream2).toListL.value() <-> interleaveLists(list1, list2)
}
}
}
| alexandru/monifu | monix-tail/shared/src/test/scala/monix/tail/IterantInterleaveSuite.scala | Scala | apache-2.0 | 2,303 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.beans
/** Provides some simple runtime processing necessary to create
* JavaBean descriptors for Scala entities. The compiler creates
* subclasses of this class automatically when the BeanInfo annotation is
* attached to a class.
*
* @author Ross Judson (rjudson@managedobjects.com)
*/
abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
props: Array[String],
methods: Array[String]) extends java.beans.SimpleBeanInfo {
import java.beans._
private val pd = new Array[PropertyDescriptor](props.length / 3)
private val md =
for (m <- clazz.getMethods if methods.exists(_ == m.getName))
yield new MethodDescriptor(m)
init()
override def getPropertyDescriptors() = pd
override def getMethodDescriptors() = md
// override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass)
private def init() {
var i = 0
while (i < props.length) {
pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2))
i = i + 3
}
}
}
| felixmulder/scala | src/library/scala/beans/ScalaBeanInfo.scala | Scala | bsd-3-clause | 1,621 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager.actor.cluster
import java.io.Closeable
import java.net.InetAddress
import java.nio.ByteBuffer
import java.time.Duration
import java.util
import java.util.Properties
import java.util.concurrent.{ConcurrentLinkedDeque, TimeUnit}
import akka.actor.{ActorContext, ActorPath, ActorRef, Props}
import akka.pattern._
import com.github.benmanes.caffeine.cache.{Cache, Caffeine, RemovalCause, RemovalListener}
import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache}
import grizzled.slf4j.Logging
import kafka.common.OffsetAndMetadata
import kafka.manager._
import kafka.manager.base.cluster.{BaseClusterQueryActor, BaseClusterQueryCommandActor}
import kafka.manager.base.{LongRunningPoolActor, LongRunningPoolConfig}
import kafka.manager.features.{ClusterFeatures, KMDeleteTopicFeature, KMPollConsumersFeature}
import kafka.manager.model.ActorModel._
import kafka.manager.model._
import kafka.manager.utils.ZkUtils
import kafka.manager.utils.zero81.{PreferredReplicaLeaderElectionCommand, ReassignPartitionCommand}
import kafka.manager.utils.two40.{GroupMetadata, GroupMetadataKey, MemberMetadata, OffsetKey}
import org.apache.curator.framework.CuratorFramework
import org.apache.curator.framework.recipes.cache.PathChildrenCache.StartMode
import org.apache.curator.framework.recipes.cache._
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.clients.consumer.{Consumer, ConsumerRecords, KafkaConsumer}
import org.apache.kafka.common.{ConsumerGroupState, TopicPartition}
import org.apache.kafka.common.requests.DescribeGroupsResponse
import org.joda.time.{DateTime, DateTimeZone}
import scala.collection.concurrent.TrieMap
import scala.collection.immutable.Map
import scala.collection.mutable
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success, Try}
import org.apache.kafka.clients.consumer.ConsumerConfig._
import org.apache.kafka.clients.consumer.internals.ConsumerProtocol
import org.apache.kafka.common.config.SaslConfigs
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.clients.CommonClientConfigs.SECURITY_PROTOCOL_CONFIG
import org.apache.kafka.clients.admin.{AdminClient, ConsumerGroupDescription, DescribeConsumerGroupsOptions}
import org.apache.kafka.common.KafkaFuture.BiConsumer
import org.apache.kafka.common.metrics.{KafkaMetric, MetricsReporter}
import org.apache.kafka.common.utils.Time
/**
* @author hiral
*/
import kafka.manager.utils._
import scala.collection.JavaConverters._
class NoopJMXReporter extends MetricsReporter {
override def init(metrics: util.List[KafkaMetric]): Unit = {}
override def metricChange(metric: KafkaMetric): Unit = {}
override def metricRemoval(metric: KafkaMetric): Unit = {}
override def close(): Unit = {}
override def configure(configs: util.Map[String, _]): Unit = {}
}
case class PartitionOffsetRequestInfo(time: Long, maxNumOffsets: Int)
case class KafkaAdminClientActorConfig(clusterContext: ClusterContext, longRunningPoolConfig: LongRunningPoolConfig, kafkaStateActorPath: ActorPath, consumerProperties: Option[Properties])
case class KafkaAdminClientActor(config: KafkaAdminClientActorConfig) extends BaseClusterQueryActor with LongRunningPoolActor {
private[this] var adminClientOption : Option[AdminClient] = None
protected implicit val clusterContext: ClusterContext = config.clusterContext
override protected def longRunningPoolConfig: LongRunningPoolConfig = config.longRunningPoolConfig
override protected def longRunningQueueFull(): Unit = {
log.error("Long running pool queue full, skipping!")
}
@scala.throws[Exception](classOf[Exception])
override def preStart() = {
super.preStart()
log.info(config.toString)
}
@scala.throws[Exception](classOf[Exception])
override def preRestart(reason: Throwable, message: Option[Any]): Unit = {
log.error(reason, "Restarting due to [{}] when processing [{}]",
reason.getMessage, message.getOrElse(""))
super.preRestart(reason, message)
}
@scala.throws[Exception](classOf[Exception])
override def postStop(): Unit = {
log.info("Closing admin client...")
Try(adminClientOption.foreach(_.close()))
log.info("Stopped actor %s".format(self.path))
}
private def createAdminClient(bl: BrokerList): AdminClient = {
val targetBrokers : IndexedSeq[BrokerIdentity] = bl.list
val brokerListStr: String = targetBrokers.map {
b =>
val port = b.endpoints(config.clusterContext.config.securityProtocol)
s"${b.host}:$port"
}.mkString(",")
val props = new Properties()
config.consumerProperties.foreach {
cp => props.putAll(cp.asMap)
}
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.clusterContext.config.securityProtocol.stringId)
props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, brokerListStr)
if(config.clusterContext.config.saslMechanism.nonEmpty){
props.put(SaslConfigs.SASL_MECHANISM, config.clusterContext.config.saslMechanism.get.stringId)
log.info(s"SASL Mechanism =${config.clusterContext.config.saslMechanism.get}")
}
if(config.clusterContext.config.jaasConfig.nonEmpty){
props.put(SaslConfigs.SASL_JAAS_CONFIG, config.clusterContext.config.jaasConfig.get)
log.info(s"SASL JAAS config=${config.clusterContext.config.jaasConfig.get}")
}
log.info(s"Creating admin client with security protocol=${config.clusterContext.config.securityProtocol.stringId} , broker list : $brokerListStr")
AdminClient.create(props)
}
override def processQueryRequest(request: QueryRequest): Unit = {
if(adminClientOption.isEmpty) {
context.actorSelection(config.kafkaStateActorPath).tell(KSGetBrokers, self)
log.error(s"AdminClient not initialized yet, cannot process request : $request")
} else {
implicit val ec = longRunningExecutionContext
request match {
case KAGetGroupSummary(groupList: Seq[String], enqueue: java.util.Queue[(String, List[MemberMetadata])]) =>
Future {
try {
adminClientOption.foreach {
client =>
val options = new DescribeConsumerGroupsOptions
options.timeoutMs(1000)
client.describeConsumerGroups(groupList.asJava, options).all().whenComplete {
(mapGroupDescription, error) => mapGroupDescription.asScala.foreach {
case (group, desc) =>
enqueue.offer(group -> desc.members().asScala.map(m => MemberMetadata.from(group, desc, m)).toList)
}
}
}
} catch {
case e: Exception =>
log.error(e, s"Failed to get group summary with admin client : $groupList")
log.error(e, s"Forcing new admin client initialization...")
Try { adminClientOption.foreach(_.close()) }
adminClientOption = None
}
}
case any: Any => log.warning("kac : processQueryRequest : Received unknown message: {}", any.toString)
}
}
}
override def processActorResponse(response: ActorResponse): Unit = {
response match {
case bl: BrokerList =>
if(bl.list.nonEmpty) {
Try {
adminClientOption = Option(createAdminClient(bl))
}.logError(s"Failed to create admin client with brokerlist : $bl")
}
case any: Any => log.warning("kac : processActorResponse : Received unknown message: {}", any.toString)
}
}
}
class KafkaAdminClient(context: => ActorContext, adminClientActorPath: ActorPath) {
def enqueueGroupMetadata(groupList: Seq[String], queue: java.util.Queue[(String, List[MemberMetadata])]) : Unit = {
Try {
context.actorSelection(adminClientActorPath).tell(KAGetGroupSummary(groupList, queue), ActorRef.noSender)
}
}
}
object KafkaManagedOffsetCache {
val supportedVersions: Set[KafkaVersion] = Set(Kafka_0_8_2_0, Kafka_0_8_2_1, Kafka_0_8_2_2, Kafka_0_9_0_0, Kafka_0_9_0_1, Kafka_0_10_0_0, Kafka_0_10_0_1, Kafka_0_10_1_0, Kafka_0_10_1_1, Kafka_0_10_2_0, Kafka_0_10_2_1, Kafka_0_11_0_0, Kafka_0_11_0_2, Kafka_1_0_0, Kafka_1_0_1, Kafka_1_1_0, Kafka_1_1_1, Kafka_2_0_0, Kafka_2_1_0, Kafka_2_1_1, Kafka_2_2_0, Kafka_2_2_1, Kafka_2_2_2, Kafka_2_3_0, Kafka_2_2_1, Kafka_2_4_0, Kafka_2_4_1, Kafka_2_5_0, Kafka_2_5_1, Kafka_2_6_0)
val ConsumerOffsetTopic = "__consumer_offsets"
def isSupported(version: KafkaVersion) : Boolean = {
supportedVersions(version)
}
def createSet[T](): mutable.Set[T] = {
import scala.collection.JavaConverters._
java.util.Collections.newSetFromMap(
new java.util.concurrent.ConcurrentHashMap[T, java.lang.Boolean]).asScala
}
}
object KafkaManagedOffsetCacheConfig {
val defaultGroupMemberMetadataCheckMillis: Int = 30000
val defaultGroupTopicPartitionOffsetMaxSize: Int = 1000000
val defaultGroupTopicPartitionOffsetExpireDays: Int = 7
}
case class KafkaManagedOffsetCacheConfig(groupMemberMetadataCheckMillis: Int = KafkaManagedOffsetCacheConfig.defaultGroupMemberMetadataCheckMillis
, groupTopicPartitionOffsetMaxSize: Int = KafkaManagedOffsetCacheConfig.defaultGroupTopicPartitionOffsetMaxSize
, groupTopicPartitionOffsetExpireDays: Int = KafkaManagedOffsetCacheConfig.defaultGroupTopicPartitionOffsetExpireDays)
case class KafkaManagedOffsetCache(clusterContext: ClusterContext
, adminClient: KafkaAdminClient
, consumerProperties: Option[Properties]
, bootstrapBrokerList: BrokerList
, config: KafkaManagedOffsetCacheConfig
) extends Runnable with Closeable with Logging {
val groupTopicPartitionOffsetSet: mutable.Set[(String, String, Int)] = KafkaManagedOffsetCache.createSet()
val groupTopicPartitionOffsetMap:Cache[(String, String, Int), OffsetAndMetadata] = Caffeine
.newBuilder()
.maximumSize(config.groupTopicPartitionOffsetMaxSize)
.expireAfterAccess(config.groupTopicPartitionOffsetExpireDays, TimeUnit.DAYS)
.removalListener(new RemovalListener[(String, String, Int), OffsetAndMetadata] {
override def onRemoval(key: (String, String, Int), value: OffsetAndMetadata, cause: RemovalCause): Unit = {
groupTopicPartitionOffsetSet.remove(key)
}
})
.build[(String, String, Int), OffsetAndMetadata]()
val topicConsumerSetMap = new TrieMap[String, mutable.Set[String]]()
val consumerTopicSetMap = new TrieMap[String, mutable.Set[String]]()
val groupTopicPartitionMemberSet: mutable.Set[(String, String, Int)] = KafkaManagedOffsetCache.createSet()
val groupTopicPartitionMemberMap: Cache[(String, String, Int), MemberMetadata] = Caffeine
.newBuilder()
.maximumSize(config.groupTopicPartitionOffsetMaxSize)
.expireAfterAccess(config.groupTopicPartitionOffsetExpireDays, TimeUnit.DAYS)
.removalListener(new RemovalListener[(String, String, Int), MemberMetadata] {
override def onRemoval(key: (String, String, Int), value: MemberMetadata, cause: RemovalCause): Unit = {
groupTopicPartitionMemberSet.remove(key)
}
})
.build[(String, String, Int), MemberMetadata]()
private[this] val queue = new ConcurrentLinkedDeque[(String, List[MemberMetadata])]()
@volatile
private[this] var lastUpdateTimeMillis : Long = 0
private[this] var lastGroupMemberMetadataCheckMillis : Long = System.currentTimeMillis()
import KafkaManagedOffsetCache._
import kafka.manager.utils.two40.GroupMetadataManager._
require(isSupported(clusterContext.config.version), s"Kafka version not support : ${clusterContext.config}")
@volatile
private[this] var shutdown: Boolean = false
private[this] def createKafkaConsumer(): Consumer[Array[Byte], Array[Byte]] = {
val hostname = InetAddress.getLocalHost.getHostName
val brokerListStr: String = bootstrapBrokerList.list.map {
b =>
val port = b.endpoints(clusterContext.config.securityProtocol)
s"${b.host}:$port"
}.mkString(",")
val props: Properties = new Properties()
props.put(GROUP_ID_CONFIG, s"KMOffsetCache-$hostname")
props.put(BOOTSTRAP_SERVERS_CONFIG, brokerListStr)
props.put(EXCLUDE_INTERNAL_TOPICS_CONFIG, "false")
props.put(ENABLE_AUTO_COMMIT_CONFIG, "false")
props.put(KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
props.put(VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
props.put(AUTO_OFFSET_RESET_CONFIG, "latest")
props.put(METRIC_REPORTER_CLASSES_CONFIG, classOf[NoopJMXReporter].getCanonicalName)
consumerProperties.foreach {
cp => props.putAll(cp.asMap)
}
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, clusterContext.config.securityProtocol.stringId)
if(clusterContext.config.saslMechanism.nonEmpty){
props.put(SaslConfigs.SASL_MECHANISM, clusterContext.config.saslMechanism.get.stringId)
info(s"SASL Mechanism =${clusterContext.config.saslMechanism.get}")
if(clusterContext.config.jaasConfig.nonEmpty){
props.put(SaslConfigs.SASL_JAAS_CONFIG, clusterContext.config.jaasConfig.get)
info(s"SASL JAAS config=${clusterContext.config.jaasConfig.get}")
}
}
Try {
info("Constructing new kafka consumer client using these properties: ")
props.asScala.foreach {
case (k, v) => info(s"$k=$v")
}
}
new KafkaConsumer[Array[Byte], Array[Byte]](props)
}
private[this] def performGroupMetadataCheck() : Unit = {
val currentMillis = System.currentTimeMillis()
if((lastGroupMemberMetadataCheckMillis + config.groupMemberMetadataCheckMillis) < currentMillis) {
val diff = groupTopicPartitionOffsetSet.diff(groupTopicPartitionMemberSet)
if(diff.nonEmpty) {
val groupsToBackfill = diff.map(_._1).toSeq
info(s"Backfilling group metadata for $groupsToBackfill")
adminClient.enqueueGroupMetadata(groupsToBackfill, queue)
}
lastGroupMemberMetadataCheckMillis = System.currentTimeMillis()
lastUpdateTimeMillis = System.currentTimeMillis()
}
}
private[this] def dequeueAndProcessBackFill(): Unit = {
while(!queue.isEmpty) {
val (groupId, members) = queue.pop()
members.foreach {
member =>
try {
member.assignment.foreach {
case (topic, part) =>
val k = (groupId, topic, part)
//only add it if it hasn't already been added through a new update via the offset topic
if(groupTopicPartitionMemberMap.getIfPresent(k) == null) {
groupTopicPartitionMemberMap.put(k, member)
groupTopicPartitionMemberSet.add(k)
}
}
} catch {
case e: Exception =>
error(s"Failed to get member metadata from group summary and member summary : $groupId : $member", e)
}
}
}
}
override def run(): Unit = {
if(!shutdown) {
for {
consumer <- Try {
val consumer = createKafkaConsumer()
consumer.subscribe(java.util.Arrays.asList(KafkaManagedOffsetCache.ConsumerOffsetTopic))
consumer
}.logError(s"Failed to create consumer for offset topic for cluster ${clusterContext.config.name}")
} {
try {
info(s"Consumer created for kafka offset topic consumption for cluster ${clusterContext.config.name}")
while (!shutdown) {
try {
try {
dequeueAndProcessBackFill()
performGroupMetadataCheck()
} catch {
case e: Exception =>
error("Failed to backfill group metadata", e)
}
val records: ConsumerRecords[Array[Byte], Array[Byte]] = consumer.poll(Duration.ofMillis(100))
val iterator = records.iterator()
while (iterator.hasNext) {
val record = iterator.next()
val key = record.key()
val value = record.value()
//only process records with data
if (key != null && value != null) {
readMessageKey(ByteBuffer.wrap(record.key())) match {
case OffsetKey(version, key) =>
val value: OffsetAndMetadata = readOffsetMessageValue(ByteBuffer.wrap(record.value()))
val newKey = (key.group, key.topicPartition.topic, key.topicPartition.partition)
groupTopicPartitionOffsetMap.put(newKey, value)
groupTopicPartitionOffsetSet.add(newKey)
val topic = key.topicPartition.topic
val group = key.group
val consumerSet = {
if (topicConsumerSetMap.contains(topic)) {
topicConsumerSetMap(topic)
} else {
val s = new mutable.TreeSet[String]()
topicConsumerSetMap += topic -> s
s
}
}
consumerSet += group
val topicSet = {
if (consumerTopicSetMap.contains(group)) {
consumerTopicSetMap(group)
} else {
val s = new mutable.TreeSet[String]()
consumerTopicSetMap += group -> s
s
}
}
topicSet += topic
case GroupMetadataKey(version, key) =>
val value: GroupMetadata = readGroupMessageValue(key, ByteBuffer.wrap(record.value()), Time.SYSTEM)
value.allMemberMetadata.foreach {
mm =>
mm.assignment.foreach {
case (topic, part) =>
val newKey = (key, topic, part)
groupTopicPartitionMemberMap.put(newKey, mm)
groupTopicPartitionMemberSet.add(newKey)
}
}
case other: Any =>
error(s"Unhandled key type : ${other.getClass.getCanonicalName}")
}
}
lastUpdateTimeMillis = System.currentTimeMillis()
}
} catch {
case e: Exception =>
warn(s"Failed to process a message from offset topic on cluster ${clusterContext.config.name}!", e)
}
}
} finally {
info(s"Shutting down consumer for $ConsumerOffsetTopic on cluster ${clusterContext.config.name}")
Try(consumer.close())
}
}
}
groupTopicPartitionMemberSet.clear()
groupTopicPartitionMemberMap.invalidateAll()
groupTopicPartitionMemberMap.cleanUp()
groupTopicPartitionOffsetSet.clear()
groupTopicPartitionOffsetMap.invalidateAll()
groupTopicPartitionOffsetMap.cleanUp()
info(s"KafkaManagedOffsetCache shut down for cluster ${clusterContext.config.name}")
}
def close(): Unit = {
this.shutdown = true
}
def getOffset(group: String, topic: String, part:Int) : Option[Long] = {
Option(groupTopicPartitionOffsetMap.getIfPresent((group, topic, part))).map(_.offset)
}
def getOwner(group: String, topic: String, part:Int) : Option[String] = {
Option(groupTopicPartitionMemberMap.getIfPresent((group, topic, part))).map(mm => s"${mm.memberId}:${mm.clientHost}")
}
def getConsumerTopics(group: String) : Set[String] = consumerTopicSetMap.get(group).map(_.toSet).getOrElse(Set.empty)
def getTopicConsumers(topic: String) : Set[String] = topicConsumerSetMap.get(topic).map(_.toSet).getOrElse(Set.empty)
def getConsumers : IndexedSeq[String] = consumerTopicSetMap.keys.toIndexedSeq
def getLastUpdateTimeMillis: Long = lastUpdateTimeMillis
}
case class ConsumerInstanceSubscriptions private(id: String, subs: Map[String, Int])
object ConsumerInstanceSubscriptions extends Logging {
//{"version":1,"subscription":{"DXSPreAgg":1},"pattern":"static","timestamp":"1443578242654"}
def apply(consumer: String, id: String, jsonString: String) : ConsumerInstanceSubscriptions = {
import org.json4s.jackson.JsonMethods.parse
import org.json4s.scalaz.JsonScalaz.field
val json = parse(jsonString)
val subs: Map[String, Int] = field[Map[String,Int]]("subscription")(json).fold({ e =>
error(s"[consumer=$consumer] Failed to parse consumer instance subscriptions : $id : $jsonString"); Map.empty}, identity)
new ConsumerInstanceSubscriptions(id, subs)
}
}
trait OffsetCache extends Logging {
def consumerProperties: Option[Properties]
def kafkaAdminClient: KafkaAdminClient
def clusterContext: ClusterContext
def getKafkaVersion: KafkaVersion
def getCacheTimeoutSecs: Int
def getSimpleConsumerSocketTimeoutMillis: Int
def kafkaManagedOffsetCacheConfig: KafkaManagedOffsetCacheConfig
protected[this] implicit def ec: ExecutionContext
protected[this] implicit def cf: ClusterFeatures
protected[this] val loadOffsets: Boolean
// Caches a map of partitions to offsets at a key that is the topic's name.
private[this] lazy val partitionOffsetsCache: LoadingCache[String, Future[PartitionOffsetsCapture]] = CacheBuilder.newBuilder()
.expireAfterWrite(getCacheTimeoutSecs,TimeUnit.SECONDS) // TODO - update more or less often maybe, or make it configurable
.build(
new CacheLoader[String,Future[PartitionOffsetsCapture]] {
def load(topic: String): Future[PartitionOffsetsCapture] = {
loadPartitionOffsets(topic)
}
}
)
// Get the latest offsets for the partitions of the topic,
// Code based off of the GetOffsetShell tool in kafka.tools, kafka 0.8.2.1
private[this] def loadPartitionOffsets(topic: String): Future[PartitionOffsetsCapture] = {
// Get partition leader broker information
val optPartitionsWithLeaders : Option[List[(Int, Option[BrokerIdentity])]] = getTopicPartitionLeaders(topic)
val clientId = "partitionOffsetGetter"
val time = -1
val nOffsets = 1
val simpleConsumerBufferSize = 256 * 1024
val currentActiveBrokerSet:Set[String] = getBrokerList().list.map(_.host).toSet
val partitionsByBroker = optPartitionsWithLeaders.map {
listOfPartAndBroker => listOfPartAndBroker.collect {
case (part, broker) if broker.isDefined && currentActiveBrokerSet(broker.get.host) => (broker.get, part)
}.groupBy(_._1)
}
def getKafkaConsumer() = {
new KafkaConsumer(consumerProperties.get)
}
// Get the latest offset for each partition
val futureMap: Future[PartitionOffsetsCapture] = {
partitionsByBroker.fold[Future[PartitionOffsetsCapture]]{
Future.failed(new IllegalArgumentException(s"Do not have partitions and their leaders for topic $topic"))
} { partitionsWithLeaders =>
try {
val listOfFutures = partitionsWithLeaders.toList.map(tpl => (tpl._2)).map {
case (parts) =>
val kafkaConsumer = getKafkaConsumer()
val f: Future[Map[TopicPartition, java.lang.Long]] = Future {
try {
val topicAndPartitions = parts.map(tpl => (new TopicPartition(topic, tpl._2), PartitionOffsetRequestInfo(time, nOffsets)))
val request: List[TopicPartition] = topicAndPartitions.map(f => new TopicPartition(f._1.topic(), f._1.partition()))
kafkaConsumer.endOffsets(request.asJava).asScala.toMap
} finally {
kafkaConsumer.close()
}
}
f.recover { case t =>
error(s"[topic=$topic] An error has occurred while getting topic offsets from broker $parts", t)
Map.empty[TopicPartition, java.lang.Long]
}
}
val result: Future[Map[TopicPartition, java.lang.Long]] = Future.sequence(listOfFutures).map(_.foldRight(Map.empty[TopicPartition, java.lang.Long])((b, a) => b ++ a))
result.map(m => PartitionOffsetsCapture(System.currentTimeMillis(), m.map(f => (f._1.partition(), f._2.toLong))))
}
catch {
case e: Exception =>
error(s"Failed to get offsets for topic $topic", e)
Future.failed(e)
}
}
}
futureMap.failed.foreach {
t => error(s"[topic=$topic] An error has occurred while getting topic offsets", t)
}
futureMap
}
private[this] def emptyPartitionOffsetsCapture: Future[PartitionOffsetsCapture] = Future.successful(PartitionOffsetsCapture(System.currentTimeMillis(), Map()))
protected def getTopicPartitionLeaders(topic: String) : Option[List[(Int, Option[BrokerIdentity])]]
protected def getTopicDescription(topic: String, interactive: Boolean) : Option[TopicDescription]
protected def getBrokerList : () => BrokerList
protected def readConsumerOffsetByTopicPartition(consumer: String, topic: String, tpi: Map[Int, TopicPartitionIdentity]) : Map[Int, Long]
protected def readConsumerOwnerByTopicPartition(consumer: String, topic: String, tpi: Map[Int, TopicPartitionIdentity]) : Map[Int, String]
protected def getConsumerTopicsFromIds(consumer: String) : Set[String]
protected def getConsumerTopicsFromOffsets(consumer: String) : Set[String]
protected def getConsumerTopicsFromOwners(consumer: String) : Set[String]
protected def getZKManagedConsumerList: IndexedSeq[ConsumerNameAndType]
protected def lastUpdateMillisZK : Long
protected def getConsumerTopics(consumer: String) : Set[String] = {
getConsumerTopicsFromOffsets(consumer) ++ getConsumerTopicsFromOwners(consumer) ++ getConsumerTopicsFromIds(consumer)
}
private[this] var kafkaManagedOffsetCache : Option[KafkaManagedOffsetCache] = None
private[this] lazy val hasNonSecureEndpoint = getBrokerList().list.exists(_.nonSecure)
def start() : Unit = {
if(KafkaManagedOffsetCache.isSupported(clusterContext.config.version)) {
if(kafkaManagedOffsetCache.isEmpty) {
info("Starting kafka managed offset cache ...")
Try {
val bl = getBrokerList()
require(bl.list.nonEmpty, "Cannot consume from offset topic when there are no brokers!")
val of = new KafkaManagedOffsetCache(clusterContext, kafkaAdminClient, consumerProperties, bl, kafkaManagedOffsetCacheConfig)
kafkaManagedOffsetCache = Option(of)
val t = new Thread(of, "KafkaManagedOffsetCache")
t.start()
}
}
} else {
throw new IllegalArgumentException(s"Unsupported Kafka Version: ${clusterContext.config.version}")
}
}
def stop() : Unit = {
kafkaManagedOffsetCache.foreach { of =>
info("Stopping kafka managed offset cache ...")
Try {
of.close()
}
}
}
def getTopicPartitionOffsets(topic: String, interactive: Boolean) : Future[PartitionOffsetsCapture] = {
if((interactive || loadOffsets) && hasNonSecureEndpoint) {
partitionOffsetsCache.get(topic)
} else {
emptyPartitionOffsetsCapture
}
}
protected def readKafkaManagedConsumerOffsetByTopicPartition(consumer: String
, topic: String
, tpi: Map[Int, TopicPartitionIdentity]) : Map[Int, Long] = {
kafkaManagedOffsetCache.fold(Map.empty[Int,Long]) {
oc =>
tpi.map {
case (part, _) =>
part -> oc.getOffset(consumer, topic, part).getOrElse(-1L)
}
}
}
protected def readKafkaManagedConsumerOwnerByTopicPartition(consumer: String
, topic: String
, tpi: Map[Int, TopicPartitionIdentity]) : Map[Int, String] = {
kafkaManagedOffsetCache.fold(Map.empty[Int,String]) {
oc =>
tpi.map {
case (part, _) =>
part -> oc.getOwner(consumer, topic, part).getOrElse("")
}
}
}
protected def getKafkaManagedConsumerTopics(consumer: String) : Set[String] = {
kafkaManagedOffsetCache.fold(Set.empty[String]) {
oc => oc.getConsumerTopics(consumer)
}
}
protected def getKafkaManagedConsumerList : IndexedSeq[ConsumerNameAndType] = {
kafkaManagedOffsetCache.fold(IndexedSeq.empty[ConsumerNameAndType]) {
oc => oc.getConsumers.map(name => ConsumerNameAndType(name, KafkaManagedConsumer))
}
}
final def lastUpdateMillis : Long = {
Math.max(lastUpdateMillisZK, kafkaManagedOffsetCache.map(_.getLastUpdateTimeMillis).getOrElse(Long.MinValue))
}
final def getConsumerDescription(consumer: String, consumerType: ConsumerType) : ConsumerDescription = {
val consumerTopics: Set[String] = getKafkaVersion match {
case Kafka_0_8_1_1 => getConsumerTopicsFromOffsets(consumer)
case _ =>
consumerType match {
case ZKManagedConsumer =>
getConsumerTopicsFromOffsets(consumer) ++ getConsumerTopicsFromOwners(consumer)
case KafkaManagedConsumer =>
getKafkaManagedConsumerTopics(consumer)
}
}
val topicDescriptions: Map[String, ConsumedTopicDescription] = consumerTopics.map { topic =>
val topicDesc = getConsumedTopicDescription(consumer, topic, false, consumerType)
(topic, topicDesc)
}.toMap
ConsumerDescription(consumer, topicDescriptions, consumerType)
}
final def getConsumedTopicDescription(consumer:String
, topic:String
, interactive: Boolean
, consumerType: ConsumerType) : ConsumedTopicDescription = {
val optTopic = getTopicDescription(topic, interactive)
val optTpi = optTopic.map(TopicIdentity.getTopicPartitionIdentity(_, None))
val (partitionOffsets, partitionOwners) = consumerType match {
case ZKManagedConsumer =>
val partitionOffsets = for {
td <- optTopic
tpi <- optTpi
} yield {
readConsumerOffsetByTopicPartition(consumer, topic, tpi)
}
val partitionOwners = for {
td <- optTopic
tpi <- optTpi
} yield {
readConsumerOwnerByTopicPartition(consumer, topic, tpi)
}
(partitionOffsets, partitionOwners)
case KafkaManagedConsumer =>
val partitionOffsets = for {
td <- optTopic
tpi <- optTpi
} yield {
readKafkaManagedConsumerOffsetByTopicPartition(consumer, topic, tpi)
}
val partitionOwners = for {
td <- optTopic
tpi <- optTpi
} yield {
readKafkaManagedConsumerOwnerByTopicPartition(consumer, topic, tpi)
}
(partitionOffsets, partitionOwners)
}
val numPartitions: Int = math.max(optTopic.flatMap(_.partitionState.map(_.size)).getOrElse(0),
partitionOffsets.map(_.size).getOrElse(0))
ConsumedTopicDescription(consumer, topic, numPartitions, optTopic, partitionOwners, partitionOffsets)
}
final def getConsumerList: ConsumerList = {
ConsumerList(getKafkaManagedConsumerList ++ getZKManagedConsumerList, clusterContext)
}
}
case class OffsetCacheActive(curator: CuratorFramework
, kafkaAdminClient: KafkaAdminClient
, clusterContext: ClusterContext
, partitionLeaders: String => Option[List[(Int, Option[BrokerIdentity])]]
, topicDescriptions: (String, Boolean) => Option[TopicDescription]
, cacheTimeoutSecs: Int
, socketTimeoutMillis: Int
, kafkaVersion: KafkaVersion
, consumerProperties: Option[Properties]
, kafkaManagedOffsetCacheConfig: KafkaManagedOffsetCacheConfig
, getBrokerList : () => BrokerList
)
(implicit protected[this] val ec: ExecutionContext, val cf: ClusterFeatures) extends OffsetCache {
def getKafkaVersion: KafkaVersion = kafkaVersion
def getCacheTimeoutSecs: Int = cacheTimeoutSecs
def getSimpleConsumerSocketTimeoutMillis: Int = socketTimeoutMillis
val loadOffsets = featureGateFold(KMPollConsumersFeature)(false, true)
private[this] val consumersTreeCacheListener = new TreeCacheListener {
override def childEvent(client: CuratorFramework, event: TreeCacheEvent): Unit = {
event.getType match {
case TreeCacheEvent.Type.INITIALIZED | TreeCacheEvent.Type.NODE_ADDED |
TreeCacheEvent.Type.NODE_REMOVED | TreeCacheEvent.Type.NODE_UPDATED =>
consumersTreeCacheLastUpdateMillis = System.currentTimeMillis()
case _ =>
//do nothing
}
}
}
private[this] val consumersTreeCache = new TreeCache(curator, ZkUtils.ConsumersPath)
@volatile
private[this] var consumersTreeCacheLastUpdateMillis : Long = System.currentTimeMillis()
private[this] def withConsumersTreeCache[T](fn: TreeCache => T) : Option[T] = {
Option(fn(consumersTreeCache))
}
protected def getTopicPartitionLeaders(topic: String) : Option[List[(Int, Option[BrokerIdentity])]] = partitionLeaders(topic)
protected def getTopicDescription(topic: String, interactive: Boolean) : Option[TopicDescription] = topicDescriptions(topic, interactive)
override def start(): Unit = {
super.start()
info("Starting consumers tree cache...")
consumersTreeCache.start()
info("Adding consumers tree cache listener...")
consumersTreeCache.getListenable.addListener(consumersTreeCacheListener)
}
override def stop(): Unit = {
super.stop()
info("Removing consumers tree cache listener...")
Try(consumersTreeCache.getListenable.removeListener(consumersTreeCacheListener))
info("Shutting down consumers tree cache...")
Try(consumersTreeCache.close())
}
protected def lastUpdateMillisZK : Long = consumersTreeCacheLastUpdateMillis
protected def readConsumerOffsetByTopicPartition(consumer: String, topic: String, tpi: Map[Int, TopicPartitionIdentity]) : Map[Int, Long] = {
tpi.map {
case (p, _) =>
val offsetPath = "%s/%s/%s/%s/%s".format(ZkUtils.ConsumersPath, consumer, "offsets", topic, p)
(p, Option(consumersTreeCache.getCurrentData(offsetPath)).flatMap(cd => Option(cd.getData)).map(asString).getOrElse("-1").toLong)
}
}
protected def readConsumerOwnerByTopicPartition(consumer: String, topic: String, tpi: Map[Int, TopicPartitionIdentity]) : Map[Int, String] = {
tpi.map {
case (p, _) =>
val offsetPath = "%s/%s/%s/%s/%s".format(ZkUtils.ConsumersPath, consumer, "owners", topic, p)
(p, Option(consumersTreeCache.getCurrentData(offsetPath)).flatMap(cd => Option(cd.getData)).map(asString).getOrElse(""))
}
}
protected def getConsumerTopicsFromIds(consumer: String) : Set[String] = {
val zkPath = "%s/%s/%s".format(ZkUtils.ConsumersPath,consumer,"ids")
Option(consumersTreeCache.getCurrentChildren(zkPath)).map(_.asScala.toMap.map {
case (id, cd) => ConsumerInstanceSubscriptions.apply(consumer, id, Option(cd).map(_.getData).map(asString).getOrElse("{}"))
}.map(_.subs.keys).flatten.toSet).getOrElse(Set.empty)
}
protected def getConsumerTopicsFromOffsets(consumer: String) : Set[String] = {
val zkPath = "%s/%s/%s".format(ZkUtils.ConsumersPath,consumer,"offsets")
Option(consumersTreeCache.getCurrentChildren(zkPath)).map(_.asScala.toMap.keySet).getOrElse(Set.empty)
}
protected def getConsumerTopicsFromOwners(consumer: String) : Set[String] = {
val zkPath = "%s/%s/%s".format(ZkUtils.ConsumersPath,consumer,"owners")
Option(consumersTreeCache.getCurrentChildren(zkPath)).map(_.asScala.toMap.keySet).getOrElse(Set.empty)
}
protected def getZKManagedConsumerList: IndexedSeq[ConsumerNameAndType] = {
withConsumersTreeCache { cache =>
cache.getCurrentChildren(ZkUtils.ConsumersPath)
}.fold {
IndexedSeq.empty[ConsumerNameAndType]
} { data: java.util.Map[String, ChildData] =>
data.asScala.filter{
case (consumer, childData) =>
if (clusterContext.config.filterConsumers)
// Defining "inactive consumer" as a consumer that is missing one of three children ids/ offsets/ or owners/
childData.getStat.getNumChildren > 2
else true
}.keySet.toIndexedSeq.map(name => ConsumerNameAndType(name, ZKManagedConsumer))
}
}
}
case class OffsetCachePassive(curator: CuratorFramework
, kafkaAdminClient: KafkaAdminClient
, clusterContext: ClusterContext
, partitionLeaders: String => Option[List[(Int, Option[BrokerIdentity])]]
, topicDescriptions: (String, Boolean) => Option[TopicDescription]
, cacheTimeoutSecs: Int
, socketTimeoutMillis: Int
, kafkaVersion: KafkaVersion
, consumerProperties: Option[Properties]
, kafkaManagedOffsetCacheConfig: KafkaManagedOffsetCacheConfig
, getBrokerList : () => BrokerList
)
(implicit protected[this] val ec: ExecutionContext, val cf: ClusterFeatures) extends OffsetCache {
def getKafkaVersion: KafkaVersion = kafkaVersion
def getCacheTimeoutSecs: Int = cacheTimeoutSecs
def getSimpleConsumerSocketTimeoutMillis: Int = socketTimeoutMillis
val loadOffsets = featureGateFold(KMPollConsumersFeature)(false, true)
private[this] val consumersPathChildrenCacheListener = new PathChildrenCacheListener {
override def childEvent(client: CuratorFramework, event: PathChildrenCacheEvent): Unit = {
event.getType match {
case PathChildrenCacheEvent.Type.INITIALIZED | PathChildrenCacheEvent.Type.CHILD_ADDED |
PathChildrenCacheEvent.Type.CHILD_REMOVED | PathChildrenCacheEvent.Type.CHILD_UPDATED =>
consumersTreeCacheLastUpdateMillis = System.currentTimeMillis()
case _ =>
//do nothing
}
}
}
private[this] val consumersPathChildrenCache = new PathChildrenCache(curator, ZkUtils.ConsumersPath, true)
@volatile
private[this] var consumersTreeCacheLastUpdateMillis : Long = System.currentTimeMillis()
private[this] def withConsumersPathChildrenCache[T](fn: PathChildrenCache => T) : Option[T] = {
Option(fn(consumersPathChildrenCache))
}
protected def getTopicPartitionLeaders(topic: String) : Option[List[(Int, Option[BrokerIdentity])]] = partitionLeaders(topic)
protected def getTopicDescription(topic: String, interactive: Boolean) : Option[TopicDescription] = topicDescriptions(topic, interactive)
override def start(): Unit = {
super.start()
info("Starting consumers path children cache...")
consumersPathChildrenCache.start(StartMode.BUILD_INITIAL_CACHE)
info("Adding consumers path children cache listener...")
consumersPathChildrenCache.getListenable.addListener(consumersPathChildrenCacheListener)
}
override def stop(): Unit = {
super.stop()
info("Removing consumers path children cache listener...")
Try(consumersPathChildrenCache.getListenable.removeListener(consumersPathChildrenCacheListener))
info("Shutting down consumers path children cache...")
Try(consumersPathChildrenCache.close())
}
protected def lastUpdateMillisZK : Long = consumersTreeCacheLastUpdateMillis
protected def readConsumerOffsetByTopicPartition(consumer: String, topic: String, tpi: Map[Int, TopicPartitionIdentity]) : Map[Int, Long] = {
tpi.map {
case (p, _) =>
val offsetPath = "%s/%s/%s/%s/%s".format(ZkUtils.ConsumersPath, consumer, "offsets", topic, p)
(p, ZkUtils.readDataMaybeNull(curator, offsetPath)._1.map(_.toLong).getOrElse(-1L))
}
}
protected def readConsumerOwnerByTopicPartition(consumer: String, topic: String, tpi: Map[Int, TopicPartitionIdentity]) : Map[Int, String] = {
tpi.map {
case (p, _) =>
val ownerPath = "%s/%s/%s/%s/%s".format(ZkUtils.ConsumersPath, consumer, "owners", topic, p)
(p, ZkUtils.readDataMaybeNull(curator, ownerPath)._1.orNull)
}.filter(_._2 != null)
}
protected def getConsumerTopicsFromIds(consumer: String) : Set[String] = {
val zkPath = "%s/%s/%s".format(ZkUtils.ConsumersPath,consumer,"ids")
val ids = Try(Option(curator.getChildren.forPath(zkPath)).map(_.asScala.toIterable)).toOption.flatten.getOrElse(Iterable.empty)
val topicList : Iterable[Iterable[String]] = for {
id <- ids
idPath = "%s/%s".format(zkPath, id)
} yield {
ZkUtils.readDataMaybeNull(
curator, idPath)._1.map(ConsumerInstanceSubscriptions.apply(consumer, id, _)).map(_.subs.keys).getOrElse(Iterable.empty)
}
topicList.flatten.toSet
}
protected def getConsumerTopicsFromOffsets(consumer: String) : Set[String] = {
val zkPath = "%s/%s/%s".format(ZkUtils.ConsumersPath,consumer,"offsets")
Try(Option(curator.getChildren.forPath(zkPath)).map(_.asScala.toSet)).toOption.flatten.getOrElse(Set.empty)
}
protected def getConsumerTopicsFromOwners(consumer: String) : Set[String] = {
val zkPath = "%s/%s/%s".format(ZkUtils.ConsumersPath,consumer,"owners")
Try(Option(curator.getChildren.forPath(zkPath)).map(_.asScala.toSet)).toOption.flatten.getOrElse(Set.empty)
}
protected def getZKManagedConsumerList: IndexedSeq[ConsumerNameAndType] = {
withConsumersPathChildrenCache { cache =>
val currentData = cache.getCurrentData
currentData
}.fold {
IndexedSeq.empty[ConsumerNameAndType]
} { data: java.util.List[ChildData] =>
data.asScala.map(cd => ConsumerNameAndType(cd.getPath.split("/").last, ZKManagedConsumer)).toIndexedSeq
}
}
}
case class KafkaStateActorConfig(curator: CuratorFramework
, pinnedDispatcherName: String
, clusterContext: ClusterContext
, offsetCachePoolConfig: LongRunningPoolConfig
, kafkaAdminClientPoolConfig: LongRunningPoolConfig
, partitionOffsetCacheTimeoutSecs: Int
, simpleConsumerSocketTimeoutMillis: Int
, consumerProperties: Option[Properties]
, kafkaManagedOffsetCacheConfig: KafkaManagedOffsetCacheConfig
)
class KafkaStateActor(config: KafkaStateActorConfig) extends BaseClusterQueryCommandActor with LongRunningPoolActor {
protected implicit val clusterContext: ClusterContext = config.clusterContext
protected implicit val cf: ClusterFeatures = clusterContext.clusterFeatures
override protected def longRunningPoolConfig: LongRunningPoolConfig = config.offsetCachePoolConfig
override protected def longRunningQueueFull(): Unit = {
log.error("Long running pool queue full, skipping!")
}
private[this] val kaConfig = KafkaAdminClientActorConfig(
clusterContext,
config.kafkaAdminClientPoolConfig,
self.path,
config.consumerProperties
)
private[this] val kaProps = Props(classOf[KafkaAdminClientActor],kaConfig)
private[this] val kafkaAdminClientActor : ActorPath = context.actorOf(kaProps.withDispatcher(config.pinnedDispatcherName),"kafka-admin-client").path
private[this] val kafkaAdminClient = new KafkaAdminClient(context, kafkaAdminClientActor)
// e.g. /brokers/topics/analytics_content/partitions/0/state
private[this] val topicsTreeCache = new TreeCache(config.curator,ZkUtils.BrokerTopicsPath)
private[this] val topicsConfigPathCache = new PathChildrenCache(config.curator,ZkUtils.TopicConfigPath,true)
private[this] val brokersPathCache = new PathChildrenCache(config.curator,ZkUtils.BrokerIdsPath,true)
private[this] val adminPathCache = new PathChildrenCache(config.curator,ZkUtils.AdminPath,true)
private[this] val deleteTopicsPathCache = new PathChildrenCache(config.curator, ZkUtils.DeleteTopicsPath,true)
@volatile
private[this] var topicsTreeCacheLastUpdateMillis : Long = System.currentTimeMillis()
private[this] val topicsTreeCacheListener = new TreeCacheListener {
override def childEvent(client: CuratorFramework, event: TreeCacheEvent): Unit = {
event.getType match {
case TreeCacheEvent.Type.INITIALIZED | TreeCacheEvent.Type.NODE_ADDED |
TreeCacheEvent.Type.NODE_REMOVED | TreeCacheEvent.Type.NODE_UPDATED =>
topicsTreeCacheLastUpdateMillis = System.currentTimeMillis()
case _ =>
//do nothing
}
}
}
@volatile
private[this] var preferredLeaderElection : Option[PreferredReplicaElection] = None
@volatile
private[this] var reassignPartitions : Option[ReassignPartitions] = None
private[this] val adminPathCacheListener = new PathChildrenCacheListener {
override def childEvent(client: CuratorFramework, event: PathChildrenCacheEvent): Unit = {
log.info(s"Got event : ${event.getType} path=${Option(event.getData).map(_.getPath)}")
event.getType match {
case PathChildrenCacheEvent.Type.INITIALIZED =>
event.getInitialData.asScala.foreach { cd: ChildData =>
updatePreferredLeaderElection(cd)
updateReassignPartition(cd)
}
case PathChildrenCacheEvent.Type.CHILD_ADDED | PathChildrenCacheEvent.Type.CHILD_UPDATED =>
updatePreferredLeaderElection(event.getData)
updateReassignPartition(event.getData)
case PathChildrenCacheEvent.Type.CHILD_REMOVED =>
endPreferredLeaderElection(event.getData)
endReassignPartition(event.getData)
case _ =>
//do nothing
}
}
private[this] def updatePreferredLeaderElection(cd: ChildData): Unit = {
if(cd != null && cd.getPath.endsWith(ZkUtils.PreferredReplicaLeaderElectionPath)) {
Try {
self ! KSUpdatePreferredLeaderElection(cd.getStat.getMtime, cd.getData)
}
}
}
private[this] def updateReassignPartition(cd: ChildData): Unit = {
if(cd != null && cd.getPath.endsWith(ZkUtils.ReassignPartitionsPath)) {
Try {
self ! KSUpdateReassignPartition(cd.getStat.getMtime, cd.getData)
}
}
}
private[this] def endPreferredLeaderElection(cd: ChildData): Unit = {
if(cd != null && cd.getPath.endsWith(ZkUtils.PreferredReplicaLeaderElectionPath)) {
Try {
self ! KSEndPreferredLeaderElection(cd.getStat.getMtime)
}
}
}
private[this] def endReassignPartition(cd: ChildData): Unit = {
if(cd != null && cd.getPath.endsWith(ZkUtils.ReassignPartitionsPath)) {
Try {
self ! KSEndReassignPartition(cd.getStat.getMtime)
}
}
}
}
private[this] lazy val offsetCache: OffsetCache = {
if(config.clusterContext.config.activeOffsetCacheEnabled)
new OffsetCacheActive(config.curator
, kafkaAdminClient
, config.clusterContext
, getPartitionLeaders
, getTopicDescription
, config.partitionOffsetCacheTimeoutSecs
, config.simpleConsumerSocketTimeoutMillis
, config.clusterContext.config.version
, config.consumerProperties
, config.kafkaManagedOffsetCacheConfig
, () => getBrokerList
)(longRunningExecutionContext, cf)
else
new OffsetCachePassive( config.curator
, kafkaAdminClient
, config.clusterContext
, getPartitionLeaders
, getTopicDescription
, config.partitionOffsetCacheTimeoutSecs
, config .simpleConsumerSocketTimeoutMillis
, config.clusterContext.config.version
, config.consumerProperties
, config.kafkaManagedOffsetCacheConfig
, () => getBrokerList
)(longRunningExecutionContext, cf)
}
@scala.throws[Exception](classOf[Exception])
override def preStart() = {
log.info(config.toString)
log.info("Started actor %s".format(self.path))
log.info("Starting topics tree cache...")
topicsTreeCache.start()
log.info("Starting topics config path cache...")
topicsConfigPathCache.start(StartMode.BUILD_INITIAL_CACHE)
log.info("Starting brokers path cache...")
brokersPathCache.start(StartMode.BUILD_INITIAL_CACHE)
log.info("Starting admin path cache...")
adminPathCache.start(StartMode.BUILD_INITIAL_CACHE)
log.info("Starting delete topics path cache...")
deleteTopicsPathCache.start(StartMode.BUILD_INITIAL_CACHE)
log.info("Adding topics tree cache listener...")
topicsTreeCache.getListenable.addListener(topicsTreeCacheListener)
log.info("Adding admin path cache listener...")
adminPathCache.getListenable.addListener(adminPathCacheListener)
//the offset cache does not poll on its own so it can be started safely
log.info("Starting offset cache...")
offsetCache.start()
startTopicOffsetGetter()
}
@scala.throws[Exception](classOf[Exception])
override def preRestart(reason: Throwable, message: Option[Any]) {
log.error(reason, "Restarting due to [{}] when processing [{}]",
reason.getMessage, message.getOrElse(""))
super.preRestart(reason, message)
}
@scala.throws[Exception](classOf[Exception])
override def postStop(): Unit = {
log.info("Stopped actor %s".format(self.path))
Try(stopTopicOffsetGetter())
log.info("Stopping offset cache...")
Try(offsetCache.stop())
log.info("Removing admin path cache listener...")
Try(adminPathCache.getListenable.removeListener(adminPathCacheListener))
log.info("Removing topics tree cache listener...")
Try(topicsTreeCache.getListenable.removeListener(topicsTreeCacheListener))
log.info("Shutting down delete topics path cache...")
Try(deleteTopicsPathCache.close())
log.info("Shutting down admin path cache...")
Try(adminPathCache.close())
log.info("Shutting down brokers path cache...")
Try(brokersPathCache.close())
log.info("Shutting down topics config path cache...")
Try(topicsConfigPathCache.close())
log.info("Shutting down topics tree cache...")
Try(topicsTreeCache.close())
super.postStop()
}
def getTopicZookeeperData(topic: String): Option[(Int,String)] = {
val topicPath = "%s/%s".format(ZkUtils.BrokerTopicsPath,topic)
Option(topicsTreeCache.getCurrentData(topicPath)).map( childData => (childData.getStat.getVersion,asString(childData.getData)))
}
def getTopicPartitionOffsetsNotFuture(topic: String, interactive: Boolean): PartitionOffsetsCapture = {
var partitionOffsets = PartitionOffsetsCapture(System.currentTimeMillis(), Map())
val loadOffsets = featureGateFold(KMPollConsumersFeature)(false, true)
if ((interactive || loadOffsets) &&
kafkaTopicOffsetCaptureMap.contains(topic)) {
partitionOffsets = kafkaTopicOffsetCaptureMap(topic)
}
partitionOffsets
}
def getTopicDescription(topic: String, interactive: Boolean) : Option[TopicDescription] = {
for {
description <- getTopicZookeeperData(topic)
partitionsPath = "%s/%s/partitions".format(ZkUtils.BrokerTopicsPath, topic)
partitions: Map[String, ChildData] <- Option(topicsTreeCache.getCurrentChildren(partitionsPath)).map(_.asScala.toMap)
states : Map[String, String] = partitions flatMap { case (part, _) =>
val statePath = s"$partitionsPath/$part/state"
Option(topicsTreeCache.getCurrentData(statePath)).map(cd => (part, asString(cd.getData)))
}
partitionOffsets = getTopicPartitionOffsetsNotFuture(topic, interactive)
topicConfig = getTopicConfigString(topic)
} yield TopicDescription(topic, description, Option(states), partitionOffsets, topicConfig)
}
def getPartitionLeaders(topic: String) : Option[List[(Int, Option[BrokerIdentity])]] = {
val partitionsPath = "%s/%s/partitions".format(ZkUtils.BrokerTopicsPath, topic)
val partitions: Option[Map[String, ChildData]] = Option(topicsTreeCache.getCurrentChildren(partitionsPath)).map(_.asScala.toMap)
val states : Option[Iterable[(String, String)]] =
partitions.map[Iterable[(String,String)]]{ partMap: Map[String, ChildData] =>
partMap.flatMap { case (part, _) =>
val statePath = s"$partitionsPath/$part/state"
Option(topicsTreeCache.getCurrentData(statePath)).map(cd => (part, asString(cd.getData)))
}
}
val targetBrokers : IndexedSeq[BrokerIdentity] = getBrokers
import org.json4s.jackson.JsonMethods.parse
import org.json4s.scalaz.JsonScalaz.field
states.map(_.map{case (part, state) =>
val partition = part.toInt
val descJson = parse(state)
val leaderID = field[Int]("leader")(descJson).fold({ e =>
log.error(s"[topic=$topic] Failed to get partitions from topic json $state"); 0}, identity)
val leader = targetBrokers.find(_.id == leaderID)
(partition, leader)
}.toList)
}
private[this] def getTopicConfigString(topic: String) : Option[(Int,String)] = {
val data: mutable.Buffer[ChildData] = topicsConfigPathCache.getCurrentData.asScala
val result: Option[ChildData] = data.find(p => p.getPath.endsWith("/" + topic))
result.map(cd => (cd.getStat.getVersion,asString(cd.getData)))
}
override def processActorResponse(response: ActorResponse): Unit = {
response match {
case any: Any => log.warning("ksa : processActorResponse : Received unknown message: {}", any.toString)
}
}
private[this] def getBrokers : IndexedSeq[BrokerIdentity] = {
val data: mutable.Buffer[ChildData] = brokersPathCache.getCurrentData.asScala
data.map { cd =>
BrokerIdentity.from(nodeFromPath(cd.getPath).toInt, asString(cd.getData))
}.filter { v =>
v match {
case scalaz.Failure(nel) =>
log.error(s"Failed to parse broker config $nel")
false
case _ => true
}
}.collect {
case scalaz.Success(bi) => bi
}.toIndexedSeq.sortBy(_.id)
}
private[this] def asyncPipeToSender[T](fn: => T): Unit = {
implicit val ec = longRunningExecutionContext
val result: Future[T] = Future {
fn
}
result pipeTo sender
}
override def processQueryRequest(request: QueryRequest): Unit = {
request match {
case KSGetTopics =>
val deleteSet: Set[String] =
featureGateFold(KMDeleteTopicFeature)(
Set.empty,
{
val deleteTopicsData: mutable.Buffer[ChildData] = deleteTopicsPathCache.getCurrentData.asScala
deleteTopicsData.map { cd =>
nodeFromPath(cd.getPath)
}.toSet
})
withTopicsTreeCache { cache =>
cache.getCurrentChildren(ZkUtils.BrokerTopicsPath)
}.fold {
sender ! TopicList(IndexedSeq.empty, deleteSet, config.clusterContext)
} { data: java.util.Map[String, ChildData] =>
sender ! TopicList(data.asScala.keySet.toIndexedSeq, deleteSet, config.clusterContext)
}
case KSGetConsumers =>
asyncPipeToSender {
offsetCache.getConsumerList
}
case KSGetTopicConfig(topic) =>
sender ! TopicConfig(topic, getTopicConfigString(topic))
case KSGetTopicDescription(topic) =>
sender ! getTopicDescription(topic, false)
case KSGetTopicDescriptions(topics) =>
sender ! TopicDescriptions(topics.toIndexedSeq.flatMap(getTopicDescription(_, false)), topicsTreeCacheLastUpdateMillis)
case KSGetConsumerDescription(consumer, consumerType) =>
asyncPipeToSender {
offsetCache.getConsumerDescription(consumer, consumerType)
}
case KSGetConsumedTopicDescription(consumer, topic, consumerType) =>
asyncPipeToSender {
offsetCache.getConsumedTopicDescription(consumer, topic, true, consumerType)
}
case KSGetAllTopicDescriptions(lastUpdateMillisOption) =>
val lastUpdateMillis = lastUpdateMillisOption.getOrElse(0L)
//since we want to update offsets, let's do so if last update plus offset cache timeout is before current time
if (topicsTreeCacheLastUpdateMillis > lastUpdateMillis || ((topicsTreeCacheLastUpdateMillis + (config.partitionOffsetCacheTimeoutSecs * 1000)) < System.currentTimeMillis())) {
//we have option here since there may be no topics at all!
withTopicsTreeCache { cache: TreeCache =>
cache.getCurrentChildren(ZkUtils.BrokerTopicsPath)
}.fold {
sender ! TopicDescriptions(IndexedSeq.empty, topicsTreeCacheLastUpdateMillis)
} { data: java.util.Map[String, ChildData] =>
sender ! TopicDescriptions(data.asScala.keys.toIndexedSeq.flatMap(getTopicDescription(_, false)), topicsTreeCacheLastUpdateMillis)
}
} // else no updates to send
case KSGetAllConsumerDescriptions(lastUpdateMillisOption) =>
val lastUpdateMillis = lastUpdateMillisOption.getOrElse(0L)
if (offsetCache.lastUpdateMillis > lastUpdateMillis) {
asyncPipeToSender {
ConsumerDescriptions(offsetCache
.getConsumerList
.list
.map(c => offsetCache.getConsumerDescription(c.name, c.consumerType)), offsetCache.lastUpdateMillis)
}
}
case KSGetTopicsLastUpdateMillis =>
sender ! topicsTreeCacheLastUpdateMillis
case KSGetBrokers =>
sender ! getBrokerList
case KSGetPreferredLeaderElection =>
sender ! preferredLeaderElection
case KSGetReassignPartition =>
sender ! reassignPartitions
case any: Any => log.warning("ksa : processQueryRequest : Received unknown message: {}", any.toString)
}
}
private def getBrokerList : BrokerList = {
BrokerList(getBrokers, config.clusterContext)
}
override def processCommandRequest(request: CommandRequest): Unit = {
request match {
case KSUpdatePreferredLeaderElection(millis,json) =>
safeExecute {
val s: Set[TopicPartition] = PreferredReplicaLeaderElectionCommand.parsePreferredReplicaElectionData(json)
preferredLeaderElection.fold {
//nothing there, add as new
preferredLeaderElection = Some(PreferredReplicaElection(getDateTime(millis), s, None, config.clusterContext))
} {
existing =>
existing.endTime.fold {
//update without end? Odd, copy existing
preferredLeaderElection = Some(existing.copy(topicAndPartition = existing.topicAndPartition ++ s))
} { _ =>
//new op started
preferredLeaderElection = Some(PreferredReplicaElection(getDateTime(millis), s, None, config.clusterContext))
}
}
}
case KSUpdateReassignPartition(millis,json) =>
safeExecute {
val m : Map[TopicPartition, Seq[Int]] = ReassignPartitionCommand.parsePartitionReassignmentZkData(json)
reassignPartitions.fold {
//nothing there, add as new
reassignPartitions = Some(ReassignPartitions(getDateTime(millis),m, None, config.clusterContext))
} {
existing =>
existing.endTime.fold {
//update without end? Odd, copy existing
reassignPartitions = Some(existing.copy(partitionsToBeReassigned = existing.partitionsToBeReassigned ++ m))
} { _ =>
//new op started
reassignPartitions = Some(ReassignPartitions(getDateTime(millis),m, None, config.clusterContext))
}
}
}
case KSEndPreferredLeaderElection(millis) =>
safeExecute {
preferredLeaderElection.foreach { existing =>
preferredLeaderElection = Some(existing.copy(endTime = Some(getDateTime(millis))))
}
}
case KSEndReassignPartition(millis) =>
safeExecute {
reassignPartitions.foreach { existing =>
reassignPartitions = Some(existing.copy(endTime = Some(getDateTime(millis))))
}
}
case any: Any => log.warning("ksa : processCommandRequest : Received unknown message: {}", any.toString)
}
}
private[this] def getDateTime(millis: Long) : DateTime = new DateTime(millis,DateTimeZone.UTC)
private[this] def safeExecute(fn: => Any) : Unit = {
Try(fn) match {
case Failure(t) =>
log.error("Failed!",t)
case Success(_) =>
//do nothing
}
}
private[this] def withTopicsTreeCache[T](fn: TreeCache => T) : Option[T] = {
Option(fn(topicsTreeCache))
}
//---------------------------------------------------
private[this] var kafkaTopicOffsetGetter : Option[KafkaTopicOffsetGetter] = None
private[this] var kafkaTopicOffsetMap = new TrieMap[String, Map[Int, Long]]
private[this] var kafkaTopicOffsetCaptureMap = new TrieMap[String, PartitionOffsetsCapture]
def startTopicOffsetGetter() : Unit = {
log.info("Starting kafka managed Topic Offset Getter ...")
kafkaTopicOffsetGetter = Option(new KafkaTopicOffsetGetter())
val topicOffsetGetterThread = new Thread(kafkaTopicOffsetGetter.get, "KafkaTopicOffsetGetter")
topicOffsetGetterThread.start()
}
def stopTopicOffsetGetter() : Unit = {
kafkaTopicOffsetGetter.foreach {
kto =>
Try {
log.info("Stopping kafka managed Topic Offset Getter ...")
kto.close()
}
}
}
class KafkaTopicOffsetGetter() extends Runnable {
@volatile
private[this] var shutdown: Boolean = false
override def run(): Unit = {
import scala.util.control.Breaks._
while (!shutdown) {
try {
withTopicsTreeCache { cache: TreeCache =>
cache.getCurrentChildren(ZkUtils.BrokerTopicsPath)
}.fold {
} { data: java.util.Map[String, ChildData] =>
var broker2TopicPartitionMap: Map[BrokerIdentity, List[(TopicPartition, PartitionOffsetRequestInfo)]] = Map()
breakable {
data.asScala.keys.toIndexedSeq.foreach(topic => {
if (shutdown) {
return
}
var optPartitionsWithLeaders : Option[List[(Int, Option[BrokerIdentity])]] = getPartitionLeaders(topic)
optPartitionsWithLeaders match {
case Some(leaders) =>
leaders.foreach(leader => {
leader._2 match {
case Some(brokerIden) =>
var tlList : List[(TopicPartition, PartitionOffsetRequestInfo)] = null
if (broker2TopicPartitionMap.contains(brokerIden)) {
tlList = broker2TopicPartitionMap(brokerIden)
} else {
tlList = List()
}
tlList = (new TopicPartition(topic, leader._1), PartitionOffsetRequestInfo(-1, 1)) +: tlList
broker2TopicPartitionMap += (brokerIden -> tlList)
case None =>
}
})
case None =>
}
}
)
}
breakable {
broker2TopicPartitionMap.keys.foreach(broker => {
if (shutdown) {
return
}
val tpList = broker2TopicPartitionMap(broker)
val consumerProperties = kaConfig.consumerProperties.getOrElse(getDefaultConsumerProperties())
val securityProtocol = Option(kaConfig.clusterContext.config.securityProtocol).getOrElse(PLAINTEXT)
val port: Int = broker.endpoints(securityProtocol)
consumerProperties.put(BOOTSTRAP_SERVERS_CONFIG, s"${broker.host}:$port")
consumerProperties.put(SECURITY_PROTOCOL_CONFIG, securityProtocol.stringId)
consumerProperties.put(METRIC_REPORTER_CLASSES_CONFIG, classOf[NoopJMXReporter].getCanonicalName)
// Use secure endpoint if available
if(kaConfig.clusterContext.config.saslMechanism.nonEmpty){
consumerProperties.put(SaslConfigs.SASL_MECHANISM, kaConfig.clusterContext.config.saslMechanism.get.stringId)
log.info(s"SASL Mechanism =${kaConfig.clusterContext.config.saslMechanism.get}")
}
if(kaConfig.clusterContext.config.jaasConfig.nonEmpty){
consumerProperties.put(SaslConfigs.SASL_JAAS_CONFIG, kaConfig.clusterContext.config.jaasConfig.get)
log.info(s"SASL JAAS config=${kaConfig.clusterContext.config.jaasConfig.get}")
}
var kafkaConsumer: Option[KafkaConsumer[Any, Any]] = None
try {
kafkaConsumer = Option(new KafkaConsumer(consumerProperties))
val request = tpList.map(f => new TopicPartition(f._1.topic(), f._1.partition()))
var tpOffsetMapOption = kafkaConsumer.map(_.endOffsets(request.asJavaCollection).asScala)
var topicOffsetMap: Map[Int, Long] = null
tpOffsetMapOption.foreach(tpOffsetMap => tpOffsetMap.keys.foreach(tp => {
if (kafkaTopicOffsetMap.contains(tp.topic)) {
topicOffsetMap = kafkaTopicOffsetMap(tp.topic)
} else {
topicOffsetMap = Map()
}
topicOffsetMap += (tp.partition -> tpOffsetMap(tp))
kafkaTopicOffsetMap += (tp.topic -> topicOffsetMap)
}))
} catch {
case e: Exception =>
log.error(e, s"consumerProperties:$consumerProperties")
throw e
} finally {
kafkaConsumer.foreach(_.close())
}
})
}
kafkaTopicOffsetCaptureMap = kafkaTopicOffsetMap.map(kv =>
(kv._1, PartitionOffsetsCapture(System.currentTimeMillis(), kv._2)))
}
} catch {
case e: Exception =>
log.error(e, s"KafkaTopicOffsetGetter exception ")
}
if (!shutdown) {
Thread.sleep(config.partitionOffsetCacheTimeoutSecs * 1000)
}
}
log.info(s"KafkaTopicOffsetGetter exit")
}
def close(): Unit = {
this.shutdown = true
}
def getDefaultConsumerProperties(): Properties = {
val properties = new Properties()
properties.put(GROUP_ID_CONFIG, getClass.getCanonicalName)
properties.put(KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
properties.put(VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
properties
}
}
}
| yahoo/kafka-manager | app/kafka/manager/actor/cluster/KafkaStateActor.scala | Scala | apache-2.0 | 67,830 |
package proximaltest.controllers;
import org.scalatestplus.play._
import play.api.mvc._
import play.api.test.FakeRequest
import play.api.test.Helpers._
import proximaltest.helpers._
class QuestionControllerSpec extends PlaySpec with Results {
"Question Controller" should {
"not allow a non admin to create a new question" in {
running(SecureSocialHelper.app) {
val creds1 = cookies(route(FakeRequest(POST, "/authenticate/naive").withTextBody("user")).get)
val qu = QuestionGenerator.question
val Some(resp) = route(FakeRequest(POST, "/api/v1/questions").withCookies(creds1.get("id").get).withJsonBody(QuestionGenerator.question))
status(resp) mustEqual UNAUTHORIZED
}
}
}
}
| silbermm/proximal | test/proximaltest/controllers/QuestionControllerSpec.scala | Scala | apache-2.0 | 735 |
package com.webtrends.harness.component.zookeeper.discoverable
import akka.actor.Actor
import akka.pattern.ask
import akka.util.Timeout
import com.webtrends.harness.command.{BaseCommandResponse, CommandException, _}
import scala.concurrent.{Future, Promise}
import scala.util.{Failure, Success}
/**
* @author Michael Cuthbert, Spencer Wood
*/
trait DiscoverableCommandExecution extends CommandHelper with Discoverable {
this: Actor =>
import context.dispatcher
/**
* Executes a discoverable command where ever it may be located
*/
def executeDiscoverableCommand[T:Manifest](basePath:String, name:String, bean:Option[CommandBean]=None)
(implicit timeout:Timeout) : Future[BaseCommandResponse[T]]= {
val p = Promise[BaseCommandResponse[T]]
initCommandManager onComplete {
case Success(_) =>
commandManager match {
case Some(cm) =>
getInstance(basePath, name) onComplete {
case Success(in) =>
(cm ? ExecuteRemoteCommand[T](name, in.getAddress, in.getPort, bean, timeout))(timeout).mapTo[BaseCommandResponse[T]] onComplete {
case Success(s) => p success s
case Failure(f) => p failure CommandException("CommandManager", f)
}
case Failure(f) => p failure CommandException("CommandManager", f)
}
case None => p failure CommandException("CommandManager", "CommandManager not found!")
}
case Failure(f) => p failure f
}
p.future
}
/**
* Executes a discoverable command on every server that is hosting it
*/
def broadcastDiscoverableCommand[T:Manifest](basePath:String, name:String, bean:Option[CommandBean]=None)
(implicit timeout:Timeout) : Future[BaseCommandResponse[T]]= {
val p = Promise[BaseCommandResponse[T]]
initCommandManager onComplete {
case Success(_) =>
commandManager match {
case Some(cm) =>
getInstances(basePath, name) onComplete {
case Success(in) if in.nonEmpty =>
val futures = in.map(i => (cm ? ExecuteRemoteCommand[T](name,
i.getAddress, i.getPort, bean, timeout))(timeout).mapTo[BaseCommandResponse[T]])
Future.sequence(futures) onComplete {
case Success(s) => p success CommandResponse[T](Some(s.flatMap(_.data).asInstanceOf[T])) // TODO - Don't lose response type
case Failure(f) => p failure CommandException("CommandManager", f)
}
case Failure(f) => p failure CommandException("CommandManager", f)
case _ => p failure CommandException("CommandManager", new IllegalStateException(s"No instances found for $basePath"))
}
case None => p failure CommandException("CommandManager", "CommandManager not found!")
}
case Failure(f) => p failure f
}
p.future
}
}
| Webtrends/wookiee-zookeeper | src/main/scala/com/webtrends/harness/component/zookeeper/discoverable/DiscoverableCommandExecution.scala | Scala | apache-2.0 | 3,008 |
package spire.example
import spire.implicits._
import spire.math._
import scala.annotation.tailrec
import scala.collection.IterableLike
import scala.collection.mutable.{Builder, GrowingBuilder, MapBuilder}
import scala.collection.generic.CanBuildFrom
/**
* Some tools for simplifying decimal expressions, and playing around
* with numbers.
*
* There are three modes:
*
* nth: print the nth rational, according to diagonalization
* all: print the first n rationals, according to diagonalization
* snap: given y, look for solutions to y = nroot(x, k) / d
*/
object Simplification {
def main(args: Array[String]): Unit = {
if (args.isEmpty) {
println("usage: %s [nrat | rats | nprime | primes | snap] [number]")
} else {
args(0) match {
case "nrat" =>
val n = if (args.length == 1) 10 else args(1).toInt
val r: Rational = rationals.drop(n - 1).head
println("rational %d is %s" format (n, r.toString))
case "rats" =>
val n = if (args.length == 1) 10 else args(1).toInt
rationals.take(n).foreach(r => print(r.toString + ", "))
println("...")
case "nprime" =>
val n = if (args.length == 1) 10 else args(1).toInt
val p: Int = primes.drop(n - 1).head
println("rational %d is %s" format (n, p.toString))
case "primes" =>
val n = if (args.length == 1) 10 else args(1).toInt
primes.take(n).foreach(p => print(p.toString + ", "))
println("...")
case "snap" =>
val n = if (args.length == 1) 1.4142135623730951 else args(1).toDouble
val (base, k, div) = snap(n)
println("%s =~ nroot(%s, %s) / %s" format (n, base, k, div))
}
}
}
/**
* Using Cantor's diagonalization method, create an infinite stream
* of all rational numbers.
*
* This stream will only be able to generate the first
* 42,535,295,865,117,307,928,310,139,910,543,638,528 values, so it
* is not really infinite. Even so, it's unlikely that a user will
* be able to generate this many values.
*/
val rationals: BigStream[Rational] = {
@tailrec
def next(i: Long, n: Long, d: Long): BigStream[Rational] = {
if (n == 0L) {
next(i + 1L, i, 1L)
} else {
val r = Rational(n, d)
if (n == r.numeratorAsLong) {
new BigCons(r, new BigCons(-r, loop(i, n - 1L, d + 1L)))
} else {
next(i, n - 1L, d + 1L)
}
}
}
def loop(i: Long, n: Long, d: Long): BigStream[Rational] = next(i, n, d)
Rational.zero #:: loop(2L, 1L, 1L)
}
/**
* Naive prime stream. For each odd number, this method tries
* dividing by all previous primes <= sqrt(n).
*
* There are a lot of ways to improve this. For now it's a toy.
* It can generate the millionth prime in ~9s on my computer.
*/
val primes: Stream[Int] = {
@tailrec
def next(n: Int, stream: Stream[Int]): Stream[Int] =
if (stream.isEmpty || (stream.head ** 2) > n)
n #:: loop(n + 2, primes)
else if (n % stream.head == 0)
next(n + 2, primes)
else
next(n, stream.tail)
def loop(n: Int, stream: Stream[Int]): Stream[Int] = next(n, stream)
2 #:: loop(3, primes)
}
/**
* Given a Double y, look for whole numbers x, k, and d such that:
*
* y = nroot(x, k) / d
*
* The limit (default: 10) describes the largest root (and divisor)
* that will be checked. The epsilon (default: 0.00000000001)
* describes the maximum distance we can shift the value to find an
* "exact" match.
*/
def snap(n: Double, limit: Int = 10, epsilon: Double = 0.00000000001): (Double, Int, Int) = {
@tailrec
def loop(i: Int, ex: Int, div: Int): (Double, Int, Int) = {
if (i >= limit) {
(n, 1, 1)
} else if (div < 1) {
loop(i + 1, 1, i + 1)
} else {
val x = math.pow(n * div, ex)
val m = x % 1.0
val d = if (m < 0.5) m else m - 1.0
if (math.abs(d) < epsilon) {
(x - m, ex, div)
} else {
loop(i, ex + 1, div - 1)
}
}
}
if (n < 0.0) {
val (x, k, div) = snap(-n, limit, epsilon)
(x, k, -div)
} else {
loop(1, 1, 1)
}
}
}
/**
* BigStream is a non-memoizing stream.
*
* It's similar to Scala's Stream[A] except that it won't exhaust your
* memory for very large streams. This makes it useful for situations
* where re-computing the stream is preferrable to trying to store
* all the results in memory for next time.
*/
object BigStream {
def empty[A]: BigStream[A] = BigNil[A]()
implicit class Wrapper[A](t: => BigStream[A]) {
def #::(a: A): BigStream[A] = new BigCons(a, t)
}
def newBuilder[A]: Builder[A, BigStream[A]] =
new Builder[A, BigStream[A]] {
private var elems: List[A] = Nil
def +=(a: A): this.type = {
elems = a :: elems
this
}
def clear(): Unit = elems = Nil
def result: BigStream[A] =
elems.foldLeft(BigStream.empty[A])((t, a) => new BigCons(a, t))
}
implicit def canBuildFrom[A]: CanBuildFrom[Iterable[A], A, BigStream[A]] =
new CanBuildFrom[Iterable[A], A, BigStream[A]] {
def apply(from: Iterable[A]) = newBuilder[A]
def apply() = newBuilder[A]
}
}
trait BigStream[A] extends Iterable[A] with IterableLike[A, BigStream[A]] { self =>
override def take(n: Int): BigStream[A] =
if (isEmpty || n < 1) BigNil() else new BigCons(head, tail.take(n - 1))
override def drop(n: Int): BigStream[A] = {
@tailrec
def loop(stream: BigStream[A], i: Int): BigStream[A] =
if (isEmpty || i < 1) stream else loop(stream.tail, i - 1)
loop(this, n)
}
def iterator: Iterator[A] = new Iterator[A] {
var stream = self
def hasNext: Boolean = !stream.isEmpty
def next: A = if (stream.isEmpty) {
throw new NoSuchElementException
} else {
val a = stream.head
stream = stream.tail
a
}
}
override def foreach[U](f: A => U): Unit = {
@tailrec
def loop(stream: BigStream[A]): Unit = if (!stream.isEmpty) {
f(stream.head)
loop(stream.tail)
}
loop(this)
}
override def newBuilder: Builder[A, BigStream[A]] =
BigStream.newBuilder[A]
}
class BigCons[A](override val head: A, t: => BigStream[A]) extends BigStream[A] {
override def tail: BigStream[A] = t
override def isEmpty = false
override def toString: String = "BigStream(%s, ...)" format head.toString
override def equals(rhs: Any): Boolean = rhs match {
case s: BigStream[_] => !s.isEmpty && tail == s.tail
case _ => false
}
}
case class BigNil[A]() extends BigStream[A] {
override def head: A = sys.error("head on nil")
override def tail: BigStream[A] = sys.error("tail on nil")
override def isEmpty = true
override def toString: String = "BigStream()"
}
| woparry/spire | examples/src/main/scala/spire/example/simplification.scala | Scala | mit | 6,919 |
package at.fabricate.openthings
package lib
import model.User
import net.liftweb.sitemap.Loc._
import net.liftweb.http.RedirectResponse
object AccessControl {
/** Zugriffsbedingung: Benutzer ist eingeloggt. */
val loggedIn = If(() => User.loggedIn_?,
() => RedirectResponse("/login"))
}
| Fabricate/OpenthingsImplementation | src/main/scala/at/fabricate/openthings/lib/AccessControl.scala | Scala | lgpl-3.0 | 312 |
//package teleporter.stream.integration.transaction
import akka.Done
import akka.stream.scaladsl.{Keep, Sink}
import akka.stream.{KillSwitch, KillSwitches}
import org.elasticsearch.action.index.IndexRequest
import org.elasticsearch.index.VersionType
import teleporter.integration.component.elasticsearch.ElasticSearch2
import teleporter.integration.component.hdfs.Hdfs
import teleporter.integration.core.Streams._
import teleporter.integration.core.{SourceAck, StreamContext, TeleporterCenter}
import teleporter.integration.utils.Converters._
import scala.concurrent.Future
/**
* Created by huanwuji on 2016/10/20.
* arguments: fields, index, type
*/
object Hadoop2Elasticsearch extends StreamLogic {
override def apply(key: String, center: TeleporterCenter): (KillSwitch, Future[Done]) = {
import center.{materializer, self}
val context = center.context.getContext[StreamContext](key)
val arguments = context.config.arguments
val fields = arguments[String]("fields").split(",")
val index = arguments[String]("index")
val indexType = arguments[String]("type")
Hdfs.sourceAck("/source/test/hadoop_es_test/hadoop2es/hdfs_source")
.map { m ⇒
m.map { bs ⇒
val data = fields.zip(bs.utf8String.split(",")).toMap
new IndexRequest()
.index(index)
.`type`(indexType)
.id(data("id"))
.source(data)
.versionType(VersionType.EXTERNAL)
.version(System.currentTimeMillis())
}
}
.via(ElasticSearch2.flow("/sink/test/hadoop_es_test/hadoop2es/item_es"))
.via(SourceAck.confirmFlow())
.viaMat(KillSwitches.single)(Keep.right).watchTermination()(Keep.both)
.to(Sink.ignore).run()
}
} | huanwuji/teleporter | src/test/scala/teleporter/stream/integration/template/Hadoop2Elasticsearch.scala | Scala | agpl-3.0 | 1,753 |
package dotty.tools.backend.jvm
import org.junit.Assert._
import org.junit.Test
class StringInterpolatorOptTest extends DottyBytecodeTest {
import ASMConverters._
@Test def testRawInterpolator = {
val source =
"""
|class Foo {
| val one = 1
| val two = "two"
| val three = 3.0
|
| def meth1: String = raw"$one plus $two$three\\n"
| def meth2: String = "" + one + " plus " + two + three + "\\\\n"
|}
""".stripMargin
checkBCode(source) { dir =>
val clsIn = dir.lookupName("Foo.class", directory = false).input
val clsNode = loadClassNode(clsIn)
val meth1 = getMethod(clsNode, "meth1")
val meth2 = getMethod(clsNode, "meth2")
val instructions1 = instructionsFromMethod(meth1)
val instructions2 = instructionsFromMethod(meth2)
assert(instructions1 == instructions2,
"the `` string interpolator incorrectly converts to string concatenation\\n" +
diffInstructions(instructions1, instructions2))
}
}
@Test def testSInterpolator = {
val source =
"""
|class Foo {
| val one = 1
| val two = "two"
| val three = 3.0
|
| def meth1: String = s"$one plus $two$three\\n"
| def meth2: String = "" + one + " plus " + two + three + "\\n"
|}
""".stripMargin
checkBCode(source) { dir =>
val clsIn = dir.lookupName("Foo.class", directory = false).input
val clsNode = loadClassNode(clsIn)
val meth1 = getMethod(clsNode, "meth1")
val meth2 = getMethod(clsNode, "meth2")
val instructions1 = instructionsFromMethod(meth1)
val instructions2 = instructionsFromMethod(meth2)
assert(instructions1 == instructions2,
"the `s` string interpolator incorrectly converts to string concatenation\\n" +
diffInstructions(instructions1, instructions2))
}
}
}
| som-snytt/dotty | compiler/test/dotty/tools/backend/jvm/StringInterpolatorOptTest.scala | Scala | apache-2.0 | 1,959 |
trait Currency
case class Yen(amount: Int) extends Currency
case class Dollar(amount: Int) extends Currency
def toS(currency:Currency):String = currency match { // 通貨の型でマッチさせる
case Yen(amount) => {
"%s yen".format(amount) // 変数amountには日本円の値が入る
}
case Dollar(amount) => {
"%s dollar".format(amount) // 変数amountにはドルの値が入る
}
}
val yen = Yen(1000)
toS(yen) should equal("1000 yen")
| akimichi/functionaljs | test/chap05.spec.scala | Scala | mit | 463 |
package org.scalacvx.constraints
import org.scalacvx.atoms.Expression
import org.scalacvx.dcp.{ConcaveVexity, ConvexVexity, AffineVexity}
/**
* Created by lorenzo on 9/10/15.
*/
trait ComparisonConstraint extends Constraint {
require(lhs.size == rhs.size,
s"Cannot create equality constraint between expressions of size ${lhs.size} and ${rhs.size}")
val lhs: Expression
val rhs: Expression
}
case class EqualityConstraint(lhs:Expression, rhs:Expression) extends ComparisonConstraint {
require(List(lhs,rhs).forall(e => e.vexity.isInstanceOf[AffineVexity]),
s"Cannot create equality constraint: ${lhs.vexity} == ${rhs.vexity} (must be AffineVexity == AffineVexity")
/*
override val expression = lhs - rhs
override val vexity = lhs.vexity - rhs.vexity match {
case ConvexVexity | ConcaveVexity => NotDcp
case _ => lhs.vexity - rhs.vexity
}
*/
//override lazy val conicForm = ConicForm()
}
case class LtConstraint(lhs:Expression, rhs:Expression) extends ComparisonConstraint {
require(lhs.vexity.isInstanceOf[ConvexVexity] && rhs.vexity.isInstanceOf[ConcaveVexity],
s"Cannot create inequality constraint: ${lhs.vexity} < ${rhs.vexity} (must be ConvexVexity < ConcaveVexity")
}
case class GtConstraint(lhs:Expression, rhs:Expression) extends ComparisonConstraint {
require(lhs.vexity.isInstanceOf[ConcaveVexity] && rhs.vexity.isInstanceOf[ConvexVexity],
s"Cannot create inequality constraint: ${lhs.vexity} > ${rhs.vexity} (must be ConcaveVexity > ConvexVexity")
}
| lorenzolucido/ScalaCVX | src/main/scala/org/scalacvx/constraints/ComparisonConstraint.scala | Scala | mit | 1,525 |
package com.rasterfoundry.notification.email
import io.estatico.newtype.macros.newtype
@SuppressWarnings(Array("AsInstanceOf"))
object Model {
sealed abstract class EncryptionScheme(val repr: String) {
override def toString: String = repr
}
object EncryptionScheme {
def fromStringE(s: String) = s.toLowerCase match {
case "ssl" => Right(SSL)
case "tls" => Right(TLS)
case "starttls" => Right(StartTLS)
case s =>
Left(
s"$s is not a valid encryption scheme. Must be in ssl, tls, or starttls"
)
}
}
case object SSL extends EncryptionScheme("ssl")
case object TLS extends EncryptionScheme("tls")
case object StartTLS extends EncryptionScheme("starttls")
@newtype case class EmailHost(underlying: String)
@newtype case class EmailPort(underlying: Int)
@newtype case class EmailUserName(underlying: String)
@newtype case class EmailPassword(underlying: String)
@newtype case class FromEmailAddress(underlying: String)
@newtype case class ToEmailAddress(underlying: String)
@newtype case class FromEmailDisplayName(underlying: String)
@newtype case class Subject(underlying: String)
@newtype case class HtmlBody(underlying: String)
@newtype case class PlainBody(underlying: String)
final case class EmailConfig(
host: EmailHost,
port: EmailPort,
encryption: EncryptionScheme,
username: EmailUserName,
password: EmailPassword
)
final case class EmailSettings(
config: EmailConfig,
fromUserEmail: FromEmailAddress,
toUserEmail: ToEmailAddress
)
}
| raster-foundry/raster-foundry | app-backend/notification/src/main/scala/com/rasterfoundry/notification/email/Model.scala | Scala | apache-2.0 | 1,610 |
package security
import play.api.libs.json.Json
import play.api.mvc.BodyParsers.parse
import play.api.mvc.BodyParser
import play.api.mvc.Request
import play.api.mvc.Action
import play.api.mvc.AnyContent
import play.api.mvc.Result
import play.api.mvc.Results
import play.api.mvc.WrappedRequest
import models.Client
case class AuthenticatedRequest[A](client: Client, request: Request[A]) extends WrappedRequest(request)
object Security {
val GeoLayersClientId = "X-GEO-LAYERS-CLIENT-ID"
val GeoLayersClientToken = "X-GEO-LAYERS-CLIENT-TOKEN"
}
trait Security {
def Authenticated[A](p: BodyParser[A])(f: AuthenticatedRequest[A] => Result) = {
Action(p) { request =>
val client = (for (
id <- request.headers.get(Security.GeoLayersClientId);
token <- request.headers.get(Security.GeoLayersClientToken);
client <- Client.findOneByEmailAndToken(id, token)
) yield client)
client match {
case None => Results.Unauthorized(Json.toJson("The client id and/or client token not found in request"))
case Some(Client(_, _, _, _, true)) => Results.Forbidden(Json.toJson("The client is blocked"))
case Some(client) => f(AuthenticatedRequest(client, request))
}
}
}
def Authenticated(f: AuthenticatedRequest[AnyContent] => Result): Action[AnyContent] = {
Authenticated(parse.anyContent)(f)
}
}
| lukaszbudnik/geo-layers | app/security/Security.scala | Scala | apache-2.0 | 1,388 |
object Spawn
{
def main(args: Array[String])
{
error("Test error main")
}
} | matheshar/simple-build-tool | src/sbt-test/run/error/changes/RunFailureMain.scala | Scala | bsd-3-clause | 80 |
package com.sksamuel.elastic4s.searches.aggs
import com.sksamuel.elastic4s.searches.aggs.pipeline.PipelineAggregationDefinition
import org.elasticsearch.search.aggregations._
trait AggregationDefinition {
type B <: AggregationBuilder
val builder: B
def pipeline(pipeline: PipelineAggregationDefinition): this.type = {
builder.subAggregation(pipeline.builder)
this
}
def pipelines(first: PipelineAggregationDefinition,
rest: PipelineAggregationDefinition*): this.type = pipelines(first +: rest)
def pipelines(pipelines: Iterable[PipelineAggregationDefinition]): this.type = {
pipelines.foreach(pipeline)
this
}
def subAggregation(agg: AggregationDefinition): this.type = {
builder.subAggregation(agg.builder)
this
}
def subAggregations(first: AggregationDefinition, rest: AggregationDefinition*): this.type =
subAggregations(first +: rest)
def subAggregations(aggs: Iterable[AggregationDefinition]): this.type = {
aggs.foreach(subAggregation)
this
}
@deprecated("use subAggregations", "5.0.0")
def aggs(first: AggregationDefinition, rest: AggregationDefinition*): this.type =
subAggregations(first +: rest)
@deprecated("use subAggregations", "5.0.0")
def aggs(aggs: Iterable[AggregationDefinition]): this.type = {
aggs.foreach(subAggregation)
this
}
} | ulric260/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/AggregationDefinition.scala | Scala | apache-2.0 | 1,359 |
package de.tu_berlin.formic.datastructure.tree.client
import akka.actor.ActorSystem
import akka.testkit.TestKit
import de.tu_berlin.formic.common.json.FormicJsonProtocol
import de.tu_berlin.formic.datastructure.tree.TreeFormicJsonDataStructureProtocol
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
/**
* @author Ronny Bräunlich
*/
class TreeClientDataStructureProviderSpec extends TestKit(ActorSystem("TreeClientDataStructureProviderSpec"))
with WordSpecLike
with Matchers
with BeforeAndAfterAll {
override def afterAll(): Unit = {
system.terminate()
}
"The TreeClientDataStructureProvider" must {
"create a factory actor for every list type" in {
val provider = TreeClientDataStructureProvider()
val factoryMap = provider.initFactories(system)
factoryMap.keySet should contain allOf(
FormicBooleanTreeFactory.name,
FormicIntegerTreeFactory.name,
FormicDoubleTreeFactory.name,
FormicStringTreeFactory.name)
val actorPaths = factoryMap.values.map(ref => ref.path.name.toString)
actorPaths should contain allOf(
FormicBooleanTreeFactory.name.name,
FormicIntegerTreeFactory.name.name,
FormicDoubleTreeFactory.name.name,
FormicStringTreeFactory.name.name
)
}
"register a FormicJsonDataTypeProtocols for each list type" in {
val protocol = new FormicJsonProtocol
val provider = TreeClientDataStructureProvider()
provider.registerFormicJsonDataStructureProtocols(protocol)
val registered = protocol.dataStructureOperationJsonProtocols
registered should contain allOf(
FormicBooleanTreeFactory.name -> new TreeFormicJsonDataStructureProtocol[Boolean](FormicBooleanTreeFactory.name),
FormicIntegerTreeFactory.name -> new TreeFormicJsonDataStructureProtocol[Int](FormicIntegerTreeFactory.name),
FormicDoubleTreeFactory.name -> new TreeFormicJsonDataStructureProtocol[Double](FormicDoubleTreeFactory.name),
FormicStringTreeFactory.name -> new TreeFormicJsonDataStructureProtocol[Char](FormicStringTreeFactory.name)
)
}
}
}
| rbraeunlich/formic | tree/shared/src/test/scala/de/tu_berlin/formic/datastructure/tree/client/TreeClientDataStructureProviderSpec.scala | Scala | apache-2.0 | 2,154 |
/*
Copyright 2014 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.serialization.macros.impl.ordered_serialization.providers
import scala.language.experimental.macros
import scala.reflect.macros.Context
import com.twitter.scalding._
import com.twitter.scalding.serialization.macros.impl.ordered_serialization.{ CompileTimeLengthTypes, ProductLike, TreeOrderedBuf }
import CompileTimeLengthTypes._
import com.twitter.scalding.serialization.OrderedSerialization
object CaseClassOrderedBuf {
def dispatch(c: Context)(buildDispatcher: => PartialFunction[c.Type, TreeOrderedBuf[c.type]]): PartialFunction[c.Type, TreeOrderedBuf[c.type]] = {
case tpe if tpe.typeSymbol.isClass && tpe.typeSymbol.asClass.isCaseClass && !tpe.typeSymbol.asClass.isModuleClass =>
CaseClassOrderedBuf(c)(buildDispatcher, tpe)
}
def apply(c: Context)(buildDispatcher: => PartialFunction[c.Type, TreeOrderedBuf[c.type]], outerType: c.Type): TreeOrderedBuf[c.type] = {
import c.universe._
def freshT(id: String) = newTermName(c.fresh(id))
val dispatcher = buildDispatcher
val elementData: List[(c.universe.Type, TermName, TreeOrderedBuf[c.type])] =
outerType
.declarations
.collect { case m: MethodSymbol if m.isCaseAccessor => m }
.map { accessorMethod =>
val fieldType = accessorMethod.returnType.asSeenFrom(outerType, outerType.typeSymbol.asClass)
val b: TreeOrderedBuf[c.type] = dispatcher(fieldType)
(fieldType, accessorMethod.name.toTermName, b)
}.toList
new TreeOrderedBuf[c.type] {
override val ctx: c.type = c
override val tpe = outerType
override def compareBinary(inputStreamA: ctx.TermName, inputStreamB: ctx.TermName) =
ProductLike.compareBinary(c)(inputStreamA, inputStreamB)(elementData)
override def hash(element: ctx.TermName): ctx.Tree = ProductLike.hash(c)(element)(elementData)
override def put(inputStream: ctx.TermName, element: ctx.TermName) =
ProductLike.put(c)(inputStream, element)(elementData)
override def get(inputStream: ctx.TermName): ctx.Tree = {
val getValProcessor = elementData.map {
case (tpe, accessorSymbol, tBuf) =>
val curR = freshT("curR")
val builderTree = q"""
val $curR = {
${tBuf.get(inputStream)}
}
"""
(builderTree, curR)
}
q"""
..${getValProcessor.map(_._1)}
${outerType.typeSymbol.companionSymbol}(..${getValProcessor.map(_._2)})
"""
}
override def compare(elementA: ctx.TermName, elementB: ctx.TermName): ctx.Tree =
ProductLike.compare(c)(elementA, elementB)(elementData)
override val lazyOuterVariables: Map[String, ctx.Tree] =
elementData.map(_._3.lazyOuterVariables).reduce(_ ++ _)
override def length(element: Tree) =
ProductLike.length(c)(element)(elementData)
}
}
}
| cchepelov/scalding | scalding-serialization/src/main/scala/com/twitter/scalding/serialization/macros/impl/ordered_serialization/providers/CaseClassOrderedBuf.scala | Scala | apache-2.0 | 3,475 |
package lila.forum
import actorApi._
import akka.actor.ActorSelection
import org.joda.time.DateTime
import play.api.libs.json._
import lila.common.paginator._
import lila.db.api._
import lila.db.Implicits._
import lila.db.paginator._
import lila.hub.actorApi.timeline.{ Propagate, ForumPost }
import lila.mod.ModlogApi
import lila.security.{ Granter => MasterGranter }
import lila.user.{ User, UserContext }
import tube._
final class PostApi(
env: Env,
indexer: ActorSelection,
maxPerPage: Int,
modLog: ModlogApi,
shutup: ActorSelection,
timeline: ActorSelection,
detectLanguage: lila.common.DetectLanguage) {
def makePost(
categ: Categ,
topic: Topic,
data: DataForm.PostData)(implicit ctx: UserContext): Fu[Post] =
lastNumberOf(topic) zip detectLanguage(data.text) zip userIds(topic) flatMap {
case ((number, lang), topicUserIds) =>
val post = Post.make(
topicId = topic.id,
author = data.author,
userId = ctx.me map (_.id),
ip = ctx.req.remoteAddress.some,
text = lila.security.Spam.replace(data.text),
number = number + 1,
lang = lang map (_.language),
troll = ctx.troll,
hidden = topic.hidden,
categId = categ.id)
PostRepo findDuplicate post flatMap {
case Some(dup) => fuccess(dup)
case _ =>
$insert(post) >>
$update(topic withPost post) >> {
shouldHideOnPost(topic) ?? TopicRepo.hide(topic.id, true)
} >>
$update(categ withTopic post) >>-
(indexer ! InsertPost(post)) >>
(env.recent.invalidate inject post) >>-
ctx.userId.?? { userId =>
shutup ! post.isTeam.fold(
lila.hub.actorApi.shutup.RecordTeamForumMessage(userId, post.text),
lila.hub.actorApi.shutup.RecordPublicForumMessage(userId, post.text))
} >>-
((ctx.userId ifFalse post.troll) ?? { userId =>
timeline ! Propagate(ForumPost(userId, topic.id.some, topic.name, post.id)).|>(prop =>
post.isStaff.fold(
prop toStaffFriendsOf userId,
prop toFollowersOf userId toUsers topicUserIds exceptUser userId
)
)
}) inject post
}
}
private val quickHideCategs = Set("lichess-feedback", "off-topic-discussion")
private def shouldHideOnPost(topic: Topic) =
topic.visibleOnHome && {
(quickHideCategs(topic.categId) && topic.nbPosts == 1) || {
topic.nbPosts == maxPerPage ||
topic.createdAt.isBefore(DateTime.now minusDays 5)
}
}
def urlData(postId: String, troll: Boolean): Fu[Option[PostUrlData]] = get(postId) flatMap {
case Some((topic, post)) if (!troll && post.troll) => fuccess(none[PostUrlData])
case Some((topic, post)) => PostRepo(troll).countBeforeNumber(topic.id, post.number) map { nb =>
val page = nb / maxPerPage + 1
PostUrlData(topic.categId, topic.slug, page, post.number).some
}
case _ => fuccess(none)
}
def get(postId: String): Fu[Option[(Topic, Post)]] = for {
post ← optionT($find.byId[Post](postId))
topic ← optionT($find.byId[Topic](post.topicId))
} yield topic -> post
def views(posts: List[Post]): Fu[List[PostView]] = for {
topics ← $find.byIds[Topic](posts.map(_.topicId).distinct)
categs ← $find.byIds[Categ](topics.map(_.categId).distinct)
} yield posts map { post =>
for {
topic ← topics find (_.id == post.topicId)
categ ← categs find (_.slug == topic.categId)
} yield PostView(post, topic, categ, lastPageOf(topic))
} flatten
def viewsFromIds(postIds: Seq[String]): Fu[List[PostView]] =
$find.byOrderedIds[Post](postIds) flatMap views
def view(post: Post): Fu[Option[PostView]] =
views(List(post)) map (_.headOption)
def liteViews(posts: List[Post]): Fu[List[PostLiteView]] = for {
topics ← $find.byIds[Topic](posts.map(_.topicId).distinct)
} yield posts flatMap { post =>
topics find (_.id == post.topicId) map { topic =>
PostLiteView(post, topic)
}
}
def liteView(post: Post): Fu[Option[PostLiteView]] =
liteViews(List(post)) map (_.headOption)
def miniPosts(posts: List[Post]): Fu[List[MiniForumPost]] = for {
topics ← $find.byIds[Topic](posts.map(_.topicId).distinct)
} yield posts flatMap { post =>
topics find (_.id == post.topicId) map { topic =>
MiniForumPost(
isTeam = post.isTeam,
postId = post.id,
topicName = topic.name,
userId = post.userId,
text = post.text take 200,
createdAt = post.createdAt)
}
}
def lastNumberOf(topic: Topic): Fu[Int] =
PostRepo lastByTopics List(topic) map { _ ?? (_.number) }
def lastPageOf(topic: Topic) =
math.ceil(topic.nbPosts / maxPerPage.toFloat).toInt
def paginator(topic: Topic, page: Int, troll: Boolean): Fu[Paginator[Post]] = Paginator(
new Adapter(
selector = PostRepo(troll) selectTopic topic,
sort = PostRepo.sortQuery :: Nil),
currentPage = page,
maxPerPage = maxPerPage)
def delete(categSlug: String, postId: String, mod: User): Funit = (for {
post ← optionT(PostRepo(true).byCategAndId(categSlug, postId))
view ← optionT(view(post))
_ ← optionT(for {
first ← PostRepo.isFirstPost(view.topic.id, view.post.id)
_ ← first.fold(
env.topicApi.delete(view.categ, view.topic),
$remove[Post](view.post) >>
(env.topicApi denormalize view.topic) >>
(env.categApi denormalize view.categ) >>
env.recent.invalidate >>-
(indexer ! RemovePost(post)))
_ ← MasterGranter(_.ModerateForum)(mod) ?? modLog.deletePost(mod, post.userId, post.author, post.ip,
text = "%s / %s / %s".format(view.categ.name, view.topic.name, post.text))
} yield true.some)
} yield ()).run.void
def nbByUser(userId: String) = $count[Post](Json.obj("userId" -> userId))
def userIds(topic: Topic) = PostRepo userIdsByTopicId topic.id
}
| JimmyMow/lila | modules/forum/src/main/PostApi.scala | Scala | mit | 6,169 |
/**
* This file is part of mycollab-esb.
*
* mycollab-esb is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-esb is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-esb. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.ecm.esb.impl
import java.util.concurrent.TimeUnit
import java.util.concurrent.locks.Lock
import com.esofthead.mycollab.core.utils.{BeanUtility, StringUtils}
import com.esofthead.mycollab.lock.DistributionLockUtil
import com.esofthead.mycollab.module.GenericCommand
import com.esofthead.mycollab.module.ecm.domain.DriveInfo
import com.esofthead.mycollab.module.ecm.esb.SaveContentEvent
import com.esofthead.mycollab.module.ecm.service.DriveInfoService
import com.esofthead.mycollab.module.file.service.RawContentService
import com.google.common.eventbus.{AllowConcurrentEvents, Subscribe}
import org.slf4j.{Logger, LoggerFactory}
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component
/**
*
* @author MyCollab Ltd.
* @since 1.0
*
*/
object SaveContentCommandImpl {
private val LOG: Logger = LoggerFactory.getLogger(classOf[SaveContentCommandImpl])
}
@Component("saveContentCommand") class SaveContentCommandImpl extends GenericCommand {
@Autowired private val driveInfoService: DriveInfoService = null
@Autowired private val rawContentService: RawContentService = null
@AllowConcurrentEvents
@Subscribe
def saveContent(event: SaveContentEvent): Unit = {
SaveContentCommandImpl.LOG.debug("Save content {} by {}", Array(BeanUtility.printBeanObj(event.content),
event.createdUser))
if (event.sAccountId == null) {
return
}
val lock: Lock = DistributionLockUtil.getLock("ecm-" + event.sAccountId)
var totalSize: Long = event.content.getSize
if (StringUtils.isNotBlank(event.content.getThumbnail)) {
totalSize += rawContentService.getSize(event.content.getThumbnail)
}
try {
if (lock.tryLock(1, TimeUnit.HOURS)) {
val driveInfo: DriveInfo = driveInfoService.getDriveInfo(event.sAccountId)
if (driveInfo.getUsedvolume == null) {
driveInfo.setUsedvolume(totalSize)
}
else {
driveInfo.setUsedvolume(totalSize + driveInfo.getUsedvolume)
}
driveInfoService.saveOrUpdateDriveInfo(driveInfo)
}
}
catch {
case e: Exception => SaveContentCommandImpl.LOG.error(String.format("Error while save content %s",
BeanUtility.printBeanObj(event.content)), e)
} finally {
DistributionLockUtil.removeLock("ecm-" + event.sAccountId)
lock.unlock
}
}
} | maduhu/mycollab | mycollab-esb/src/main/scala/com/esofthead/mycollab/module/ecm/esb/impl/SaveContentCommandImpl.scala | Scala | agpl-3.0 | 3,293 |
package model.pokedex
/**
* Created by salim on 19/09/2016.
*/
case class PokemonMove(pokemon_id: Int, version_group_id: Int, move_id: Int, pokemon_move_method_id: Int, level: Int, order: Int) extends DexClass {
def uid = pokemon_id
}
object PokemonMove extends DexObject[PokemonMove] {
def fromMap(row: Map[String, ConvertibleThing]): PokemonMove = {
PokemonMove(
row("pokemon_id").i,
row("version_group_id").i,
row("move_id").i,
row("pokemon_move_method_id").i,
row("level").i,
row("order").i
)
}
}
| salimfadhley/scalamoo | src/main/scala/model/pokedex/PokemonMove.scala | Scala | mit | 557 |
package com.blinkboxbooks.resourceserver
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import MatrixParameters._
@RunWith(classOf[JUnitRunner])
class MatrixParametersTest extends FunSuite {
test("Matrix params, valid cases") {
assert(getMatrixParams("").get === Map())
assert(getMatrixParams("a=1").get === Map("a" -> "1"))
assert(getMatrixParams("a=1;").get === Map("a" -> "1"))
assert(getMatrixParams("x:a=1").get === Map("x:a" -> "1"))
assert(getMatrixParams("x:abc=1").get === Map("x:abc" -> "1"))
assert(getMatrixParams("xyz:abc=1").get === Map("xyz:abc" -> "1"))
assert(getMatrixParams("a=1;b=2").get === Map("a" -> "1", "b" -> "2"))
assert(getMatrixParams("a=1;b=2;").get === Map("a" -> "1", "b" -> "2"))
assert(getMatrixParams("a=1;b=2;c=foo").get === Map("a" -> "1", "b" -> "2", "c" -> "foo"))
assert(getMatrixParams(" a = 1 ; b = 2 ").get === Map("a" -> "1", "b" -> "2"))
}
test("Matrix params, invalid cases") {
intercept[Exception](getMatrixParams("a").get)
intercept[Exception](getMatrixParams("abc").get)
intercept[Exception](getMatrixParams("a=").get)
intercept[Exception](getMatrixParams("a=;").get)
intercept[Exception](getMatrixParams("=x").get)
intercept[Exception](getMatrixParams("=x;").get)
intercept[Exception](getMatrixParams("a=xyz;b").get)
intercept[Exception](getMatrixParams("a=xyz;b=").get)
intercept[Exception](getMatrixParams("a=xyz;b=;").get)
}
} | blinkboxbooks/resource-server.scala | src/test/scala/com/blinkboxbooks/resourceserver/MatrixParametersTest.scala | Scala | mit | 1,521 |
package tests
trait Baz {
val bar/*caret*/ = 0
def foo() = {
bar/*caret*/ match {
case `bar`/*caret*/ =>
case _ =>
}
}
}
class BazClass extends Baz {
override var bar = 1
}
object BazInst extends Baz {
override def bar = 1
}
object Test {
def foo(i: Int = BazInst.bar) = {
BazInst.bar.toString
(new BazClass)./*caret*/bar_=(3)
}
}
| whorbowicz/intellij-scala | testdata/rename3/valInTrait/before/tests/Baz.scala | Scala | apache-2.0 | 378 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import java.io._
import org.junit.Assert._
import java.util.{Collections, Arrays}
import org.junit._
import org.scalatest.junit.JUnitSuite
import scala.collection._
import scala.util.Random
import kafka.utils.TestUtils
import kafka.common.InvalidOffsetException
class OffsetIndexTest extends JUnitSuite {
var idx: OffsetIndex = null
val maxEntries = 30
@Before
def setup() {
this.idx = new OffsetIndex(nonExistantTempFile(), baseOffset = 45L, maxIndexSize = 30 * 8)
}
@After
def teardown() {
if(this.idx != null)
this.idx.file.delete()
}
@Test
def randomLookupTest() {
assertEquals("Not present value should return physical offset 0.", OffsetPosition(idx.baseOffset, 0), idx.lookup(92L))
// append some random values
val base = idx.baseOffset.toInt + 1
val size = idx.maxEntries
val vals: Seq[(Long, Int)] = monotonicSeq(base, size).map(_.toLong).zip(monotonicSeq(0, size))
vals.foreach{x => idx.append(x._1, x._2)}
// should be able to find all those values
for((logical, physical) <- vals)
assertEquals("Should be able to find values that are present.", OffsetPosition(logical, physical), idx.lookup(logical))
// for non-present values we should find the offset of the largest value less than or equal to this
val valMap = new immutable.TreeMap[Long, (Long, Int)]() ++ vals.map(p => (p._1, p))
val offsets = (idx.baseOffset until vals.last._1.toInt).toArray
Collections.shuffle(Arrays.asList(offsets))
for(offset <- offsets.take(30)) {
val rightAnswer =
if(offset < valMap.firstKey)
OffsetPosition(idx.baseOffset, 0)
else
OffsetPosition(valMap.to(offset).last._1, valMap.to(offset).last._2._2)
assertEquals("The index should give the same answer as the sorted map", rightAnswer, idx.lookup(offset))
}
}
@Test
def lookupExtremeCases() {
assertEquals("Lookup on empty file", OffsetPosition(idx.baseOffset, 0), idx.lookup(idx.baseOffset))
for(i <- 0 until idx.maxEntries)
idx.append(idx.baseOffset + i + 1, i)
// check first and last entry
assertEquals(OffsetPosition(idx.baseOffset, 0), idx.lookup(idx.baseOffset))
assertEquals(OffsetPosition(idx.baseOffset + idx.maxEntries, idx.maxEntries - 1), idx.lookup(idx.baseOffset + idx.maxEntries))
}
@Test
def appendTooMany() {
for(i <- 0 until idx.maxEntries) {
val offset = idx.baseOffset + i + 1
idx.append(offset, i)
}
assertWriteFails("Append should fail on a full index", idx, idx.maxEntries + 1, classOf[IllegalArgumentException])
}
@Test(expected = classOf[InvalidOffsetException])
def appendOutOfOrder() {
idx.append(51, 0)
idx.append(50, 1)
}
@Test
def testReopen() {
val first = OffsetPosition(51, 0)
val sec = OffsetPosition(52, 1)
idx.append(first.offset, first.position)
idx.append(sec.offset, sec.position)
idx.close()
val idxRo = new OffsetIndex(idx.file, baseOffset = idx.baseOffset)
assertEquals(first, idxRo.lookup(first.offset))
assertEquals(sec, idxRo.lookup(sec.offset))
assertEquals(sec.offset, idxRo.lastOffset)
assertEquals(2, idxRo.entries)
assertWriteFails("Append should fail on read-only index", idxRo, 53, classOf[IllegalArgumentException])
}
@Test
def truncate() {
val idx = new OffsetIndex(nonExistantTempFile(), baseOffset = 0L, maxIndexSize = 10 * 8)
idx.truncate()
for(i <- 1 until 10)
idx.append(i, i)
// now check the last offset after various truncate points and validate that we can still append to the index.
idx.truncateTo(12)
assertEquals("Index should be unchanged by truncate past the end", OffsetPosition(9, 9), idx.lookup(10))
assertEquals("9 should be the last entry in the index", 9, idx.lastOffset)
idx.append(10, 10)
idx.truncateTo(10)
assertEquals("Index should be unchanged by truncate at the end", OffsetPosition(9, 9), idx.lookup(10))
assertEquals("9 should be the last entry in the index", 9, idx.lastOffset)
idx.append(10, 10)
idx.truncateTo(9)
assertEquals("Index should truncate off last entry", OffsetPosition(8, 8), idx.lookup(10))
assertEquals("8 should be the last entry in the index", 8, idx.lastOffset)
idx.append(9, 9)
idx.truncateTo(5)
assertEquals("4 should be the last entry in the index", OffsetPosition(4, 4), idx.lookup(10))
assertEquals("4 should be the last entry in the index", 4, idx.lastOffset)
idx.append(5, 5)
idx.truncate()
assertEquals("Full truncation should leave no entries", 0, idx.entries)
idx.append(0, 0)
}
def assertWriteFails[T](message: String, idx: OffsetIndex, offset: Int, klass: Class[T]) {
try {
idx.append(offset, 1)
fail(message)
} catch {
case e: Exception => assertEquals("Got an unexpected exception.", klass, e.getClass)
}
}
def monotonicSeq(base: Int, len: Int): Seq[Int] = {
val rand = new Random(1L)
val vals = new mutable.ArrayBuffer[Int](len)
var last = base
for (_ <- 0 until len) {
last += rand.nextInt(15) + 1
vals += last
}
vals
}
def nonExistantTempFile(): File = {
val file = TestUtils.tempFile()
file.delete()
file
}
}
| rhauch/kafka | core/src/test/scala/unit/kafka/log/OffsetIndexTest.scala | Scala | apache-2.0 | 6,145 |
package org.github.aalbul.jenkins.domain
/**
* Created by nuru on 3/30/14.
*/
case class JobList(jobs: List[JobOverview]) | aalbul/reactive-jenkins | src/main/scala/org/github/aalbul/jenkins/domain/JobList.scala | Scala | gpl-2.0 | 124 |
/*
* Copyright (c) 2015 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.spark.streaming
// Spark
import com.amazonaws.services.dynamodbv2.document.DynamoDB
import org.apache.spark.SparkConf
import org.apache.spark.streaming._
import org.apache.spark.streaming.kinesis.KinesisUtils
// This project
import storage.DynamoUtils
import kinesis.{KinesisUtils => KU}
/**
* Core of the Spark Streaming Application
* 1. Configuration information is brought in from StreamingCountsApp.scala
* 2. Object sets up Kinesis, DynamoDB, CloudTrail connections
* 3. Once connections are up, Spark StreamingCounts stream processing starts
* AWS Kinesis -> Apache Spark Streaming -> AWS DynamoDB
* Raw Data -> Stream Processing Data -> Stored in Database
*
* (More on Spark Streaming: https://spark.apache.org/docs/1.3.0/streaming-kinesis-integration.html)
*/
object StreamingCounts {
/**
* Private function to set up Spark Streaming
*
* @param config The configuration for our job using StreamingCountsConfig.scala
*/
private def setupSparkContext(config: StreamingCountsConfig): StreamingContext = {
val streamingSparkContext = {
val sparkConf = new SparkConf().setAppName(config.appName).setMaster(config.master)
new StreamingContext(sparkConf, config.batchInterval)
}
streamingSparkContext
}
/**
* Starts our processing of a single Kinesis stream.
* Never ends.
*
* @param config The configuration for our job using StreamingCountsConfig.scala
*/
def execute(config: StreamingCountsConfig) {
// setting up Spark Streaming connection to Kinesis
val kinesisClient = KU.setupKinesisClientConnection(config.endpointUrl, config.awsProfile)
require(kinesisClient != null,
"No AWS credentials found. Please specify credentials using one of the methods specified " +
"in http://docs.aws.amazon.com/AWSSdkDocsJava/latest/DeveloperGuide/credentials.html")
// setting up Spark Streaming connection to DynamoDB
lazy val dynamoConnection = DynamoUtils.setupDynamoClientConnection(config.awsProfile)
val streamingSparkContext = setupSparkContext(config)
val numShards = KU.getShardCount(kinesisClient, config.streamName)
val sparkDStreams = (0 until numShards).map { i =>
KinesisUtils.createStream(
ssc = streamingSparkContext,
streamName = config.streamName,
endpointUrl = config.endpointUrl,
initialPositionInStream = config.initialPosition,
checkpointInterval = config.batchInterval,
storageLevel = config.storageLevel
)
}
// Map phase: union DStreams, derive events, determine bucket
val bucketedEvents = streamingSparkContext
.union(sparkDStreams)
.map { bytes =>
val e = SimpleEvent.fromJson(bytes)
(e.bucket, e.`type`)
}
// Reduce phase: group by key then by count
val bucketedEventCounts = bucketedEvents
.groupByKey
.map { case (eventType, events) =>
val count = events.groupBy(identity).mapValues(_.size)
(eventType, count)
}
// Iterate over each aggregate record and save the record into DynamoDB
bucketedEventCounts.foreachRDD { rdd =>
rdd.foreach { case (bucket, aggregates) =>
aggregates.foreach { case (eventType, count) =>
DynamoUtils.setOrUpdateCount(
dynamoConnection,
config.tableName,
bucket.toString,
eventType,
DynamoUtils.timeNow(),
DynamoUtils.timeNow(),
count.toInt
)
}
}
}
// Start Spark Streaming process
streamingSparkContext.start()
streamingSparkContext.awaitTermination()
}
} | MiguelPeralvo/spark-streaming-example-project | src/main/scala/com.snowplowanalytics.spark/streaming/StreamingCounts.scala | Scala | apache-2.0 | 4,374 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Uniformity
import org.scalactic.Prettifier
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import Matchers._
import exceptions.TestFailedException
class ListShouldContainAllElementsOfLogicalOrSpec extends FunSpec {
private val prettifier = Prettifier.default
val invertedStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
val invertedListOfStringEquality =
new Equality[List[String]] {
def areEqual(a: List[String], b: Any): Boolean = a != b
}
private def upperCase(value: Any): Any =
value match {
case l: List[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = upperCase(a) == upperCase(b)
}
//ADDITIONAL//
val fileName: String = "ListShouldContainAllElementsOfLogicalOrSpec.scala"
describe("a List") {
val fumList: List[String] = List("fex", "fum", "foe", "fie", "fee")
val toList: List[String] = List("too", "you", "to", "birthday", "happy")
describe("when used with (contain allElementsOf Seq(..) or contain allElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fum") or contain allElementsOf Seq("fie", "fee", "fum", "foe"))
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fam") or contain allElementsOf Seq("fie", "fee", "fum", "foe"))
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fum") or contain allElementsOf Seq("fie", "fee", "fam", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fam") or contain allElementsOf Seq("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("fee", "fie", "foe", "fam")) + ", and " + FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("happy", "birthday", "to", "you")), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FUM") or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))
fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FAM") or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))
fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FUM") or contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FAM") or (contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE")))
}
checkMessageStackDepth(e1, FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FEE", "FIE", "FOE", "FAM")) + ", and " + FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FIE", "FEE", "FAM", "FOE")), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FUM") or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FAM") or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FUM") or contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FAM") or contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FEE", "FIE", "FOE", "FAM")) + ", and " + FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FIE", "FEE", "FAM", "FOE")), fileName, thisLineNumber - 2)
(fumList should (contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM ") or contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should allow RHS to contain duplicated value") {
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum") or contain allElementsOf Seq("fie", "fee", "fum", "foe"))
fumList should (contain allElementsOf Seq("fie", "fee", "fum", "foe") or contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum"))
}
}
describe("when used with (equal (..) and contain allElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (equal (fumList) or contain allElementsOf Seq("fie", "fee", "fum", "foe"))
fumList should (equal (toList) or contain allElementsOf Seq("fie", "fee", "fum", "foe"))
fumList should (equal (fumList) or contain allElementsOf Seq("fie", "fee", "fam", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) or contain allElementsOf Seq("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, FailureMessages.didNotEqual(prettifier, fumList, toList) + ", and " + FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("happy", "birthday", "to", "you")), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (equal (fumList) or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))
fumList should (equal (toList) or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))
fumList should (equal (fumList) or contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) or (contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE")))
}
checkMessageStackDepth(e1, FailureMessages.didNotEqual(prettifier, fumList, toList) + ", and " + FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FIE", "FEE", "FAM", "FOE")), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (equal (toList) or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (equal (fumList) or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (equal (toList) or contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (equal (fumList) or contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.didNotEqual(prettifier, fumList, fumList) + ", and " + FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FIE", "FEE", "FAM", "FOE")), fileName, thisLineNumber - 2)
(fumList should (equal (toList) or contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))) (decided by invertedListOfStringEquality, after being lowerCased and trimmed)
}
it("should allow RHS to contain duplicated value") {
fumList should (equal (fumList) or contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum"))
}
}
describe("when used with (be (..) and contain allElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (be (fumList) or contain allElementsOf Seq("fie", "fee", "fum", "foe"))
fumList should (be (toList) or contain allElementsOf Seq("fie", "fee", "fum", "foe"))
fumList should (be (fumList) or contain allElementsOf Seq("fie", "fee", "fam", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) or contain allElementsOf Seq("fie", "fee", "fam", "foe"))
}
checkMessageStackDepth(e1, FailureMessages.wasNotEqualTo(prettifier, fumList, toList) + ", and " + FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("fie", "fee", "fam", "foe")), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (be (fumList) or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))
fumList should (be (toList) or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))
fumList should (be (fumList) or contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) or (contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE")))
}
checkMessageStackDepth(e1, FailureMessages.wasNotEqualTo(prettifier, fumList, toList) + ", and " + FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FIE", "FEE", "FAM", "FOE")), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (be (fumList) or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (be (toList) or contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (be (fumList) or contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (be (toList) or contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.wasNotEqualTo(prettifier, fumList, toList) + ", and " + FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FIE", "FEE", "FAM", "FOE")), fileName, thisLineNumber - 2)
(fumList should (be (fumList) or contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed)
}
it("should allow RHS to contain duplicated value") {
fumList should (be (fumList) or contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum"))
}
}
describe("when used with (contain allElementsOf Seq(..) and be (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (contain allElementsOf Seq("fie", "fee", "fum", "foe") or be (fumList))
fumList should (contain allElementsOf Seq("fie", "fee", "fam", "foe") or be (fumList))
fumList should (contain allElementsOf Seq("fie", "fee", "fum", "foe") or be (toList))
val e1 = intercept[TestFailedException] {
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fam") or be (toList))
}
checkMessageStackDepth(e1, FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("fee", "fie", "foe", "fam")) + ", and " + FailureMessages.wasNotEqualTo(prettifier, fumList, toList), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE") or be (fumList))
fumList should (contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE") or be (fumList))
fumList should (contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE") or be (toList))
val e1 = intercept[TestFailedException] {
fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FAM") or be (toList))
}
checkMessageStackDepth(e1, FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FEE", "FIE", "FOE", "FAM")) + ", and " + FailureMessages.wasNotEqualTo(prettifier, fumList, toList), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE") or be (fumList))) (decided by upperCaseStringEquality)
(fumList should (contain allElementsOf Seq("FIE", "FEE", "FAM", "FOE") or be (fumList))) (decided by upperCaseStringEquality)
(fumList should (contain allElementsOf Seq("FIE", "FEE", "FUM", "FOE") or be (toList))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain allElementsOf Seq("FEE", "FIE", "FOE", "FAM") or be (toList))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.didNotContainAllElementsOf(prettifier, fumList, Seq("FEE", "FIE", "FOE", "FAM")) + ", and " + FailureMessages.wasNotEqualTo(prettifier, fumList, toList), fileName, thisLineNumber - 2)
(fumList should (contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM ") or be (fumList))) (after being lowerCased and trimmed)
}
it("should allow RHS to contain duplicated value") {
fumList should (contain allElementsOf Seq("fee", "fie", "foe", "fie", "fum") or be (fumList))
}
}
describe("when used with (not contain allElementsOf Seq(..) and not contain allElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (not contain allElementsOf (Seq("fee", "fie", "foe", "fuu")) or not contain allElementsOf (Seq("fie", "fee", "fuu", "foe")))
fumList should (not contain allElementsOf (Seq("fee", "fie", "foe", "fum")) or not contain allElementsOf (Seq("fie", "fee", "fuu", "foe")))
fumList should (not contain allElementsOf (Seq("fee", "fie", "foe", "fuu")) or not contain allElementsOf (Seq("fie", "fee", "fum", "foe")))
val e1 = intercept[TestFailedException] {
fumList should (not contain allElementsOf (Seq("fee", "fie", "foe", "fum")) or not contain allElementsOf (Seq("fie", "fee", "fum", "foe")))
}
checkMessageStackDepth(e1, FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("fee", "fie", "foe", "fum")) + ", and " + FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("fie", "fee", "fum", "foe")), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUU")) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))
fumList should (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUM")) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))
fumList should (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUU")) or not contain allElementsOf (Seq("FIE", "FEE", "FUM", "FOE")))
val e1 = intercept[TestFailedException] {
fumList should (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUM")) or not contain allElementsOf (Seq("FIE", "FEE", "FUM", "FOE")))
}
checkMessageStackDepth(e1, FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("FEE", "FIE", "FOE", "FUM")) + ", and " + FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("FIE", "FEE", "FUM", "FOE")), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUU")) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUM")) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUU")) or not contain allElementsOf (Seq("FIE", "FEE", "FUM", "FOE")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUM")) or not contain allElementsOf (Seq("FIE", "FEE", "FUM", "FOE")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("FEE", "FIE", "FOE", "FUM")) + ", and " + FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("FIE", "FEE", "FUM", "FOE")), fileName, thisLineNumber - 2)
(fumList should (contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM ") or contain allElementsOf Seq(" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should allow RHS to contain duplicated value") {
fumList should (not contain allElementsOf (Seq("fee", "fie", "foe", "fie", "fum")) or not contain allElementsOf (Seq("fie", "fee", "fuu", "foe")))
fumList should (not contain allElementsOf (Seq("fie", "fee", "fuu", "foe")) or not contain allElementsOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
}
describe("when used with (not equal (..) and not contain allElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (not equal (toList) or not contain allElementsOf (Seq("fie", "fee", "fuu", "foe")))
fumList should (not equal (fumList) or not contain allElementsOf (Seq("fie", "fee", "fuu", "foe")))
fumList should (not equal (toList) or not contain allElementsOf (Seq("fie", "fee", "fum", "foe")))
val e1 = intercept[TestFailedException] {
fumList should (not equal (fumList) or not contain allElementsOf (Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e1, FailureMessages.equaled(prettifier, fumList, fumList) + ", and " + FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (not equal (toList) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))
fumList should (not equal (fumList) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))
fumList should (not equal (toList) or not contain allElementsOf (Seq("FIE", "FEE", "FUM", "FOE")))
val e2 = intercept[TestFailedException] {
fumList should (not equal (fumList) or (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUM"))))
}
checkMessageStackDepth(e2, FailureMessages.equaled(prettifier, fumList, fumList) + ", and " + FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (not equal (fumList) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (not equal (toList) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (not equal (fumList) or not contain allElementsOf (Seq("FIE", "FEE", "FUM", "FOE")))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not equal (toList) or not contain allElementsOf (Seq("FIE", "FEE", "FUM", "FOE")))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.equaled(prettifier, fumList, toList) + ", and " + FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("FIE", "FEE", "FUM", "FOE")), fileName, thisLineNumber - 2)
(fumList should (not contain allElementsOf (Seq(" FEE ", " FIE ", " FOE ", " FUU ")) or not contain allElementsOf (Seq(" FEE ", " FIE ", " FOE ", " FUU ")))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should allow RHS to contain duplicated value") {
fumList should (not equal (toList) or not contain allElementsOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
}
describe("when used with (not be (..) and not contain allElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (not be (toList) or not contain allElementsOf (Seq("fie", "fee", "fuu", "foe")))
fumList should (not be (fumList) or not contain allElementsOf (Seq("fie", "fee", "fuu", "foe")))
fumList should (not be (toList) or not contain allElementsOf (Seq("fee", "fie", "foe", "fum")))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) or not contain allElementsOf (Seq("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e1, FailureMessages.wasEqualTo(prettifier, fumList, fumList) + ", and " + FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("fee", "fie", "foe", "fum")), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (not be (toList) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))
fumList should (not be (fumList) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))
fumList should (not be (toList) or not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUM")))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) or (not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUM"))))
}
checkMessageStackDepth(e1, FailureMessages.wasEqualTo(prettifier, fumList, fumList) + ", and " + FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (not be (toList) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))) (decided by upperCaseStringEquality)
(fumList should (not be (fumList) or not contain allElementsOf (Seq("FIE", "FEE", "FUU", "FOE")))) (decided by upperCaseStringEquality)
(fumList should (not be (toList) or not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUM")))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not be (fumList) or not contain allElementsOf (Seq("FEE", "FIE", "FOE", "FUM")))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, FailureMessages.wasEqualTo(prettifier, fumList, fumList) + ", and " + FailureMessages.containedAllElementsOf(prettifier, fumList, Seq("FEE", "FIE", "FOE", "FUM")), fileName, thisLineNumber - 2)
(fumList should (not contain allElementsOf (Seq(" FEE ", " FIE ", " FOE ", " FUU ")) or not contain allElementsOf (Seq(" FEE ", " FIE ", " FOE ", " FUU ")))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should allow RHS to contain duplicated value") {
fumList should (not be (toList) or not contain allElementsOf (Seq("fee", "fie", "foe", "fie", "fum")))
}
}
}
describe("collection of Lists") {
val list1s: Vector[List[Int]] = Vector(List(3, 2, 1, 0), List(3, 2, 1, 0), List(3, 2, 1, 0))
val lists: Vector[List[Int]] = Vector(List(3, 2, 1, 0), List(3, 2, 1, 0), List(8, 4, 3, 2))
val nils: Vector[List[Int]] = Vector(Nil, Nil, Nil)
val listsNil: Vector[List[Int]] = Vector(List(3, 2, 1, 0), List(3, 2, 1, 0), Nil)
val hiLists: Vector[List[String]] = Vector(List("howdy", "hi", "hello"), List("howdy", "hi", "hello"), List("howdy", "hi", "hello"))
val toLists: Vector[List[String]] = Vector(List("nice", "you", "to"), List("nice", "you", "to"), List("nice", "you", "to"))
def allErrMsg(index: Int, message: String, lineNumber: Int, left: Any): String =
"'all' inspection failed, because: \\n" +
" at index " + index + ", " + message + " (" + fileName + ":" + (lineNumber) + ") \\n" +
"in " + decorateToStringValue(prettifier, left)
describe("when used with (contain allElementsOf Seq(..) and contain allElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (contain allElementsOf Seq(3, 2, 1) or contain allElementsOf Seq(1, 3, 2))
all (list1s) should (contain allElementsOf Seq(3, 2, 5) or contain allElementsOf Seq(1, 3, 2))
all (list1s) should (contain allElementsOf Seq(3, 2, 1) or contain allElementsOf Seq(2, 3, 4))
atLeast (2, lists) should (contain allElementsOf Seq(3, 1, 2) or contain allElementsOf Seq(1, 2, 3))
atLeast (2, lists) should (contain allElementsOf Seq(3, 6, 5) or contain allElementsOf Seq(1, 3, 2))
atLeast (2, lists) should (contain allElementsOf Seq(3, 1, 2) or contain allElementsOf Seq(8, 3, 4))
val e1 = intercept[TestFailedException] {
all (lists) should (contain allElementsOf Seq(3, 1, 2) or contain allElementsOf Seq(1, 3, 2))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, lists(2)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq(3, 1, 2)) + ", and " + decorateToStringValue(prettifier, lists(2)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq(1, 3, 2)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain allElementsOf Seq("HELLO", "HI") or contain allElementsOf Seq("hello", "hi"))
all (hiLists) should (contain allElementsOf Seq("HELLO", "HO") or contain allElementsOf Seq("hello", "hi"))
all (hiLists) should (contain allElementsOf Seq("HELLO", "HI") or contain allElementsOf Seq("hello", "ho"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (contain allElementsOf Seq("HELLO", "HO") or contain allElementsOf Seq("hello", "ho"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq("HELLO", "HO")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq("hello", "ho")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (contain allElementsOf Seq("HELLO", "HI") or contain allElementsOf Seq("hello", "hi"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (contain allElementsOf Seq("HELLO", "HO") or contain allElementsOf Seq("hello", "hi"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (contain allElementsOf Seq("HELLO", "HI") or contain allElementsOf Seq("hello", "ho"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (contain allElementsOf Seq("HELLO", "HO") or contain allElementsOf Seq("hello", "ho"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq("HELLO", "HO")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq("hello", "ho")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should allow RHS to contain duplicated value") {
all (list1s) should (contain allElementsOf Seq(3, 2, 2, 1) or contain allElementsOf Seq(1, 3, 2))
all (list1s) should (contain allElementsOf Seq(1, 3, 2) or contain allElementsOf Seq(3, 2, 2, 1))
}
}
describe("when used with (be (..) and contain allElementsOf Seq(..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (be (List(3, 2, 1, 0)) or contain allElementsOf Seq(1, 2, 3))
all (list1s) should (be (List(2, 3, 4)) or contain allElementsOf Seq(1, 2, 3))
all (list1s) should (be (List(3, 2, 1, 0)) or contain allElementsOf Seq(2, 3, 4))
val e1 = intercept[TestFailedException] {
all (list1s) should (be (List(2, 3, 4)) or contain allElementsOf Seq(2, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, list1s(0)) + " was not equal to " + decorateToStringValue(prettifier, List(2, 3, 4)) + ", and " + decorateToStringValue(prettifier, list1s(0)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq(2, 3, 4)), thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (be (List("howdy", "hi", "hello")) or contain allElementsOf Seq("HELLO", "HI"))
all (hiLists) should (be (List("ho", "hello")) or contain allElementsOf Seq("HELLO", "HI"))
all (hiLists) should (be (List("howdy", "hi", "hello")) or contain allElementsOf Seq("HELLO", "HO"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (be (List("ho", "hello")) or contain allElementsOf Seq("HELLO", "HO"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was not equal to " + decorateToStringValue(prettifier, List("ho", "hello")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq("HELLO", "HO")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (be (List("howdy", "hi", "hello")) or contain allElementsOf Seq("HELLO", "HI"))) (decided by upperCaseStringEquality)
(all (hiLists) should (be (List("ho", "hello")) or contain allElementsOf Seq("HELLO", "HI"))) (decided by upperCaseStringEquality)
(all (hiLists) should (be (List("howdy", "hi", "hello")) or contain allElementsOf Seq("HELLO", "HO"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (be (List("ho", "hello")) or contain allElementsOf Seq("HELLO", "HO"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was not equal to " + decorateToStringValue(prettifier, List("ho", "hello")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain all elements of " + decorateToStringValue(prettifier, Seq("HELLO", "HO")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should allow RHS to contain duplicated value") {
all (list1s) should (be (List(3, 2, 1, 0)) or contain allElementsOf Seq(1, 2, 2, 3))
}
}
describe("when used with (not contain allElementsOf xx and not contain allElementsOf xx)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (not contain allElementsOf (Seq(3, 2, 8)) or not contain allElementsOf (Seq(8, 3, 4)))
all (list1s) should (not contain allElementsOf (Seq(1, 2, 3)) or not contain allElementsOf (Seq(8, 3, 4)))
all (list1s) should (not contain allElementsOf (Seq(3, 2, 8)) or not contain allElementsOf (Seq(2, 3, 1)))
val e1 = intercept[TestFailedException] {
all (lists) should (not contain allElementsOf (Seq(4, 2, 3)) or not contain allElementsOf (Seq(2, 3, 4)))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, lists(2)) + " contained all elements of " + decorateToStringValue(prettifier, Seq(4, 2, 3)) + ", and " + decorateToStringValue(prettifier, lists(2)) + " contained all elements of " + decorateToStringValue(prettifier, Seq(2, 3, 4)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not contain allElementsOf (Seq("HELLO", "HO")) or not contain allElementsOf (Seq("hello", "ho")))
all (hiLists) should (not contain allElementsOf (Seq("HELLO", "HI")) or not contain allElementsOf (Seq("hello", "ho")))
all (hiLists) should (not contain allElementsOf (Seq("HELLO", "HO")) or not contain allElementsOf (Seq("hello", "hi")))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not contain allElementsOf (Seq("HELLO", "HI")) or not contain allElementsOf (Seq("hello", "hi")))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("HELLO", "HI")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("hello", "hi")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (not contain allElementsOf (Seq("HELLO", "HO")) or not contain allElementsOf (Seq("hello", "ho")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (not contain allElementsOf (Seq("HELLO", "HI")) or not contain allElementsOf (Seq("hello", "ho")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (not contain allElementsOf (Seq("HELLO", "HO")) or not contain allElementsOf (Seq("hello", "hi")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not contain allElementsOf (Seq("HELLO", "HI")) or not contain allElementsOf (Seq("hello", "hi")))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("HELLO", "HI")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("hello", "hi")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should allow RHS to contain duplicated value") {
all (list1s) should (not contain allElementsOf (Seq(1, 2, 2, 3)) or not contain allElementsOf (Seq(8, 3, 4)))
all (list1s) should (not contain allElementsOf (Seq(8, 3, 4)) or not contain allElementsOf (Seq(1, 2, 2, 3)))
}
}
describe("when used with (not be (...) and not contain allElementsOf Seq(...))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (not be (List(2)) or not contain allElementsOf (Seq(8, 3, 4)))
all (list1s) should (not be (List(3, 2, 1, 0)) or not contain allElementsOf (Seq(8, 3, 4)))
all (list1s) should (not be (List(2)) or not contain allElementsOf (Seq(1, 2, 3)))
val e1 = intercept[TestFailedException] {
all (list1s) should (not be (List(3, 2, 1, 0)) or not contain allElementsOf (Seq(2, 3, 1)))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, list1s(0)) + " was equal to " + decorateToStringValue(prettifier, List(3, 2, 1, 0)) + ", and " + decorateToStringValue(prettifier, list1s(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq(2, 3, 1)), thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not be (List("hello", "ho")) or not contain allElementsOf (Seq("HELLO", "HO")))
all (hiLists) should (not be (List("howdy", "hello", "hi")) or not contain allElementsOf (Seq("HELLO", "HO")))
all (hiLists) should (not be (List("hello", "ho")) or not contain allElementsOf (Seq("HELLO", "HI")))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not be (List("howdy", "hi", "hello")) or not contain allElementsOf (Seq("HELLO", "HI")))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was equal to " + decorateToStringValue(prettifier, List("howdy", "hi", "hello")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("HELLO", "HI")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (not be (List("hello", "ho")) or not contain allElementsOf (Seq("HELLO", "HO")))) (decided by upperCaseStringEquality)
(all (hiLists) should (not be (List("howdy", "hello", "hi")) or not contain allElementsOf (Seq("HELLO", "HO")))) (decided by upperCaseStringEquality)
(all (hiLists) should (not be (List("hello", "ho")) or not contain allElementsOf (Seq("HELLO", "HI")))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not be (List("howdy", "hi", "hello")) or not contain allElementsOf (Seq("HELLO", "HI")))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was equal to " + decorateToStringValue(prettifier, List("howdy", "hi", "hello")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " contained all elements of " + decorateToStringValue(prettifier, Seq("HELLO", "HI")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should allow RHS to contain duplicated value") {
all (list1s) should (not be (List(2)) or not contain allElementsOf (Seq(1, 2, 2, 3)))
}
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/ListShouldContainAllElementsOfLogicalOrSpec.scala | Scala | apache-2.0 | 40,636 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package runtime
import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator }
import scala.collection.mutable.WrappedArray
import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: }
import scala.collection.generic.{ Sorted }
import scala.reflect.{ ClassTag, classTag }
import scala.util.control.ControlThrowable
import scala.xml.{ Node, MetaData }
import java.lang.{ Class => jClass }
import java.lang.Double.doubleToLongBits
import java.lang.reflect.{ Modifier, Method => JMethod }
/** The object ScalaRunTime provides support methods required by
* the scala runtime. All these methods should be considered
* outside the API and subject to change or removal without notice.
*/
object ScalaRunTime {
def isArray(x: AnyRef): Boolean = isArray(x, 1)
def isArray(x: Any, atLevel: Int): Boolean =
x != null && isArrayClass(x.getClass, atLevel)
private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
clazz != null && clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1))
def isValueClass(clazz: jClass[_]) = clazz.isPrimitive()
// includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22)
def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple")
def isAnyVal(x: Any) = x match {
case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true
case _ => false
}
/** Return the class object representing an array with element class `clazz`.
*/
def arrayClass(clazz: jClass[_]): jClass[_] = {
// newInstance throws an exception if the erasure is Void.TYPE. see SI-5680
if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]]
else java.lang.reflect.Array.newInstance(clazz, 0).getClass
}
/** Return the class object representing elements in arrays described by a given schematic.
*/
def arrayElementClass(schematic: Any): jClass[_] = schematic match {
case cls: jClass[_] => cls.getComponentType
case tag: ClassTag[_] => tag.runtimeClass
case _ =>
throw new UnsupportedOperationException(s"unsupported schematic $schematic (${schematic.getClass})")
}
/** Return the class object representing an unboxed value type,
* e.g. classOf[int], not classOf[java.lang.Integer]. The compiler
* rewrites expressions like 5.getClass to come here.
*/
def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] =
classTag[T].runtimeClass.asInstanceOf[jClass[T]]
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = {
xs match {
case x: Array[AnyRef] => x(idx).asInstanceOf[Any]
case x: Array[Int] => x(idx).asInstanceOf[Any]
case x: Array[Double] => x(idx).asInstanceOf[Any]
case x: Array[Long] => x(idx).asInstanceOf[Any]
case x: Array[Float] => x(idx).asInstanceOf[Any]
case x: Array[Char] => x(idx).asInstanceOf[Any]
case x: Array[Byte] => x(idx).asInstanceOf[Any]
case x: Array[Short] => x(idx).asInstanceOf[Any]
case x: Array[Boolean] => x(idx).asInstanceOf[Any]
case x: Array[Unit] => x(idx).asInstanceOf[Any]
case null => throw new NullPointerException
}
}
/** update generic array element */
def array_update(xs: AnyRef, idx: Int, value: Any): Unit = {
xs match {
case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef]
case x: Array[Int] => x(idx) = value.asInstanceOf[Int]
case x: Array[Double] => x(idx) = value.asInstanceOf[Double]
case x: Array[Long] => x(idx) = value.asInstanceOf[Long]
case x: Array[Float] => x(idx) = value.asInstanceOf[Float]
case x: Array[Char] => x(idx) = value.asInstanceOf[Char]
case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte]
case x: Array[Short] => x(idx) = value.asInstanceOf[Short]
case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean]
case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit]
case null => throw new NullPointerException
}
}
/** Get generic array length */
def array_length(xs: AnyRef): Int = xs match {
case x: Array[AnyRef] => x.length
case x: Array[Int] => x.length
case x: Array[Double] => x.length
case x: Array[Long] => x.length
case x: Array[Float] => x.length
case x: Array[Char] => x.length
case x: Array[Byte] => x.length
case x: Array[Short] => x.length
case x: Array[Boolean] => x.length
case x: Array[Unit] => x.length
case null => throw new NullPointerException
}
def array_clone(xs: AnyRef): AnyRef = xs match {
case x: Array[AnyRef] => ArrayRuntime.cloneArray(x)
case x: Array[Int] => ArrayRuntime.cloneArray(x)
case x: Array[Double] => ArrayRuntime.cloneArray(x)
case x: Array[Long] => ArrayRuntime.cloneArray(x)
case x: Array[Float] => ArrayRuntime.cloneArray(x)
case x: Array[Char] => ArrayRuntime.cloneArray(x)
case x: Array[Byte] => ArrayRuntime.cloneArray(x)
case x: Array[Short] => ArrayRuntime.cloneArray(x)
case x: Array[Boolean] => ArrayRuntime.cloneArray(x)
case x: Array[Unit] => x
case null => throw new NullPointerException
}
/** Convert an array to an object array.
* Needed to deal with vararg arguments of primitive types that are passed
* to a generic Java vararg parameter T ...
*/
def toObjectArray(src: AnyRef): Array[Object] = src match {
case x: Array[AnyRef] => x
case _ =>
val length = array_length(src)
val dest = new Array[Object](length)
for (i <- 0 until length)
array_update(dest, i, array_apply(src, i))
dest
}
def toArray[T](xs: scala.collection.Seq[T]) = {
val arr = new Array[AnyRef](xs.length)
var i = 0
for (x <- xs) {
arr(i) = x.asInstanceOf[AnyRef]
i += 1
}
arr
}
// Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957
// More background at ticket #2318.
def ensureAccessible(m: JMethod): JMethod = {
if (!m.isAccessible) {
try m setAccessible true
catch { case _: SecurityException => () }
}
m
}
def checkInitialized[T <: AnyRef](x: T): T =
if (x == null) throw new UninitializedError else x
def _toString(x: Product): String =
x.productIterator.mkString(x.productPrefix + "(", ",", ")")
def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x)
/** A helper for case classes. */
def typedProductIterator[T](x: Product): Iterator[T] = {
new AbstractIterator[T] {
private var c: Int = 0
private val cmax = x.productArity
def hasNext = c < cmax
def next() = {
val result = x.productElement(c)
c += 1
result.asInstanceOf[T]
}
}
}
/** Fast path equality method for inlining; used when -optimise is set.
*/
@inline def inlinedEquals(x: Object, y: Object): Boolean =
if (x eq y) true
else if (x eq null) false
else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.equalsNumObject(x.asInstanceOf[java.lang.Number], y)
else if (x.isInstanceOf[java.lang.Character]) BoxesRunTime.equalsCharObject(x.asInstanceOf[java.lang.Character], y)
else x.equals(y)
def _equals(x: Product, y: Any): Boolean = y match {
case y: Product if x.productArity == y.productArity => x.productIterator sameElements y.productIterator
case _ => false
}
// hashcode -----------------------------------------------------------
//
// Note that these are the implementations called by ##, so they
// must not call ## themselves.
def hash(x: Any): Int =
if (x == null) 0
else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number])
else x.hashCode
def hash(dv: Double): Int = {
val iv = dv.toInt
if (iv == dv) return iv
val lv = dv.toLong
if (lv == dv) return lv.hashCode
val fv = dv.toFloat
if (fv == dv) fv.hashCode else dv.hashCode
}
def hash(fv: Float): Int = {
val iv = fv.toInt
if (iv == fv) return iv
val lv = fv.toLong
if (lv == fv) return hash(lv)
else fv.hashCode
}
def hash(lv: Long): Int = {
val low = lv.toInt
val lowSign = low >>> 31
val high = (lv >>> 32).toInt
low ^ (high + lowSign)
}
def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x)
// The remaining overloads are here for completeness, but the compiler
// inlines these definitions directly so they're not generally used.
def hash(x: Int): Int = x
def hash(x: Short): Int = x.toInt
def hash(x: Byte): Int = x.toInt
def hash(x: Char): Int = x.toInt
def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode
def hash(x: Unit): Int = 0
/** A helper method for constructing case class equality methods,
* because existential types get in the way of a clean outcome and
* it's performing a series of Any/Any equals comparisons anyway.
* See ticket #2867 for specifics.
*/
def sameElements(xs1: scala.collection.Seq[Any], xs2: scala.collection.Seq[Any]) = xs1 sameElements xs2
/** Given any Scala value, convert it to a String.
*
* The primary motivation for this method is to provide a means for
* correctly obtaining a String representation of a value, while
* avoiding the pitfalls of naïvely calling toString on said value.
* In particular, it addresses the fact that (a) toString cannot be
* called on null and (b) depending on the apparent type of an
* array, toString may or may not print it in a human-readable form.
*
* @param arg the value to stringify
* @return a string representation of arg.
*/
def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue)
def stringOf(arg: Any, maxElements: Int): String = {
def packageOf(x: AnyRef) = x.getClass.getPackage match {
case null => ""
case p => p.getName
}
def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala."
def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc."
// When doing our own iteration is dangerous
def useOwnToString(x: Any) = x match {
// Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData]
case _: Node | _: MetaData => true
// Range/NumericRange have a custom toString to avoid walking a gazillion elements
case _: Range | _: NumericRange[_] => true
// Sorted collections to the wrong thing (for us) on iteration - ticket #3493
case _: Sorted[_, _] => true
// StringBuilder(a, b, c) and similar not so attractive
case _: StringLike[_] => true
// Don't want to evaluate any elements in a view
case _: TraversableView[_, _] => true
// Don't want to a) traverse infinity or b) be overly helpful with peoples' custom
// collections which may have useful toString methods - ticket #3710
// or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s.
case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x)
// Otherwise, nothing could possibly go wrong
case _ => false
}
// A variation on inner for maps so they print -> instead of bare tuples
def mapInner(arg: Any): String = arg match {
case (k, v) => inner(k) + " -> " + inner(v)
case _ => inner(arg)
}
// Special casing Unit arrays, the value class which uses a reference array type.
def arrayToString(x: AnyRef) = {
if (x.getClass.getComponentType == classOf[BoxedUnit])
0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")")
else
WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")")
}
// The recursively applied attempt to prettify Array printing.
// Note that iterator is used if possible and foreach is used as a
// last resort, because the parallel collections "foreach" in a
// random order even on sequences.
def inner(arg: Any): String = arg match {
case null => "null"
case "" => "\\"\\""
case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\\"" + x + "\\"" else x
case x if useOwnToString(x) => x.toString
case x: AnyRef if isArray(x) => arrayToString(x)
case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")")
case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")")
case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma
case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")")
case x => x.toString
}
// The try/catch is defense against iterables which aren't actually designed
// to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes.
try inner(arg)
catch {
case _: StackOverflowError | _: UnsupportedOperationException | _: AssertionError => "" + arg
}
}
/** stringOf formatted for use in a repl result. */
def replStringOf(arg: Any, maxElements: Int): String = {
val s = stringOf(arg, maxElements)
val nl = if (s contains "\\n") "\\n" else ""
nl + s + "\\n"
}
private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) {
if (sys.props contains "scala.debug.zip") {
val xs = coll1.toIndexedSeq
val ys = coll2.toIndexedSeq
if (xs.length != ys.length) {
Console.err.println(
"Mismatched zip in " + what + ":\\n" +
" this: " + xs.mkString(", ") + "\\n" +
" that: " + ys.mkString(", ")
)
(new Exception).getStackTrace.drop(2).take(10).foreach(println)
}
}
}
}
| xuwei-k/scala-js | scalalib/overrides-2.10/scala/runtime/ScalaRunTime.scala | Scala | bsd-3-clause | 14,890 |
package pl.newicom.dddd.process
import pl.newicom.dddd.aggregate.{DomainEvent, EntityId}
import pl.newicom.dddd.office.LocalOfficeId
import pl.newicom.dddd.process.ProcessConfig.CorrelationIdResolver
import pl.newicom.dddd.saga.BusinessProcessId
import scala.reflect.ClassTag
object ProcessConfig {
type CorrelationIdResolver = PartialFunction[DomainEvent, EntityId]
}
abstract class ProcessConfig[E : ClassTag](val process: BusinessProcessId)
extends LocalOfficeId[E](process.processId, Option(process.department).getOrElse(process.processDomain)) {
/**
* Correlation ID identifies process instance.
*/
def correlationIdResolver: CorrelationIdResolver
}
| pawelkaczor/akka-ddd | akka-ddd-core/src/main/scala/pl/newicom/dddd/process/ProcessConfig.scala | Scala | mit | 677 |
//: ----------------------------------------------------------------------------
//: Copyright (C) 2016 Verizon. All Rights Reserved.
//:
//: Licensed under the Apache License, Version 2.0 (the "License");
//: you may not use this file except in compliance with the License.
//: You may obtain a copy of the License at
//:
//: http://www.apache.org/licenses/LICENSE-2.0
//:
//: Unless required by applicable law or agreed to in writing, software
//: distributed under the License is distributed on an "AS IS" BASIS,
//: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//: See the License for the specific language governing permissions and
//: limitations under the License.
//:
//: ----------------------------------------------------------------------------
package ark
package example
import org.apache.mesos.MesosSchedulerDriver
import org.http4s.EntityEncoder
import org.http4s.server.{HttpService, Router}
import org.http4s.argonaut._
import org.http4s.dsl._
import argonaut._
import Argonaut._
import scala.concurrent.ExecutionContext
import scala.language.postfixOps
import scalaz.concurrent.Task
import scalaz.stream.async
import scalaz.stream.async.mutable.Queue
object Service {
implicit val infoEncoder: EntityEncoder[SchedulerInfo] = jsonEncoderOf[SchedulerInfo]
implicit def infoJson: CodecJson[SchedulerInfo] = casecodec5(SchedulerInfo.apply, SchedulerInfo.unapply)(
"mesosMaster", "frameworkId", "frameworkName", "reqcpu", "reqmem")
def setup(driver: MesosSchedulerDriver) = {
val inbound = async.boundedQueue[CustomMessage](100)(Scheduler.defaultExecutor)
val stream = inbound.dequeue
(service(inbound, driver), stream)
}
def service(inbound: Queue[CustomMessage], driver: MesosSchedulerDriver)(
implicit executionContext: ExecutionContext = ExecutionContext.global): HttpService =
Router("" -> rootService(inbound, driver))
def rootService(inbound: Queue[CustomMessage], driver: MesosSchedulerDriver)(
implicit executionContext: ExecutionContext) = HttpService {
case _ -> Root => MethodNotAllowed()
case GET -> Root / "info" => {
// When request comes we only block until message makes it to the queue
// After that is just waiting for state manager to call callback function
val res: Task[SchedulerInfo] = Task.async[SchedulerInfo](cb => inbound.enqueueOne(GetInfo(driver, cb)).run)
Ok(res)
}
case POST -> Root / "blacklist" / slaveId => {
inbound.enqueueOne(Blacklist(driver, slaveId)).run
Ok(s"Requested Blacklist $slaveId")
}
case DELETE -> Root / "blacklist" / slaveId => {
inbound.enqueueOne(Unblacklist(driver, slaveId)).run
Ok(s"Requested Unblacklist $slaveId")
}
}
}
| oncue/mesos-scheduler | example/src/main/scala/Service.scala | Scala | apache-2.0 | 2,772 |
package vexriscv.demo
import spinal.core._
import spinal.lib._
import spinal.lib.bus.amba3.apb._
import spinal.lib.bus.misc.SizeMapping
import spinal.lib.com.jtag.Jtag
import spinal.lib.com.uart._
import spinal.lib.io.{InOutWrapper, TriStateArray}
import spinal.lib.misc.{InterruptCtrl, Prescaler, Timer}
import spinal.lib.soc.pinsec.{PinsecTimerCtrl, PinsecTimerCtrlExternal}
import vexriscv.plugin._
import vexriscv.{VexRiscv, VexRiscvConfig, plugin}
import scala.collection.mutable.ArrayBuffer
/**
* Created by PIC32F_USER on 28/07/2017.
*
* Murax is a very light SoC which could work without any external component.
* - ICE40-hx8k + icestorm => 53 Mhz, 2142 LC
* - 0.37 DMIPS/Mhz
* - 8 kB of on-chip ram
* - JTAG debugger (eclipse/GDB/openocd ready)
* - Interrupt support
* - APB bus for peripherals
* - 32 GPIO pin
* - one 16 bits prescaler, two 16 bits timers
* - one UART with tx/rx fifo
*/
case class MuraxConfig(coreFrequency : HertzNumber,
onChipRamSize : BigInt,
onChipRamHexFile : String,
pipelineDBus : Boolean,
pipelineMainBus : Boolean,
pipelineApbBridge : Boolean,
gpioWidth : Int,
uartCtrlConfig : UartCtrlMemoryMappedConfig,
cpuPlugins : ArrayBuffer[Plugin[VexRiscv]]){
require(pipelineApbBridge || pipelineMainBus, "At least pipelineMainBus or pipelineApbBridge should be enable to avoid wipe transactions")
}
object MuraxConfig{
def default = MuraxConfig(
coreFrequency = 50 MHz,
onChipRamSize = 256 kB,
onChipRamHexFile = null,
pipelineDBus = true,
pipelineMainBus = false,
pipelineApbBridge = true,
gpioWidth = 32,
cpuPlugins = ArrayBuffer( //DebugPlugin added by the toplevel
new IBusSimplePlugin(
resetVector = 0x80000000l,
relaxedPcCalculation = true,
prediction = NONE,
catchAccessFault = false,
compressedGen = true
),
new DBusSimplePlugin(
catchAddressMisaligned = false,
catchAccessFault = false,
earlyInjection = false
),
new CsrPlugin(CsrPluginConfig.all(mtvecInit = 0x80000020l)),
new DecoderSimplePlugin(
catchIllegalInstruction = false
),
new RegFilePlugin(
regFileReadyKind = plugin.SYNC,
zeroBoot = false
),
new IntAluPlugin,
new SrcPlugin(
separatedAddSub = false,
executeInsertion = false
),
new LightShifterPlugin,
new HazardSimplePlugin(
bypassExecute = false,
bypassMemory = false,
bypassWriteBack = false,
bypassWriteBackBuffer = false,
pessimisticUseSrc = false,
pessimisticWriteRegFile = false,
pessimisticAddressMatch = false
),
new MulPlugin,
new DivPlugin,
new BranchPlugin(
earlyBranch = false,
catchAddressMisaligned = false
),
new YamlPlugin("cpu0.yaml")
),
uartCtrlConfig = UartCtrlMemoryMappedConfig(
uartCtrlConfig = UartCtrlGenerics(
dataWidthMax = 8,
clockDividerWidth = 20,
preSamplingSize = 1,
samplingSize = 3,
postSamplingSize = 1
),
initConfig = UartCtrlInitConfig(
baudrate = 115200,
dataLength = 7, //7 => 8 bits
parity = UartParityType.NONE,
stop = UartStopType.ONE
),
busCanWriteClockDividerConfig = false,
busCanWriteFrameConfig = false,
txFifoDepth = 16,
rxFifoDepth = 16
)
)
def fast = {
val config = default
//Replace HazardSimplePlugin to get datapath bypass
config.cpuPlugins(config.cpuPlugins.indexWhere(_.isInstanceOf[HazardSimplePlugin])) = new HazardSimplePlugin(
bypassExecute = true,
bypassMemory = true,
bypassWriteBack = true,
bypassWriteBackBuffer = true
)
// config.cpuPlugins(config.cpuPlugins.indexWhere(_.isInstanceOf[LightShifterPlugin])) = new FullBarrelShifterPlugin()
config
}
}
case class Murax(config : MuraxConfig) extends Component{
import config._
val io = new Bundle {
//Clocks / reset
val asyncReset = in Bool
val mainClk = in Bool
//Main components IO
val jtag = slave(Jtag())
//Peripherals IO
val gpioA = master(TriStateArray(gpioWidth bits))
val uart = master(Uart())
}
val resetCtrlClockDomain = ClockDomain(
clock = io.mainClk,
config = ClockDomainConfig(
resetKind = BOOT
)
)
val resetCtrl = new ClockingArea(resetCtrlClockDomain) {
val mainClkResetUnbuffered = False
//Implement an counter to keep the reset axiResetOrder high 64 cycles
// Also this counter will automatically do a reset when the system boot.
val systemClkResetCounter = Reg(UInt(6 bits)) init(0)
when(systemClkResetCounter =/= U(systemClkResetCounter.range -> true)){
systemClkResetCounter := systemClkResetCounter + 1
mainClkResetUnbuffered := True
}
when(BufferCC(io.asyncReset)){
systemClkResetCounter := 0
}
//Create all reset used later in the design
val mainClkReset = RegNext(mainClkResetUnbuffered)
val systemReset = RegNext(mainClkResetUnbuffered)
}
val systemClockDomain = ClockDomain(
clock = io.mainClk,
reset = resetCtrl.systemReset,
frequency = FixedFrequency(coreFrequency)
)
val debugClockDomain = ClockDomain(
clock = io.mainClk,
reset = resetCtrl.mainClkReset,
frequency = FixedFrequency(coreFrequency)
)
val system = new ClockingArea(systemClockDomain) {
val simpleBusConfig = SimpleBusConfig(
addressWidth = 32,
dataWidth = 32
)
//Arbiter of the cpu dBus/iBus to drive the mainBus
//Priority to dBus, !! cmd transactions can change on the fly !!
val mainBusArbiter = new MuraxMasterArbiter(simpleBusConfig)
//Instanciate the CPU
val cpu = new VexRiscv(
config = VexRiscvConfig(
plugins = cpuPlugins += new DebugPlugin(debugClockDomain)
)
)
//Checkout plugins used to instanciate the CPU to connect them to the SoC
val timerInterrupt = False
val externalInterrupt = False
for(plugin <- cpu.plugins) plugin match{
case plugin : IBusSimplePlugin => mainBusArbiter.io.iBus <> plugin.iBus
case plugin : DBusSimplePlugin => {
if(!pipelineDBus)
mainBusArbiter.io.dBus <> plugin.dBus
else {
mainBusArbiter.io.dBus.cmd << plugin.dBus.cmd.halfPipe()
mainBusArbiter.io.dBus.rsp <> plugin.dBus.rsp
}
}
case plugin : CsrPlugin => {
plugin.externalInterrupt := externalInterrupt
plugin.timerInterrupt := timerInterrupt
}
case plugin : DebugPlugin => plugin.debugClockDomain{
resetCtrl.systemReset setWhen(RegNext(plugin.io.resetOut))
io.jtag <> plugin.io.bus.fromJtag()
}
case _ =>
}
//****** MainBus slaves ********
val ram = new MuraxSimpleBusRam(
onChipRamSize = onChipRamSize,
onChipRamHexFile = onChipRamHexFile,
simpleBusConfig = simpleBusConfig
)
val apbBridge = new MuraxSimpleBusToApbBridge(
apb3Config = Apb3Config(
addressWidth = 20,
dataWidth = 32
),
pipelineBridge = pipelineApbBridge,
simpleBusConfig = simpleBusConfig
)
//******** APB peripherals *********
val gpioACtrl = Apb3Gpio(gpioWidth = gpioWidth)
io.gpioA <> gpioACtrl.io.gpio
val uartCtrl = Apb3UartCtrl(uartCtrlConfig)
uartCtrl.io.uart <> io.uart
externalInterrupt setWhen(uartCtrl.io.interrupt)
val timer = new MuraxApb3Timer()
timerInterrupt setWhen(timer.io.interrupt)
//******** Memory mappings *********
val apbDecoder = Apb3Decoder(
master = apbBridge.io.apb,
slaves = List[(Apb3, SizeMapping)](
gpioACtrl.io.apb -> (0x00000, 4 kB),
uartCtrl.io.apb -> (0x10000, 4 kB),
timer.io.apb -> (0x20000, 4 kB)
)
)
val mainBusDecoder = new Area {
val logic = new MuraxSimpleBusDecoder(
master = mainBusArbiter.io.masterBus,
specification = List[(SimpleBus,SizeMapping)](
ram.io.bus -> (0x80000000l, onChipRamSize),
apbBridge.io.simpleBus -> (0xF0000000l, 1 MB)
),
pipelineMaster = pipelineMainBus
)
}
}
}
object Murax{
def main(args: Array[String]) {
SpinalVerilog(Murax(MuraxConfig.default))
}
}
object MuraxDhrystoneReady{
def main(args: Array[String]) {
SpinalVerilog(Murax(MuraxConfig.fast.copy(onChipRamSize = 256 kB)))
}
}
object MuraxDhrystoneReadyMulDivStatic{
def main(args: Array[String]) {
SpinalVerilog({
val config = MuraxConfig.fast.copy(onChipRamSize = 256 kB)
//val config = MuraxConfig.fast.copy(onChipRamSize = 192 kB, onChipRamHexFile = "src/main/ressource/hex/muraxDemo.hex")
config.cpuPlugins += new MulPlugin
config.cpuPlugins += new DivPlugin
config.cpuPlugins.remove(config.cpuPlugins.indexWhere(_.isInstanceOf[BranchPlugin]))
config.cpuPlugins +=new BranchPlugin(
earlyBranch = false,
catchAddressMisaligned = false
)
config.cpuPlugins += new IBusSimplePlugin(
resetVector = 0x80000000l,
relaxedPcCalculation = true,
prediction = STATIC,
catchAccessFault = false,
compressedGen = false
)
config.cpuPlugins.remove(config.cpuPlugins.indexWhere(_.isInstanceOf[LightShifterPlugin]))
config.cpuPlugins += new FullBarrelShifterPlugin
Murax(config)
})
}
}
//Will blink led and echo UART RX to UART TX (in the verilator sim, type some text and press enter to send UART frame to the Murax RX pin)
object MuraxWithRamInit{
def main(args: Array[String]) {
SpinalVerilog(Murax(MuraxConfig.default.copy(onChipRamSize = 192 kB, onChipRamHexFile = "src/main/ressource/hex/muraxDemo.hex")))
}
}
| windelbouwman/ppci-mirror | examples/riscvmurax/Murax.scala | Scala | bsd-2-clause | 10,120 |
package cromwell.services.keyvalue.impl
import akka.actor.ActorSystem
import cromwell.core.ExecutionIndex._
import cromwell.core.WorkflowId
import cromwell.database.sql.tables.JobKeyValueEntry
import cromwell.services.EngineServicesStore
import cromwell.services.keyvalue.KeyValueServiceActor.KvJobKey
import cromwell.util.DatabaseUtil._
import scala.concurrent.{ExecutionContext, Future}
trait BackendKeyValueDatabaseAccess {
def getBackendValueByKey(workflowId: WorkflowId, jobKey: KvJobKey, key: String)
(implicit ec: ExecutionContext): Future[Option[String]] = {
EngineServicesStore.engineDatabaseInterface.queryStoreValue(
workflowId.toString, jobKey.callFqn, jobKey.callIndex.fromIndex, jobKey.callAttempt, key)
}
def updateBackendKeyValuePair(workflowId: WorkflowId,
jobKey: KvJobKey,
backendStoreKey: String,
backendStoreValue: String)(implicit ec: ExecutionContext, actorSystem: ActorSystem): Future[Unit] = {
val jobKeyValueEntry = JobKeyValueEntry(workflowId.toString, jobKey.callFqn, jobKey.callIndex.fromIndex,
jobKey.callAttempt, backendStoreKey, backendStoreValue)
withRetry(() => EngineServicesStore.engineDatabaseInterface.addJobKeyValueEntry(jobKeyValueEntry))
}
}
| ohsu-comp-bio/cromwell | services/src/main/scala/cromwell/services/keyvalue/impl/BackendKeyValueDatabaseAccess.scala | Scala | bsd-3-clause | 1,343 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.features.kryo
import com.esotericsoftware.kryo.io.Input
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.features.SerializationOption.SerializationOption
import org.locationtech.geomesa.features.kryo.impl.{KryoFeatureDeserialization, KryoFeatureSerialization}
import org.locationtech.geomesa.features.kryo.serialization.KryoUserDataSerialization
import org.locationtech.geomesa.utils.cache.CacheKeyGenerator
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
/**
* @param original the simple feature type that was encoded
* @param projected the simple feature type to project to when decoding
* @param options the options what were applied when encoding
*/
class ProjectingKryoFeatureDeserializer(original: SimpleFeatureType,
projected: SimpleFeatureType,
val options: Set[SerializationOption] = Set.empty)
extends KryoFeatureSerialization {
override private [kryo] def serializeSft = original
private val numProjectedAttributes = projected.getAttributeCount
private val offsets = Array.fill[Int](numProjectedAttributes)(-1)
private val readersInOrder = Array.ofDim[(Input) => AnyRef](numProjectedAttributes)
private val indices = Array.ofDim[Int](original.getAttributeCount)
setup()
private def setup(): Unit = {
val originalReaders = KryoFeatureDeserialization.getReaders(CacheKeyGenerator.cacheKey(original), original)
var i = 0
while (i < indices.length) {
val index = projected.indexOf(original.getDescriptor(i).getLocalName)
indices(i) = index
if (index != -1) {
readersInOrder(index) = originalReaders(i)
}
i += 1
}
}
override def deserialize(bytes: Array[Byte]): SimpleFeature = deserialize(bytes, 0, bytes.length)
override def deserialize(bytes: Array[Byte], offset: Int, length: Int): SimpleFeature = {
val input = KryoFeatureDeserialization.getInput(bytes, offset, length)
if (input.readInt(true) != KryoFeatureSerializer.VERSION) {
throw new IllegalArgumentException("Can't process features serialized with an older version")
}
val attributes = Array.ofDim[AnyRef](numProjectedAttributes)
// read in the offsets
val offsetStart = offset + input.readInt()
val id = input.readString()
input.setPosition(offsetStart)
var i = 0
while (i < indices.length) {
val iOffset = if (input.position < input.limit) { offset + input.readInt(true) } else { -1 }
val index = indices(i)
if (index != -1) {
offsets(index) = iOffset
}
i += 1
}
// read in the values
i = 0
while (i < numProjectedAttributes) {
val offset = offsets(i)
if (offset != -1) {
input.setPosition(offset)
attributes(i) = readersInOrder(i)(input)
}
i += 1
}
val sf = new ScalaSimpleFeature(projected, id, attributes)
if (options.withUserData) {
// skip offset data
input.setPosition(offsetStart)
var i = 0
while (i < original.getAttributeCount) {
input.readInt(true)
i += 1
}
val ud = KryoUserDataSerialization.deserialize(input)
sf.getUserData.putAll(ud)
sf
}
sf
}
}
| ronq/geomesa | geomesa-features/geomesa-feature-kryo/src/main/scala/org/locationtech/geomesa/features/kryo/ProjectingKryoFeatureDeserializer.scala | Scala | apache-2.0 | 3,792 |
package io.udash.web.guide.demos.rpc
import com.avsystem.commons.serialization.{GenCodec, HasGenCodec, Input, Output}
import io.udash.rpc._
import scala.concurrent.Future
object GenCodecServerRPC extends DefaultServerRpcCompanion[GenCodecServerRPC] {
case class DemoCaseClass(i: Int, s: String, intAsDouble: Double)
object DemoCaseClass extends HasGenCodec[DemoCaseClass]
sealed trait Fruit
object Fruit {
case object Apple extends Fruit
case object Orange extends Fruit
case object Banana extends Fruit
implicit val genCodec: GenCodec[Fruit] = GenCodec.materialize
}
class DemoClass(val i: Int, val s: String) {
var _v: Int = 5
}
object DemoClass {
implicit val DemoClassCodec = new GenCodec[DemoClass] {
override def read(input: Input): DemoClass = {
val list = input.readList()
val i = list.nextElement().readSimple().readInt()
val s = list.nextElement().readSimple().readString()
val _v = list.nextElement().readSimple().readInt()
val demo = new DemoClass(i, s)
demo._v = _v
demo
}
override def write(output: Output, value: DemoClass): Unit = {
val values = output.writeList()
values.writeElement().writeSimple().writeInt(value.i)
values.writeElement().writeSimple().writeString(value.s)
values.writeElement().writeSimple().writeInt(value._v)
values.finish()
}
}
}
}
trait GenCodecServerRPC {
import GenCodecServerRPC._
def sendInt(el: Int): Future[Int]
def sendDouble(el: Double): Future[Double]
def sendString(el: String): Future[String]
def sendSeq(el: Seq[String]): Future[Seq[String]]
def sendMap(el: Map[String, Int]): Future[Map[String, Int]]
def sendCaseClass(el: DemoCaseClass): Future[DemoCaseClass]
def sendClass(el: DemoClass): Future[DemoClass]
def sendSealedTrait(el: Fruit): Future[Fruit]
}
| UdashFramework/udash-guide | shared/src/main/scala/io/udash/web/guide/demos/rpc/GenCodecServerRPC.scala | Scala | gpl-3.0 | 1,904 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Tue Sep 13 17:37:39 EDT 2016
* @see LICENSE (MIT style license file).
*/
package scalation.analytics.classifier
import scala.math.log
import scalation.linalgebra.{MatrixD, VectoD, VectorD, VectoI, VectorI}
import scalation.random.ProbabilityVec
// U N D E R D E V E L O P M E N T
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `DynBayesNetwork` class provides Dynamic Bayesian Network (DBN) models.
*/
class DynBayesNetwork
extends Classifier
{
def train (testStart: Int, testEnd: Int): Unit = ???
def classify (z: VectoD): (Int, String, Double) = ???
def classify (z: VectoI): (Int, String, Double) = ???
def reset () {}
def size: Int = ???
def test (testStart: Int, testEnd: Int): Double = ???
} // DynBayesNetwork class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `DynBayesNetworkTest` object is used to test the `DynBayesNetwork` class.
* > run-main scalation.analytics.classifier.DynBayesNetworkTest
*/
object DynBayesNetworkTest extends App
{
println ("run DynBayesNetworkTest")
} // DynBayesNetworkTest object
| NBKlepp/fda | scalation_1.2/src/main/scala/scalation/analytics/classifier/DynBayesNetwork.scala | Scala | mit | 1,292 |
package com.bstek.designer.core.palette
import java.awt.event.{ActionEvent, ActionListener, KeyEvent}
import java.awt._
import javax.swing._
import com.intellij.designer.palette.PaletteItemsComponent
import com.intellij.icons.AllIcons
import com.intellij.ui.Gray
import com.intellij.util.ui.UIUtil
/**
* Created by robin on 14-7-22.
*/
class DoradoPaletteGroupComponent(group: DoradoPaletteGroup) extends JCheckBox {
private var myItemsComponent: DoradoPaletteItemsComponent = null
setText(group.getName)
setSelected(true)
setIcon(AllIcons.Nodes.TreeClosed)
setSelectedIcon(AllIcons.Modules.SourceRoot)
setFont(getFont.deriveFont(Font.BOLD))
setFocusPainted(false)
setMargin(new Insets(0, 3, 0, 3))
setOpaque(true)
addActionListener(new ActionListener {
def actionPerformed(e: ActionEvent) {
myItemsComponent.setVisible(isSelected)
}
})
initActions
override def getBackground: Color = {
if (isFocusOwner) {
return UIUtil.getListSelectionBackground
}
if (UIUtil.isUnderDarcula) {
return Gray._100
}
return super.getBackground
}
override def getForeground: Color = {
if (isFocusOwner) {
return UIUtil.getListSelectionForeground
}
return super.getForeground
}
def getItemsComponent: DoradoPaletteItemsComponent = {
return myItemsComponent
}
def setItemsComponent(itemsComponent: DoradoPaletteItemsComponent) {
myItemsComponent = itemsComponent
}
private def initActions {
val inputMap: InputMap = getInputMap(JComponent.WHEN_FOCUSED)
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_DOWN, 0, false), "moveFocusDown")
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_UP, 0, false), "moveFocusUp")
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_LEFT, 0, false), "collapse")
inputMap.put(KeyStroke.getKeyStroke(KeyEvent.VK_RIGHT, 0, false), "expand")
val actionMap: ActionMap = getActionMap
actionMap.put("moveFocusDown", new MoveFocusAction(true))
actionMap.put("moveFocusUp", new MoveFocusAction(false))
actionMap.put("collapse", new ExpandAction(false))
actionMap.put("expand", new ExpandAction(true))
}
private class MoveFocusAction(moveDown: Boolean) extends AbstractAction {
def actionPerformed(e: ActionEvent) {
val kfm: KeyboardFocusManager = KeyboardFocusManager.getCurrentKeyboardFocusManager
val container: Container = kfm.getCurrentFocusCycleRoot
var policy: FocusTraversalPolicy = container.getFocusTraversalPolicy
if (policy == null) {
policy = kfm.getDefaultFocusTraversalPolicy
}
var next: Component = if (moveDown) policy.getComponentAfter(container, DoradoPaletteGroupComponent.this) else policy.getComponentBefore(container, DoradoPaletteGroupComponent.this)
if (next.isInstanceOf[DoradoPaletteItemsComponent]) {
val list: DoradoPaletteItemsComponent = next.asInstanceOf[DoradoPaletteItemsComponent]
if (list.getModel.getSize != 0) {
list.takeFocusFrom(if (list eq myItemsComponent) 0 else -1)
return
}
else {
next = if (moveDown) policy.getComponentAfter(container, next) else policy.getComponentBefore(container, next)
}
}
if (next.isInstanceOf[DoradoPaletteGroupComponent]) {
next.requestFocus
}
}
private final val myMoveDown: Boolean = false
}
private class ExpandAction(expand: Boolean) extends AbstractAction {
def actionPerformed(e: ActionEvent) {
if (expand != isSelected) {
setSelected(expand)
if (myItemsComponent != null) {
myItemsComponent.setVisible(isSelected)
}
}
}
}
}
| OuYuBin/IDEADorado | dorado-core/src/com/bstek/designer/core/palette/DoradoPaletteGroupComponent.scala | Scala | apache-2.0 | 3,691 |
package play.api.test
import play.api.test._
import play.api.test.Helpers._
import org.specs2.mutable._
class HelpersSpec extends Specification {
"inMemoryDatabase" should {
"change database with a name argument" in {
val inMemoryDatabaseConfiguration = inMemoryDatabase("test")
inMemoryDatabaseConfiguration.get("db.test.driver") must beSome("org.h2.Driver")
inMemoryDatabaseConfiguration.get("db.test.url") must beSome.which { url =>
url.startsWith("jdbc:h2:mem:play-test-")
}
}
"add options" in {
val inMemoryDatabaseConfiguration = inMemoryDatabase("test", Map("MODE" -> "PostgreSQL", "DB_CLOSE_DELAY" -> "-1"))
inMemoryDatabaseConfiguration.get("db.test.driver") must beSome("org.h2.Driver")
inMemoryDatabaseConfiguration.get("db.test.url") must beSome.which { url =>
"""^jdbc:h2:mem:play-test([0-9-]+);MODE=PostgreSQL;DB_CLOSE_DELAY=-1$""".r.findFirstIn(url).isDefined
}
}
}
}
| noel-yap/setter-for-catan | play-2.1.1/framework/src/play-test/src/test/scala/play/api/test/HelpersSpec.scala | Scala | apache-2.0 | 973 |
package jsky.app.ot.gemini.editor
import java.io.File
import jsky.util.Preferences
/**
*
*/
package object auxfile {
val DirectoryPref = "EdProgramAuxFile.Directory"
val MaxFileSizeMb = 250
val MaxFileSize = MaxFileSizeMb * 1024 * 1024
def dirPreference: Option[File] =
Option(Preferences.getPreferences.getPreference(DirectoryPref)).map(new File(_))
def dirPreference_=(dir: Option[File]) {
Preferences.getPreferences.setPreference(DirectoryPref, dir.map(_.getAbsolutePath).orNull)
}
}
| spakzad/ocs | bundle/jsky.app.ot/src/main/scala/jsky/app/ot/gemini/editor/auxfile/package.scala | Scala | bsd-3-clause | 515 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import java.io.File
import org.apache.spark.sql.catalyst.util._
/**
* A framework for running the query tests that are listed as a set of text files.
*
* TestSuites that derive from this class must provide a map of testCaseName -> testCaseFiles
* that should be included. Additionally, there is support for whitelisting and blacklisting
* tests as development progresses.
*/
abstract class HiveQueryFileTest extends HiveComparisonTest {
/** A list of tests deemed out of scope and thus completely disregarded */
def blackList: Seq[String] = Nil
/**
* The set of tests that are believed to be working in catalyst. Tests not in whiteList
* blacklist are implicitly marked as ignored.
*/
def whiteList: Seq[String] = ".*" :: Nil
def testCases: Seq[(String, File)]
val runAll =
!(System.getProperty("spark.hive.alltests") == null) ||
runOnlyDirectories.nonEmpty ||
skipDirectories.nonEmpty
val whiteListProperty = "spark.hive.whitelist"
// Allow the whiteList to be overridden by a system property
val realWhiteList =
Option(System.getProperty(whiteListProperty)).map(_.split(",").toSeq).getOrElse(whiteList)
// Go through all the test cases and add them to scala test.
testCases.sorted.foreach {
case (testCaseName, testCaseFile) =>
if (blackList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_)) {
logDebug(s"Blacklisted test skipped $testCaseName")
} else if (realWhiteList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_) ||
runAll) {
// Build a test case and submit it to scala test framework...
val queriesString = fileToString(testCaseFile)
createQueryTest(testCaseName, queriesString, reset = true, tryWithoutResettingFirst = true)
} else {
// Only output warnings for the built in whitelist as this clutters the output when the user
// trying to execute a single test from the commandline.
if (System.getProperty(whiteListProperty) == null && !runAll) {
ignore(testCaseName) {}
}
}
}
}
| chenc10/Spark-PAF | sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala | Scala | apache-2.0 | 2,944 |
package gapt.proofs
import gapt.expr.VarOrConst
import gapt.expr._
import gapt.expr.formula.hol.HOLAtomConst
import gapt.expr.subst.Substitution
import gapt.expr.util.freeVariables
import gapt.expr.util.rename
import gapt.proofs.context.update.Definition
import scala.collection.mutable
package object resolution {
implicit object avatarComponentsAreReplaceable extends ClosedUnderReplacement[AvatarDefinition] {
def replace( component: AvatarDefinition, repl: PartialFunction[Expr, Expr] ): AvatarDefinition = component match {
case AvatarGroundComp( atom, pol ) => AvatarGroundComp( TermReplacement( atom, repl ), pol )
case AvatarNonGroundComp( atom, defn, vars ) => AvatarNonGroundComp( TermReplacement( atom, repl ), TermReplacement( defn, repl ), vars )
case AvatarNegNonGroundComp( atom, defn, vars, idx ) =>
AvatarNegNonGroundComp( TermReplacement( atom, repl ), TermReplacement( defn, repl ), vars, idx )
}
def names( component: AvatarDefinition ) = component match {
case AvatarGroundComp( atom, _ ) => containedNames( atom )
case AvatarNonGroundComp( atom, defn, vars ) =>
containedNames( atom ) ++ containedNames( defn ) ++ containedNames( vars )
case AvatarNegNonGroundComp( atom, defn, vars, _ ) =>
containedNames( atom ) ++ containedNames( defn ) ++ containedNames( vars )
}
}
implicit object resolutionProofsAreReplaceable extends ClosedUnderReplacement[ResolutionProof] {
def replace( proof: ResolutionProof, repl: PartialFunction[Expr, Expr] ): ResolutionProof = {
val memo = mutable.Map[ResolutionProof, ResolutionProof]()
def f( p: ResolutionProof ): ResolutionProof = memo.getOrElseUpdate( p, p match {
case Input( sequent ) => Input( TermReplacement( sequent, repl ) map BetaReduction.betaNormalize )
case Refl( term ) => Refl( BetaReduction betaNormalize TermReplacement( term, repl ) )
case Taut( formula ) => Taut( BetaReduction betaNormalize TermReplacement( formula, repl ) )
case Defn( defConst, definition ) => Defn( TermReplacement( defConst, repl ).asInstanceOf[HOLAtomConst], TermReplacement( definition, repl ) )
case Factor( q, i1, i2 ) => Factor( f( q ), i1, i2 )
case Subst( q, subst ) => Subst( f( q ), TermReplacement( subst, repl ) )
case Resolution( q1, l1, q2, l2 ) => Resolution( f( q1 ), l1, f( q2 ), l2 )
case Paramod( q1, l1, dir, q2, l2, con ) =>
val q1New = f( q1 )
val q2New = f( q2 )
val ( equation, auxFormula ) = ( q1New.conclusion( l1 ), q2New.conclusion( l2 ) )
val Abs( v, subContext ) = con
val v_ = rename( v, freeVariables( equation ) ++ freeVariables( auxFormula ) )
val contextNew = BetaReduction betaNormalize TermReplacement( Abs( v_, Substitution( v, v_ )( subContext ) ), repl )
Paramod( q1New, l1, dir, q2New, l2, contextNew )
case AvatarSplit( q, indices, component ) =>
AvatarSplit( f( q ), indices, TermReplacement( component, repl ) )
case AvatarContradiction( q ) => AvatarContradiction( f( q ) )
case AvatarComponent( component ) => AvatarComponent( TermReplacement( component, repl ) )
case p @ DefIntro( q, i, definition, args ) =>
val Definition( what, by ) = definition
val definitionNew = Definition( TermReplacement( what, repl ).asInstanceOf[Const], TermReplacement( by, repl ) )
val argsNew = TermReplacement( args, repl )
DefIntro( f( q ), i, definitionNew, argsNew )
case Flip( q, i ) => Flip( f( q ), i )
case TopL( q, i ) => TopL( f( q ), i )
case BottomR( q, i ) => BottomR( f( q ), i )
case NegL( q, i ) => NegL( f( q ), i )
case NegR( q, i ) => NegR( f( q ), i )
case AndL( q, i ) => AndL( f( q ), i )
case OrR( q, i ) => OrR( f( q ), i )
case ImpR( q, i ) => ImpR( f( q ), i )
case AndR1( q, i ) => AndR1( f( q ), i )
case OrL1( q, i ) => OrL1( f( q ), i )
case ImpL1( q, i ) => ImpL1( f( q ), i )
case AndR2( q, i ) => AndR2( f( q ), i )
case OrL2( q, i ) => OrL2( f( q ), i )
case ImpL2( q, i ) => ImpL2( f( q ), i )
case AllL( q, i, skTerm ) => AllL( f( q ), i, TermReplacement( skTerm, repl ) )
case ExR( q, i, skTerm ) => ExR( f( q ), i, TermReplacement( skTerm, repl ) )
case AllR( q, i, v ) => AllR( f( q ), i, TermReplacement( v, repl ).asInstanceOf[Var] )
case ExL( q, i, v ) => ExL( f( q ), i, TermReplacement( v, repl ).asInstanceOf[Var] )
} )
f( proof )
}
def names( proof: ResolutionProof ) = {
val ns = Set.newBuilder[VarOrConst]
for ( p <- proof.subProofs ) {
ns ++= containedNames( p.conclusion )
ns ++= containedNames( p.assertions )
p match {
case AvatarComponent( comp ) =>
ns ++= containedNames( comp )
case AvatarSplit( _, _, comp ) =>
ns ++= containedNames( comp )
case Subst( _, subst ) =>
ns ++= containedNames( subst )
case DefIntro( _, _, definition, repContext ) =>
val Definition( what, by ) = definition
ns ++= containedNames( what )
ns ++= containedNames( by )
case Defn( defConst, definition ) =>
ns += defConst
ns ++= containedNames( definition )
case p: SkolemQuantResolutionRule =>
ns ++= containedNames( p.skolemTerm )
case p: WeakQuantResolutionRule =>
ns ++= containedNames( p.variable )
case _ =>
}
}
ns.result()
}
}
}
| gapt/gapt | core/src/main/scala/gapt/proofs/resolution/package.scala | Scala | gpl-3.0 | 5,883 |
package com.iz2use.express.transform.toscala
trait ScalaCode {
def scalaCode(implicit context: Context): String
} | math85360/ifc-scala | shared/src/main/scala/com/iz2use/express/transform/toscala/ScalaCode.scala | Scala | apache-2.0 | 116 |
package org.apache.spark.sql
import org.apache.spark.sql.execution.strategy.DDLStrategy
import org.apache.spark.sql.parser.CarbonExtensionSqlParser
import org.apache.spark.sql.test.util.PlanTest
import org.scalatest.BeforeAndAfterAll
class CarbonExtensionSuite extends PlanTest with BeforeAndAfterAll {
var session: SparkSession = null
val sparkCommands = Array("select 2 > 1")
val carbonCommands = Array("show STREAMS")
override protected def beforeAll(): Unit = {
super.beforeAll()
session = SparkSession
.builder()
.appName("parserApp")
.master("local")
.config("spark.sql.extensions", "org.apache.spark.sql.CarbonExtensions")
.getOrCreate()
}
test("test parser injection") {
assert(session.sessionState.sqlParser.isInstanceOf[CarbonExtensionSqlParser])
(carbonCommands ++ sparkCommands) foreach (command =>
session.sql(command).show)
}
test("test strategy injection") {
assert(session.sessionState.planner.strategies.filter(_.isInstanceOf[DDLStrategy]).length == 1)
session.sql("create table if not exists table1 (column1 String) using carbondata ").show
}
}
| jackylk/incubator-carbondata | integration/spark/src/test/scala/org/apache/spark/sql/CarbonExtensionSuite.scala | Scala | apache-2.0 | 1,149 |
package functional
import play.api.{Application => PlayApp}
import play.api.test._
import play.api.test.Helpers._
import play.api.i18n.MessagesApi
import play.api.i18n.{Lang, Messages, MessagesImpl, MessagesProvider}
import java.time.Instant
import play.api.inject.guice.GuiceApplicationBuilder
import helpers.InjectorSupport
import play.api.db.Database
import views.Titles
import helpers.Formatter
import helpers.UrlHelper
import helpers.UrlHelper._
import helpers.PasswordHash
import constraints.FormConstraints
import play.api.test._
import play.api.test.Helpers._
import java.sql.Connection
import java.util.concurrent.TimeUnit
import helpers.Helper._
import org.specs2.mutable.Specification
import play.api.test.{Helpers, TestServer}
import play.api.i18n.{Lang, Messages}
import play.api.test.TestServer
import org.openqa.selenium.By
import models._
import com.ruimo.scoins.Scoping._
import SeleniumHelpers.htmlUnit
import SeleniumHelpers.FirefoxJa
class EmployeeUserMaintenanceSpec extends Specification with InjectorSupport {
val disableEmployeeMaintenance = Map("siteOwnerCanEditEmployee" -> false)
val enableEmployeeMaintenance = Map("siteOwnerCanEditEmployee" -> true)
def createNormalUser(userName: String = "administrator")(implicit conn: Connection, app: PlayApp): StoreUser =
inject[StoreUserRepo].create(
userName, "Admin", None, "Manager", "admin@abc.com",
4151208325021896473L, -1106301469931443100L, UserRole.NORMAL, Some("Company1"), stretchCount = 1
)
def login(browser: TestBrowser, userName: String = "administrator") {
browser.goTo(controllers.routes.Admin.index.url)
browser.find("#userName").fill().`with`(userName)
browser.find("#password").fill().`with`("password")
browser.find("#doLoginButton").click()
}
"Employee user" should {
"Employee editing is disabled." in new WithBrowser(
WebDriverFactory(CHROME),
appl(inMemoryDatabase() ++ disableEmployeeMaintenance)
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = createNormalUser()
val site = inject[SiteRepo].createNew(Ja, "店舗1")
val siteUser = inject[SiteUserRepo].createNew(user.id.get, site.id.get)
login(browser)
browser.goTo(
controllers.routes.UserMaintenance.startCreateNewEmployeeUser().url +
"?lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
// Since employee maintenance is disabled, redirected to top.
browser.webDriver.getTitle() === Messages("commonTitle", Titles.top).trim
}
}
"Employee editing is enabled." in new WithBrowser(
WebDriverFactory(CHROME), appl(inMemoryDatabase() ++ enableEmployeeMaintenance)
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = createNormalUser()
val site = inject[SiteRepo].createNew(Ja, "店舗1")
val siteUser = inject[SiteUserRepo].createNew(user.id.get, site.id.get)
login(browser)
browser.goTo(
controllers.routes.UserMaintenance.startCreateNewEmployeeUser().url +
"?lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("createEmployeeTitle"))
// Check validation error.
browser.find("#registerEmployee").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#userName_field .error").text === Formatter.validationErrorString(
inject[FormConstraints].normalUserNameConstraint(), ""
)
browser.find("#password_main_field .error").text ===
Messages("error.minLength", inject[FormConstraints].passwordMinLength())
// Confirm password does not match.
browser.find("#userName").fill().`with`("12345678")
browser.find("#password_main").fill().`with`("abcdefgh")
browser.find("#password_confirm").fill().`with`("abcdefgh1")
browser.find("#registerEmployee").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#password_confirm_field .error").text === Messages("confirmPasswordDoesNotMatch")
browser.find("#userName").fill().`with`("12345678")
browser.find("#password_main").fill().`with`("abcdefgh")
browser.find("#password_confirm").fill().`with`("abcdefgh")
browser.find("#registerEmployee").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("createEmployeeTitle"))
browser.find(".message").text === Messages("userIsCreated")
// store_user table should be updated.
doWith(inject[StoreUserRepo].findByUserName(site.id.get + "-12345678").get) { user =>
user.firstName === ""
user.passwordHash === PasswordHash.generate("abcdefgh", user.salt)
user.companyName === Some(site.name)
// employee table should be updated.
doWith(inject[EmployeeRepo].getBelonging(user.id.get).get) { emp =>
emp.userId === user.id.get
emp.siteId === site.id.get
}
}
}
}
"User name pattern error." in new WithBrowser(
WebDriverFactory(CHROME),
appl(inMemoryDatabase() ++ enableEmployeeMaintenance + ("normalUserNamePattern" -> "[0-9]{6}"))
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = createNormalUser()
val site = inject[SiteRepo].createNew(Ja, "店舗1")
val siteUser = inject[SiteUserRepo].createNew(user.id.get, site.id.get)
login(browser)
browser.goTo(
controllers.routes.UserMaintenance.startCreateNewEmployeeUser().url +
"?lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("createEmployeeTitle"))
browser.find("#userName").fill().`with`("abcdef")
browser.find("#password_main").fill().`with`("abcdefgh")
browser.find("#password_confirm").fill().`with`("abcdefgh")
browser.find("#registerEmployee").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("createEmployeeTitle"))
browser.find("#userName_field dd.error").text === Messages("normalUserNamePatternError")
browser.find("#userName").fill().`with`("12345")
browser.find("#password_main").fill().`with`("abcdefgh")
browser.find("#password_confirm").fill().`with`("abcdefgh")
browser.find("#registerEmployee").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("createEmployeeTitle"))
browser.find("#userName_field dd.error").text === Messages("normalUserNamePatternError")
browser.find("#userName").fill().`with`("1234567")
browser.find("#password_main").fill().`with`("abcdefgh")
browser.find("#password_confirm").fill().`with`("abcdefgh")
browser.find("#registerEmployee").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("createEmployeeTitle"))
browser.find("#userName_field dd.error").text === Messages("normalUserNamePatternError")
browser.find("#userName").fill().`with`("123456")
browser.find("#password_main").fill().`with`("abcdefgh")
browser.find("#password_confirm").fill().`with`("abcdefgh")
browser.find("#registerEmployee").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("createEmployeeTitle"))
browser.find(".message").text === Messages("userIsCreated")
// store_user table should be updated.
doWith(inject[StoreUserRepo].findByUserName(site.id.get + "-123456").get) { user =>
user.firstName === ""
user.passwordHash === PasswordHash.generate("abcdefgh", user.salt)
user.companyName === Some(site.name)
// employee table should be updated.
doWith(inject[EmployeeRepo].getBelonging(user.id.get).get) { emp =>
emp.userId === user.id.get
emp.siteId === site.id.get
}
}
}
}
// Since employee maintenance is disabled, redirected to top
"Login with super user. Since super user cannot edit employee, page is redirected to top." in new WithBrowser(
WebDriverFactory(CHROME),
appl(inMemoryDatabase() ++ disableEmployeeMaintenance)
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
browser.goTo(
controllers.routes.UserMaintenance.startCreateNewEmployeeUser().url +
"?lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Titles.top).trim
}
}
// Since employee maintenance is disabled, redirected to top
"Login with super user. Since super user cannot edit employee, page is redirected to top." in new WithBrowser(
WebDriverFactory(CHROME),
appl(inMemoryDatabase() ++ disableEmployeeMaintenance)
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val site01 = inject[SiteRepo].createNew(Ja, "店舗1")
val superUser = loginWithTestUser(browser)
val user01 = createNormalUser("user01")
val employee01 = createNormalUser(site01.id.get + "-employee")
val employee02 = createNormalUser((site01.id.get + 1) + "-employee")
val siteOwner = inject[SiteUserRepo].createNew(user01.id.get, site01.id.get)
logoff(browser)
login(browser, "user01")
browser.goTo(
controllers.routes.UserMaintenance.editUser().url.addParm("lang", lang.code).toString
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Titles.top).trim
browser.goTo(
controllers.routes.UserMaintenance.modifyUserStart(employee01.id.get).url.addParm("lang", lang.code).toString
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Titles.top).trim
}
}
"Edit employee will show only employees of the site of currently logined store owner." in new WithBrowser(
WebDriverFactory(CHROME),
appl(inMemoryDatabase() ++ enableEmployeeMaintenance)
) {
inject[Database].withConnection { implicit conn =>
val currencyInfo = inject[CurrencyRegistry]
val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val site01 = inject[SiteRepo].createNew(Ja, "店舗1")
val superUser = loginWithTestUser(browser)
val user01 = createNormalUser("user01")
val employee01 = createNormalUser(site01.id.get + "-employee")
val employee02 = createNormalUser((site01.id.get + 1) + "-employee")
val siteOwner = inject[SiteUserRepo].createNew(user01.id.get, site01.id.get)
logoff(browser)
login(browser, "user01")
browser.goTo(
controllers.routes.UserMaintenance.editUser().url.addParm("lang", lang.code).toString
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find(".userTable .userTableBody").size === 1
browser.find(".userTable .userTableBody .id a").text === employee01.id.get.toString
browser.find(".userTable .userTableBody .name").text === employee01.userName
browser.find(".userTable .userTableBody .id a").click()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("modifyUserTitle"))
browser.find("#userId").attribute("value") === employee01.id.get.toString
browser.find("#userName").attribute("value") === employee01.userName
browser.find("#firstName").attribute("value") === employee01.firstName
browser.find("#lastName").attribute("value") === employee01.lastName
browser.find("#companyName").attribute("value") === employee01.companyName.get
browser.find("#email").attribute("value") === employee01.email
browser.find("#sendNoticeMail_field input[type='checkbox']").size === 0
browser.find("#userName").fill().`with`("")
browser.find("#firstName").fill().`with`("")
browser.find("#lastName").fill().`with`("")
browser.find("#companyName").fill().`with`("")
browser.find("#email").fill().`with`("")
browser.find("#modifyUser").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find(".globalErrorMessage").text === Messages("inputError")
browser.find("#userName_field .error").text ===
Messages("error.minLength", inject[FormConstraints].userNameMinLength)
browser.find("#firstName_field .error").text === Messages("error.required")
browser.find("#lastName_field .error").text === Messages("error.required")
browser.find("#companyName_field .error").text === Messages("error.required")
browser.find("#email_field .error").index(0).text === Messages("error.email")
browser.find("#email_field .error").index(1).text === Messages("error.required")
browser.find("#password_main_field .error").text ===
Messages("error.minLength", inject[FormConstraints].passwordMinLength())
browser.find("#userName").fill().`with`(employee01.userName + "new")
browser.find("#firstName").fill().`with`("firstName2")
browser.find("#lastName").fill().`with`("lastName2")
browser.find("#companyName").fill().`with`("companyName2")
browser.find("#email").fill().`with`("xxx@xxx.xxx")
browser.find("#password_main").fill().`with`("password2")
browser.find("#modifyUser").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("#password_confirm_field .error").text === Messages("confirmPasswordDoesNotMatch")
browser.find("#password_main").fill().`with`("password2")
browser.find("#password_confirm").fill().`with`("password2")
browser.find("#modifyUser").click()
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("editUserTitle"))
browser.find(".message").text === Messages("userIsUpdated")
doWith(inject[StoreUserRepo].apply(employee01.id.get)) { newUser =>
newUser.userName === employee01.userName + "new"
newUser.firstName === "firstName2"
newUser.lastName === "lastName2"
newUser.companyName === Some("companyName2")
newUser.email === "xxx@xxx.xxx"
newUser.passwordHash === PasswordHash.generate("password2", newUser.salt, storeUserRepo.PasswordHashStretchCount())
}
browser.find("button[data-user-id='" + employee01.id.get + "']").click()
browser.waitUntil(
failFalse(browser.find(".ui-dialog-buttonset").first().displayed())
)
browser.find(".ui-dialog-buttonset .ui-button").index(1).click() // click No
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("editUserTitle"))
browser.goTo(
controllers.routes.UserMaintenance.editUser().url.addParm("lang", lang.code).toString
)
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.find("button[data-user-id='" + employee01.id.get + "']").click()
browser.waitUntil(
failFalse(browser.find(".ui-dialog-buttonset").first().displayed())
)
browser.find(".ui-dialog-buttonset .ui-button").index(0).click() // click Yes
browser.await().atMost(5, TimeUnit.SECONDS).untilPage().isLoaded()
browser.webDriver.getTitle() === Messages("commonTitle", Messages("editUserTitle"))
browser.find(".userTable .userTableBody").size === 0
}
}
}
}
| ruimo/store2 | test/functional/EmployeeUserMaintenanceSpec.scala | Scala | apache-2.0 | 18,584 |
package parsec.optimised
import scala.reflect.macros.blackbox.Context
import util.{Zeroval, TreeTools}
/**
* This trait rewrites grammars according to the Parsequery
* transformation rules.
*/
trait ParsequeryTransform
extends GrammarTrees
with Zeroval
with TreeTools {
val c: Context
import c.universe._
/**
* The global, outermost transform function
* It forwards implementation to `transformMap` if the Grammar we have
* at hand corresponds to such a grammar.
*/
def transform(g: Grammar): Grammar = g match {
case Concat(l, r, t) => Concat(transform(l), transform(r), t)
case ConcatLeft(l, r, t) => ConcatLeft(transform(l), transform(r), t)
case ConcatRight(l, r, t) => ConcatRight(transform(l), transform(r), t)
case Or(l, r, t) => Or(transform(l), transform(r), t)
case Mapped(g, f, t) => transformMap(g, f, t)
/* TODO: add rep and repsep */
case _ => g
}
/**
* The following rules apply
*
* - Concat_1: Pushing of maps closer to their creation
* T[[ (a ~ b) map { case (a, b) => f3(f1(a), f2(b)) } ]] =
* (T[[ a ]] map f1 ~ T[[ b ]] map f2) map { case (a, b) => f3(a, b) }
*
* For now, we expect f3 to be either a case class constructor (this case
* also covers tuples) or method (which can also be an anonymous functions).
*
* - Concat_2: Turning parsers into recognizers (make them id recognizers for now)
* T[[ (a ~ b) map { case (a, b) => f(a) } ]] = T[[ a ]] map f <~ recognize(T[[ b ]])
* T[[ (a ~ b) map { case (a, b) => f(b) } ]] = recognize(T[[ a ]]) ~> T[[ b ]] map f
*
* - Map_Map over Parser
* T[[ T[[ p map f map g ]] = T[[ p ]] map (f andThen g) ]]
*/
def transformMap(g: Grammar, f: Tree, u: Type): Grammar = g match {
case Or(l, r, t) =>
val q"${lMapped: Grammar}" = q"$l.map[$t]($f)"
val q"${rMapped: Grammar}" = q"$r.map[$t]($f)"
Or(lMapped, rMapped, t)
/**
* map of a map, go under
* Note: it is not sufficient to just propagate the
* transformation under, i.e. this is imperfect:
* case Mapped(_, _, _) => Mapped(transform(g), f, u)
*
* If the result of `transform(g)` is a Concat, we still want
* to perform `transformMap` on it.
*
* If on the other hand `transform(g)` is a Mapped, we will run
* into an infinite loop by recursively calling ourselves.
* Option 1: compose functions `f` and `f2` from the inner map
* Option 2: perform one level of pattern matching deeper
* Option 2 seems better since it flattens all maps into a single one
* While this can be done through staging later, we might as well have
* it done now.
*/
case Mapped(g1, f2, u2) =>
println()
println("Mapped(Mapped(....))")
println("before")
println(showRaw(f))
println(showRaw(f2))
println()
val argTerm = TermName(c.freshName("arg"))
val arg = q"$argTerm"
val inlinedf2 = inline(f2, List(arg))
val inlined = inline(f, List(inlinedf2))
val composed = q"($argTerm: ${g1.tpe}) => $inlined"
println("after")
println(showRaw(composed))
transformMap(g1, composed, u)
/**
* The map function will only ever be involved on one side
* TODO: The right/left hand sides must be turned into recognisers
*/
case ConcatLeft(l, r, t) => ConcatLeft(transformMap(l, f, u), r, t)
case ConcatRight(l, r, t) => ConcatRight(l, transformMap(r, f, u), u)
/**
* Step 1: find all pattern bindings in the pattern match syntax in `f`
* Step 2: find all independent applications of each binding, or dead codes thereof
* Step 3: transfer independent applications to their respective parsers
* Step 4: rewrite `f` and remove dead code
* Recursively propagate transform as appropriate
*/
case Concat(l, r, t) => f match {
case q"($param) => $body" => body match {
/** we are assuming only one case in the body */
case q"$_ match { case $c1 }" => c1 match {
/**
* The pattern itself can have two forms:
* - just a simple binding, in which case we can't modify anything
* - a pair binding, in which case we might salvage from inspecting
* the body
*/
case cq"$pat => $bdy" =>
/** Get all mappings of a grammar to a binding */
val mappings: Map[Grammar, Bind] = grammarBindingMappings(g, pat)
/** Get mappings from each Grammar to a possible Apply tree */
val indepApplications: Map[Grammar, SymbolUsageInfo] = {
val bindingSyms = mappings.values.map(_.symbol).toList
for((grammar, binding) <- mappings) yield {
val app = largestIndepApplication(
binding.symbol,
bindingSyms diff List(binding.symbol)
)(bdy)
(grammar -> app)
}
}
/**
* helper functions to identify applications worth manipulating
*/
def isApp(s: SymbolUsageInfo) = s match {
case IndepComponent(Apply(_, _)) => true
case _ => false
}
def isAppOrNotUsed(s: SymbolUsageInfo) = s match {
case IndepComponent(Apply(_, _)) => true
case NotUsed => true
case _ => false
}
def isNotUsed(s: SymbolUsageInfo) = s match {
case NotUsed => true
case _ => false
}
/** create, for each Apply, a separate function */
val newFunctions: Map[Grammar, (Tree, Type)] = for {
(grammar, indepApp) <- indepApplications if isApp(indepApp)
binding <- mappings.get(grammar)
} yield {
val IndepComponent(app @ Apply(_, _)) = indepApp
val funArg = freshAnonVal("arg", binding.tpe)
/** Does the appTransformed *need* to be a `typingTransform`?*/
val appTransformed = c.internal.typingTransform(app)(
(tree, api) => tree match {
case Ident(_) if tree.symbol == binding.symbol =>
api.typecheck(q"${funArg.symbol}")
case _ =>
api.default(tree)
}
)
val fun = (q"($funArg) => $appTransformed", app.tpe)
(grammar -> fun)
}
/**
* for each relevant grammar create a Mapped version of it
* We use Option[Mapped] to denote if a grammar is not used
* anymore
*/
val oldAndNew: Map[Grammar, Option[Mapped]] = for {
(grammar, indepApp) <- indepApplications if isAppOrNotUsed(indepApp)
} yield {
val maybeMapped = indepApp match {
case IndepComponent(Apply(_, _)) =>
for ((fun, tpe) <- newFunctions.get(grammar))
yield Mapped(grammar, fun, tpe)
case NotUsed => None
}
(grammar -> maybeMapped)
}
/**
* in `bdy`, replace all occurrences of the applications with relevant symbol
* Since Map uses hashCode and co, and we need ref equality, we'll use
* a List and find with a predicate
*/
val reverseMap: List[(Apply, Bind)] = (for {
(grammar, indepApp) <- indepApplications if isApp(indepApp)
binding <- mappings.get(grammar)
} yield {
val IndepComponent(app @ Apply(_, _)) = indepApp
(app, binding)
}).toList
val newBody = c.internal.typingTransform(bdy)((tree, api) => tree match {
case app @ Apply(_, _) =>
reverseMap.find { case (app2, _) => app == app2 } match {
case None => api.default(tree)
case Some((_, b @ Bind(_, _))) => api.typecheck(q"${b.symbol}")
}
case _ => api.default(tree)
})
val grammarReworked = swapOldForNew(g, oldAndNew)
/**
* rewrite the pattern match in order to remove dead patterns
*/
val deadBindings: List[Symbol] = (for {
(grammar, binding) <- mappings
indepApp <- indepApplications.get(grammar) if isNotUsed(indepApp)
} yield binding.symbol).toList
val dcedPattern = eliminateDeadPatterns(pat, deadBindings)
/**
* If the new body is just an Ident we don't need it
*/
val brandNewBody = newBody match {
case Ident(_) => transform(grammarReworked)
case _ =>
val funTransformed = c.internal.typingTransform(f)(
(tree, api) =>
if (tree == bdy) api.typecheck(newBody)
else if (tree == param) {
val ValDef(mods, name, tpt, rhs) = param
/**
* using the internal reflection library to create
* a new valDef, with the same symbol as `param`
*/
import c.universe.definitions._
import c.internal._, decorators._
val sym = param.symbol
sym.setInfo(grammarReworked.tpe)
val newVal = ValDef(mods, name, q"${grammarReworked.tpe}", rhs)
newVal.setSymbol(sym)
api.typecheck(newVal)
} else if (tree == pat) dcedPattern match {
/* nothing in the pattern is used in the body */
case EmptyTree => api.typecheck(pq"_")
case _ => api.typecheck(dcedPattern)
} else api.default(tree)
)
Mapped(transform(grammarReworked), funTransformed, u)
}
brandNewBody
case _ => println("matched no pattern"); Mapped(transform(g), f, u)
}
case _ =>
c.warning(f.pos,
"""The body of your function (applying to a concat) could possibly
benefit from optimisations if you use a pattern match syntax, with
exactly one case.
Do consider changing it!
"""
)
println(showCode(f))
println(showRaw(f))
Mapped(transform(g), f, u)
}
case _ =>
c.warning(f.pos,
"""You are using a function syntax that is not anonymous function
syntax. As a result you might be forgoing some optimisations we
could perform. Do consider changing it!
"""
)
println(showCode(f))
println(showRaw(f))
Mapped(transform(g), f, u)
}
case _ => Mapped(transform(g), f, u)
}
/**
* given a grammar and a tree that is a pattern match, returns
* a mapping of tree to binding. This is used for identifying
* which parsers are mapped on from a concat:
*
* Example: (a ~ b ~ c ~ d) map { case (((p1, p2), p3), p4) => ... }
* returns (a, p1), (b, p2), (c, p3), (d, p4)
*
* Example: (a ~ b ~ c ~ d) map { case ((p1, p3), p4) => ... }
* returns (a ~ b, p1), (c, p3), (d, p4)
*/
def grammarBindingMappings(g: Grammar, pat: Tree): Map[Grammar, Bind] = (g, pat) match {
case (Concat(l, r, _), q"($lpat, $rpat)") =>
grammarBindingMappings(l, lpat) ++ grammarBindingMappings(r, rpat)
case (_, b @ Bind(_, _)) => Map(g -> b)
case _ => Map.empty[Grammar, Bind]
}
/**
* given old to new grammar mappings, yield a new grammar where the old is
* replaced with the new
*/
def swapOldForNew(g: Grammar, oldAndNew: Map[Grammar, Option[Mapped]]): Grammar = g match {
case Concat(l, r, tpe) => oldAndNew.get(g) match {
case Some(None) => ConcatLeft(l, r, tpe)//TODO: convert into recogniser
case Some(Some(g2)) => g2
case _ =>
val (leftExists, rightExists) = (oldAndNew.get(l), oldAndNew.get(r))
(leftExists, rightExists) match {
case (Some(None), Some(Some(_))) =>
ConcatLeft(swapOldForNew(l, oldAndNew), swapOldForNew(r, oldAndNew), tpe)
case (Some(Some(_)), Some(None)) =>
ConcatRight(swapOldForNew(l, oldAndNew), swapOldForNew(r, oldAndNew), tpe)
case _ => Concat(swapOldForNew(l, oldAndNew), swapOldForNew(r, oldAndNew), tpe)
}
}
case _ => oldAndNew.get(g) match {
case Some(Some(g2)) => g2
case Some(None) => g//TODO: convert into recogniser
case _ => g
}
}
/**
* Given a pattern-match tree (i.e. `pat` in q"$pat => $body") and a
* list of dead bindings, returns a new tree devoid of those bindings
* We naturally
*/
def eliminateDeadPatterns(pat: c.Tree, deadBindings: List[Symbol]): Tree = {
if (deadBindings.contains(pat.symbol)) EmptyTree
else pat match {
case q"($lpat, $rpat)" =>
(eliminateDeadPatterns(lpat, deadBindings), eliminateDeadPatterns(rpat, deadBindings)) match {
case (EmptyTree, EmptyTree) => EmptyTree
case (EmptyTree, r) => r
case (l, EmptyTree) => l
case (l, r) => q"($l, $r)"
}
case _ => pat
}
}
}
| manojo/parsequery | macros/src/main/scala/parsec/optimised/ParsequeryTransform.scala | Scala | mit | 13,517 |
package com.webtrends.harness.component.spray
import akka.actor.{Actor, ActorRef}
import com.webtrends.harness.component.spray.websocket._
import spray.can.server.ServerSettings
/**
* Created by wallinm on 4/3/15.
*/
trait SprayWebSocketServer { this : Actor =>
var webSocketServer:Option[ActorRef] = None
def startWebSocketServer(port: Int, settings:Option[ServerSettings]=None) : ActorRef = {
webSocketServer = Some(context.actorOf(CoreWebSocketServer.props(port, settings), SprayWebSocketServer.Name))
webSocketServer.get ! WebSocketBindServer
webSocketServer.get
}
def stopWebSocketServer = {
webSocketServer match {
case Some(s) => s ! WebSocketShutdownServer
case None => //ignore
}
}
}
object SprayWebSocketServer {
val Name = "websocket-server"
}
| mjwallin1/wookiee-spray | src/main/scala/com/webtrends/harness/component/spray/SprayWebSocketServer.scala | Scala | apache-2.0 | 806 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.table
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
/**
* Test for testing aggregate plans.
*/
class AggregateTest extends TableTestBase {
@Test
def testGroupAggregateWithFilter(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, Int)]("MyTable", 'a, 'b, 'c)
val resultTable = sourceTable.groupBy('a)
.select('a, 'a.avg, 'b.sum, 'c.count)
.where('a === 1)
util.verifyPlan(resultTable)
}
@Test
def testAggregate(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, Int)]("MyTable", 'a, 'b, 'c)
val resultTable = sourceTable.select('a.avg,'b.sum,'c.count)
util.verifyPlan(resultTable)
}
@Test
def testAggregateWithFilter(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, Int)]("MyTable", 'a, 'b, 'c)
val resultTable = sourceTable.select('a,'b,'c).where('a === 1)
.select('a.avg,'b.sum,'c.count)
util.verifyPlan(resultTable)
}
@Test
def testAggregateWithFilterOnNestedFields(): Unit = {
val util = batchTestUtil()
val sourceTable = util.addTableSource[(Int, Long, (Int, Long))]("MyTable", 'a, 'b, 'c)
val resultTable = sourceTable.select('a,'b,'c).where('a === 1)
.select('a.avg,'b.sum,'c.count, 'c.get("_1").sum)
util.verifyPlan(resultTable)
}
}
| hequn8128/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/AggregateTest.scala | Scala | apache-2.0 | 2,356 |
package uk.gov.dvla.vehicles.presentation.common.views.helpers
import uk.gov.dvla.vehicles.presentation.common.UnitSpec
import uk.gov.dvla.vehicles.presentation.common.views.constraints.Required.RequiredField
import uk.gov.dvla.vehicles.presentation.common.views.helpers.HtmlArgsExtensions.RichHtmlArgs
final class HtmlArgsExtensionsSpec extends UnitSpec {
"withMaxLength" should {
"return the same args when key 'maxLength' is already present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithMaxLength)
// Override validationOff to check the behaviour of the production code.
val result = richHtmlArgs.withMaxLength
result should equal(htmlArgsWithMaxLength)
}
"add key 'maxLength' with default value when not present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
// Override validationOff to check the behaviour of the production code.
val result = richHtmlArgs.withMaxLength
result should equal(htmlArgsWithMaxLength)
}
}
"withoutAutoComplete" should {
"add key-value 'autocomplete' 'off' attribute when key is not present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.withoutAutoComplete
result should equal(htmlArgsWithAutoCompleteOff)
}
"return the same args when key-value 'autocomplete' 'off' is present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithAutoCompleteOff)
val result = richHtmlArgs.withoutAutoComplete
result should equal(htmlArgsWithAutoCompleteOff)
}
"replace key-value autocomplete 'on' with autocomplete 'off'" in {
val htmlArgsWithAutoCompleteOn: Map[Symbol, Any] = Map('title -> "test", 'autocomplete -> "on")
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithAutoCompleteOn)
val result = richHtmlArgs.withoutAutoComplete
result should equal(htmlArgsWithAutoCompleteOff)
}
}
"withAriaDescribedby" should {
"return the same when hint text is not present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val key = Symbol("aria-describedby")
val result: Map[Symbol, Any] = richHtmlArgs.withAriaDescribedby(hintText = None, idOfRelatedField = "test-id")
result.contains(key) should equal(false)
}
"add 'aria-describedby' attribute when hint text is present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val key = Symbol("aria-describedby")
val result: Map[Symbol, Any] = richHtmlArgs.withAriaDescribedby(hintText = Some("test-hint-text"), idOfRelatedField = "test-id")
result.contains(key) should equal(true)
result.get(key) should equal(Some("test-id-hint"))
}
}
"withTypeAttribute" should {
"add 'type=text' when key 'type' is not present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.withTypeAttribute
val htmlArgsWithTypeText = Map('title -> "test", 'type -> "text")
result should equal(htmlArgsWithTypeText)
}
"add expected when key 'typeTel' is present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithTypeTel)
val result = richHtmlArgs.withTypeAttribute
val key = 'type
val value = """tel"""
result.contains(key)
result.get(key) should equal(Some(value))
}
"remove key 'typeTel' when present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithTypeTel)
val result = richHtmlArgs.withTypeAttribute
result.contains('typeTel) should equal(false)
}
"add expected when key 'typeFleetNumber' is present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithTypeFleetNumber)
val result = richHtmlArgs.withTypeAttribute
val key = 'type
val value = """tel"""
result.contains(key)
result.get(key) should equal(Some(value))
}
"remove key 'typeFleetNumber' when present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithTypeFleetNumber)
val result = richHtmlArgs.withTypeAttribute
result.contains('typeFleetNumber) should equal(false)
}
"add expected when key 'typeEmail' is present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithTypeEmail)
val result = richHtmlArgs.withTypeAttribute
val key = 'type
val value = "email"
result.contains(key)
result.get(key) should equal(Some(value))
}
"remove key 'typeEmail' when present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithTypeEmail)
val result = richHtmlArgs.withTypeAttribute
result.contains('typeEmail) should equal(false)
}
"add expected when key 'alphabeticalOnly' is present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithTypeAlphabeticalOnly)
val result = richHtmlArgs.withTypeAttribute
val key = 'type
val value = """text"""
result.contains(key)
result.get(key) should equal(Some(value))
}
"remove key 'alphabeticalOnly' when present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithTypeAlphabeticalOnly)
val result = richHtmlArgs.withTypeAttribute
result.contains('alphabeticalOnly) should equal(false)
}
}
"withAriaInvalid" should {
"return the same when hasErrors is false" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.withAriaInvalid(hasErrors = false)
result should equal(htmlArgsMinimal)
}
"add aria-invalid when hasErrors is true" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.withAriaInvalid(hasErrors = true)
val key = Symbol("aria-invalid")
val htmlArgsWithAriaInvalid: Map[Symbol, Any] = Map('title -> "test", key -> true)
result should equal(htmlArgsWithAriaInvalid)
}
}
"withAriaRequired" should {
"return the same when field does not have a required constraint" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val constraints: Seq[(String, Seq[Any])] = Seq.empty
val result = richHtmlArgs.withAriaRequired(constraints)
result should equal(htmlArgsMinimal)
}
"add aria-required when field has a required constraint" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val constraints: Seq[(String, Seq[Any])] = Seq((RequiredField, Seq()))
val result = richHtmlArgs.withAriaRequired(constraints)
val key = Symbol("aria-required")
result should equal(Map('title -> "test", key -> true))
}
}
"valueElseTrue" should {
"add the key-value 'value' with default value 'true' when value is not present" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.valueElseTrue
val htmlArgsWithValueDefault = Map('title -> "test", 'value -> true)
result should equal(htmlArgsWithValueDefault)
}
"return the same when key 'value' is present" in {
val htmlArgsWithValue = Map('title -> "test", 'value -> "test-value")
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithValue)
val result = richHtmlArgs.valueElseTrue
result should equal(htmlArgsWithValue)
}
}
"checkedWhenValueMatches" should {
"return the same when value not present in htmlArgs" in {
val fieldValue = Some("test-value")
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.checkedWhenValueMatches(fieldValue)
result should equal(htmlArgsMinimal)
}
"return the same when field has a value doesn't match htmlArgs value" in {
val fieldValue = Some("test-value")
val htmlArgsWithDifferentValue = Map('title -> "test", 'value -> "different-test-value")
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithDifferentValue)
val result = richHtmlArgs.checkedWhenValueMatches(fieldValue)
result should equal(htmlArgsWithDifferentValue)
}
"return the same when field and htmlArgs have no 'checked' value" in {
val fieldValue = None
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.checkedWhenValueMatches(fieldValue)
result should equal(htmlArgsMinimal)
}
"return the same when field has no value" in {
val fieldValue = None
val htmlArgsWithValue = Map('title -> "test", 'value -> "test-value")
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithValue)
val result = richHtmlArgs.checkedWhenValueMatches(fieldValue)
result should equal(htmlArgsWithValue)
}
"not add 'checked' when the field has no value (so defaults to 'true') and htmlArgs contains value 'true'" in {
val fieldValue = None
val htmlArgsWithValue = Map('title -> "test", 'value -> true)
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithValue)
val result = richHtmlArgs.checkedWhenValueMatches(fieldValue)
val htmlArgsWithChecked = Map('title -> "test", 'value -> true)
result should equal(htmlArgsWithChecked)
}
"add 'checked' when the field has the same value as the htmlArgs value" in {
val fieldValue = Some("test-value")
val htmlArgsWithSameValue = Map('title -> "test", 'value -> "test-value")
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithSameValue)
val result = richHtmlArgs.checkedWhenValueMatches(fieldValue)
val htmlArgsWithChecked = Map('title -> "test", 'value -> "test-value", 'checked -> "")
result should equal(htmlArgsWithChecked)
}
"add 'checked' when the field has the same value as the htmlArgs value (specified as a boolean)" in {
val fieldValue = Some("true")
val htmlArgsWithSameValue = Map('title -> "test", 'value -> true)
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithSameValue)
val result = richHtmlArgs.checkedWhenValueMatches(fieldValue)
val htmlArgsWithChecked = Map('title -> "test", 'value -> true, 'checked -> "")
result should equal(htmlArgsWithChecked)
}
}
"withCanTabTo" should {
"return the same map when field has tabbing turned on" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.withCanTabTo(canTabTo = true)
result should equal(htmlArgsMinimal)
}
"add tabindex of -1 when tabbing is turned off" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.withCanTabTo(canTabTo = false)
result should equal(Map('title -> "test", Symbol("tabindex") -> -1))
}
}
"withAutofocus" should {
"return the same map when autofocus is turned off" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.withAutofocus(autofocus = false)
result should equal(htmlArgsMinimal)
}
"add autofocus to the map when autofocus is turned on" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.withAutofocus(autofocus = true)
result should equal(Map('title -> "test", Symbol("autofocus") -> true))
}
}
"withoutNoOptionalLabel" should {
"return the same map when NO_OPTIONAL_LABEL not present in htmlArgs" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsMinimal)
val result = richHtmlArgs.withoutNoOptionalLabel
result should equal(htmlArgsMinimal)
}
"remove NO_OPTIONAL_LABEL from the map when present in htmlArgs" in {
val richHtmlArgs = new RichHtmlArgs(htmlArgsWithNoOptionalLabel)
val result = richHtmlArgs.withoutNoOptionalLabel
result should equal(htmlArgsMinimal)
}
}
private def htmlArgsMinimal: Map[Symbol, Any] = Map('title -> "test")
private def htmlArgsWithMaxLength: Map[Symbol, Any] = Map('title -> "test", 'maxLength -> 60)
private def htmlArgsWithAutoCompleteOff: Map[Symbol, Any] = Map('title -> "test", 'autocomplete -> "off")
private def htmlArgsWithTypeTel: Map[Symbol, Any] = Map('title -> "test", 'typeTel -> true)
private def htmlArgsWithTypeFleetNumber: Map[Symbol, Any] = Map('title -> "test", 'typeFleetNumber -> true)
private def htmlArgsWithTypeEmail: Map[Symbol, Any] = Map('title -> "test", 'typeEmail -> true)
private def htmlArgsWithTypeAlphabeticalOnly: Map[Symbol, Any] = Map('title -> "test", 'alphabeticalOnly -> true)
private def htmlArgsWithNoOptionalLabel: Map[Symbol, Any] = Map('title -> "test", 'NO_OPTIONAL_LABEL -> true)
}
| dvla/vehicles-presentation-common | test/uk/gov/dvla/vehicles/presentation/common/views/helpers/HtmlArgsExtensionsSpec.scala | Scala | mit | 12,386 |
/*-------------------------------------------------------------------------*\\
** ScalaCheck **
** Copyright (c) 2007-2021 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
\\*------------------------------------------------------------------------ */
package org.scalacheck
import Prop.{forAllNoShrink => forAll}
object StatsSpecification extends Properties("Stats") {
// each test run generates 5k samples, so only do 10 of them.
override def overrideParameters(ps: Test.Parameters): Test.Parameters =
ps.withMinSuccessfulTests(10)
// we sample the distribution 5000 times, and expect the mean and
// standard deviation to be within ±10% of the true value.
val Samples = 5000
val ErrorRate = 0.1
// we'll generate relatively small, well-behaved mean values.
val genMean = Gen.choose(10.0, 20.0)
// generate a number of trials for use with binomial
val genTrials = Gen.choose(10, 30)
// generate a probability value
val genP = Gen.choose(0.2, 0.8)
property("prob") =
forAll(genP) { p =>
val gen = Gen.prob(p).map(b => if (b) 1.0 else 0.0)
check(gen, mean = p, stdDev = Math.sqrt(p * (1.0 - p)))
}
property("gaussian") =
forAll(genMean, genMean) { (mean, stdDev) =>
val gen = Gen.gaussian(mean, stdDev)
check(gen, mean, stdDev)
}
property("exponential") =
forAll(genMean) { mean =>
val gen = Gen.exponential(1.0 / mean)
check(gen, mean = mean, stdDev = mean)
}
property("geometric") =
forAll(genMean) { mean =>
val gen = Gen.geometric(mean).map(_.toDouble)
val p = 1.0 / (mean + 1.0)
val stdDev = Math.sqrt((1.0 - p) / (p * p))
check(gen, mean, stdDev)
}
property("poisson") =
forAll(genMean) { rate =>
val gen = Gen.poisson(rate).map(_.toDouble)
check(gen, mean = rate, stdDev = Math.sqrt(rate))
}
property("binomial") =
forAll(genTrials, genP) { (trials, p) =>
val gen = Gen.binomial(Gen.prob(p), trials).map(_.toDouble)
val mean = trials * p
val stdDev = Math.sqrt(trials * p * (1.0 - p))
check(gen, mean, stdDev)
}
def check(gen: Gen[Double], mean: Double, stdDev: Double): Prop = {
val (e1, e2) = (mean * ErrorRate, stdDev * ErrorRate)
val (μ, σ) = computeStats(gen, Samples)
(mean ± e1).contains(μ) && (stdDev ± e2).contains(σ)
}
def computeStats(g: Gen[Double], samples: Int): (Double, Double) = {
val vg = Gen.buildableOfN[Vector[Double], Double](samples, g)
val xs = vg.sample.get
val mean = xs.sum / xs.size
val stdDev = Math.sqrt(xs.iterator.map(x => Math.pow(x - mean, 2)).sum / xs.size)
(mean, stdDev)
}
case class Bounds(min: Double, max: Double) {
def contains(x: Double): Prop =
Prop(min <= x && x <= max) :| s"($min <= $x <= $max) was false"
}
implicit class MakeBounds(val n: Double) extends AnyVal {
def ±(error: Double): Bounds = Bounds(n - error, n + error)
}
}
| rickynils/scalacheck | src/test/scala/org/scalacheck/StatsSpecification.scala | Scala | bsd-3-clause | 3,327 |
package io.scalajs.nodejs
package url
import io.scalajs.util.JSONHelper._
import io.scalajs.util.JsUnderOrHelper._
import org.scalatest.FunSpec
/**
* URLObject Tests
* @author lawrence.daniels@gmail.com
*/
class URLObjectTest extends FunSpec {
describe("URLObject") {
val originalUrl = "https://www.google.com/webhp?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8#q=node"
val urlObject = URL.parse(originalUrl)
it("should break down URLs into components") {
assert(
urlObject.toJson == """{"protocol":"https:","slashes":true,"auth":null,"host":"www.google.com","port":null,"hostname":"www.google.com","hash":"#q=node","search":"?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8","query":"sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8","pathname":"/webhp","path":"/webhp?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8","href":"https://www.google.com/webhp?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8#q=node"}""")
}
it("should be properly extracted") {
assert(urlObject.query ?== "sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8")
}
it("should properly extract the search query") {
assert(urlObject.search ?== "?sourceid=chrome-instant&ion=1&espv=2&ie=UTF-8")
}
it("should reconstituted the URL to match the original") {
assert(URL.format(urlObject) == originalUrl)
}
}
}
| scalajs-io/nodejs | app/current/src/test/scala/io/scalajs/nodejs/url/URLObjectTest.scala | Scala | apache-2.0 | 1,356 |
package io.netflow.lib
import java.net.InetAddress
import org.joda.time.DateTime
case class BadDatagram(date: DateTime, sender: InetAddress)
| ayscb/netflow | netflow1/netflow-master/src/main/scala/io/netflow/lib/BadDatagram.scala | Scala | apache-2.0 | 144 |
package example
abstract class WhatToShow2(duration: Int) {
def getDuration = this.duration
def moveToNext(): WhatToShow2
}
// todo add choosing times from list based on smth
case class Cross(duration: Int) extends WhatToShow2(duration) {
def setDuration(duration: Int) = {
currentDuration = duration
}
var currentDuration = 33
override def moveToNext(): WhatToShow2 = ImageQ(currentDuration)
}
case class ImageQ(duration: Int) extends WhatToShow2(duration) {
override def moveToNext(): WhatToShow2 = new Mask(120)
}
case class RestPeriod(duration: Int) extends WhatToShow2(duration) {
override def moveToNext(): WhatToShow2 = new Cross(500)
}
case class ChoiceQuestion(duration: Int) extends WhatToShow2(duration) {
override def moveToNext(): WhatToShow2 = ChoiceQuestion(-1)
}
case class Mask(duration: Int) extends WhatToShow2(duration) {
override def moveToNext(): WhatToShow2 = ChoiceQuestion(-1)
} | MysterionRise/psycho-test-framework | psycho-test-client/src/main/scala/example/WhatToShow2.scala | Scala | mit | 937 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.handler
import com.ibm.spark.kernel.protocol.v5._
import com.ibm.spark.kernel.protocol.v5.content.KernelInfoReply
import com.ibm.spark.utils.LogLike
import play.api.libs.json.Json
import scala.concurrent._
/**
* Receives a KernelInfoRequest KernelMessage and returns a KernelInfoReply
* KernelMessage.
*/
class KernelInfoRequestHandler(actorLoader: ActorLoader)
extends BaseHandler(actorLoader) with LogLike
{
def process(kernelMessage: KernelMessage): Future[_] = {
import scala.concurrent.ExecutionContext.Implicits.global
future {
logger.debug("Sending kernel info reply message")
val kernelInfo = SparkKernelInfo
val kernelInfoReply = KernelInfoReply(
kernelInfo.protocolVersion,
kernelInfo.implementation,
kernelInfo.implementationVersion,
kernelInfo.language,
kernelInfo.languageVersion,
kernelInfo.banner
)
// TODO could we use HeaderBuilder here?
val replyHeader = Header(
java.util.UUID.randomUUID.toString,
"",
java.util.UUID.randomUUID.toString,
MessageType.KernelInfoReply.toString,
kernelInfo.protocolVersion
)
val kernelResponseMessage = KMBuilder()
.withIds(kernelMessage.ids)
.withSignature("")
.withHeader(replyHeader)
.withParent(kernelMessage)
.withContentString(kernelInfoReply).build
actorLoader.load(SystemActorType.KernelMessageRelay) ! kernelResponseMessage
}
}
} | bpburns/spark-kernel | kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/handler/KernelInfoRequestHandler.scala | Scala | apache-2.0 | 2,132 |
import blended.sbt.Dependencies
object BlendedUtilLogging extends ProjectFactory {
private[this] val helper = new ProjectSettings(
projectName = "blended.util.logging",
description = "Logging utility classes to use in other bundles",
deps = Seq(
Dependencies.slf4j
)
)
override val project = helper.baseProject
}
| lefou/blended | project/BlendedUtilLogging.scala | Scala | apache-2.0 | 345 |
// src/main/scala/progscala2/typesystem/structuraltypes/Observer.scala
package progscala2.typesystem.structuraltypes
trait Subject { // <1>
import scala.language.reflectiveCalls // <2>
type State // <3>
type Observer = { def receiveUpdate(state: Any): Unit } // <4>
private var observers: List[Observer] = Nil // <5>
def addObserver(observer:Observer): Unit =
observers ::= observer
def notifyObservers(state: State): Unit =
observers foreach (_.receiveUpdate(state))
}
| sunilrebel/programming-scala | examples/src/main/scala/progscala2/typesystem/structuraltypes/observer.scala | Scala | mpl-2.0 | 669 |
package retronym.commons
import BooleanW._
object PartialFunctionW {
implicit def PartialFunctionToPartialFunctionW[A, B](pf: PartialFunction[A, B]) : PartialFunctionW[A, B] = {
new PartialFunctionW(pf)
}
}
class PartialFunctionW[-A, +B](pf: PartialFunction[A, B]) {
def toFunction1 : Function1[A, Option[B]] = {
(v1: A) => pf.isDefinedAt(v1).iff(pf(v1))
}
}
| retronym/scala-sandbox | commons/src/main/scala/retronym/commons/PartialFunctionW.scala | Scala | mit | 378 |
package temportalist.esotericraft.main.server
import temportalist.esotericraft.main.common.ProxyCommon
/**
* Created by TheTemportalist on 12/31/2015.
*/
class ProxyServer extends ProxyCommon {
}
| TheTemportalist/EsoTeriCraft | src/main/scala/temportalist/esotericraft/main/server/ProxyServer.scala | Scala | apache-2.0 | 203 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import java.io.{DataInput, DataOutput, File, PrintWriter}
import java.util.{ArrayList, Arrays, Properties}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hive.ql.udf.UDAFPercentile
import org.apache.hadoop.hive.ql.udf.generic._
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject
import org.apache.hadoop.hive.serde2.{AbstractSerDe, SerDeStats}
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, ObjectInspectorFactory}
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory
import org.apache.hadoop.io.Writable
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.util.Utils
case class Fields(f1: Int, f2: Int, f3: Int, f4: Int, f5: Int)
// Case classes for the custom UDF's.
case class IntegerCaseClass(i: Int)
case class ListListIntCaseClass(lli: Seq[(Int, Int, Int)])
case class StringCaseClass(s: String)
case class ListStringCaseClass(l: Seq[String])
/**
* A test suite for Hive custom UDFs.
*/
class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
import spark.udf
import spark.implicits._
test("spark sql udf test that returns a struct") {
udf.register("getStruct", (_: Int) => Fields(1, 2, 3, 4, 5))
assert(sql(
"""
|SELECT getStruct(1).f1,
| getStruct(1).f2,
| getStruct(1).f3,
| getStruct(1).f4,
| getStruct(1).f5 FROM src LIMIT 1
""".stripMargin).head() === Row(1, 2, 3, 4, 5))
}
test("SPARK-4785 When called with arguments referring column fields, PMOD throws NPE") {
checkAnswer(
sql("SELECT PMOD(CAST(key as INT), 10) FROM src LIMIT 1"),
Row(8)
)
}
test("hive struct udf") {
sql(
"""
|CREATE TABLE hiveUDFTestTable (
| pair STRUCT<id: INT, value: INT>
|)
|PARTITIONED BY (partition STRING)
|ROW FORMAT SERDE '%s'
|STORED AS SEQUENCEFILE
""".
stripMargin.format(classOf[PairSerDe].getName))
val location = Utils.getSparkClassLoader.getResource("data/files/testUDF").getFile
sql(s"""
ALTER TABLE hiveUDFTestTable
ADD IF NOT EXISTS PARTITION(partition='testUDF')
LOCATION '$location'""")
sql(s"CREATE TEMPORARY FUNCTION testUDF AS '${classOf[PairUDF].getName}'")
sql("SELECT testUDF(pair) FROM hiveUDFTestTable")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDF")
}
test("Max/Min on named_struct") {
checkAnswer(sql(
"""
|SELECT max(named_struct(
| "key", key,
| "value", value)).value FROM src
""".stripMargin), Seq(Row("val_498")))
checkAnswer(sql(
"""
|SELECT min(named_struct(
| "key", key,
| "value", value)).value FROM src
""".stripMargin), Seq(Row("val_0")))
// nested struct cases
checkAnswer(sql(
"""
|SELECT max(named_struct(
| "key", named_struct(
"key", key,
"value", value),
| "value", value)).value FROM src
""".stripMargin), Seq(Row("val_498")))
checkAnswer(sql(
"""
|SELECT min(named_struct(
| "key", named_struct(
"key", key,
"value", value),
| "value", value)).value FROM src
""".stripMargin), Seq(Row("val_0")))
}
test("SPARK-6409 UDAF Average test") {
sql(s"CREATE TEMPORARY FUNCTION test_avg AS '${classOf[GenericUDAFAverage].getName}'")
checkAnswer(
sql("SELECT test_avg(1), test_avg(substr(value,5)) FROM src"),
Seq(Row(1.0, 260.182)))
sql("DROP TEMPORARY FUNCTION IF EXISTS test_avg")
hiveContext.reset()
}
test("SPARK-2693 udaf aggregates test") {
checkAnswer(sql("SELECT percentile(key, 1) FROM src LIMIT 1"),
sql("SELECT max(key) FROM src").collect().toSeq)
checkAnswer(sql("SELECT percentile(key, array(1, 1)) FROM src LIMIT 1"),
sql("SELECT array(max(key), max(key)) FROM src").collect().toSeq)
}
test("SPARK-16228 Percentile needs explicit cast to double") {
sql("select percentile(value, cast(0.5 as double)) from values 1,2,3 T(value)")
sql("select percentile_approx(value, cast(0.5 as double)) from values 1.0,2.0,3.0 T(value)")
sql("select percentile(value, 0.5) from values 1,2,3 T(value)")
sql("select percentile_approx(value, 0.5) from values 1.0,2.0,3.0 T(value)")
}
test("Generic UDAF aggregates") {
checkAnswer(sql("SELECT ceiling(percentile_approx(key, 0.99999D)) FROM src LIMIT 1"),
sql("SELECT max(key) FROM src LIMIT 1").collect().toSeq)
checkAnswer(sql("SELECT percentile_approx(100.0D, array(0.9D, 0.9D)) FROM src LIMIT 1"),
sql("SELECT array(100, 100) FROM src LIMIT 1").collect().toSeq)
}
test("UDFIntegerToString") {
val testData = spark.sparkContext.parallelize(
IntegerCaseClass(1) :: IntegerCaseClass(2) :: Nil).toDF()
testData.createOrReplaceTempView("integerTable")
val udfName = classOf[UDFIntegerToString].getName
sql(s"CREATE TEMPORARY FUNCTION testUDFIntegerToString AS '$udfName'")
checkAnswer(
sql("SELECT testUDFIntegerToString(i) FROM integerTable"),
Seq(Row("1"), Row("2")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFIntegerToString")
hiveContext.reset()
}
test("UDFToListString") {
val testData = spark.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.createOrReplaceTempView("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToListString AS '${classOf[UDFToListString].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToListString(s) FROM inputTable")
}
assert(errMsg.getMessage contains "List type in java is unsupported because " +
"JVM type erasure makes spark fail to catch a component type in List<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToListString")
hiveContext.reset()
}
test("UDFToListInt") {
val testData = spark.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.createOrReplaceTempView("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToListInt AS '${classOf[UDFToListInt].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToListInt(s) FROM inputTable")
}
assert(errMsg.getMessage contains "List type in java is unsupported because " +
"JVM type erasure makes spark fail to catch a component type in List<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToListInt")
hiveContext.reset()
}
test("UDFToStringIntMap") {
val testData = spark.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.createOrReplaceTempView("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToStringIntMap " +
s"AS '${classOf[UDFToStringIntMap].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToStringIntMap(s) FROM inputTable")
}
assert(errMsg.getMessage contains "Map type in java is unsupported because " +
"JVM type erasure makes spark fail to catch key and value types in Map<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToStringIntMap")
hiveContext.reset()
}
test("UDFToIntIntMap") {
val testData = spark.sparkContext.parallelize(StringCaseClass("") :: Nil).toDF()
testData.createOrReplaceTempView("inputTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFToIntIntMap " +
s"AS '${classOf[UDFToIntIntMap].getName}'")
val errMsg = intercept[AnalysisException] {
sql("SELECT testUDFToIntIntMap(s) FROM inputTable")
}
assert(errMsg.getMessage contains "Map type in java is unsupported because " +
"JVM type erasure makes spark fail to catch key and value types in Map<>;")
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToIntIntMap")
hiveContext.reset()
}
test("UDFListListInt") {
val testData = spark.sparkContext.parallelize(
ListListIntCaseClass(Nil) ::
ListListIntCaseClass(Seq((1, 2, 3))) ::
ListListIntCaseClass(Seq((4, 5, 6), (7, 8, 9))) :: Nil).toDF()
testData.createOrReplaceTempView("listListIntTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFListListInt AS '${classOf[UDFListListInt].getName}'")
checkAnswer(
sql("SELECT testUDFListListInt(lli) FROM listListIntTable"),
Seq(Row(0), Row(2), Row(13)))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFListListInt")
hiveContext.reset()
}
test("UDFListString") {
val testData = spark.sparkContext.parallelize(
ListStringCaseClass(Seq("a", "b", "c")) ::
ListStringCaseClass(Seq("d", "e")) :: Nil).toDF()
testData.createOrReplaceTempView("listStringTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFListString AS '${classOf[UDFListString].getName}'")
checkAnswer(
sql("SELECT testUDFListString(l) FROM listStringTable"),
Seq(Row("a,b,c"), Row("d,e")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFListString")
hiveContext.reset()
}
test("UDFStringString") {
val testData = spark.sparkContext.parallelize(
StringCaseClass("world") :: StringCaseClass("goodbye") :: Nil).toDF()
testData.createOrReplaceTempView("stringTable")
sql(s"CREATE TEMPORARY FUNCTION testStringStringUDF AS '${classOf[UDFStringString].getName}'")
checkAnswer(
sql("SELECT testStringStringUDF(\\"hello\\", s) FROM stringTable"),
Seq(Row("hello world"), Row("hello goodbye")))
checkAnswer(
sql("SELECT testStringStringUDF(\\"\\", testStringStringUDF(\\"hello\\", s)) FROM stringTable"),
Seq(Row(" hello world"), Row(" hello goodbye")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testStringStringUDF")
hiveContext.reset()
}
test("UDFTwoListList") {
val testData = spark.sparkContext.parallelize(
ListListIntCaseClass(Nil) ::
ListListIntCaseClass(Seq((1, 2, 3))) ::
ListListIntCaseClass(Seq((4, 5, 6), (7, 8, 9))) ::
Nil).toDF()
testData.createOrReplaceTempView("TwoListTable")
sql(s"CREATE TEMPORARY FUNCTION testUDFTwoListList AS '${classOf[UDFTwoListList].getName}'")
checkAnswer(
sql("SELECT testUDFTwoListList(lli, lli) FROM TwoListTable"),
Seq(Row("0, 0"), Row("2, 2"), Row("13, 13")))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFTwoListList")
hiveContext.reset()
}
test("Hive UDFs with insufficient number of input arguments should trigger an analysis error") {
Seq((1, 2)).toDF("a", "b").createOrReplaceTempView("testUDF")
{
// HiveSimpleUDF
sql(s"CREATE TEMPORARY FUNCTION testUDFTwoListList AS '${classOf[UDFTwoListList].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDFTwoListList() FROM testUDF")
}.getMessage
assert(message.contains("No handler for Hive UDF"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFTwoListList")
}
{
// HiveGenericUDF
sql(s"CREATE TEMPORARY FUNCTION testUDFAnd AS '${classOf[GenericUDFOPAnd].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDFAnd() FROM testUDF")
}.getMessage
assert(message.contains("No handler for Hive UDF"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFAnd")
}
{
// Hive UDAF
sql(s"CREATE TEMPORARY FUNCTION testUDAFPercentile AS '${classOf[UDAFPercentile].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDAFPercentile(a) FROM testUDF GROUP BY b")
}.getMessage
assert(message.contains("No handler for Hive UDF"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDAFPercentile")
}
{
// AbstractGenericUDAFResolver
sql(s"CREATE TEMPORARY FUNCTION testUDAFAverage AS '${classOf[GenericUDAFAverage].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDAFAverage() FROM testUDF GROUP BY b")
}.getMessage
assert(message.contains("No handler for Hive UDF"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDAFAverage")
}
{
// Hive UDTF
sql(s"CREATE TEMPORARY FUNCTION testUDTFExplode AS '${classOf[GenericUDTFExplode].getName}'")
val message = intercept[AnalysisException] {
sql("SELECT testUDTFExplode() FROM testUDF")
}.getMessage
assert(message.contains("No handler for Hive UDF"))
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDTFExplode")
}
spark.catalog.dropTempView("testUDF")
}
test("Hive UDF in group by") {
withTempView("tab1") {
Seq(Tuple1(1451400761)).toDF("test_date").createOrReplaceTempView("tab1")
sql(s"CREATE TEMPORARY FUNCTION testUDFToDate AS '${classOf[GenericUDFToDate].getName}'")
val count = sql("select testUDFToDate(cast(test_date as timestamp))" +
" from tab1 group by testUDFToDate(cast(test_date as timestamp))").count()
sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToDate")
assert(count == 1)
}
}
test("SPARK-11522 select input_file_name from non-parquet table") {
withTempDir { tempDir =>
// EXTERNAL OpenCSVSerde table pointing to LOCATION
val file1 = new File(tempDir + "/data1")
val writer1 = new PrintWriter(file1)
writer1.write("1,2")
writer1.close()
val file2 = new File(tempDir + "/data2")
val writer2 = new PrintWriter(file2)
writer2.write("1,2")
writer2.close()
sql(
s"""CREATE EXTERNAL TABLE csv_table(page_id INT, impressions INT)
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde'
WITH SERDEPROPERTIES (
\\"separatorChar\\" = \\",\\",
\\"quoteChar\\" = \\"\\\\\\"\\",
\\"escapeChar\\" = \\"\\\\\\\\\\")
LOCATION '$tempDir'
""")
val answer1 =
sql("SELECT input_file_name() FROM csv_table").head().getString(0)
assert(answer1.contains("data1") || answer1.contains("data2"))
val count1 = sql("SELECT input_file_name() FROM csv_table").distinct().count()
assert(count1 == 2)
sql("DROP TABLE csv_table")
// EXTERNAL pointing to LOCATION
sql(
s"""CREATE EXTERNAL TABLE external_t5 (c1 int, c2 int)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
LOCATION '$tempDir'
""")
val answer2 =
sql("SELECT input_file_name() as file FROM external_t5").head().getString(0)
assert(answer1.contains("data1") || answer1.contains("data2"))
val count2 = sql("SELECT input_file_name() as file FROM external_t5").distinct().count
assert(count2 == 2)
sql("DROP TABLE external_t5")
}
withTempDir { tempDir =>
// External parquet pointing to LOCATION
val parquetLocation = tempDir + "/external_parquet"
sql("SELECT 1, 2").write.parquet(parquetLocation)
sql(
s"""CREATE EXTERNAL TABLE external_parquet(c1 int, c2 int)
STORED AS PARQUET
LOCATION '$parquetLocation'
""")
val answer3 =
sql("SELECT input_file_name() as file FROM external_parquet").head().getString(0)
assert(answer3.contains("external_parquet"))
val count3 = sql("SELECT input_file_name() as file FROM external_parquet").distinct().count
assert(count3 == 1)
sql("DROP TABLE external_parquet")
}
// Non-External parquet pointing to /tmp/...
sql("CREATE TABLE parquet_tmp STORED AS parquet AS SELECT 1, 2")
val answer4 =
sql("SELECT input_file_name() as file FROM parquet_tmp").head().getString(0)
assert(answer4.contains("parquet_tmp"))
val count4 = sql("SELECT input_file_name() as file FROM parquet_tmp").distinct().count
assert(count4 == 1)
sql("DROP TABLE parquet_tmp")
}
}
class TestPair(x: Int, y: Int) extends Writable with Serializable {
def this() = this(0, 0)
var entry: (Int, Int) = (x, y)
override def write(output: DataOutput): Unit = {
output.writeInt(entry._1)
output.writeInt(entry._2)
}
override def readFields(input: DataInput): Unit = {
val x = input.readInt()
val y = input.readInt()
entry = (x, y)
}
}
class PairSerDe extends AbstractSerDe {
override def initialize(p1: Configuration, p2: Properties): Unit = {}
override def getObjectInspector: ObjectInspector = {
ObjectInspectorFactory
.getStandardStructObjectInspector(
Arrays.asList("pair"),
Arrays.asList(ObjectInspectorFactory.getStandardStructObjectInspector(
Arrays.asList("id", "value"),
Arrays.asList(PrimitiveObjectInspectorFactory.javaIntObjectInspector,
PrimitiveObjectInspectorFactory.javaIntObjectInspector))
))
}
override def getSerializedClass: Class[_ <: Writable] = classOf[TestPair]
override def getSerDeStats: SerDeStats = null
override def serialize(p1: scala.Any, p2: ObjectInspector): Writable = null
override def deserialize(value: Writable): AnyRef = {
val pair = value.asInstanceOf[TestPair]
val row = new ArrayList[ArrayList[AnyRef]]
row.add(new ArrayList[AnyRef](2))
row.get(0).add(Integer.valueOf(pair.entry._1))
row.get(0).add(Integer.valueOf(pair.entry._2))
row
}
}
class PairUDF extends GenericUDF {
override def initialize(p1: Array[ObjectInspector]): ObjectInspector =
ObjectInspectorFactory.getStandardStructObjectInspector(
Arrays.asList("id", "value"),
Arrays.asList(PrimitiveObjectInspectorFactory.javaIntObjectInspector,
PrimitiveObjectInspectorFactory.javaIntObjectInspector)
)
override def evaluate(args: Array[DeferredObject]): AnyRef = {
Integer.valueOf(args(0).get.asInstanceOf[TestPair].entry._2)
}
override def getDisplayString(p1: Array[String]): String = ""
}
| MrCodeYu/spark | sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala | Scala | apache-2.0 | 18,809 |
package us.feliscat.text.normalizer.en
import us.feliscat.m17n.English
import us.feliscat.text.StringOption
import us.feliscat.text.normalizer.EscapeObject
/**
* @author K.Sakamoto
* Created on 2016/08/07
*/
object EnglishEscapeNoun extends EscapeObject(StringOption("escape_noun.txt")) with English
| ktr-skmt/FelisCatusZero-multilingual | libraries/src/main/scala/us/feliscat/text/normalizer/en/EnglishEscapeNoun.scala | Scala | apache-2.0 | 316 |
/*
* Copyright 2015 University of Basel, Graphics and Vision Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalismo.mesh.boundingSpheres
import breeze.linalg.{max, min}
import scalismo.ScalismoTestSuite
import scalismo.common.{PointId, UnstructuredPoints, UnstructuredPointsDomain}
import scalismo.geometry.{_3D, EuclideanVector, Point, Point3D}
import scalismo.mesh.{
BarycentricCoordinates4,
TetrahedralCell,
TetrahedralList,
TetrahedralMesh3D,
TriangleCell,
TriangleList,
TriangleMesh3D
}
import scalismo.utils.Random
import scala.collection.parallel.immutable.ParVector
class MeshSurfaceDistanceTests extends ScalismoTestSuite {
implicit val rnd: Random = Random(42)
def gen(offset: Double = 0.0, scale: Double = 1.0): Double = rnd.scalaRandom.nextDouble() * scale + offset
def randomPoint(offset: Double = 0.0, scale: Double = 1.0): Point[_3D] = {
Point(gen(offset, scale), gen(offset, scale), gen(offset, scale))
}
def randomVector(offset: Double = 0.0, scale: Double = 1.0): EuclideanVector[_3D] = {
EuclideanVector(gen(offset, scale), gen(offset, scale), gen(offset, scale))
}
private def randomTriangle(offset: Double, scale: Double): Triangle = {
val a = randomVector(offset, scale)
val b = randomVector(offset, scale)
val c = randomVector(offset, scale)
Triangle(a, b, c)
}
private def randomTetrahedron(offset: Double, scale: Double): Tetrahedron = {
val a = randomVector(offset, scale)
val b = randomVector(offset, scale)
val c = randomVector(offset, scale)
val d = randomVector(offset, scale)
if (BoundingSphereHelpers.calculateSignedVolume(a, b, c, d) > 0)
Tetrahedron(a, b, c, d)
else
Tetrahedron(a, b, d, c)
}
def createTetrahedronsInUnitCube(): TetrahedralMesh3D = {
// points around unit cube
val points = IndexedSeq(Point(0, 0, 0),
Point(1, 0, 0),
Point(1, 1, 0),
Point(0, 1, 0),
Point(0, 0, 1),
Point(1, 0, 1),
Point(1, 1, 1),
Point(0, 1, 1))
val domain = UnstructuredPoints(points)
// cells covering the complete cube
implicit def intToPointId(i: Int): PointId = PointId(i)
val cells = IndexedSeq(TetrahedralCell(0, 2, 7, 3),
TetrahedralCell(0, 2, 5, 1),
TetrahedralCell(2, 5, 7, 6),
TetrahedralCell(0, 5, 7, 4),
TetrahedralCell(0, 2, 5, 7))
val list = TetrahedralList(cells)
TetrahedralMesh3D(domain, list)
}
def uniform(min: Double = 50.0, max: Double = 50.0): Double = { rnd.scalaRandom.nextDouble() * (max - min) + min }
def createCoLinearTriangle(): Triangle = {
val b = EuclideanVector(uniform(), uniform(), uniform())
val m = EuclideanVector(uniform(), uniform(), uniform())
Triangle(b + m * uniform(), b + m * uniform(), b + m * uniform())
}
def createSinglePointTriangle(): Triangle = {
val b = EuclideanVector(uniform(), uniform(), uniform())
Triangle(b, b, b)
}
def aeqV[D](a: EuclideanVector[D], b: EuclideanVector[D], theta: Double = 1.0e-8): Boolean = {
a.toArray.zip(b.toArray).forall(p => aeq(p._1, p._2, theta))
}
def aeqP[D](a: Point[D], b: Point[D], theta: Double = 1.0e-8): Boolean = {
a.toArray.zip(b.toArray).forall(p => aeq(p._1, p._2, theta))
}
def aeq(a: Double, b: Double, theta: Double = 1.0e-8): Boolean = {
a - b < theta
}
describe("The SurfaceDistance") {
it("should use correct barycentric coordinate for points on a line") {
(0 until 100) foreach { _ =>
val pairs = IndexedSeq((0.0, 1.0), (10.0, 10.0), (100.0, 100.0), (-10.0, 10.0))
pairs.foreach { pair =>
val a = randomVector(pair._1, pair._2)
val b = randomVector(pair._1, pair._2)
(0 until 100) foreach { _ =>
val s = gen()
val p = a + (b - a) * s
val res = BSDistance.toLineSegment(p, a, b)
if ((b - a).norm2 < Double.MinPositiveValue) {
res.bc._1 shouldBe 0.5 +- 1.0e-8
res.pt shouldBe (a + b) * 0.5
} else {
res.bc._1 shouldBe s +- 1.0e-8
if (res.bc._1 > 0.0) {
if (res.bc._1 < 1.0) {
res.ptType shouldBe SurfaceClosestPointType.ON_LINE
aeqV(res.pt, p) shouldBe true
} else {
res.ptType shouldBe SurfaceClosestPointType.POINT
res.idx._1 shouldBe 1
aeqV(res.pt, b) shouldBe true
}
} else {
res.ptType shouldBe SurfaceClosestPointType.POINT
res.idx._1 shouldBe 0
aeqV(res.pt, a) shouldBe true
}
}
}
}
}
}
it("should use correct barycentric coordinate for points away from the line") {
(0 until 100) foreach { _ =>
val pairs = IndexedSeq((0.0, 1.0), (10.0, 10.0), (100.0, 100.0), (-10.0, 10.0))
pairs.foreach { pair =>
val a = randomVector(pair._1, pair._2)
val b = randomVector(pair._1, pair._2)
(0 until 100) foreach { _ =>
val s = gen()
val ab = b - a
val k = ab + randomVector()
val n = ab.crossproduct(k)
val p1 = a + ab * s
val p = p1 + n * gen(pair._1, pair._2)
val res = BSDistance.toLineSegment(p, a, b)
if ((b - a).norm2 < Double.MinPositiveValue) {
res.bc._1 shouldBe 0.5 +- 1.0e-8
res.pt shouldBe (a + b) * 0.5
} else {
res.bc._1 shouldBe s +- 1.0e-8
if (res.bc._1 > 0.0) {
if (res.bc._1 < 1.0) {
res.ptType shouldBe SurfaceClosestPointType.ON_LINE
aeqV(res.pt, p1) shouldBe true
} else {
res.ptType shouldBe SurfaceClosestPointType.POINT
res.idx._1 shouldBe 1
aeqV(res.pt, b) shouldBe true
}
} else {
res.ptType shouldBe SurfaceClosestPointType.POINT
res.idx._1 shouldBe 0
aeqV(res.pt, a) shouldBe true
}
}
}
}
}
}
it("should return the correct barycentric coordinates in a triangle") {
(0 until 100) foreach { _ =>
val pairs = IndexedSeq((0, 1), (10, 10), (100, 100), (-10, 10))
pairs.foreach { pair =>
val tri = randomTriangle(pair._1, pair._2)
(0 until 100) foreach { _ =>
val s = gen()
val t = (1.0 - s) * gen()
val pt = tri.a + tri.ab * s + tri.ac * t
val p = pt + tri.n * gen(pair._1, pair._2)
val ct = BSDistance.toTriangle(p, tri)
val resT = ct.bc
max(0.0, min(1.0, s)) shouldBe resT._1 +- 1.0e-8
max(0.0, min(1.0, t)) shouldBe resT._2 +- 1.0e-8
pt(0) shouldBe ct.pt(0) +- 1.0e-8
pt(1) shouldBe ct.pt(1) +- 1.0e-8
pt(2) shouldBe ct.pt(2) +- 1.0e-8
}
}
}
}
it("should return the correct barycentric coordinates in a tetrahedron") {
val _EPSILON = 1.0e-8
(0 until 100) foreach { _ =>
val pairs = IndexedSeq((0, 1), (10, 10), (100, 100), (-10, 10))
pairs.foreach { pair =>
val tet = randomTetrahedron(pair._1, pair._2)
(0 until 100) foreach { _ =>
val bc = BarycentricCoordinates4.randomUniform
val pt = tet.a * bc.a + tet.b * bc.b + tet.c * bc.c + tet.d * bc.d
val ct = BSDistance.toTetrahedron(pt, tet)
val resT = ct.bc
max(0.0, min(1.0, bc.b)) shouldBe resT._1 +- _EPSILON
max(0.0, min(1.0, bc.c)) shouldBe resT._2 +- _EPSILON
max(0.0, min(1.0, bc.d)) shouldBe resT._3 +- _EPSILON
max(0.0, min(1.0, bc.a)) shouldBe (1.0 - resT._1 - resT._2 - resT._3) +- _EPSILON
pt(0) shouldBe ct.pt(0) +- _EPSILON
pt(1) shouldBe ct.pt(1) +- _EPSILON
pt(2) shouldBe ct.pt(2) +- _EPSILON
}
}
}
}
it("should use correct barycentric coordinates for points in triangle plane ") {
(0 until 100) foreach { _ =>
val pairs = IndexedSeq((0.0, 1.0), (10.0, 10.0), (100.0, 100.0), (-10.0, 10.0))
pairs.foreach { pair =>
val tri = randomTriangle(pair._1, pair._2)
(0 until 100) foreach { _ =>
val s = gen()
val t = gen()
val p = tri.a + tri.ab * s + tri.ac * t
val res = BSDistance.calculateBarycentricCoordinates(tri, p)
s shouldBe res._1 +- 1.0e-8
t shouldBe res._2 +- 1.0e-8
}
}
}
}
it("should use correct barycentric coordinates for points outside the triangle plane") {
(0 until 100) foreach { _ =>
val pairs = IndexedSeq((0, 1), (10, 10), (100, 100), (-10, 10))
pairs.foreach { pair =>
val tri = randomTriangle(pair._1, pair._2)
(0 until 100) foreach { _ =>
val s = gen()
val t = gen()
val p = tri.a + tri.ab * s + tri.ac * t + tri.n * gen(pair._1, pair._2)
val res = BSDistance.calculateBarycentricCoordinates(tri, p)
s shouldBe res._1 +- 1.0e-8
t shouldBe res._2 +- 1.0e-8
}
}
}
}
it("should use correct barycentric coordinates for points randomly placed near or in the tetrahedron") {
val _EPSILON = 1.0e-8
(0 until 100) foreach { _ =>
val pairs = IndexedSeq((0, 1), (10, 10), (100, 100), (-10, 10))
pairs.foreach { pair =>
val tet = randomTetrahedron(pair._1, pair._2)
(0 until 100) foreach { _ =>
val pOff = randomVector(pair._1 * 2, pair._2 * 2)
val bc = // reference implementation tested against VTK
BarycentricCoordinates4.pointInTetrahedron(pOff.toPoint,
tet.a.toPoint,
tet.b.toPoint,
tet.c.toPoint,
tet.d.toPoint)
val res = BSDistance.calculateBarycentricCoordinates(tet, pOff)
bc.b shouldBe res._1 +- 1.0e-8
bc.c shouldBe res._2 +- 1.0e-8
bc.d shouldBe res._3 +- 1.0e-8
bc.a shouldBe (1.0 - res._1 - res._2 - res._3) +- 1.0e-8
}
}
}
}
it("should use reasonable barycentric coordinates for triangles with three times the same point") {
def test(tri: Triangle): Unit = {
for (x <- BigDecimal(0) to BigDecimal(2) by BigDecimal(0.1)) {
val p = EuclideanVector(x.toDouble, 0, 1)
val bc = BSDistance.calculateBarycentricCoordinates(tri, p)
(bc._1 + bc._2 + bc._3) shouldBe 1.0 +- 1.0e-8
val epsilon = 1.0e-12
bc._1 should be(1.0 +- epsilon)
bc._2 should be(0.0 +- epsilon)
bc._3 should be(0.0 +- epsilon)
}
}
for (_ <- 0 until 20) { test(createSinglePointTriangle()) }
}
it("should use reasonable barycentric coordinates for triangles with only co-linear points") {
def test(tri: Triangle, pt: EuclideanVector[_3D]) = {
val bc = BSDistance.calculateBarycentricCoordinates(tri, pt)
(bc._1 + bc._2 + bc._3) shouldBe 1.0 +- 1.0e-8
val epsilon = 1.0e-12
bc._1 should be >= 0.0 - epsilon
bc._1 should be <= 1.0 + epsilon
bc._2 should be >= 0.0 - epsilon
bc._2 should be <= 1.0 + epsilon
bc._3 should be >= 0.0 - epsilon
bc._3 should be <= 1.0 + epsilon
}
for (_ <- 0 until 40) {
val tri = createCoLinearTriangle()
test(tri, tri.a)
test(tri, tri.b)
test(tri, tri.c)
}
{
val tri =
Triangle(EuclideanVector(0.0, 0.0, 1.0), EuclideanVector(1.0, 0.0, 1.0), EuclideanVector(2.0, 0.0, 1.0))
test(tri, tri.a)
test(tri, tri.b)
test(tri, tri.c)
}
{
val tri =
Triangle(EuclideanVector(0.0, 0.0, 1.0), EuclideanVector(2.0, 0.0, 1.0), EuclideanVector(1.0, 0.0, 1.0))
test(tri, tri.a)
test(tri, tri.b)
test(tri, tri.c)
}
{
val tri =
Triangle(EuclideanVector(1.0, 0.0, 1.0), EuclideanVector(0.0, 0.0, 1.0), EuclideanVector(2.0, 0.0, 1.0))
test(tri, tri.a)
test(tri, tri.b)
test(tri, tri.c)
}
{
val tri =
Triangle(EuclideanVector(1.0, 0.0, 1.0), EuclideanVector(2.0, 0.0, 1.0), EuclideanVector(0.0, 0.0, 1.0))
test(tri, tri.a)
test(tri, tri.b)
test(tri, tri.c)
}
{
val tri =
Triangle(EuclideanVector(2.0, 0.0, 1.0), EuclideanVector(0.0, 0.0, 1.0), EuclideanVector(1.0, 0.0, 1.0))
test(tri, tri.a)
test(tri, tri.b)
test(tri, tri.c)
}
{
val tri =
Triangle(EuclideanVector(2.0, 0.0, 1.0), EuclideanVector(1.0, 0.0, 1.0), EuclideanVector(0.0, 0.0, 1.0))
test(tri, tri.a)
test(tri, tri.b)
test(tri, tri.c)
}
}
it("should return the same when used for points as the findClosestPoint from UnstructuredPoints") {
val points = for (_ <- 0 until 10000) yield randomPoint()
val pd = UnstructuredPoints(points)
val md = DiscreteSpatialIndex.fromPointList(points)
(0 until 100) foreach { _ =>
val p = randomPoint()
val vpt = pd.findClosestPoint(p)
val vd = (vpt.point - p).norm2
val cp = md.closestPoint(p)
vd shouldBe cp.distanceSquared
vpt.point shouldBe cp.point
}
}
it(
"should return an equal or smaller distance when used for points than the findClosestPoint from UnstructuredPoints for triangles"
) {
val triangles = (0 until 100) map { _ =>
// test if two function lead to same cp
val a = randomVector()
val b = randomVector()
val c = randomVector()
Triangle(a, b, c)
}
val points = triangles.flatMap(t => Array(t.a.toPoint, t.b.toPoint, t.c.toPoint))
val pd = UnstructuredPoints(points)
val sd = TriangleMesh3DSpatialIndex.fromTriangleMesh3D(
TriangleMesh3D(
triangles.flatMap(t => Seq(t.a.toPoint, t.b.toPoint, t.c.toPoint)),
TriangleList(
(0 until 3 * triangles.length)
.grouped(3)
.map(g => TriangleCell(PointId(g(0)), PointId(g(1)), PointId(g(2))))
.toIndexedSeq
)
)
)
(0 until 1000) foreach { _ =>
val p = randomVector()
// findClosestPoint from UnstructuredPoints
val vp = pd.findClosestPoint(p.toPoint)
val vd = (vp.point - p.toPoint).norm2
val ge = sd.getClosestPoint(p.toPoint)
require(vd >= ge.distanceSquared)
}
}
it("should return the same closest point on surface result when processing points in parallel") {
val triangles = (0 until 100) map { _ =>
// test if two function lead to same cp
val a = randomVector()
val b = randomVector()
val c = randomVector()
Triangle(a, b, c)
}
val sd = TriangleMesh3DSpatialIndex.fromTriangleMesh3D(
TriangleMesh3D(
triangles.flatMap(t => Seq(t.a.toPoint, t.b.toPoint, t.c.toPoint)),
TriangleList(
(0 until 3 * triangles.length)
.grouped(3)
.map(g => TriangleCell(PointId(g(0)), PointId(g(1)), PointId(g(2))))
.toIndexedSeq
)
)
)
val queries = ParVector.range(0, 100000) map { _ =>
randomVector()
}
val cpsSeq = queries.map(q => sd.getClosestPoint(q.toPoint))
val cpsPar = queries.map(q => sd.getClosestPoint(q.toPoint))
cpsSeq.zip(cpsPar) foreach { pair =>
val seq = pair._1
val par = pair._2
require(seq.point == par.point)
require(seq.distanceSquared == par.distanceSquared)
}
}
it("should return the same closest point result when processing points in parallel") {
val triangles = (0 until 100) map { _ =>
// test if two function lead to same cp
val a = randomVector()
val b = randomVector()
val c = randomVector()
Triangle(a, b, c)
}
val sd = DiscreteSpatialIndex.fromMesh(
TriangleMesh3D(
triangles.flatMap(t => Seq(t.a.toPoint, t.b.toPoint, t.c.toPoint)),
TriangleList(
(0 until 3 * triangles.length)
.grouped(3)
.map(g => TriangleCell(PointId(g(0)), PointId(g(1)), PointId(g(2))))
.toIndexedSeq
)
)
)
val queries = (0 until 100000) map { _ =>
randomVector()
}
val cpsSeq = queries.map(q => sd.closestPoint(q.toPoint))
val cpsPar = new ParVector(queries.toVector).map(q => sd.closestPoint(q.toPoint))
cpsSeq.zip(cpsPar) foreach { pair =>
val seq = pair._1
val par = pair._2
require(seq.point == par.point)
require(seq.pid == par.pid)
require(seq.distanceSquared == par.distanceSquared)
}
}
it("should create correct bounding spheres with values for center and radius which do not contain NaN.") {
def test(tri: Triangle) = {
val sphere = Sphere.fromTriangle(tri)
sphere.r2.isNaN shouldBe false
sphere.center.x.isNaN shouldBe false
sphere.center.y.isNaN shouldBe false
sphere.center.z.isNaN shouldBe false
}
for (_ <- 0 until 40) { test(createCoLinearTriangle()) }
for (_ <- 0 until 40) { test(createSinglePointTriangle()) }
}
}
describe("The BoundingSphere") {
it("should find the correct closest points pairs in a sorted list") {
def bruteForcePairFinder(sortedPoints: IndexedSeq[(EuclideanVector[_3D], Int)]) = {
sortedPoints.zipWithIndex.map { e =>
val spIndex = e._2
val basePoint = e._1._1
var bestIndex = (spIndex + 1) % sortedPoints.length
var d = (basePoint - sortedPoints(bestIndex)._1).norm2
sortedPoints.indices foreach { j =>
val runningPoint = sortedPoints(j)._1
val t = (basePoint - runningPoint).norm2
if (t < d && j != spIndex) {
d = t
bestIndex = j
}
}
(d, bestIndex, e)
}
}
val centers = (0 until 10000) map { _ =>
randomVector()
}
val list = centers.sortBy(a => a(1)).zipWithIndex
val matches = BoundingSpheres.findClosestPointPairs(list)
val testMatches = bruteForcePairFinder(list)
matches.zip(testMatches).foreach { res =>
val m = res._1
val t = res._2
m._1 shouldBe t._1
m._2 shouldBe t._2
m._3._2 shouldBe t._3._2
m._3._1._2 shouldBe t._3._1._2
}
}
}
describe("The VolumeDistance") {
it("should return not a larger value than when querying all triangles or 0 if it is inside a tetrahedron") {
val pts = IndexedSeq(
Point3D(0, 0, 0),
Point3D(5, 0, 0),
Point3D(0, 0, 5),
Point3D(0, 5, 0),
Point3D(5, 5, 5),
Point3D(5, 5, 0)
)
val tmesh = TetrahedralMesh3D(
pts,
TetrahedralList(
IndexedSeq(
TetrahedralCell(PointId(0), PointId(2), PointId(1), PointId(4)),
TetrahedralCell(PointId(0), PointId(3), PointId(2), PointId(4)),
TetrahedralCell(PointId(0), PointId(1), PointId(3), PointId(4)),
TetrahedralCell(PointId(1), PointId(5), PointId(3), PointId(4))
)
)
)
val index = TetrahedralMesh3DSpatialIndex.fromTetrahedralMesh3D(tmesh)
def genPoint() =
Point3D(rnd.scalaRandom.nextDouble() * 10 - 5,
rnd.scalaRandom.nextDouble() * 10 - 5,
rnd.scalaRandom.nextDouble() * 10 - 5)
def isInside(pt: Point[_3D]) = {
tmesh.tetrahedrons.exists { cell =>
val bc = tmesh.getBarycentricCoordinates(pt, cell)
bc.forall(d => d >= 0.0) &&
bc.forall(d => d <= 1.0) &&
bc.sum <= 1.0 + 1.0e-8
}
}
def closestPointToOneOfTheTriangles(pt: Point[_3D]) = {
val p = pt.toVector
tmesh.tetrahedrons
.flatMap { tc =>
tc.triangles.map { t =>
val tr = Triangle(
tmesh.pointSet.point(t.ptId1).toVector,
tmesh.pointSet.point(t.ptId2).toVector,
tmesh.pointSet.point(t.ptId3).toVector
)
BSDistance.toTriangle(p, tr)
}
}
.minBy(_.distance2)
}
for (i <- 0 until 200) {
val query = genPoint()
val cp = index.getClosestPointToVolume(query)
if (isInside(query)) { // inside of one tetrahedron → cp should be equal query
assert(cp.isInstanceOf[ClosestPointInTetrahedron])
(cp.asInstanceOf[ClosestPointInTetrahedron].point - query).norm should be < 1.0e-8
} else { // outside of a tetrahedron → cp should be the same as the minimal distance given all triangles individually
val posCP = cp.point.toVector
val posCPT = closestPointToOneOfTheTriangles(query).pt
(posCP - posCPT).norm should be < 1.0e-8
}
}
}
it("should allow to reconstruct the closest point based on the meta information") {
val pts = IndexedSeq(
Point3D(0, 0, 0),
Point3D(5, 0, 0),
Point3D(0, 0, 5),
Point3D(0, 5, 0),
Point3D(5, 5, 5),
Point3D(5, 5, 0)
)
val tmesh = TetrahedralMesh3D(
pts,
TetrahedralList(
IndexedSeq(
TetrahedralCell(PointId(0), PointId(2), PointId(1), PointId(4)),
TetrahedralCell(PointId(0), PointId(3), PointId(2), PointId(4)),
TetrahedralCell(PointId(0), PointId(1), PointId(3), PointId(4)),
TetrahedralCell(PointId(1), PointId(5), PointId(3), PointId(4))
)
)
)
val index = TetrahedralMesh3DSpatialIndex.fromTetrahedralMesh3D(tmesh)
def genPoint() =
Point3D(rnd.scalaRandom.nextDouble() * 10 - 5,
rnd.scalaRandom.nextDouble() * 10 - 5,
rnd.scalaRandom.nextDouble() * 10 - 5)
for (i <- 0 until 200) {
val query = genPoint()
val cp = index.getClosestPointToVolume(query)
cp match {
case cpiv: ClosestPointIsVertex =>
val reconstructed = tmesh.pointSet.point(cpiv.pid)
(cp.point - reconstructed).norm < 1.0e-8
case cpol: ClosestPointOnLine =>
val reconstructed = (
tmesh.pointSet.point(cpol.pids._1).toVector * cpol.bc +
tmesh.pointSet.point(cpol.pids._2).toVector * (1.0 - cpol.bc)
).toPoint
(cp.point - reconstructed).norm < 1.0e-8
case cpitot: ClosestPointInTriangleOfTetrahedron =>
val tet = tmesh.tetrahedralization.tetrahedron(cpitot.tetId)
val tri = tet.triangles(cpitot.triId.id)
val reconstructed = (
tmesh.pointSet.point(tri.ptId1).toVector * cpitot.bc.a +
tmesh.pointSet.point(tri.ptId2).toVector * cpitot.bc.b +
tmesh.pointSet.point(tri.ptId3).toVector * cpitot.bc.c
).toPoint
(cp.point - reconstructed).norm < 1.0e-8
case cpit: ClosestPointInTetrahedron =>
val tet = tmesh.tetrahedralization.tetrahedron(cpit.tid)
val reconstructed = (
tmesh.pointSet.point(tet.ptId1).toVector * cpit.bc.a +
tmesh.pointSet.point(tet.ptId2).toVector * cpit.bc.b +
tmesh.pointSet.point(tet.ptId3).toVector * cpit.bc.c +
tmesh.pointSet.point(tet.ptId4).toVector * cpit.bc.d
).toPoint
(cp.point - reconstructed).norm < 1.0e-8
}
}
}
it("should return the correct point when queried with points from the mesh") {
val mesh = createTetrahedronsInUnitCube()
for (i <- 0 until mesh.pointSet.numberOfPoints) {
val query = mesh.pointSet.point(PointId(i))
val cp = mesh.operations.closestPointToVolume(query)
(query - cp.point).norm should be < 1.0e-8
require(cp.isInstanceOf[ClosestPointIsVertex])
}
}
it("should return the correct pointId when queried with points from the mesh") {
val mesh = createTetrahedronsInUnitCube()
for (i <- 0 until mesh.pointSet.numberOfPoints) {
val query = mesh.pointSet.point(PointId(i))
val cp = mesh.operations.closestPointToVolume(query)
cp.asInstanceOf[ClosestPointIsVertex].pid.id shouldBe i
}
}
}
}
| unibas-gravis/scalismo | src/test/scala/scalismo/mesh/boundingSpheres/MeshSurfaceDistanceTests.scala | Scala | apache-2.0 | 25,850 |
package muster
package codec
package argonaut
import org.scalacheck.{Gen, Prop}
import org.specs2.specification.Fragments
import org.specs2.{ScalaCheck, Specification}
import _root_.argonaut._, Argonaut._
class JsonFormatterSpec extends Specification with ScalaCheck {
def is: Fragments =
s2"""
A Json Formatter should
read a string property $stringProp
read a bool property $boolProp
read a int property $intProp
read a double property $doubleProp
read a decimal property $bigdecimalProp
read a list property $stringListProp
read a mutable list property $mutableListProp
read a string map property $stringMapProp
read a map property $mapProp
read a map list property $mapListProp
read an int map $mapIntProp
read an int list map $mapIntListProp
read a long map $mapLongProp
read a long list map $mapLongListProp
"""
def read[T:Consumer](value: Json) = ArgonautCodec.as[T](value)
implicit def MapIntDecodeJson[V](implicit e: DecodeJson[V]): DecodeJson[Map[Int, V]] =
DecodeJson(a =>
a.fields match {
case None => DecodeResult.fail("[V]Map[Int, V]", a.history)
case Some(s) => {
def spin(x: List[JsonField], m: DecodeResult[Map[Int, V]]): DecodeResult[Map[Int, V]] =
x match {
case Nil => m
case h::t =>
spin(t, for {
mm <- m
v <- a.get(h)(e)
} yield mm + ((h.toInt, v)))
}
spin(s, DecodeResult.ok(Map.empty[Int, V]))
}
}
)
implicit def MapLongDecodeJson[V](implicit e: DecodeJson[V]): DecodeJson[Map[Long, V]] =
DecodeJson(a =>
a.fields match {
case None => DecodeResult.fail("[V]Map[Long, V]", a.history)
case Some(s) => {
def spin(x: List[JsonField], m: DecodeResult[Map[Long, V]]): DecodeResult[Map[Long, V]] =
x match {
case Nil => m
case h::t =>
spin(t, for {
mm <- m
v <- a.get(h)(e)
} yield mm + ((h.toLong, v)))
}
spin(s, DecodeResult.ok(Map.empty[Long, V]))
}
}
)
val jstringGen = for {
s <- Gen.alphaStr
} yield jString(s)
val jboolGen = for {
s <- Gen.oneOf(true, false)
} yield jBool(s)
val jintGen = for {
s <- Gen.chooseNum(Int.MinValue, Int.MaxValue)
} yield jNumber(s)
val jdoubleGen = for {
s <- Gen.chooseNum(Double.MinValue, Double.MaxValue)
} yield jNumber(s)
val jdecimalGen = for {
s <- Gen.chooseNum(Double.MinValue, Double.MaxValue)
} yield jNumber(s)
val stringListGen = for {
s <- Gen.listOf(Gen.alphaStr).map(_.map(jString))
} yield Json.array(s:_*)
val stringMapGen = {
val k = Gen.alphaStr.suchThat(ss => ss != null && ss.trim.nonEmpty)
val v = k.flatMap(kk => Gen.alphaStr.map(vv => (kk, jString(vv))))
for {
s <- Gen.listOf(v)
} yield Json.obj(s:_*)
}
val stringProp = Prop.forAll(jstringGen) { jst =>
read[String](jst) must_== jst.stringOrEmpty
}
val boolProp = Prop.forAll(jboolGen) { jst =>
read[Boolean](jst) must_== jst.bool.get
}
val intProp = Prop.forAll(jintGen) { jst =>
read[Int](jst) must_== jst.numberOrZero
}
val doubleProp = Prop.forAll(jdoubleGen) { jst =>
read[Double](jst) must_== jst.numberOrZero
}
val bigdecimalProp = Prop.forAll(jdecimalGen) { jst =>
read[BigDecimal](jst) must_== BigDecimal(jst.numberOrZero)
}
val stringListProp = Prop.forAll(stringListGen) { jst =>
read[List[String]](jst) must_== jst.jdecode[List[String]].toOption.get
}
val stringMapProp = Prop.forAll(stringMapGen) { jst =>
read[Map[String, String]](jst) must_== jst.jdecode[Map[String, String]].toOption.get
}
import scala.collection.mutable
val mutableListProp = prop {
(i: mutable.ListBuffer[Int]) => read[mutable.ListBuffer[Int]](Json.array(i.map(v => jNumber(v)):_*)) must_== i
}
val mapGen = {
for {
n <- Gen.alphaStr.suchThat(s => s != null && s.trim.nonEmpty)
m <- Gen.chooseNum(1, 999999999)
t = (n, jNumber(m))
r <- Gen.listOf(t)
} yield Json.obj(r:_*)
}
val mapProp = Prop.forAll(mapGen) { (json: Json) =>
read[Map[String, Int]](json) must_== json.jdecode[Map[String, Int]].toOption.get
}
val mapListGen = {
for {
n <- Gen.alphaStr.suchThat(s => s != null && s.trim.nonEmpty)
m <- Gen.listOf(Gen.chooseNum(1, 999999999).map(jNumber(_)))
t = (n, Json.array(m:_*))
r <- Gen.listOf(t)
} yield Json.obj(r:_*)
}
val mapListProp = Prop.forAll(mapListGen) { (json: Json) =>
read[Map[String, List[Int]]](json) must_== json.jdecode[Map[String, List[Int]]].toOption.get
}
val intMapGen = {
for {
n <- Gen.posNum[Int]
m <- Gen.chooseNum(1, 999999999)
t = (n.toString, jNumber(m))
r <- Gen.listOf(t)
} yield Json.obj(r:_*)
}
val mapIntProp = Prop.forAll(intMapGen) { (json: Json) =>
read[Map[Int, Int]](json) must_== json.jdecode[Map[Int, Int]].toOption.get
}
val intMapListGen = {
for {
n <- Gen.posNum[Int]
m <- Gen.listOf(Gen.chooseNum(1, 999999999).map(jNumber(_)))
t = (n.toString, Json.array(m:_*))
r <- Gen.listOf(t)
} yield Json.obj(r:_*)
}
val mapIntListProp = Prop.forAll(intMapListGen) { (json: Json) =>
read[Map[Int, List[Int]]](json) must_== json.jdecode[Map[Int, List[Int]]].toOption.get
}
val longMapGen = {
for {
n <- Gen.posNum[Long]
m <- Gen.chooseNum(1, 999999999)
t = (n.toString, jNumber(m))
r <- Gen.listOf(t)
} yield Json.obj(r:_*)
}
val mapLongProp = Prop.forAll(longMapGen) { (json: Json) =>
read[Map[Long, Int]](json) must_== json.jdecode[Map[Long, Int]].toOption.get
}
val longMapListGen = {
for {
n <- Gen.posNum[Long]
m <- Gen.listOf(Gen.chooseNum(1, 999999999).map(jNumber(_)))
t = (n.toString, Json.array(m:_*))
r <- Gen.listOf(t)
} yield Json.obj(r:_*)
}
val mapLongListProp = Prop.forAll(longMapListGen) { (json: Json) =>
read[Map[Long, List[Int]]](json) must_== json.jdecode[Map[Long, List[Int]]].toOption.get
}
} | json4s/muster | codecs/argonaut/src/test/scala/muster/codec/argonaut/JsonFormatterSpec.scala | Scala | mit | 6,338 |
/*
Copyright 2011 the original author or authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package net.gumbix.bioinf.string.dotplot
import net.gumbix.dynpro.Idx
import net.gumbix.ui.{MatrixPanel, Data}
import swing.{MainFrame}
/**
* A tool for dot plots.
* @author Markus Gumbel (m.gumbel@hs-mannheim.de)
*/
object DotPlot {
def main(args: Array[String]) {
new DotPlot(args(0), args(1), args(2))
}
}
/**
* @param s1 First string.
* @param s2 Second string.
* @param windowTitle The title of the frame.
*/
class DotPlot(val s1: String, val s2: String, windowTitle: String) {
def data(s1: String, s2: String) = {
var dataList = List[Data]()
for (i <- 0 until s1.size; j <- 0 until s2.size) {
if (s1(i) == s2(j)) dataList = Data(Idx(i, j), "") :: dataList
}
dataList.toArray
}
val top = new MainFrame {
title = windowTitle
contents = new MatrixPanel(s1.toArray, s2.toArray, data(s1, s2))
}
top.pack()
top.visible = true
} | markusgumbel/scalabioalg | ui/src/main/scala/net/gumbix/bioinf/string/dotplot/DotPlot.scala | Scala | apache-2.0 | 1,496 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.ops.{Mod => ModOps}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.tf.Context
import org.tensorflow.framework.{DataType, NodeDef}
import scala.reflect.ClassTag
class Mod extends TensorflowOpsLoader {
import Utils._
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder, context: Context[T])
(implicit ev: TensorNumeric[T]): Module[T] = {
val t = getType(nodeDef.getAttrMap, "T")
if (t == DataType.DT_FLOAT) {
ModOps[T, Float]()
} else if (t == DataType.DT_DOUBLE) {
ModOps[T, Double]()
} else if (t == DataType.DT_INT32) {
ModOps[T, Int]()
} else {
throw new UnsupportedOperationException(s"Not support load Mod when type is ${t}")
}
}
}
| wzhongyuan/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/utils/tf/loaders/Mod.scala | Scala | apache-2.0 | 1,538 |
package computerdatabase.advanced
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import scala.concurrent.duration._
class Advanced1Simulation extends Simulation {
val feeder = csv("search1.csv").random
// Let's split this big scenario into composable business processes, like one would do with PageObject pattern with Selenium
// object are native Scala singletons
object Search {
val search = exec(http("Home").get("/"))
.feed(feeder)
.pause(1)
.exec(http("Search").get("/computers?f=${searchCriterion}"))
.pause(1)
.exec(http("Select").get("/computers/6"))
.pause(1)
}
val httpConf = http
.baseURL("http://computer-database.gatling.io")
.acceptHeader("text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8")
.doNotTrackHeader("1")
.acceptLanguageHeader("en-US,en;q=0.5")
.acceptEncodingHeader("gzip, deflate")
.userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:16.0) Gecko/20100101 Firefox/16.0")
// Now, we can write the scenario as a composition
val scn = scenario("Scenario Name").exec(Search.search)
setUp(scn.inject(atOnceUsers(1)).protocols(httpConf))
}
| evandor/skysail | skysail.product.demo.e2e.gatling/src/gatling/scala/computerdatabase/advanced/Advanced1Simulation.scala | Scala | apache-2.0 | 1,195 |
package scala.macros.tests
package config
import org.junit._
import org.junit.runner._
import org.junit.runners._
import org.junit.Assert._
import scala.macros.config._
@RunWith(classOf[JUnit4])
class VersionSuite {
@Test
def parseStable: Unit = {
val s = "2.0.0"
val version = Version.parse(s).get
assertEquals(2, version.major)
assertEquals(0, version.minor)
assertEquals(0, version.patch)
assertEquals("", version.snapshot)
assertEquals("", version.build)
assertEquals(s, version.toString)
}
@Test
def parseSnapshotWithoutCommit: Unit = {
val s = "2.0.0-707"
val version = Version.parse(s).get
assertEquals(2, version.major)
assertEquals(0, version.minor)
assertEquals(0, version.patch)
assertEquals("707", version.snapshot)
assertEquals("", version.build)
assertEquals(s, version.toString)
}
@Test
def parseSnapshotWithSha: Unit = {
val s = "2.0.0-707-51be4a51"
val version = Version.parse(s).get
assertEquals(2, version.major)
assertEquals(0, version.minor)
assertEquals(0, version.patch)
assertEquals("707-51be4a51", version.snapshot)
assertEquals("", version.build)
assertEquals(s, version.toString)
}
@Test
def parseSnapshotWithShaAndTimestamp: Unit = {
val s = "2.0.0-707-51be4a51.1495325855697"
val version = Version.parse(s).get
assertEquals(2, version.major)
assertEquals(0, version.minor)
assertEquals(0, version.patch)
assertEquals("707-51be4a51.1495325855697", version.snapshot)
assertEquals("", version.build)
assertEquals(s, version.toString)
}
@Test
def parseSnapshotWithShaAndTimestampAndBuild: Unit = {
val s = "2.0.0-707-51be4a51.1495325855697+build"
val version = Version.parse(s).get
assertEquals(2, version.major)
assertEquals(0, version.minor)
assertEquals(0, version.patch)
assertEquals("707-51be4a51.1495325855697", version.snapshot)
assertEquals("build", version.build)
assertEquals(s, version.toString)
}
} | xeno-by/scalamacros | tests/api/src/test/scala/scala/macros/tests/config/VersionSuite.scala | Scala | bsd-3-clause | 2,031 |
package scala.tools
package reflect
import scala.reflect.reify.Taggers
import scala.tools.nsc.typechecker.{ Analyzer, Macros }
import scala.reflect.runtime.Macros.currentMirror
import scala.reflect.quasiquotes.{ Quasiquotes => QuasiquoteImpls }
/** Optimizes system macro expansions by hardwiring them directly to their implementations
* bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
*/
class FastTrack[MacrosAndAnalyzer <: Macros with Analyzer](val macros: MacrosAndAnalyzer) {
import macros._
import global._
import definitions._
import scala.language.implicitConversions
import treeInfo.Applied
def contains(symbol: Symbol): Boolean = fastTrackCache().contains(symbol)
def apply(symbol: Symbol): FastTrackEntry = fastTrackCache().apply(symbol)
def get(symbol: Symbol): Option[FastTrackEntry] = fastTrackCache().get(symbol)
private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } =
new { val c: c0.type = c0 } with Taggers
private implicit def context2macroimplementations(c0: MacroContext): FormatInterpolator { val c: c0.type } =
new { val c: c0.type = c0 } with FormatInterpolator
private implicit def context2quasiquote(c0: MacroContext): QuasiquoteImpls { val c: c0.type } =
new { val c: c0.type = c0 } with QuasiquoteImpls
private def makeBlackbox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
sym -> new FastTrackEntry(pf, isBlackbox = true)
private def makeWhitebox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
sym -> new FastTrackEntry(pf, isBlackbox = false)
final class FastTrackEntry(pf: PartialFunction[Applied, MacroContext => Tree], val isBlackbox: Boolean) extends (MacroArgs => Any) {
def validate(tree: Tree) = pf isDefinedAt Applied(tree)
def apply(margs: MacroArgs): margs.c.Expr[Nothing] = {
val MacroArgs(c, _) = margs
// Macros validated that the pf is defined here - and there's not much we could do if it weren't.
c.Expr[Nothing](pf(Applied(c.expandee))(c))(c.WeakTypeTag.Nothing)
}
}
/** A map from a set of pre-established macro symbols to their implementations. */
private val fastTrackCache = perRunCaches.newGeneric[Map[Symbol, FastTrackEntry]] {
val runDefinitions = currentRun.runDefinitions
import runDefinitions._
Map[Symbol, FastTrackEntry](
makeBlackbox( materializeClassTag) { case Applied(_, ttag :: Nil, _) => _.materializeClassTag(ttag.tpe) },
makeBlackbox( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) },
makeBlackbox( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) },
makeBlackbox( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) },
makeBlackbox( StringContext_f) { case _ => _.interpolate },
makeBlackbox(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree },
makeWhitebox( QuasiquoteClass_api_apply) { case _ => _.expandQuasiquote },
makeWhitebox(QuasiquoteClass_api_unapply) { case _ => _.expandQuasiquote }
)
}
}
| felixmulder/scala | src/compiler/scala/tools/reflect/FastTrack.scala | Scala | bsd-3-clause | 3,537 |
package edu.gemini.pit.ui.editor
import edu.gemini.model.p1.immutable.EphemerisElement
import edu.gemini.pit.ui.util._
import edu.gemini.shared.gui.textComponent.NumberField
import edu.gemini.spModel.core.Coordinates
import scala.swing._
import scala.swing.event.ValueChanged
import java.util.{Date, TimeZone}
import javax.swing.{JSpinner, SpinnerDateModel}
import scalaz._
import Scalaz._
/**
* Modal editor for an EphemerisElement.
*/
class EphemerisElementEditor(e: EphemerisElement) extends StdModalEditor[EphemerisElement]("Edit Ephemeris Element") {
// Editor component
object Editor extends GridBagPanel with Rows {
addRow(new Label("UTC"), Component.wrap(Cal))
addRow(new Label("RA"), RA)
addRow(new Label("Dec"), Dec)
addRow(new Label("Magnitude"), Mag)
}
// Validation
override def editorValid = RA.valid && Dec.valid && Mag.valid
Seq(RA, Dec, Mag) foreach {
_.reactions += {
case ValueChanged(_) => validateEditor()
}
}
// Data fields
object Cal extends JSpinner(new SpinnerDateModel()) { spin =>
setEditor(new JSpinner.DateEditor(spin, "dd-MMM-yyyy HH:mm:ss") {
getFormat.setTimeZone(TimeZone.getTimeZone("UTC"))
})
setValue(new Date(e.validAt))
}
object RA extends RATextField(e.coords.ra)
object Dec extends DecTextField(e.coords.dec)
object Mag extends NumberField(e.magnitude, allowEmpty = false)
// Construct our editor
def editor = Editor
// Construct a new value
def value = EphemerisElement(Coordinates(RA.toRightAscension, Dec.value), parseDouble(Mag.text).toOption, Cal.getValue.asInstanceOf[Date].getTime)
}
| spakzad/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/editor/EphemerisElementEditor.scala | Scala | bsd-3-clause | 1,626 |
/*
*************************************************************************************
* Copyright 2012 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.batch
import net.liftweb.actor.{LiftActor, LAPinger}
import com.normation.rudder.services.system.DatabaseManager
import net.liftweb.common._
import org.joda.time._
import com.normation.rudder.domain.logger.ReportLogger
import com.normation.rudder.domain.reports._
/**
* An helper object designed to help building automatic reports cleaning
*/
object AutomaticReportsCleaning {
/*
* Default parameters and properties name
*/
val minParam = "rudder.batch.databasecleaner.runtime.minute"
val hourParam = "rudder.batch.databasecleaner.runtime.hour"
val dayParam = "rudder.batch.databasecleaner.runtime.day"
val freqParam = "rudder.batch.reportsCleaner.frequency"
val defaultMinute = 0
val defaultHour = 0
val defaultDay = "sunday"
val defaultArchiveTTL = 30
val defaultDeleteTTL = 90
/**
* Build a frequency depending on the value
*/
def buildFrequency(kind:String, min:Int, hour:Int, day:String):Box[CleanFrequency] = {
kind.toLowerCase() match {
case "hourly" => buildHourly(min)
case "daily" => buildDaily(min,hour)
case "weekly" => buildWeekly(min,hour,day)
case _ => Failure("%s is not correctly set, value is %s".format(freqParam,kind))
}
}
/**
* Build an hourly frequency
*/
private[this] def buildHourly(min:Int):Box[CleanFrequency] = {
if (min >= 0 && min <= 59)
Full(Hourly(min))
else
Failure("%s is not correctly set, value is %d, should be in [0-59]".format(minParam,min))
}
/**
* Build a daily frequency
*/
private[this] def buildDaily(min:Int,hour:Int):Box[CleanFrequency] = {
if (min >= 0 && min <= 59)
if(hour >= 0 && hour <= 23)
Full(Daily(hour,min))
else
Failure("%s is not correctly set, value is %d, should be in [0-23]".format(hourParam,hour))
else
Failure("%s is not correctly set, value is %d, should be in [0-59]".format(minParam,min))
}
/**
* Build a weekly frequency
*/
private[this] def buildWeekly(min:Int,hour:Int,day:String):Option[CleanFrequency] = {
if (min >= 0 && min <= 59)
if(hour >= 0 && hour <= 23)
day.toLowerCase() match {
case "monday" => Full(Weekly(DateTimeConstants.MONDAY,hour,min))
case "tuesday" => Full(Weekly(DateTimeConstants.TUESDAY,hour,min))
case "wednesday" => Full(Weekly(DateTimeConstants.WEDNESDAY,hour,min))
case "thursday" => Full(Weekly(DateTimeConstants.THURSDAY,hour,min))
case "friday" => Full(Weekly(DateTimeConstants.FRIDAY,hour,min))
case "saturday" => Full(Weekly(DateTimeConstants.SATURDAY,hour,min))
case "sunday" => Full(Weekly(DateTimeConstants.SUNDAY,hour,min))
case _ => Failure("%s is not correctly set, value is %s".format(dayParam,day))
}
else
Failure("%s is not correctly set, value is %d, should be in [0-23]".format(hourParam,hour))
else
Failure("%s is not correctly set, value is %d, should be in [0-59]".format(minParam,min))
}
}
/**
* Clean Frequency represents how often a report cleaning will be done.
*/
trait CleanFrequency {
/**
* Check if report cleaning has to be run
* Actually check every minute.
* TODO : check in a range of 5 minutes
*/
def check(date:DateTime):Boolean = {
val target = checker(date)
target.equals(date)
}
/**
* Compute the checker from now
*/
def checker(now: DateTime):DateTime
/**
* Compute the next cleaning time
*/
def next:DateTime
/**
* Display the frequency
*/
def displayFrequency : Option[String]
override def toString = displayFrequency match {
case Some(freq) => freq
case None => "Could not compute frequency"
}
}
/**
* An hourly frequency.
* It runs every hour past min minutes
*/
case class Hourly(min:Int) extends CleanFrequency{
def checker(date:DateTime):DateTime = date.withMinuteOfHour(min)
def next:DateTime = {
val now = DateTime.now()
if (now.isBefore(checker(now)))
checker(now)
else
checker(now).plusHours(1)
}
def displayFrequency = Some("Every hour past %d minutes".format(min))
}
/**
* A daily frequency.
* It runs every day at hour:min
*/
case class Daily(hour:Int,min:Int) extends CleanFrequency{
def checker(date:DateTime):DateTime = date.withMinuteOfHour(min).withHourOfDay(hour)
def next:DateTime = {
val now = DateTime.now()
if (now.isBefore(checker(now)))
checker(now)
else
checker(now).plusDays(1)
}
def displayFrequency = Some("Every day at %02d:%02d".format(hour,min))
}
/**
* A weekly frequency.
* It runs every week on day at hour:min
*/
case class Weekly(day:Int,hour:Int,min:Int) extends CleanFrequency{
def checker(date:DateTime):DateTime = date.withMinuteOfHour(min).withHourOfDay(hour).withDayOfWeek(day)
def next:DateTime = {
val now = DateTime.now()
if (now.isBefore(checker(now)))
checker(now)
else
checker(now).plusWeeks(1)
}
def displayFrequency = {
def expressWeekly(day:String) = Some("every %s at %02d:%02d".format(day,hour,min))
day match {
case DateTimeConstants.MONDAY => expressWeekly ("Monday")
case DateTimeConstants.TUESDAY => expressWeekly ("Tuesday")
case DateTimeConstants.WEDNESDAY => expressWeekly ("Wednesday")
case DateTimeConstants.THURSDAY => expressWeekly ("Thursday")
case DateTimeConstants.FRIDAY => expressWeekly ("Friday")
case DateTimeConstants.SATURDAY => expressWeekly ("Saturday")
case DateTimeConstants.SUNDAY => expressWeekly ("Sunday")
case _ => None
}
}
}
// States into which the cleaner process can be.
sealed trait CleanerState
// The process is idle.
case object IdleCleaner extends CleanerState
// An update is currently cleaning the databases.
case object ActiveCleaner extends CleanerState
sealed trait DatabaseCleanerMessage
// Messages the cleaner can receive.
// Ask to clean database (need to be in active state).
case object CleanDatabase extends DatabaseCleanerMessage
// Ask to check if cleaning has to be launched (need to be in idle state).
case object CheckLaunch extends DatabaseCleanerMessage
case class ManualLaunch(date:DateTime) extends DatabaseCleanerMessage
trait DatabaseCleanerActor extends LiftActor {
def isIdle : Boolean
}
/**
* A class that periodically check if the Database has to be cleaned.
*
* for now, Archive and delete run at same frequency.
* Delete and Archive TTL express the maximum age of reports.
* A negative or zero TTL means to not run the relative reports cleaner.
* Archive action doesn't run if its TTL is more than Delete TTL.
*/
case class AutomaticReportsCleaning(
dbManager : DatabaseManager
, deletettl : Int // in days
, archivettl : Int // in days
, freq : CleanFrequency
) extends Loggable {
val reportLogger = ReportLogger
// Check if automatic reports archiving has to be started
val archiver:DatabaseCleanerActor = if(archivettl < 1) {
val propertyName = "rudder.batch.reportsCleaner.archive.TTL"
reportLogger.info("Disable automatic database archive sinces property %s is 0 or negative".format(propertyName))
new LADatabaseCleaner(ArchiveAction(dbManager,this),-1)
} else {
// Don't launch automatic report archiving if reports would have already been deleted by automatic reports deleting
if ((archivettl < deletettl ) && (deletettl > 0)) {
logger.trace("***** starting Automatic Archive Reports batch *****")
new LADatabaseCleaner(ArchiveAction(dbManager,this),archivettl)
}
else {
reportLogger.info("Disable automatic archive since archive maximum age is older than delete maximum age")
new LADatabaseCleaner(ArchiveAction(dbManager,this),-1)
}
}
archiver ! CheckLaunch
val deleter:DatabaseCleanerActor = if(deletettl < 1) {
val propertyName = "rudder.batch.reportsCleaner.delete.TTL"
reportLogger.info("Disable automatic database deletion sinces property %s is 0 or negative".format(propertyName))
new LADatabaseCleaner(DeleteAction(dbManager,this),-1)
} else {
logger.trace("***** starting Automatic Delete Reports batch *****")
new LADatabaseCleaner(DeleteAction(dbManager,this),deletettl)
}
deleter ! CheckLaunch
////////////////////////////////////////////////////////////////
//////////////////// implementation details ////////////////////
////////////////////////////////////////////////////////////////
private case class LADatabaseCleaner(cleanaction:CleanReportAction,ttl:Int) extends DatabaseCleanerActor with Loggable {
updateManager =>
private[this] val reportLogger = ReportLogger
private[this] val automatic = ttl > 0
private[this] var currentState: CleanerState = IdleCleaner
private[this] var lastRun: DateTime = DateTime.now()
def isIdle : Boolean = currentState == IdleCleaner
private[this] def formatDate(date:DateTime) : String = date.toString("yyyy-MM-dd HH:mm")
private[this] def activeCleaning(date : DateTime, message : DatabaseCleanerMessage, kind:String) : Unit = {
val formattedDate = formatDate(date)
cleanaction.act(date) match {
case eb:EmptyBox =>
// Error while cleaning. Do not start again, since there is heavy chance
// that without an human intervention, it will fail again, leading to
// log explosion. Perhaps we could start-it again after a little time (several minutes)
reportLogger.error("Reports database: Error while processing database %s, cause is: %s ".format(cleanaction.continue.toLowerCase(),eb))
currentState = IdleCleaner
case Full(res) =>
if (res==0)
reportLogger.info("Reports database: %s %s completed for all reports before %s, no reports to %s".format(kind,cleanaction.name.toLowerCase(), formattedDate,cleanaction.name.toLowerCase()))
else
reportLogger.info("Reports database: %s %s completed for all reports before %s, %d reports %s".format(kind,cleanaction.name.toLowerCase(),formattedDate,res,cleanaction.past.toLowerCase()))
lastRun=DateTime.now
currentState = IdleCleaner
}
}
override protected def messageHandler = {
/*
* Ask to check if need to be launched
* If idle => check
* If active => do nothing
* always register to LAPinger
*/
case CheckLaunch => {
// Schedule next check, every minute
if (automatic) {
LAPinger.schedule(this, CheckLaunch, 1000L*60)
currentState match {
case IdleCleaner =>
logger.trace("***** Check launch *****")
if(freq.check(DateTime.now)){
logger.trace("***** Automatic %s entering in active State *****".format(cleanaction.name.toLowerCase()))
currentState = ActiveCleaner
(this) ! CleanDatabase
}
else
logger.trace("***** Automatic %s will not be launched now, It is scheduled '%s'*****".format(cleanaction.name.toLowerCase(),freq.toString))
case ActiveCleaner => ()
}
}
else
logger.trace("***** Database %s is not automatic, it will not schedule its next launch *****".format(cleanaction.name))
}
/*
* Ask to clean Database
* If idle => do nothing
* If active => clean database
*/
case CleanDatabase => {
currentState match {
case ActiveCleaner =>
val now = DateTime.now
val target = now.minusDays(ttl)
val formattedDate = formatDate(target)
logger.trace("***** %s Database *****".format(cleanaction.name))
reportLogger.info("Reports database: Automatic %s started for all reports before %s".format(cleanaction.name.toLowerCase(),formattedDate))
activeCleaning(target,CleanDatabase,"automatic")
case IdleCleaner => ()
}
}
case ManualLaunch(date) => {
val formattedDate = formatDate(date)
logger.trace("***** Ask to launch manual database %s *****".format(cleanaction.name))
currentState match {
case IdleCleaner =>
currentState = ActiveCleaner
logger.trace("***** Start manual %s database *****".format(cleanaction.name))
reportLogger.info("Reports database: Manual %s started for all reports before %s ".format(cleanaction.name.toLowerCase(), formattedDate))
activeCleaning(date,ManualLaunch(date),"Manual")
case ActiveCleaner => reportLogger.info("Reports database: A database cleaning is already running, please try later")
}
}
case _ =>
reportLogger.error("Wrong message for automatic reports %s ".format(cleanaction.name.toLowerCase()))
}
}
}
| armeniaca/rudder | rudder-core/src/main/scala/com/normation/rudder/batch/AutomaticReportsCleaner.scala | Scala | gpl-3.0 | 14,657 |
/* Copyright (c) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gdata.youtube
/**
* A concrete standard user profile entry.
*
* @author Iulian Dragos
*/
class StdUserProfileEntry extends UserProfileEntries {
type Entry = UserProfileEntry
val videoFeeds = new StdVideoFeed
val playlistFeeds = new StdUserPlaylistsFeed
val subscriptionFeeds = new StdSubscriptionFeed
val contactFeeds = new StdContactsFeed
protected def entryContentsPickler = userProfileEntryContents
}
| jeppenejsum/gdata-scala-client | src/com/google/gdata/youtube/StdUserProfileEntry.scala | Scala | apache-2.0 | 1,045 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.jdbc
import java.math.BigDecimal
import java.sql.{Date, DriverManager, SQLException, Timestamp}
import java.util.{Calendar, GregorianCalendar, Properties}
import org.h2.jdbc.JdbcSQLException
import org.scalatest.{BeforeAndAfter, PrivateMethodTester}
import org.apache.spark.{SparkException, SparkFunSuite}
import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.sql.execution.DataSourceScanExec
import org.apache.spark.sql.execution.command.ExplainCommand
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JDBCPartition, JDBCRDD, JDBCRelation, JdbcUtils}
import org.apache.spark.sql.execution.metric.InputOutputMetricsHelper
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources._
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
class JDBCSuite extends QueryTest
with BeforeAndAfter with PrivateMethodTester with SharedSQLContext {
import testImplicits._
val url = "jdbc:h2:mem:testdb0"
val urlWithUserAndPass = "jdbc:h2:mem:testdb0;user=testUser;password=testPass"
var conn: java.sql.Connection = null
val testBytes = Array[Byte](99.toByte, 134.toByte, 135.toByte, 200.toByte, 205.toByte)
val testH2Dialect = new JdbcDialect {
override def canHandle(url: String) : Boolean = url.startsWith("jdbc:h2")
override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] =
Some(StringType)
}
before {
Utils.classForName("org.h2.Driver")
// Extra properties that will be specified for our database. We need these to test
// usage of parameters from OPTIONS clause in queries.
val properties = new Properties()
properties.setProperty("user", "testUser")
properties.setProperty("password", "testPass")
properties.setProperty("rowId", "false")
conn = DriverManager.getConnection(url, properties)
conn.prepareStatement("create schema test").executeUpdate()
conn.prepareStatement(
"create table test.people (name TEXT(32) NOT NULL, theid INTEGER NOT NULL)").executeUpdate()
conn.prepareStatement("insert into test.people values ('fred', 1)").executeUpdate()
conn.prepareStatement("insert into test.people values ('mary', 2)").executeUpdate()
conn.prepareStatement(
"insert into test.people values ('joe ''foo'' \\"bar\\"', 3)").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW foobar
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW fetchtwo
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass',
| ${JDBCOptions.JDBC_BATCH_FETCH_SIZE} '2')
""".stripMargin.replaceAll("\\n", " "))
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW parts
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass',
| partitionColumn 'THEID', lowerBound '1', upperBound '4', numPartitions '3')
""".stripMargin.replaceAll("\\n", " "))
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW partsoverflow
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass',
| partitionColumn 'THEID', lowerBound '-9223372036854775808',
| upperBound '9223372036854775807', numPartitions '3')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("create table test.inttypes (a INT, b BOOLEAN, c TINYINT, "
+ "d SMALLINT, e BIGINT)").executeUpdate()
conn.prepareStatement("insert into test.inttypes values (1, false, 3, 4, 1234567890123)"
).executeUpdate()
conn.prepareStatement("insert into test.inttypes values (null, null, null, null, null)"
).executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW inttypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.INTTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("create table test.strtypes (a BINARY(20), b VARCHAR(20), "
+ "c VARCHAR_IGNORECASE(20), d CHAR(20), e BLOB, f CLOB)").executeUpdate()
val stmt = conn.prepareStatement("insert into test.strtypes values (?, ?, ?, ?, ?, ?)")
stmt.setBytes(1, testBytes)
stmt.setString(2, "Sensitive")
stmt.setString(3, "Insensitive")
stmt.setString(4, "Twenty-byte CHAR")
stmt.setBytes(5, testBytes)
stmt.setString(6, "I am a clob!")
stmt.executeUpdate()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW strtypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.STRTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("create table test.timetypes (a TIME, b DATE, c TIMESTAMP)"
).executeUpdate()
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
+ "'1996-01-01', '2002-02-20 11:22:33.543543543')").executeUpdate()
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
+ "null, '2002-02-20 11:22:33.543543543')").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW timetypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.TIMETYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("CREATE TABLE test.timezone (tz TIMESTAMP WITH TIME ZONE) " +
"AS SELECT '1999-01-08 04:05:06.543543543 GMT-08:00'")
.executeUpdate()
conn.commit()
conn.prepareStatement("CREATE TABLE test.array (ar ARRAY) " +
"AS SELECT '(1, 2, 3)'")
.executeUpdate()
conn.commit()
conn.prepareStatement("create table test.flttypes (a DOUBLE, b REAL, c DECIMAL(38, 18))"
).executeUpdate()
conn.prepareStatement("insert into test.flttypes values ("
+ "1.0000000000000002220446049250313080847263336181640625, "
+ "1.00000011920928955078125, "
+ "123456789012345.543215432154321)").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW flttypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.FLTTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement(
s"""
|create table test.nulltypes (a INT, b BOOLEAN, c TINYINT, d BINARY(20), e VARCHAR(20),
|f VARCHAR_IGNORECASE(20), g CHAR(20), h BLOB, i CLOB, j TIME, k DATE, l TIMESTAMP,
|m DOUBLE, n REAL, o DECIMAL(38, 18))
""".stripMargin.replaceAll("\\n", " ")).executeUpdate()
conn.prepareStatement("insert into test.nulltypes values ("
+ "null, null, null, null, null, null, null, null, null, "
+ "null, null, null, null, null, null)").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW nulltypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.NULLTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement(
"create table test.emp(name TEXT(32) NOT NULL," +
" theid INTEGER, \\"Dept\\" INTEGER)").executeUpdate()
conn.prepareStatement(
"insert into test.emp values ('fred', 1, 10)").executeUpdate()
conn.prepareStatement(
"insert into test.emp values ('mary', 2, null)").executeUpdate()
conn.prepareStatement(
"insert into test.emp values ('joe ''foo'' \\"bar\\"', 3, 30)").executeUpdate()
conn.prepareStatement(
"insert into test.emp values ('kathy', null, null)").executeUpdate()
conn.commit()
conn.prepareStatement(
"create table test.seq(id INTEGER)").executeUpdate()
(0 to 6).foreach { value =>
conn.prepareStatement(
s"insert into test.seq values ($value)").executeUpdate()
}
conn.prepareStatement(
"insert into test.seq values (null)").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW nullparts
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.EMP', user 'testUser', password 'testPass',
|partitionColumn '"Dept"', lowerBound '1', upperBound '4', numPartitions '3')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement(
"""create table test."mixedCaseCols" ("Name" TEXT(32), "Id" INTEGER NOT NULL)""")
.executeUpdate()
conn.prepareStatement("""insert into test."mixedCaseCols" values ('fred', 1)""").executeUpdate()
conn.prepareStatement("""insert into test."mixedCaseCols" values ('mary', 2)""").executeUpdate()
conn.prepareStatement("""insert into test."mixedCaseCols" values (null, 3)""").executeUpdate()
conn.commit()
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW mixedCaseCols
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST."mixedCaseCols"', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
conn.prepareStatement("CREATE TABLE test.partition (THEID INTEGER, `THE ID` INTEGER) " +
"AS SELECT 1, 1")
.executeUpdate()
conn.commit()
// Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types.
}
after {
conn.close()
}
// Check whether the tables are fetched in the expected degree of parallelism
def checkNumPartitions(df: DataFrame, expectedNumPartitions: Int): Unit = {
val jdbcRelations = df.queryExecution.analyzed.collect {
case LogicalRelation(r: JDBCRelation, _, _, _) => r
}
assert(jdbcRelations.length == 1)
assert(jdbcRelations.head.parts.length == expectedNumPartitions,
s"Expecting a JDBCRelation with $expectedNumPartitions partitions, but got:`$jdbcRelations`")
}
test("SELECT *") {
assert(sql("SELECT * FROM foobar").collect().size === 3)
}
test("SELECT * WHERE (simple predicates)") {
def checkPushdown(df: DataFrame): DataFrame = {
val parentPlan = df.queryExecution.executedPlan
// Check if SparkPlan Filter is removed in a physical plan and
// the plan only has PhysicalRDD to scan JDBCRelation.
assert(parentPlan.isInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec])
val node = parentPlan.asInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec]
assert(node.child.isInstanceOf[org.apache.spark.sql.execution.DataSourceScanExec])
assert(node.child.asInstanceOf[DataSourceScanExec].nodeName.contains("JDBCRelation"))
df
}
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID < 1")).collect().size == 0)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID != 2")).collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID = 1")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME = 'fred'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME <=> 'fred'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME > 'fred'")).collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME != 'fred'")).collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME IN ('mary', 'fred')"))
.collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME NOT IN ('fred')"))
.collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID = 1 OR NAME = 'mary'"))
.collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID = 1 OR NAME = 'mary' "
+ "AND THEID = 2")).collect().size == 2)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME LIKE 'fr%'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME LIKE '%ed'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME LIKE '%re%'")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM nulltypes WHERE A IS NULL")).collect().size == 1)
assert(checkPushdown(sql("SELECT * FROM nulltypes WHERE A IS NOT NULL")).collect().size == 0)
// This is a test to reflect discussion in SPARK-12218.
// The older versions of spark have this kind of bugs in parquet data source.
val df1 = sql("SELECT * FROM foobar WHERE NOT (THEID != 2) OR NOT (NAME != 'mary')")
assert(df1.collect.toSet === Set(Row("mary", 2)))
// SPARK-22548: Incorrect nested AND expression pushed down to JDBC data source
val df2 = sql("SELECT * FROM foobar " +
"WHERE (THEID > 0 AND TRIM(NAME) = 'mary') OR (NAME = 'fred')")
assert(df2.collect.toSet === Set(Row("fred", 1), Row("mary", 2)))
def checkNotPushdown(df: DataFrame): DataFrame = {
val parentPlan = df.queryExecution.executedPlan
// Check if SparkPlan Filter is not removed in a physical plan because JDBCRDD
// cannot compile given predicates.
assert(parentPlan.isInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec])
val node = parentPlan.asInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec]
assert(node.child.isInstanceOf[org.apache.spark.sql.execution.FilterExec])
df
}
assert(checkNotPushdown(sql("SELECT * FROM foobar WHERE (THEID + 1) < 2")).collect().size == 0)
assert(checkNotPushdown(sql("SELECT * FROM foobar WHERE (THEID + 2) != 4")).collect().size == 2)
}
test("SELECT COUNT(1) WHERE (predicates)") {
// Check if an answer is correct when Filter is removed from operations such as count() which
// does not require any columns. In some data sources, e.g., Parquet, `requiredColumns` in
// org.apache.spark.sql.sources.interfaces is not given in logical plans, but some filters
// are applied for columns with Filter producing wrong results. On the other hand, JDBCRDD
// correctly handles this case by assigning `requiredColumns` properly. See PR 10427 for more
// discussions.
assert(sql("SELECT COUNT(1) FROM foobar WHERE NAME = 'mary'").collect.toSet === Set(Row(1)))
}
test("SELECT * WHERE (quoted strings)") {
assert(sql("select * from foobar").where('NAME === "joe 'foo' \\"bar\\"").collect().size === 1)
}
test("SELECT first field") {
val names = sql("SELECT NAME FROM foobar").collect().map(x => x.getString(0)).sortWith(_ < _)
assert(names.size === 3)
assert(names(0).equals("fred"))
assert(names(1).equals("joe 'foo' \\"bar\\""))
assert(names(2).equals("mary"))
}
test("SELECT first field when fetchsize is two") {
val names = sql("SELECT NAME FROM fetchtwo").collect().map(x => x.getString(0)).sortWith(_ < _)
assert(names.size === 3)
assert(names(0).equals("fred"))
assert(names(1).equals("joe 'foo' \\"bar\\""))
assert(names(2).equals("mary"))
}
test("SELECT second field") {
val ids = sql("SELECT THEID FROM foobar").collect().map(x => x.getInt(0)).sortWith(_ < _)
assert(ids.size === 3)
assert(ids(0) === 1)
assert(ids(1) === 2)
assert(ids(2) === 3)
}
test("SELECT second field when fetchsize is two") {
val ids = sql("SELECT THEID FROM fetchtwo").collect().map(x => x.getInt(0)).sortWith(_ < _)
assert(ids.size === 3)
assert(ids(0) === 1)
assert(ids(1) === 2)
assert(ids(2) === 3)
}
test("SELECT * partitioned") {
val df = sql("SELECT * FROM parts")
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length == 3)
}
test("SELECT WHERE (simple predicates) partitioned") {
val df1 = sql("SELECT * FROM parts WHERE THEID < 1")
checkNumPartitions(df1, expectedNumPartitions = 3)
assert(df1.collect().length === 0)
val df2 = sql("SELECT * FROM parts WHERE THEID != 2")
checkNumPartitions(df2, expectedNumPartitions = 3)
assert(df2.collect().length === 2)
val df3 = sql("SELECT THEID FROM parts WHERE THEID = 1")
checkNumPartitions(df3, expectedNumPartitions = 3)
assert(df3.collect().length === 1)
}
test("SELECT second field partitioned") {
val ids = sql("SELECT THEID FROM parts").collect().map(x => x.getInt(0)).sortWith(_ < _)
assert(ids.size === 3)
assert(ids(0) === 1)
assert(ids(1) === 2)
assert(ids(2) === 3)
}
test("overflow of partition bound difference does not give negative stride") {
val df = sql("SELECT * FROM partsoverflow")
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length == 3)
}
test("Register JDBC query with renamed fields") {
// Regression test for bug SPARK-7345
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW renamed
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable '(select NAME as NAME1, NAME as NAME2 from TEST.PEOPLE)',
|user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
val df = sql("SELECT * FROM renamed")
assert(df.schema.fields.size == 2)
assert(df.schema.fields(0).name == "NAME1")
assert(df.schema.fields(1).name == "NAME2")
}
test("Basic API") {
assert(spark.read.jdbc(
urlWithUserAndPass, "TEST.PEOPLE", new Properties()).collect().length === 3)
}
test("Basic API with illegal fetchsize") {
val properties = new Properties()
properties.setProperty(JDBCOptions.JDBC_BATCH_FETCH_SIZE, "-1")
val e = intercept[IllegalArgumentException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", properties).collect()
}.getMessage
assert(e.contains("Invalid value `-1` for parameter `fetchsize`"))
}
test("Missing partition columns") {
withView("tempPeople") {
val e = intercept[IllegalArgumentException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW tempPeople
|USING org.apache.spark.sql.jdbc
|OPTIONS (
| url 'jdbc:h2:mem:testdb0;user=testUser;password=testPass',
| dbtable 'TEST.PEOPLE',
| lowerBound '0',
| upperBound '52',
| numPartitions '53',
| fetchSize '10000' )
""".stripMargin.replaceAll("\\n", " "))
}.getMessage
assert(e.contains("When reading JDBC data sources, users need to specify all or none " +
"for the following options: 'partitionColumn', 'lowerBound', 'upperBound', and " +
"'numPartitions'"))
}
}
test("Basic API with FetchSize") {
(0 to 4).foreach { size =>
val properties = new Properties()
properties.setProperty(JDBCOptions.JDBC_BATCH_FETCH_SIZE, size.toString)
assert(spark.read.jdbc(
urlWithUserAndPass, "TEST.PEOPLE", properties).collect().length === 3)
}
}
test("Partitioning via JDBCPartitioningInfo API") {
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", "THEID", 0, 4, 3, new Properties())
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length === 3)
}
test("Partitioning via list-of-where-clauses API") {
val parts = Array[String]("THEID < 2", "THEID >= 2")
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, new Properties())
checkNumPartitions(df, expectedNumPartitions = 2)
assert(df.collect().length === 3)
}
test("Partitioning on column that might have null values.") {
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.EMP", "theid", 0, 4, 3, new Properties())
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length === 4)
val df2 = spark.read.jdbc(urlWithUserAndPass, "TEST.EMP", "THEID", 0, 4, 3, new Properties())
checkNumPartitions(df2, expectedNumPartitions = 3)
assert(df2.collect().length === 4)
// partitioning on a nullable quoted column
assert(
spark.read.jdbc(urlWithUserAndPass, "TEST.EMP", """"Dept"""", 0, 4, 3, new Properties())
.collect().length === 4)
}
test("Partitioning on column where numPartitions is zero") {
val res = spark.read.jdbc(
url = urlWithUserAndPass,
table = "TEST.seq",
columnName = "id",
lowerBound = 0,
upperBound = 4,
numPartitions = 0,
connectionProperties = new Properties()
)
checkNumPartitions(res, expectedNumPartitions = 1)
assert(res.count() === 8)
}
test("Partitioning on column where numPartitions are more than the number of total rows") {
val res = spark.read.jdbc(
url = urlWithUserAndPass,
table = "TEST.seq",
columnName = "id",
lowerBound = 1,
upperBound = 5,
numPartitions = 10,
connectionProperties = new Properties()
)
checkNumPartitions(res, expectedNumPartitions = 4)
assert(res.count() === 8)
}
test("Partitioning on column where lowerBound is equal to upperBound") {
val res = spark.read.jdbc(
url = urlWithUserAndPass,
table = "TEST.seq",
columnName = "id",
lowerBound = 5,
upperBound = 5,
numPartitions = 4,
connectionProperties = new Properties()
)
checkNumPartitions(res, expectedNumPartitions = 1)
assert(res.count() === 8)
}
test("Partitioning on column where lowerBound is larger than upperBound") {
val e = intercept[IllegalArgumentException] {
spark.read.jdbc(
url = urlWithUserAndPass,
table = "TEST.seq",
columnName = "id",
lowerBound = 5,
upperBound = 1,
numPartitions = 3,
connectionProperties = new Properties()
)
}.getMessage
assert(e.contains("Operation not allowed: the lower bound of partitioning column " +
"is larger than the upper bound. Lower bound: 5; Upper bound: 1"))
}
test("SELECT * on partitioned table with a nullable partition column") {
val df = sql("SELECT * FROM nullparts")
checkNumPartitions(df, expectedNumPartitions = 3)
assert(df.collect().length == 4)
}
test("H2 integral types") {
val rows = sql("SELECT * FROM inttypes WHERE A IS NOT NULL").collect()
assert(rows.length === 1)
assert(rows(0).getInt(0) === 1)
assert(rows(0).getBoolean(1) === false)
assert(rows(0).getInt(2) === 3)
assert(rows(0).getInt(3) === 4)
assert(rows(0).getLong(4) === 1234567890123L)
}
test("H2 null entries") {
val rows = sql("SELECT * FROM inttypes WHERE A IS NULL").collect()
assert(rows.length === 1)
assert(rows(0).isNullAt(0))
assert(rows(0).isNullAt(1))
assert(rows(0).isNullAt(2))
assert(rows(0).isNullAt(3))
assert(rows(0).isNullAt(4))
}
test("H2 string types") {
val rows = sql("SELECT * FROM strtypes").collect()
assert(rows(0).getAs[Array[Byte]](0).sameElements(testBytes))
assert(rows(0).getString(1).equals("Sensitive"))
assert(rows(0).getString(2).equals("Insensitive"))
assert(rows(0).getString(3).equals("Twenty-byte CHAR"))
assert(rows(0).getAs[Array[Byte]](4).sameElements(testBytes))
assert(rows(0).getString(5).equals("I am a clob!"))
}
test("H2 time types") {
val rows = sql("SELECT * FROM timetypes").collect()
val cal = new GregorianCalendar(java.util.Locale.ROOT)
cal.setTime(rows(0).getAs[java.sql.Timestamp](0))
assert(cal.get(Calendar.HOUR_OF_DAY) === 12)
assert(cal.get(Calendar.MINUTE) === 34)
assert(cal.get(Calendar.SECOND) === 56)
cal.setTime(rows(0).getAs[java.sql.Timestamp](1))
assert(cal.get(Calendar.YEAR) === 1996)
assert(cal.get(Calendar.MONTH) === 0)
assert(cal.get(Calendar.DAY_OF_MONTH) === 1)
cal.setTime(rows(0).getAs[java.sql.Timestamp](2))
assert(cal.get(Calendar.YEAR) === 2002)
assert(cal.get(Calendar.MONTH) === 1)
assert(cal.get(Calendar.DAY_OF_MONTH) === 20)
assert(cal.get(Calendar.HOUR) === 11)
assert(cal.get(Calendar.MINUTE) === 22)
assert(cal.get(Calendar.SECOND) === 33)
assert(rows(0).getAs[java.sql.Timestamp](2).getNanos === 543543000)
}
test("test DATE types") {
val rows = spark.read.jdbc(
urlWithUserAndPass, "TEST.TIMETYPES", new Properties()).collect()
val cachedRows = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties())
.cache().collect()
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
assert(rows(1).getAs[java.sql.Date](1) === null)
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
}
test("test DATE types in cache") {
val rows = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties()).collect()
spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties())
.cache().createOrReplaceTempView("mycached_date")
val cachedRows = sql("select * from mycached_date").collect()
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
}
test("test types for null value") {
val rows = spark.read.jdbc(
urlWithUserAndPass, "TEST.NULLTYPES", new Properties()).collect()
assert((0 to 14).forall(i => rows(0).isNullAt(i)))
}
test("H2 floating-point types") {
val rows = sql("SELECT * FROM flttypes").collect()
assert(rows(0).getDouble(0) === 1.00000000000000022)
assert(rows(0).getDouble(1) === 1.00000011920928955)
assert(rows(0).getAs[BigDecimal](2) ===
new BigDecimal("123456789012345.543215432154321000"))
assert(rows(0).schema.fields(2).dataType === DecimalType(38, 18))
val result = sql("SELECT C FROM flttypes where C > C - 1").collect()
assert(result(0).getAs[BigDecimal](0) ===
new BigDecimal("123456789012345.543215432154321000"))
}
test("SQL query as table name") {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW hack
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable '(SELECT B, B*B FROM TEST.FLTTYPES)',
| user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
val rows = sql("SELECT * FROM hack").collect()
assert(rows(0).getDouble(0) === 1.00000011920928955) // Yes, I meant ==.
// For some reason, H2 computes this square incorrectly...
assert(math.abs(rows(0).getDouble(1) - 1.00000023841859331) < 1e-12)
}
test("Pass extra properties via OPTIONS") {
// We set rowId to false during setup, which means that _ROWID_ column should be absent from
// all tables. If rowId is true (default), the query below doesn't throw an exception.
intercept[JdbcSQLException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW abc
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable '(SELECT _ROWID_ FROM test.people)',
| user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
}
}
test("Remap types via JdbcDialects") {
JdbcDialects.registerDialect(testH2Dialect)
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties())
assert(df.schema.filter(_.dataType != org.apache.spark.sql.types.StringType).isEmpty)
val rows = df.collect()
assert(rows(0).get(0).isInstanceOf[String])
assert(rows(0).get(1).isInstanceOf[String])
JdbcDialects.unregisterDialect(testH2Dialect)
}
test("Default jdbc dialect registration") {
assert(JdbcDialects.get("jdbc:mysql://127.0.0.1/db") == MySQLDialect)
assert(JdbcDialects.get("jdbc:postgresql://127.0.0.1/db") == PostgresDialect)
assert(JdbcDialects.get("jdbc:db2://127.0.0.1/db") == DB2Dialect)
assert(JdbcDialects.get("jdbc:sqlserver://127.0.0.1/db") == MsSqlServerDialect)
assert(JdbcDialects.get("jdbc:derby:db") == DerbyDialect)
assert(JdbcDialects.get("test.invalid") == NoopDialect)
}
test("quote column names by jdbc dialect") {
val MySQL = JdbcDialects.get("jdbc:mysql://127.0.0.1/db")
val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
val Derby = JdbcDialects.get("jdbc:derby:db")
val columns = Seq("abc", "key")
val MySQLColumns = columns.map(MySQL.quoteIdentifier(_))
val PostgresColumns = columns.map(Postgres.quoteIdentifier(_))
val DerbyColumns = columns.map(Derby.quoteIdentifier(_))
assert(MySQLColumns === Seq("`abc`", "`key`"))
assert(PostgresColumns === Seq(""""abc"""", """"key""""))
assert(DerbyColumns === Seq(""""abc"""", """"key""""))
}
test("compile filters") {
val compileFilter = PrivateMethod[Option[String]]('compileFilter)
def doCompileFilter(f: Filter): String =
JDBCRDD invokePrivate compileFilter(f, JdbcDialects.get("jdbc:")) getOrElse("")
assert(doCompileFilter(EqualTo("col0", 3)) === """"col0" = 3""")
assert(doCompileFilter(Not(EqualTo("col1", "abc"))) === """(NOT ("col1" = 'abc'))""")
assert(doCompileFilter(And(EqualTo("col0", 0), EqualTo("col1", "def")))
=== """("col0" = 0) AND ("col1" = 'def')""")
assert(doCompileFilter(Or(EqualTo("col0", 2), EqualTo("col1", "ghi")))
=== """("col0" = 2) OR ("col1" = 'ghi')""")
assert(doCompileFilter(LessThan("col0", 5)) === """"col0" < 5""")
assert(doCompileFilter(LessThan("col3",
Timestamp.valueOf("1995-11-21 00:00:00.0"))) === """"col3" < '1995-11-21 00:00:00.0'""")
assert(doCompileFilter(LessThan("col4", Date.valueOf("1983-08-04")))
=== """"col4" < '1983-08-04'""")
assert(doCompileFilter(LessThanOrEqual("col0", 5)) === """"col0" <= 5""")
assert(doCompileFilter(GreaterThan("col0", 3)) === """"col0" > 3""")
assert(doCompileFilter(GreaterThanOrEqual("col0", 3)) === """"col0" >= 3""")
assert(doCompileFilter(In("col1", Array("jkl"))) === """"col1" IN ('jkl')""")
assert(doCompileFilter(In("col1", Array.empty)) ===
"""CASE WHEN "col1" IS NULL THEN NULL ELSE FALSE END""")
assert(doCompileFilter(Not(In("col1", Array("mno", "pqr"))))
=== """(NOT ("col1" IN ('mno', 'pqr')))""")
assert(doCompileFilter(IsNull("col1")) === """"col1" IS NULL""")
assert(doCompileFilter(IsNotNull("col1")) === """"col1" IS NOT NULL""")
assert(doCompileFilter(And(EqualNullSafe("col0", "abc"), EqualTo("col1", "def")))
=== """((NOT ("col0" != 'abc' OR "col0" IS NULL OR 'abc' IS NULL) """
+ """OR ("col0" IS NULL AND 'abc' IS NULL))) AND ("col1" = 'def')""")
}
test("Dialect unregister") {
JdbcDialects.registerDialect(testH2Dialect)
JdbcDialects.unregisterDialect(testH2Dialect)
assert(JdbcDialects.get(urlWithUserAndPass) == NoopDialect)
}
test("Aggregated dialects") {
val agg = new AggregatedDialect(List(new JdbcDialect {
override def canHandle(url: String) : Boolean = url.startsWith("jdbc:h2:")
override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] =
if (sqlType % 2 == 0) {
Some(LongType)
} else {
None
}
override def quoteIdentifier(colName: String): String = {
s"My $colName quoteIdentifier"
}
override def getTableExistsQuery(table: String): String = {
s"My $table Table"
}
override def getSchemaQuery(table: String): String = {
s"My $table Schema"
}
override def isCascadingTruncateTable(): Option[Boolean] = Some(true)
}, testH2Dialect))
assert(agg.canHandle("jdbc:h2:xxx"))
assert(!agg.canHandle("jdbc:h2"))
assert(agg.getCatalystType(0, "", 1, null) === Some(LongType))
assert(agg.getCatalystType(1, "", 1, null) === Some(StringType))
assert(agg.isCascadingTruncateTable() === Some(true))
assert(agg.quoteIdentifier ("Dummy") === "My Dummy quoteIdentifier")
assert(agg.getTableExistsQuery ("Dummy") === "My Dummy Table")
assert(agg.getSchemaQuery ("Dummy") === "My Dummy Schema")
}
test("Aggregated dialects: isCascadingTruncateTable") {
def genDialect(cascadingTruncateTable: Option[Boolean]): JdbcDialect = new JdbcDialect {
override def canHandle(url: String): Boolean = true
override def getCatalystType(
sqlType: Int,
typeName: String,
size: Int,
md: MetadataBuilder): Option[DataType] = None
override def isCascadingTruncateTable(): Option[Boolean] = cascadingTruncateTable
}
def testDialects(cascadings: List[Option[Boolean]], expected: Option[Boolean]): Unit = {
val dialects = cascadings.map(genDialect(_))
val agg = new AggregatedDialect(dialects)
assert(agg.isCascadingTruncateTable() === expected)
}
testDialects(List(Some(true), Some(false), None), Some(true))
testDialects(List(Some(true), Some(true), None), Some(true))
testDialects(List(Some(false), Some(false), None), None)
testDialects(List(Some(true), Some(true)), Some(true))
testDialects(List(Some(false), Some(false)), Some(false))
testDialects(List(None, None), None)
}
test("DB2Dialect type mapping") {
val db2Dialect = JdbcDialects.get("jdbc:db2://127.0.0.1/db")
assert(db2Dialect.getJDBCType(StringType).map(_.databaseTypeDefinition).get == "CLOB")
assert(db2Dialect.getJDBCType(BooleanType).map(_.databaseTypeDefinition).get == "CHAR(1)")
assert(db2Dialect.getJDBCType(ShortType).map(_.databaseTypeDefinition).get == "SMALLINT")
assert(db2Dialect.getJDBCType(ByteType).map(_.databaseTypeDefinition).get == "SMALLINT")
// test db2 dialect mappings on read
assert(db2Dialect.getCatalystType(java.sql.Types.REAL, "REAL", 1, null) == Option(FloatType))
assert(db2Dialect.getCatalystType(java.sql.Types.OTHER, "DECFLOAT", 1, null) ==
Option(DecimalType(38, 18)))
assert(db2Dialect.getCatalystType(java.sql.Types.OTHER, "XML", 1, null) == Option(StringType))
assert(db2Dialect.getCatalystType(java.sql.Types.OTHER, "TIMESTAMP WITH TIME ZONE", 1, null) ==
Option(TimestampType))
}
test("PostgresDialect type mapping") {
val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
assert(Postgres.getCatalystType(java.sql.Types.OTHER, "json", 1, null) === Some(StringType))
assert(Postgres.getCatalystType(java.sql.Types.OTHER, "jsonb", 1, null) === Some(StringType))
assert(Postgres.getJDBCType(FloatType).map(_.databaseTypeDefinition).get == "FLOAT4")
assert(Postgres.getJDBCType(DoubleType).map(_.databaseTypeDefinition).get == "FLOAT8")
val errMsg = intercept[IllegalArgumentException] {
Postgres.getJDBCType(ByteType)
}
assert(errMsg.getMessage contains "Unsupported type in postgresql: ByteType")
}
test("DerbyDialect jdbc type mapping") {
val derbyDialect = JdbcDialects.get("jdbc:derby:db")
assert(derbyDialect.getJDBCType(StringType).map(_.databaseTypeDefinition).get == "CLOB")
assert(derbyDialect.getJDBCType(ByteType).map(_.databaseTypeDefinition).get == "SMALLINT")
assert(derbyDialect.getJDBCType(BooleanType).map(_.databaseTypeDefinition).get == "BOOLEAN")
}
test("OracleDialect jdbc type mapping") {
val oracleDialect = JdbcDialects.get("jdbc:oracle")
val metadata = new MetadataBuilder().putString("name", "test_column").putLong("scale", -127)
assert(oracleDialect.getCatalystType(java.sql.Types.NUMERIC, "float", 1, metadata) ==
Some(DecimalType(DecimalType.MAX_PRECISION, 10)))
assert(oracleDialect.getCatalystType(java.sql.Types.NUMERIC, "numeric", 0, null) ==
Some(DecimalType(DecimalType.MAX_PRECISION, 10)))
assert(oracleDialect.getCatalystType(OracleDialect.BINARY_FLOAT, "BINARY_FLOAT", 0, null) ==
Some(FloatType))
assert(oracleDialect.getCatalystType(OracleDialect.BINARY_DOUBLE, "BINARY_DOUBLE", 0, null) ==
Some(DoubleType))
assert(oracleDialect.getCatalystType(OracleDialect.TIMESTAMPTZ, "TIMESTAMP", 0, null) ==
Some(TimestampType))
}
test("table exists query by jdbc dialect") {
val MySQL = JdbcDialects.get("jdbc:mysql://127.0.0.1/db")
val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
val db2 = JdbcDialects.get("jdbc:db2://127.0.0.1/db")
val h2 = JdbcDialects.get(url)
val derby = JdbcDialects.get("jdbc:derby:db")
val table = "weblogs"
val defaultQuery = s"SELECT * FROM $table WHERE 1=0"
val limitQuery = s"SELECT 1 FROM $table LIMIT 1"
assert(MySQL.getTableExistsQuery(table) == limitQuery)
assert(Postgres.getTableExistsQuery(table) == limitQuery)
assert(db2.getTableExistsQuery(table) == defaultQuery)
assert(h2.getTableExistsQuery(table) == defaultQuery)
assert(derby.getTableExistsQuery(table) == defaultQuery)
}
test("truncate table query by jdbc dialect") {
val MySQL = JdbcDialects.get("jdbc:mysql://127.0.0.1/db")
val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db")
val db2 = JdbcDialects.get("jdbc:db2://127.0.0.1/db")
val h2 = JdbcDialects.get(url)
val derby = JdbcDialects.get("jdbc:derby:db")
val table = "weblogs"
val defaultQuery = s"TRUNCATE TABLE $table"
val postgresQuery = s"TRUNCATE TABLE ONLY $table"
assert(MySQL.getTruncateQuery(table) == defaultQuery)
assert(Postgres.getTruncateQuery(table) == postgresQuery)
assert(db2.getTruncateQuery(table) == defaultQuery)
assert(h2.getTruncateQuery(table) == defaultQuery)
assert(derby.getTruncateQuery(table) == defaultQuery)
}
test("Test DataFrame.where for Date and Timestamp") {
// Regression test for bug SPARK-11788
val timestamp = java.sql.Timestamp.valueOf("2001-02-20 11:22:33.543543");
val date = java.sql.Date.valueOf("1995-01-01")
val jdbcDf = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties())
val rows = jdbcDf.where($"B" > date && $"C" > timestamp).collect()
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
assert(rows(0).getAs[java.sql.Timestamp](2)
=== java.sql.Timestamp.valueOf("2002-02-20 11:22:33.543543"))
}
test("test credentials in the properties are not in plan output") {
val df = sql("SELECT * FROM parts")
val explain = ExplainCommand(df.queryExecution.logical, extended = true)
spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach {
r => assert(!List("testPass", "testUser").exists(r.toString.contains))
}
// test the JdbcRelation toString output
df.queryExecution.analyzed.collect {
case r: LogicalRelation =>
assert(r.relation.toString == "JDBCRelation(TEST.PEOPLE) [numPartitions=3]")
}
}
test("test credentials in the connection url are not in the plan output") {
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties())
val explain = ExplainCommand(df.queryExecution.logical, extended = true)
spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach {
r => assert(!List("testPass", "testUser").exists(r.toString.contains))
}
}
test("hide credentials in create and describe a persistent/temp table") {
val password = "testPass"
val tableName = "tab1"
Seq("TABLE", "TEMPORARY VIEW").foreach { tableType =>
withTable(tableName) {
val df = sql(
s"""
|CREATE $tableType $tableName
|USING org.apache.spark.sql.jdbc
|OPTIONS (
| url '$urlWithUserAndPass',
| dbtable 'TEST.PEOPLE',
| user 'testUser',
| password '$password')
""".stripMargin)
val explain = ExplainCommand(df.queryExecution.logical, extended = true)
spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach { r =>
assert(!r.toString.contains(password))
}
sql(s"DESC FORMATTED $tableName").collect().foreach { r =>
assert(!r.toString().contains(password))
}
}
}
}
test("SPARK 12941: The data type mapping for StringType to Oracle") {
val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db")
assert(oracleDialect.getJDBCType(StringType).
map(_.databaseTypeDefinition).get == "VARCHAR2(255)")
}
test("SPARK-16625: General data types to be mapped to Oracle") {
def getJdbcType(dialect: JdbcDialect, dt: DataType): String = {
dialect.getJDBCType(dt).orElse(JdbcUtils.getCommonJDBCType(dt)).
map(_.databaseTypeDefinition).get
}
val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db")
assert(getJdbcType(oracleDialect, BooleanType) == "NUMBER(1)")
assert(getJdbcType(oracleDialect, IntegerType) == "NUMBER(10)")
assert(getJdbcType(oracleDialect, LongType) == "NUMBER(19)")
assert(getJdbcType(oracleDialect, FloatType) == "NUMBER(19, 4)")
assert(getJdbcType(oracleDialect, DoubleType) == "NUMBER(19, 4)")
assert(getJdbcType(oracleDialect, ByteType) == "NUMBER(3)")
assert(getJdbcType(oracleDialect, ShortType) == "NUMBER(5)")
assert(getJdbcType(oracleDialect, StringType) == "VARCHAR2(255)")
assert(getJdbcType(oracleDialect, BinaryType) == "BLOB")
assert(getJdbcType(oracleDialect, DateType) == "DATE")
assert(getJdbcType(oracleDialect, TimestampType) == "TIMESTAMP")
}
private def assertEmptyQuery(sqlString: String): Unit = {
assert(sql(sqlString).collect().isEmpty)
}
test("SPARK-15916: JDBC filter operator push down should respect operator precedence") {
val TRUE = "NAME != 'non_exists'"
val FALSE1 = "THEID > 1000000000"
val FALSE2 = "THEID < -1000000000"
assertEmptyQuery(s"SELECT * FROM foobar WHERE ($TRUE OR $FALSE1) AND $FALSE2")
assertEmptyQuery(s"SELECT * FROM foobar WHERE $FALSE1 AND ($FALSE2 OR $TRUE)")
// Tests JDBCPartition whereClause clause push down.
withTempView("tempFrame") {
val jdbcPartitionWhereClause = s"$FALSE1 OR $TRUE"
val df = spark.read.jdbc(
urlWithUserAndPass,
"TEST.PEOPLE",
predicates = Array[String](jdbcPartitionWhereClause),
new Properties())
df.createOrReplaceTempView("tempFrame")
assertEmptyQuery(s"SELECT * FROM tempFrame where $FALSE2")
}
}
test("SPARK-16387: Reserved SQL words are not escaped by JDBC writer") {
val df = spark.createDataset(Seq("a", "b", "c")).toDF("order")
val schema = JdbcUtils.schemaString(df, "jdbc:mysql://localhost:3306/temp")
assert(schema.contains("`order` TEXT"))
}
test("SPARK-18141: Predicates on quoted column names in the jdbc data source") {
assert(sql("SELECT * FROM mixedCaseCols WHERE Id < 1").collect().size == 0)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id <= 1").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id > 1").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id >= 1").collect().size == 3)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id = 1").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id != 2").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id <=> 2").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name LIKE 'fr%'").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name LIKE '%ed'").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name LIKE '%re%'").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name IS NULL").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name IS NOT NULL").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols").filter($"Name".isin()).collect().size == 0)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name IN ('mary', 'fred')").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name NOT IN ('fred')").collect().size == 1)
assert(sql("SELECT * FROM mixedCaseCols WHERE Id = 1 OR Name = 'mary'").collect().size == 2)
assert(sql("SELECT * FROM mixedCaseCols WHERE Name = 'mary' AND Id = 2").collect().size == 1)
}
test("SPARK-18419: Fix `asConnectionProperties` to filter case-insensitively") {
val parameters = Map(
"url" -> "jdbc:mysql://localhost:3306/temp",
"dbtable" -> "t1",
"numPartitions" -> "10")
assert(new JDBCOptions(parameters).asConnectionProperties.isEmpty)
assert(new JDBCOptions(CaseInsensitiveMap(parameters)).asConnectionProperties.isEmpty)
}
test("SPARK-16848: jdbc API throws an exception for user specified schema") {
val schema = StructType(Seq(
StructField("name", StringType, false), StructField("theid", IntegerType, false)))
val parts = Array[String]("THEID < 2", "THEID >= 2")
val e1 = intercept[AnalysisException] {
spark.read.schema(schema).jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, new Properties())
}.getMessage
assert(e1.contains("User specified schema not supported with `jdbc`"))
val e2 = intercept[AnalysisException] {
spark.read.schema(schema).jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties())
}.getMessage
assert(e2.contains("User specified schema not supported with `jdbc`"))
}
test("jdbc API support custom schema") {
val parts = Array[String]("THEID < 2", "THEID >= 2")
val customSchema = "NAME STRING, THEID INT"
val props = new Properties()
props.put("customSchema", customSchema)
val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, props)
assert(df.schema.size === 2)
assert(df.schema === CatalystSqlParser.parseTableSchema(customSchema))
assert(df.count() === 3)
}
test("jdbc API custom schema DDL-like strings.") {
withTempView("people_view") {
val customSchema = "NAME STRING, THEID INT"
sql(
s"""
|CREATE TEMPORARY VIEW people_view
|USING org.apache.spark.sql.jdbc
|OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass',
|customSchema '$customSchema')
""".stripMargin.replaceAll("\\n", " "))
val df = sql("select * from people_view")
assert(df.schema.length === 2)
assert(df.schema === CatalystSqlParser.parseTableSchema(customSchema))
assert(df.count() === 3)
}
}
test("SPARK-15648: teradataDialect StringType data mapping") {
val teradataDialect = JdbcDialects.get("jdbc:teradata://127.0.0.1/db")
assert(teradataDialect.getJDBCType(StringType).
map(_.databaseTypeDefinition).get == "VARCHAR(255)")
}
test("SPARK-15648: teradataDialect BooleanType data mapping") {
val teradataDialect = JdbcDialects.get("jdbc:teradata://127.0.0.1/db")
assert(teradataDialect.getJDBCType(BooleanType).
map(_.databaseTypeDefinition).get == "CHAR(1)")
}
test("Checking metrics correctness with JDBC") {
val foobarCnt = spark.table("foobar").count()
val res = InputOutputMetricsHelper.run(sql("SELECT * FROM foobar").toDF())
assert(res === (foobarCnt, 0L, foobarCnt) :: Nil)
}
test("unsupported types") {
var e = intercept[SQLException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.TIMEZONE", new Properties()).collect()
}.getMessage
assert(e.contains("Unsupported type TIMESTAMP_WITH_TIMEZONE"))
e = intercept[SQLException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.ARRAY", new Properties()).collect()
}.getMessage
assert(e.contains("Unsupported type ARRAY"))
}
test("SPARK-19318: Connection properties keys should be case-sensitive.") {
def testJdbcOptions(options: JDBCOptions): Unit = {
// Spark JDBC data source options are case-insensitive
assert(options.tableOrQuery == "t1")
// When we convert it to properties, it should be case-sensitive.
assert(options.asProperties.size == 3)
assert(options.asProperties.get("customkey") == null)
assert(options.asProperties.get("customKey") == "a-value")
assert(options.asConnectionProperties.size == 1)
assert(options.asConnectionProperties.get("customkey") == null)
assert(options.asConnectionProperties.get("customKey") == "a-value")
}
val parameters = Map("url" -> url, "dbTAblE" -> "t1", "customKey" -> "a-value")
testJdbcOptions(new JDBCOptions(parameters))
testJdbcOptions(new JDBCOptions(CaseInsensitiveMap(parameters)))
// test add/remove key-value from the case-insensitive map
var modifiedParameters = CaseInsensitiveMap(Map.empty) ++ parameters
testJdbcOptions(new JDBCOptions(modifiedParameters))
modifiedParameters -= "dbtable"
assert(modifiedParameters.get("dbTAblE").isEmpty)
modifiedParameters -= "customkey"
assert(modifiedParameters.get("customKey").isEmpty)
modifiedParameters += ("customKey" -> "a-value")
modifiedParameters += ("dbTable" -> "t1")
testJdbcOptions(new JDBCOptions(modifiedParameters))
assert ((modifiedParameters -- parameters.keys).size == 0)
}
test("SPARK-19318: jdbc data source options should be treated case-insensitive.") {
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("DbTaBle", "TEST.PEOPLE")
.load()
assert(df.count() == 3)
withTempView("people_view") {
sql(
s"""
|CREATE TEMPORARY VIEW people_view
|USING org.apache.spark.sql.jdbc
|OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass')
""".stripMargin.replaceAll("\\n", " "))
assert(sql("select * from people_view").count() == 3)
}
}
test("SPARK-21519: option sessionInitStatement, run SQL to initialize the database session.") {
val initSQL1 = "SET @MYTESTVAR 21519"
val df1 = spark.read.format("jdbc")
.option("url", urlWithUserAndPass)
.option("dbtable", "(SELECT NVL(@MYTESTVAR, -1))")
.option("sessionInitStatement", initSQL1)
.load()
assert(df1.collect() === Array(Row(21519)))
val initSQL2 = "SET SCHEMA DUMMY"
val df2 = spark.read.format("jdbc")
.option("url", urlWithUserAndPass)
.option("dbtable", "TEST.PEOPLE")
.option("sessionInitStatement", initSQL2)
.load()
val e = intercept[SparkException] {df2.collect()}.getMessage
assert(e.contains("""Schema "DUMMY" not found"""))
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW test_sessionInitStatement
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$urlWithUserAndPass',
|dbtable '(SELECT NVL(@MYTESTVAR1, -1), NVL(@MYTESTVAR2, -1))',
|sessionInitStatement 'SET @MYTESTVAR1 21519; SET @MYTESTVAR2 1234')
""".stripMargin)
val df3 = sql("SELECT * FROM test_sessionInitStatement")
assert(df3.collect() === Array(Row(21519, 1234)))
}
test("jdbc data source shouldn't have unnecessary metadata in its schema") {
val schema = StructType(Seq(
StructField("NAME", StringType, true), StructField("THEID", IntegerType, true)))
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("DbTaBle", "TEST.PEOPLE")
.load()
assert(df.schema === schema)
withTempView("people_view") {
sql(
s"""
|CREATE TEMPORARY VIEW people_view
|USING org.apache.spark.sql.jdbc
|OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass')
""".stripMargin.replaceAll("\\n", " "))
assert(sql("select * from people_view").schema === schema)
}
}
test("SPARK-23856 Spark jdbc setQueryTimeout option") {
val numJoins = 100
val longRunningQuery =
s"SELECT t0.NAME AS c0, ${(1 to numJoins).map(i => s"t$i.NAME AS c$i").mkString(", ")} " +
s"FROM test.people t0 ${(1 to numJoins).map(i => s"join test.people t$i").mkString(" ")}"
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("dbtable", s"($longRunningQuery)")
.option("queryTimeout", 1)
.load()
val errMsg = intercept[SparkException] {
df.collect()
}.getMessage
assert(errMsg.contains("Statement was canceled or the session timed out"))
}
test("SPARK-24327 verify and normalize a partition column based on a JDBC resolved schema") {
def testJdbcParitionColumn(partColName: String, expectedColumnName: String): Unit = {
val df = spark.read.format("jdbc")
.option("url", urlWithUserAndPass)
.option("dbtable", "TEST.PARTITION")
.option("partitionColumn", partColName)
.option("lowerBound", 1)
.option("upperBound", 4)
.option("numPartitions", 3)
.load()
val quotedPrtColName = testH2Dialect.quoteIdentifier(expectedColumnName)
df.logicalPlan match {
case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) =>
val whereClauses = parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet
assert(whereClauses === Set(
s"$quotedPrtColName < 2 or $quotedPrtColName is null",
s"$quotedPrtColName >= 2 AND $quotedPrtColName < 3",
s"$quotedPrtColName >= 3"))
}
}
testJdbcParitionColumn("THEID", "THEID")
testJdbcParitionColumn("\\"THEID\\"", "THEID")
withSQLConf("spark.sql.caseSensitive" -> "false") {
testJdbcParitionColumn("ThEiD", "THEID")
}
testJdbcParitionColumn("THE ID", "THE ID")
def testIncorrectJdbcPartitionColumn(partColName: String): Unit = {
val errMsg = intercept[AnalysisException] {
testJdbcParitionColumn(partColName, "THEID")
}.getMessage
assert(errMsg.contains(s"User-defined partition column $partColName not found " +
"in the JDBC relation:"))
}
testIncorrectJdbcPartitionColumn("NoExistingColumn")
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
testIncorrectJdbcPartitionColumn(testH2Dialect.quoteIdentifier("ThEiD"))
}
}
test("query JDBC option - negative tests") {
val query = "SELECT * FROM test.people WHERE theid = 1"
// load path
val e1 = intercept[RuntimeException] {
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("query", query)
.option("dbtable", "test.people")
.load()
}.getMessage
assert(e1.contains("Both 'dbtable' and 'query' can not be specified at the same time."))
// jdbc api path
val properties = new Properties()
properties.setProperty(JDBCOptions.JDBC_QUERY_STRING, query)
val e2 = intercept[RuntimeException] {
spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", properties).collect()
}.getMessage
assert(e2.contains("Both 'dbtable' and 'query' can not be specified at the same time."))
val e3 = intercept[RuntimeException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW queryOption
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', query '$query', dbtable 'TEST.PEOPLE',
| user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
}.getMessage
assert(e3.contains("Both 'dbtable' and 'query' can not be specified at the same time."))
val e4 = intercept[RuntimeException] {
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("query", "")
.load()
}.getMessage
assert(e4.contains("Option `query` can not be empty."))
// Option query and partitioncolumn are not allowed together.
val expectedErrorMsg =
s"""
|Options 'query' and 'partitionColumn' can not be specified together.
|Please define the query using `dbtable` option instead and make sure to qualify
|the partition columns using the supplied subquery alias to resolve any ambiguity.
|Example :
|spark.read.format("jdbc")
| .option("dbtable", "(select c1, c2 from t1) as subq")
| .option("partitionColumn", "subq.c1"
| .load()
""".stripMargin
val e5 = intercept[RuntimeException] {
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW queryOption
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', query '$query', user 'testUser', password 'testPass',
| partitionColumn 'THEID', lowerBound '1', upperBound '4', numPartitions '3')
""".stripMargin.replaceAll("\\n", " "))
}.getMessage
assert(e5.contains(expectedErrorMsg))
}
test("query JDBC option") {
val query = "SELECT name, theid FROM test.people WHERE theid = 1"
// query option to pass on the query string.
val df = spark.read.format("jdbc")
.option("Url", urlWithUserAndPass)
.option("query", query)
.load()
checkAnswer(
df,
Row("fred", 1) :: Nil)
// query option in the create table path.
sql(
s"""
|CREATE OR REPLACE TEMPORARY VIEW queryOption
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', query '$query', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\\n", " "))
checkAnswer(
sql("select name, theid from queryOption"),
Row("fred", 1) :: Nil)
}
}
| bravo-zhang/spark | sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala | Scala | apache-2.0 | 58,401 |
package model.services
import java.util.UUID
import model.dtos._
import model.repositories._
import play.api.Play.current
import play.api.i18n.{Messages, MessagesApi}
import play.api.libs.json.JsArray
import play.api.libs.ws.WS
import utils.MailService
import play.api.Play.current
import play.api.i18n.Messages.Implicits._
import scala.concurrent.Await
class AnnotationManager (gamificationEngine: GamificationEngineTrait, mailService: MailService)(implicit messages:Messages){
private val commentsPageSize = 50
val commentsRepository = new CommentsRepository()
val consultationRepository = new ConsultationRepository()
def extractTags(input:String):Seq[String] = {
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
Await.result(
WS.url(play.api.Play.current.configuration.getString("application.webservice.termsExtraction").get)
.withHeaders(("Content-Type" ->"application/x-www-form-urlencoded; charset=utf-8"))
.post("text="+input) map {
response => {
val content =(response.json \\ "content")
val tags = content.get.asInstanceOf[JsArray].value.map( x=>
x.toString().replace("\\"","")
).toList
tags
}
},15 seconds)
}
def checkIfUserHasReportedComment(commentId:Long, userId:UUID): Boolean = {
val answer = commentsRepository.checkIfUserHasReportedComment(commentId, userId)
answer
}
def reportComment(commentId:Long, userId:UUID): Long = {
val answer = commentsRepository.reportComment(commentId, userId)
answer
}
def saveComment(comment:Comment): Comment = {
if (!comment.discussionThread.get.id.isDefined || comment.discussionThread.get.id.get <= 0) {
// retrieve the article name from database
// compare the article name with comment.discussionThread.get.text
// if it is the same , discussionThread.get.typeid = "Whole article"
comment.discussionThread.get.id = commentsRepository.saveDiscussionThread(comment.discussionThread.get.clientId, comment.discussionThread.get.text, comment.discussionThread.get.discussion_thread_type_id)
}
comment.id= Some(commentsRepository.saveComment(comment, comment.discussionThread.get.id.get).get)
awardPointsForComment(comment)
comment
}
def updateComment(comment:Comment): Comment = {
commentsRepository.saveUpdatedComment(comment)
comment
}
private def awardPointsForComment(comment: Comment): Unit = {
if(comment.userId.isDefined) {
//check how many comments has the user entered today
if(commentsRepository.howManyCommentsToday(comment.userId.get) <= 10) {
if (comment.annotationTagProblems.size != 0) {
//award points for comment with annotation problems
gamificationEngine.rewardUser(comment.userId.get, GamificationEngineTrait.COMMENT_WITH_PROBLEM_TAGS, comment.id.get)
}
if (comment.annotationTagTopics.size != 0) {
//award points for comment with annotation tags
gamificationEngine.rewardUser(comment.userId.get, GamificationEngineTrait.COMMENT_WITH_ANN_TAGS, comment.id.get)
}
if(comment.discussionThread.get.discussion_thread_type_id == 2) {
//awards points for comment that refers to part of the article
gamificationEngine.rewardUser(comment.userId.get, GamificationEngineTrait.COMMENT_ON_CONSULTATION_PARAGRAPH, comment.id.get)
} else if(comment.discussionThread.get.discussion_thread_type_id == 1) {
//awards points for comment that refers to the whole article
gamificationEngine.rewardUser(comment.userId.get, GamificationEngineTrait.COMMENT_ON_CONSULTATION_ARTICLE, comment.id.get)
}
}
}
}
private def getActionsMadeByUserWhileCommenting(comment:Comment): List[Int] = {
var action_ids = Nil
// if comment is only for an article
//action_ids = action_ids ::: GamificationEngineTrait.COMMENT_ON_CONSULTATION
// if comment has category tags
//action_ids = action_ids ::: GamificationEngineTrait.COMMENT_ON_CONSULTATION
//if comment has problem tags
action_ids
}
def sendEmailToCommenterForLike(userId:UUID, consultationId:Long, articleId:Long, annotationId:String, commentId:Long): Unit = {
var linkToShowComment = ""
val userProfileManager = new UserProfileManager()
val userEmail = userProfileManager.getUserEmailById(userId)
if(play.Play.application().configuration().getString("application.state") == "development") {
linkToShowComment += "http://localhost:9000/consultation/"
} else {
linkToShowComment += "http://democracit.org/consultation/"
}
linkToShowComment += consultationId + "#articleid=" + articleId + "&annid=" + annotationId + "&commentid=" + commentId
MailerManager.sendEmailToCommenterForLike(userEmail, linkToShowComment)(mailService)
}
def rateComment(user_id:java.util.UUID, comment_id:Long, liked:Option[Boolean], commenterId:Option[UUID], annId:String, articleId:Long, consultationId:Option[Long]) = {
if(liked != None) {
//if the user has liked and has less than 10 likes today
if(liked.get && commenterId.isDefined) {
sendEmailToCommenterForLike(commenterId.get, consultationId.get, articleId, annId, comment_id)
}
if(liked.get && howManyLikesToday(user_id) < 10) {
gamificationEngine.rewardUser(user_id,GamificationEngineTrait.LIKE_COMMENT, comment_id)
//if the user is disliking
} else if(!liked.get) {
commentsRepository.cancelLikeReward(user_id, comment_id)
}
}
// if the user is taking back his like
else if(liked == None) {
commentsRepository.cancelLikeReward(user_id, comment_id)
}
commentsRepository.rateComment(user_id,comment_id,liked)
}
def saveReply(articleId:Long, parentId:Long, discussionthreadclientid:Long,replyText:String, userId:UUID):Long = {
val commentId = commentsRepository.saveCommentReply(replyText, parentId, articleId, discussionthreadclientid, userId)
commentId
}
def saveFinalLawAnnotation(userId:UUID, commentId:Long, finalLawId:Long, annotationIds:List[String]):String = {
consultationRepository.saveFinalLawAnnotation(userId, commentId, finalLawId, annotationIds)
val userProfileRepository = new UserProfileRepository()
val userName = userProfileRepository.getUserFullNameById(userId)
userName
}
def updateFinalLawAnnotation(userId:UUID, commentId:Long, finalLawId:Long, annotationIds:List[String]):String = {
consultationRepository.updateFinalLawAnnotation(userId, commentId, finalLawId, annotationIds)
val userProfileRepository = new UserProfileRepository()
val userName = userProfileRepository.getUserFullNameById(userId)
userName
}
def getFinalLawAnnotationsForComment(commentId:Long, finalLawId:Long):List[FinalLawUserAnnotation] = {
val finalLawUserAnnotation = consultationRepository.getFinalLawAnnotationsForComment(commentId, finalLawId)
finalLawUserAnnotation
}
def howManyLikesToday(user_id:UUID): Int ={
val answer = commentsRepository.howManyLikesToday(user_id)
answer
}
def getComments(consultationId:Long,
articleId:Long,
source: String,
discussionthreadid:Option[Int],
discussionthreadclientid:String,
user_id:Option[java.util.UUID],
pageSize:Int = 10): List[Comment] = {
var comments:List[Comment] = Nil
if (source=="opengov"){
comments =commentsRepository.getOpenGovComments(consultationId,articleId ,pageSize,user_id ).map{ c =>
if (c.profileUrl.isDefined && c.profileUrl.get.startsWith("http://www.opengov.gr"))
c.profileUrl = None // don't display the open gov comment url in the user's profile url //todo: this should be fixed in the crawler in the db
c
}
}
else {
comments = commentsRepository.getComments(discussionthreadclientid, pageSize, user_id)
//search comments to distinguish the ones that are comment replies
for(comment <- comments) {
//if the comment has a parentId, it is a reply
if(comment.parentId.isDefined){
//get the parentId of the comment
val parentId = comment.parentId.get
//get the parent comment
val parentComment = getCommentById(comments, parentId).asInstanceOf[Comment]
//append the reply to the list of replies of the parent comment
parentComment.commentReplies = comment :: parentComment.commentReplies
//exclude (delete) reply from list of comments (only parent comments or comments with no replies should be in this list)
comments = comments.filterNot(element => element == comment)
}
}
}
comments
}
def getRssForDiscussion(consultationId:Long):scala.xml.Elem= {
val pageSize=50
val consultation_title = this.consultationRepository.get(consultationId).title
val comments = this.commentsRepository.getRssForConsulationComments(pageSize, consultationId)
val title = "Consultation:" + " \\"" + consultation_title + "\\" "
val description = if (comments.isEmpty) "No comments found" else "Last " + pageSize + " comments"
val rss =
<rss version="2.0">
<channel>
<title> { title }</title>
<link> http://www.democracit.org/consultation/{ consultationId}</link>
<description>{ description} </description>
{ comments.map { c =>
<item>
<title>{ c.userName } | {c.comment_date.toLocaleString}</title>
<link>{ c.consultationShareUrl }</link>
<description>{ c.comment } </description>
</item>
}}
</channel>
</rss>
rss
}
/** Function which returns the comment from a list of comments by comment id
*
* @param comments the list od comments
* @param id id of the comment we want
*/
def getCommentById(comments:List[Comment], id:Long):Any = {
var commentFound:Any = Nil
for(comment <- comments) {
if(comment.id.get == id) {
commentFound = comment
}
}
commentFound
}
} | scify/DemocracIT-Web | app/model/services/AnnotationManager.scala | Scala | apache-2.0 | 10,368 |
/*
Copyright (c) 2017, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.sireum.web.playground
import ffi.monaco.KeyCode
import ffi.monaco.editor._
import ffi.monaco.languages.Languages
import org.scalajs.dom
import org.scalajs.dom.MouseEvent
import org.scalajs.dom.html.{Anchor, Div, Input, Select}
import org.scalajs.dom.raw._
import org.sireum.web.ui.{Modal, Notification}
import scalatags.Text.all._
import org.sireum.common.JSutil._
import scala.collection.immutable.SortedMap
import scala.scalajs.js
object Playground {
var editor: ffi.monaco.editor.IStandaloneCodeEditor = _
def editorValue: String = editor.getModel().getValue(EndOfLinePreference.LF, preserveBOM = false)
def updateView(f: () => Unit): Unit = {
val height = s"${dom.window.innerHeight - 90}px"
val editorDiv = $[Div]("#editor")
if (!js.isUndefined(editorDiv)) {
editorDiv.style.height = height
val output = $[Div]("#output")
output.style.height = height
editor.layout()
editor.focus()
dom.document.body.style.backgroundColor = "#8e44ad"
output.style.backgroundColor = "#f8f3fa"
f()
} else dom.window.setTimeout(() => updateView(f), 500)
}
def apply(): Unit = {
if (detectedPlatform == Platform.Unsupported && detectedBrowser == Browser.Unsupported ||
detectedPlatform == Platform.Android && detectedBrowser == Browser.Chrome) {
dom.document.body.removeChild($[Div]("#welcome"))
Modal.halt("Unsupported Operating System (OS) and Browser",
raw(s"Sireum Playground does not currently support your OS and browser:<br><br>" +
s"<blockquote>'${dom.window.navigator.appVersion}'</blockquote><br>" +
"Supported OSs are macOS, iOS, Linux, Android, and Windows, and " +
"supported browsers are Safari, Firefox, and Chrome, " +
"with the exception of Chrome under Android."))
return
}
Languages.register(jsObj(id = slangId))
Languages.setMonarchTokensProvider(slangId, eval("(function(){ " + slangModelText + "; })()"))
Languages.register(jsObj(id = smt2Id))
Languages.setMonarchTokensProvider(smt2Id, eval("(function(){ " + smt2ModelText + "; })()"))
val mainDiv = render[Div](mainPage())
editor = Editor.create($[Div](mainDiv, "#editor"),
jsObj[IEditorConstructionOptions](
value = "",
language = slangId,
fontSize = 16
))
def save(): Unit =
Files.save(Files.selectedFilename, editor.getPosition().lineNumber.toInt,
editor.getPosition().column.toInt, editorValue)
editor.getModel().onDidChangeContent((e: IModelContentChangedEvent2) =>
if (e.text.contains('\n'))
Files.save(Files.selectedFilename, editor.getPosition().lineNumber.toInt + 1, 1, editorValue))
editor.addCommand(KeyCode.F1, jsObj[ICommandHandler](apply = (() => ()): js.Function0[Unit]), "")
dom.window.onunload = (_: Event) => save()
editor.onDidBlurEditor(() => save())
editor.onDidBlurEditorText(() => save())
$[Select](mainDiv, "#filename").onchange = (_: Event) => Files.load(Files.selectedFilename)
dom.document.onreadystatechange = (_: Event) => {
dom.document.body.appendChild(mainDiv)
updateView(() => {
Files.loadFiles()
dom.document.body.removeChild($[Div]("#welcome"))
})
}
dom.window.onresize = (_: UIEvent) => updateView(() => ())
val runButton = $[Anchor](mainDiv, "#run")
def run(): Unit =
if (runButton.getAttribute("disabled") != "true")
Notification.notify(Notification.Kind.Info, s"Slang execution coming soon.")
runButton.onclick = (_: MouseEvent) => run()
editor.addCommand(KeyCode.F8, jsObj[ICommandHandler](apply = (() => run()): js.Function0[Unit]), "")
def verify(): Unit =
if (Files.selectedFilename.endsWith(Files.smtExt))
Z3.queryAsync(editorValue, (status, result) => {
$[Div](mainDiv, "#output").innerHTML = pre(result).render
if (status) Notification.notify(Notification.Kind.Success, s"Successfully invoked Z3.")
else Notification.notify(Notification.Kind.Error, s"Z3 invocation unsuccessful.")
})
else Notification.notify(Notification.Kind.Info, s"Slang verification coming soon.")
$[Anchor](mainDiv, "#verify").onclick = (_: MouseEvent) => verify()
editor.addCommand(KeyCode.F9, jsObj[ICommandHandler](apply = (() => verify()): js.Function0[Unit]), "")
val optionsButton = $[Anchor](mainDiv, "#options")
optionsButton.setAttribute("disabled", "true")
optionsButton.onclick = (_: MouseEvent) =>
if (optionsButton.getAttribute("disabled") != "true")
Notification.notify(Notification.Kind.Info, s"Sireum configuration coming soon.")
def appendSlangExtIfNoExt(filename: String): String =
if (filename.endsWith(Files.slangExt) || filename.endsWith(Files.smtExt))
filename
else filename + Files.slangExt
$[Anchor](mainDiv, "#add-file").onclick = (_: MouseEvent) =>
Modal.textInput("New File", "Filename:", "Enter filename",
filename => Files.isValidNewFilename(filename),
filename => Files.newFile(appendSlangExtIfNoExt(filename), None)
)
$[Anchor](mainDiv, "#duplicate-file").onclick = (_: MouseEvent) =>
Modal.textInput("Duplicate File", "Filename:", "Enter filename",
filename => Files.isValidNewFilename(filename),
filename => Files.newFile(appendSlangExtIfNoExt(filename), Some(editorValue))
)
$[Anchor](mainDiv, "#rename-file").onclick = (_: MouseEvent) =>
Modal.textInput("Rename File", "Filename:", "Enter filename",
filename => Files.isValidNewFilename(filename) && {
val selected = Files.selectedFilename
val newName = appendSlangExtIfNoExt(filename)
selected.substring(selected.lastIndexOf('.')) == newName.substring(newName.lastIndexOf('.'))
},
filename => {
val isSingle = Files.lookupFilenames()._2.length == 1
Files.deleteFile(Files.selectedFilename)
Files.newFile(appendSlangExtIfNoExt(filename), Some(editorValue))
if (isSingle) Files.deleteFile(Files.untitled)
}
)
$[Anchor](mainDiv, "#delete-file").onclick = (_: MouseEvent) => {
val f = Files.selectedFilename
Modal.confirm(s"Delete File",
s"Are you sure you want to delete $f?",
() => Files.deleteFile(f))
}
$[Anchor](mainDiv, "#clean").onclick = (_: MouseEvent) =>
Modal.confirm(s"Erase Data",
s"Are you sure you want to erase all data including files, etc.?",
() => {
GitHub.erase()
Files.erase()
Z3.erase()
Files.loadFiles()
})
def incoming(title: String, successfulMessage: String, noChangesMessage: String,
fm: SortedMap[String, String]): Unit = if (fm.nonEmpty) {
val changes = Files.incomingChanges(fm)
if (changes.nonEmpty) {
val tbl = table(cls := "table",
thead(tr(th("File"), th("Change"))),
tbody(changes.map(p => tr(th(p._1), td(p._2.toString))).toList))
Modal.confirm(title,
div(cls := "field", label(cls := "label")("Are you sure you want to incorporate the following incoming changes?"), tbl),
() => {
Files.mergeIncoming(changes, fm)
Notification.notify(Notification.Kind.Success, successfulMessage)
})
} else Notification.notify(Notification.Kind.Info, noChangesMessage)
}
$[Anchor](mainDiv, "#download").onclick = (_: MouseEvent) =>
Modal.textInput("Download As Zip", "Filename:", "Enter filename", _.nonEmpty,
name => Files.saveZip(name + ".zip"))
$[Anchor](mainDiv, "#upload").onclick = (_: MouseEvent) => click("#file-input")
val fileInput = $[Input]("#file-input")
fileInput.onchange = (e: Event) => Files.loadZips(e.target.dyn.files.asInstanceOf[FileList], fm =>
incoming("Upload Zip", "Upload was successful.", "There were no changes to incorporate.", fm))
$[Anchor](mainDiv, "#github").onclick = (_: MouseEvent) =>
Modal.gitHubToken(
GitHub.lookup(),
path => path.endsWith(Files.slangExt) || path.endsWith(Files.smtExt),
(_, fm) => incoming("Pull From GitHub", "Pull was successful.", "There were no changes to pull.", fm),
(repoAuth, fm) => {
val changes = Files.outgoingChanges(fm)
if (changes.nonEmpty) {
val tbl = table(cls := "table",
thead(tr(th("File"), th("Change"))),
tbody(changes.map(p => tr(th(p._1), td(p._2.toString))).toList))
Modal.confirm("Push To GitHub",
div(cls := "field", label(cls := "label")("Are you sure you want to perform the following outgoing changes?"), tbl),
() => GitHub.pushChanges(repoAuth, changes, () => Notification.notify(Notification.Kind.Success, "Push was successful."),
err => Notification.notify(Notification.Kind.Error, s"Push was unsuccessful (reason: $err).")))
} else Notification.notify(Notification.Kind.Info, s"There were no changes to push.")
})
}
import scalatags.Text.tags2.{attr, _}
def iconButton(buttonId: String, icon: String, tooltipPos: String, tooltip: String, clsAttrs: Seq[String], iconAttrs: Seq[String]): Frag = {
a(id := buttonId, cls := s"""button ${clsAttrs.mkString(" ")}""", border := "none", attr("data-balloon-pos") := tooltipPos, attr("data-balloon") := tooltip,
span(cls := s"""icon ${iconAttrs.mkString(" ")}""",
i(cls := icon)
)
)
}
def topButton(buttonId: String, icon: String, tooltipPos: String, tooltip: String): Frag = {
iconButton(buttonId, icon, tooltipPos, tooltip, Seq("is-primary"), Seq())
}
def mainPage(): Frag = {
div(id := "view", width := "100%",
nav(cls := "nav",
div(cls := "nav-left",
div(cls := "nav-item",
topButton("home", "fa fa-home", "right", "Home"),
p(cls := "title is-4", raw(" Sireum Playground"))),
div(cls := "nav-item",
div(cls := "container is-fluid",
div(cls :="field is-grouped",
topButton("run", "fa fa-play", "right", "Run script (F8)"),
topButton("verify", "fa fa-check", "right", "Check script (F9)"),
topButton("options", "fa fa-cog", "right", "Configure"))))),
div(cls := "nav-center",
div(cls := "nav-item",
div(cls := "field grouped",
topButton("add-file", "fa fa-plus", "left", "Add file"),
topButton("duplicate-file", "fa fa-copy", "left", "Duplicate file"),
span(cls := "select",
select(id := "filename")),
topButton("rename-file", "fa fa-pencil-square-o", "right", "Rename file"),
topButton("delete-file", "fa fa-minus", "right", "Delete file")))),
div(cls :="nav-right nav-menu is-active",
div(cls := "nav-item",
div(cls := "container is-fluid",
div(cls :="field is-grouped",
topButton("clean", "fa fa-eraser", "left", "Erase all data"),
topButton("download", "fa fa-download", "left", "Download files as a zip file"),
topButton("upload", "fa fa-upload", "left", "Upload files from a zip file"),
topButton("github", "fa fa-github", "left", "GitHub integration")))))),
div(id := "columns", cls := "columns is-gapless", backgroundColor := "white",
div(cls := "column is-fullheight is-7",
div(id := "editor")),
div(cls := "column is-fullheight is-5",
div(id := "output", overflow := "auto", fontSize := "20px"))),
div(id := "footer", position := "absolute", bottom := "10px", right := "10px",
p(
span(color := "white",
"SAnToS Laboratory, Kansas State University"))))
}
val slangId = "slang"
val smt2Id = "smt2"
val slangModelText: String =
"""return {
| keywords: [
| 'case', 'class', 'def', 'do', 'else', 'extends', 'false',
| 'for', 'if', 'import', 'match', 'object',
| 'package', 'return', 'sealed', 'this', 'trait',
| 'true', 'type', 'val', 'var', 'while', 'with', 'yield',
| 'T', 'F'
| ],
|
| typeKeywords: [
| 'B',
| 'Z', 'Z8', 'Z16', 'Z32', 'Z64',
| 'N', 'N8', 'N16', 'N32', 'N64',
| 'S8', 'S16', 'S32', 'S64',
| 'U8', 'U16', 'U32', 'U64',
| 'F32', 'F64', 'R',
| 'IS', 'MS',
| 'MSZ', 'MSZ8', 'MSZ16', 'MSZ32', 'MSZ64',
| 'MSN', 'MSN8', 'MSN16', 'MSN32', 'MSN64',
| 'MSS8', 'MSS16', 'MSS32', 'MSS64',
| 'MSU8', 'MSU16', 'MSU32', 'MSU64',
| 'ISZ', 'ISZ8', 'ISZ16', 'ISZ32', 'ISZ64',
| 'ISN', 'ISN8', 'ISN16', 'ISN32', 'ISN64',
| 'ISS8', 'ISS16', 'ISS32', 'ISS64',
| 'ISU8', 'ISU16', 'ISU32', 'ISU64',
| 'Unit'
| ],
|
| operators: [
| '=', '>', '<', '!', '~', ':',
| '==', '<=', '>=', '!=',
| '+', '-', '*', '/', '&', '|', '|^', '%', '<<',
| '>>', '>>>'
| ],
|
| symbols: /[=><!~?:&|+\-*\/\^%]+/,
|
| escapes: /\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,
|
| tokenizer: {
| root: [
| // identifiers and keywords
| [/[a-z_$][\w$]*/, { cases: { '@typeKeywords': 'keyword',
| '@keywords': 'keyword',
| '@default': 'identifier' } }],
| [/[A-Z][\w\$]*/, 'type.identifier' ], // to show class names nicely
|
| // whitespace
| { include: '@whitespace' },
|
| // delimiters and operators
| [/[{}()\[\]]/, '@brackets'],
| [/[<>](?!@symbols)/, '@brackets'],
| [/@symbols/, { cases: { '@operators': 'operator',
| '@default' : '' } } ],
|
| // @ annotations.
| [/@\s*[a-zA-Z_\$][\w\$]*/, { token: 'annotation' }],
|
| // numbers
| [/\d*\.\d+([eE][\-+]?\d+)?/, 'number.float'],
| [/0[xX][0-9a-fA-F]+/, 'number.hex'],
| [/\d+/, 'number'],
|
| // delimiter: after number because of .\d floats
| [/[;,.]/, 'delimiter'],
|
| // strings
| [/"([^"\\]|\\.)*$/, 'string.invalid' ], // non-teminated string
| [/"/, { token: 'string.quote', bracket: '@open', next: '@string' } ],
|
| // characters
| [/'[^\\']'/, 'string'],
| [/(')(@escapes)(')/, ['string','string.escape','string']],
| [/'/, 'string.invalid']
| ],
|
| comment: [
| [/[^\/*]+/, 'comment' ],
| [/\/\*/, 'comment', '@push' ], // nested comment
| ["\\*/", 'comment', '@pop' ],
| [/[\/*]/, 'comment' ]
| ],
|
| string: [
| [/[^\\"]+/, 'string'],
| [/@escapes/, 'string.escape'],
| [/\\./, 'string.escape.invalid'],
| [/"/, { token: 'string.quote', bracket: '@close', next: '@pop' } ]
| ],
|
| whitespace: [
| [/[ \t\r\n]+/, 'white'],
| [/\/\*/, 'comment', '@comment' ],
| [/\/\/.*$/, 'comment'],
| ],
| },
|};
""".stripMargin.trim
val smt2ModelText: String =
"""
|// Difficulty: "Easy"
|// SMT 2.0 language
|// See http://www.rise4fun.com/z3 or http://www.smtlib.org/ for more information
|return {
|
| // Set defaultToken to invalid to see what you do not tokenize yet
| // defaultToken: 'invalid',
|
| keywords: [
| 'define-fun', 'define-const', 'assert', 'push', 'pop', 'assert', 'check-sat',
| 'declare-const', 'declare-fun', 'get-model', 'get-value', 'declare-sort',
| 'declare-datatypes', 'reset', 'eval', 'set-logic', 'help', 'get-assignment',
| 'exit', 'get-proof', 'get-unsat-core', 'echo', 'let', 'forall', 'exists',
| 'define-sort', 'set-option', 'get-option', 'set-info', 'check-sat-using', 'apply', 'simplify',
| 'display', 'as', '!', 'get-info', 'declare-map', 'declare-rel', 'declare-var', 'rule',
| 'query', 'get-user-tactics'
| ],
|
| operators: [
| '=', '>', '<', '<=', '>=', '=>', '+', '-', '*', '/',
| ],
|
| builtins: [
| 'mod', 'div', 'rem', '^', 'to_real', 'and', 'or', 'not', 'distinct',
| 'to_int', 'is_int', '~', 'xor', 'if', 'ite', 'true', 'false', 'root-obj',
| 'sat', 'unsat', 'const', 'map', 'store', 'select', 'sat', 'unsat',
| 'bit1', 'bit0', 'bvneg', 'bvadd', 'bvsub', 'bvmul', 'bvsdiv', 'bvudiv', 'bvsrem',
| 'bvurem', 'bvsmod', 'bvule', 'bvsle', 'bvuge', 'bvsge', 'bvult',
| 'bvslt', 'bvugt', 'bvsgt', 'bvand', 'bvor', 'bvnot', 'bvxor', 'bvnand',
| 'bvnor', 'bvxnor', 'concat', 'sign_extend', 'zero_extend', 'extract',
| 'repeat', 'bvredor', 'bvredand', 'bvcomp', 'bvshl', 'bvlshr', 'bvashr',
| 'rotate_left', 'rotate_right', 'get-assertions'
| ],
|
| brackets: [
| ['(',')','delimiter.parenthesis'],
| ['{','}','delimiter.curly'],
| ['[',']','delimiter.square']
| ],
|
| // we include these common regular expressions
| symbols: /[=><~&|+\-*\/%@#]+/,
|
| // C# style strings
| escapes: /\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,
|
| // The main tokenizer for our languages
| tokenizer: {
| root: [
| // identifiers and keywords
| [/[a-z_][\w\-\.']*/, { cases: { '@builtins': 'predefined.identifier',
| '@keywords': 'keyword',
| '@default': 'identifier' } }],
| [/[A-Z][\w\-\.']*/, 'type.identifier' ],
| [/[:][\w\-\.']*/, 'string.identifier' ],
| [/[$?][\w\-\.']*/, 'constructor.identifier' ],
|
| // whitespace
| { include: '@whitespace' },
|
| // delimiters and operators
| [/[()\[\]]/, '@brackets'],
| [/@symbols/, { cases: { '@operators': 'predefined.operator',
| '@default' : 'operator' } } ],
|
|
| // numbers
| [/\d*\.\d+([eE][\-+]?\d+)?/, 'number.float'],
| [/0[xX][0-9a-fA-F]+/, 'number.hex'],
| [/#[xX][0-9a-fA-F]+/, 'number.hex'],
| [/#b[0-1]+/, 'number.binary'],
| [/\d+/, 'number'],
|
| // delimiter: after number because of .\d floats
| [/[,.]/, 'delimiter'],
|
| // strings
| [/"([^"\\]|\\.)*$/, 'string.invalid' ], // non-teminated string
| [/"/, { token: 'string.quote', bracket: '@open', next: '@string' } ],
|
| // user values
| [/\{/, { token: 'string.curly', bracket: '@open', next: '@uservalue' } ],
| ],
|
| uservalue: [
| [/[^\\\}]+/, 'string' ],
| [/\}/, { token: 'string.curly', bracket: '@close', next: '@pop' } ],
| [/\\\}/, 'string.escape'],
| [/./, 'string'] // recover
| ],
|
| string: [
| [/[^\\"]+/, 'string'],
| [/@escapes/, 'string.escape'],
| [/\\./, 'string.escape.invalid'],
| [/"/, { token: 'string.quote', bracket: '@close', next: '@pop' } ]
| ],
|
| whitespace: [
| [/[ \t\r\n]+/, 'white'],
| [/;.*$/, 'comment'],
| ],
| },
|};
""".stripMargin
}
| sireum/v3 | web/js/src/main/scala/org/sireum/web/playground/Playground.scala | Scala | bsd-2-clause | 21,494 |
package mesosphere.marathon.core.group
import javax.inject.Provider
import akka.actor.ActorRef
import akka.event.EventStream
import akka.stream.Materializer
import com.codahale.metrics.Gauge
import mesosphere.marathon.core.group.impl.{ GroupManagerActor, GroupManagerDelegate }
import mesosphere.marathon.core.leadership.LeadershipModule
import mesosphere.marathon.io.storage.StorageProvider
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.storage.repository.GroupRepository
import mesosphere.marathon.{ DeploymentService, MarathonConf }
import mesosphere.util.CapConcurrentExecutions
import scala.concurrent.Await
/**
* Provides a [[GroupManager]] implementation.
*/
class GroupManagerModule(
config: MarathonConf,
leadershipModule: LeadershipModule,
serializeUpdates: CapConcurrentExecutions,
scheduler: Provider[DeploymentService],
groupRepo: GroupRepository,
storage: StorageProvider,
eventBus: EventStream,
metrics: Metrics)(implicit mat: Materializer) {
private[this] val groupManagerActorRef: ActorRef = {
val props = GroupManagerActor.props(
serializeUpdates,
scheduler,
groupRepo,
storage,
config,
eventBus)
leadershipModule.startWhenLeader(props, "groupManager")
}
val groupManager: GroupManager = {
val groupManager = new GroupManagerDelegate(config, groupManagerActorRef)
metrics.gauge("service.mesosphere.marathon.app.count", new Gauge[Int] {
override def getValue: Int = {
Await.result(groupManager.rootGroup(), config.zkTimeoutDuration).transitiveAppsById.size
}
})
metrics.gauge("service.mesosphere.marathon.group.count", new Gauge[Int] {
override def getValue: Int = {
Await.result(groupManager.rootGroup(), config.zkTimeoutDuration).transitiveGroups.size
}
})
metrics.gauge("service.mesosphere.marathon.uptime", new Gauge[Long] {
val startedAt = System.currentTimeMillis()
override def getValue: Long = {
System.currentTimeMillis() - startedAt
}
})
groupManager
}
}
| timcharper/marathon | src/main/scala/mesosphere/marathon/core/group/GroupManagerModule.scala | Scala | apache-2.0 | 2,103 |
package ch.wsl.box.client.services
import wvlet.airframe._
trait ServiceModule {
val httpClient = bind[HttpClient]
val rest = bind[REST]
val clientSession = bind[ClientSession]
val navigator = bind[Navigator]
}
| Insubric/box | client/src/main/scala/ch/wsl/box/client/services/ServiceModule.scala | Scala | apache-2.0 | 222 |
/*
* Copyright (c) 2012-2017 by its authors. Some rights reserved.
* See the project homepage at: https://github.com/monix/shade
*
* Licensed under the MIT License (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy
* of the License at:
*
* https://github.com/monix/shade/blob/master/LICENSE.txt
*/
package shade
/**
* Super-class for errors thrown when specific cache-store related
* errors occur.
*/
class CacheException(val msg: String) extends RuntimeException(msg)
/**
* Thrown in case a cache store related operation times out.
*/
class TimeoutException(val key: String) extends CacheException(key)
/**
* Thrown in case a cache store related operation is cancelled
* (like due to closed / broken connections)
*/
class CancelledException(val key: String) extends CacheException(key)
/**
* Gets thrown in case the implementation is wrong and
* mishandled a status. Should never get thrown and
* if it does, then it's a bug.
*/
class UnhandledStatusException(msg: String) extends CacheException(msg)
/**
* Gets thrown in case a key is not found in the cache store on #apply().
*/
class KeyNotInCacheException(val key: String) extends CacheException(key)
| lloydmeta/shade | src/main/scala/shade/CacheException.scala | Scala | mit | 1,240 |
import shapeless.tag
import shapeless.tag.@@
package object learnshapeless {
def assertEquals[T](expected: T, actual: T) = assert(expected == actual, s"Expected: $expected != Actual: $actual")
def assertHaveEqualTypes[A, B](a: A, b: B)(implicit ev: A =:= B): Unit = ()
def assertConformsToTypeOf[A, B](subtype: A, supertype: B)(implicit ev: A <:< B): Unit = ()
//The `tag` operator provided by Shapeless doesn't work when the tagged value is assigned onto an explicit typed val or def.
//https://github.com/milessabin/shapeless/issues/557
//for this reason, Im using a variant syntax but the final effect is the same
object tag2 {
def apply[T](t: T) = new TaggedValue[T](t)
class TaggedValue[T](t: T) {
def @@[U]: T @@ U = tag[U].apply[T](t)
}
}
}
| benhutchison/learningshapeless | src/main/scala/learnshapeless/package.scala | Scala | apache-2.0 | 790 |
package com.danielasfregola.twitter4s.http.clients
import akka.http.scaladsl.client.RequestBuilding
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model._
import akka.stream.Materializer
import com.danielasfregola.twitter4s.http.marshalling.{BodyEncoder, Parameters}
import com.danielasfregola.twitter4s.http.oauth.OAuth1Provider
import scala.concurrent.{ExecutionContext, Future}
private[twitter4s] trait OAuthClient extends CommonClient with RequestBuilding {
def oauthProvider: OAuth1Provider
def withOAuthHeader(callback: Option[String])(
implicit materializer: Materializer): HttpRequest => Future[HttpRequest] = { request =>
implicit val ec = materializer.executionContext
for {
authorizationHeader <- oauthProvider.oauth1Header(callback)(request, materializer)
} yield request.withHeaders(request.headers :+ authorizationHeader)
}
def withSimpleOAuthHeader(callback: Option[String])(
implicit materializer: Materializer): HttpRequest => Future[HttpRequest] = { request =>
implicit val ec = materializer.executionContext
for {
authorizationHeader <- oauthProvider.oauth1Header(callback)(request.withEntity(HttpEntity.Empty), materializer)
} yield request.withHeaders(request.headers :+ authorizationHeader)
}
override val Get = new OAuthRequestBuilder(GET)
override val Post = new OAuthRequestBuilder(POST)
override val Put = new OAuthRequestBuilder(PUT)
override val Patch = new OAuthRequestBuilder(PATCH)
override val Delete = new OAuthRequestBuilder(DELETE)
override val Options = new OAuthRequestBuilder(OPTIONS)
override val Head = new OAuthRequestBuilder(HEAD)
private[twitter4s] class OAuthRequestBuilder(method: HttpMethod) extends RequestBuilder(method) with BodyEncoder {
def apply(uri: String, parameters: Parameters): HttpRequest =
if (parameters.toString.nonEmpty) apply(s"$uri?$parameters") else apply(uri)
def apply(uri: String, content: Product): HttpRequest = {
val data = toBodyAsEncodedParams(content)
val contentType = ContentType(MediaTypes.`application/x-www-form-urlencoded`)
apply(uri, data, contentType)
}
def asJson[A <: AnyRef](uri: String, content: A): HttpRequest = {
val jsonData = org.json4s.native.Serialization.write(content)
val contentType = ContentType(MediaTypes.`application/json`)
apply(uri, jsonData, contentType)
}
def apply(uri: String, content: Product, contentType: ContentType): HttpRequest = {
val data = toBodyAsParams(content)
apply(uri, data, contentType)
}
def apply(uri: String, data: String, contentType: ContentType): HttpRequest =
apply(uri).withEntity(HttpEntity(data).withContentType(contentType))
def apply(uri: String, multipartFormData: Multipart.FormData)(implicit ec: ExecutionContext): HttpRequest =
apply(Uri(uri), Some(multipartFormData))
}
}
| DanielaSfregola/twitter4s | src/main/scala/com/danielasfregola/twitter4s/http/clients/OAuthClient.scala | Scala | apache-2.0 | 2,932 |
package pl.touk.nussknacker.engine.avro.source.delayed
import cats.data.Validated.{Invalid, Valid}
import pl.touk.nussknacker.engine.api.context.transformation.{DefinedEagerParameter, NodeDependencyValue}
import pl.touk.nussknacker.engine.api.context.ValidationContext
import pl.touk.nussknacker.engine.api.definition.Parameter
import pl.touk.nussknacker.engine.api.process.ProcessObjectDependencies
import pl.touk.nussknacker.engine.avro.KafkaAvroBaseComponentTransformer.SchemaVersionParamName
import pl.touk.nussknacker.engine.avro.schemaregistry.SchemaRegistryProvider
import pl.touk.nussknacker.engine.avro.source.KafkaAvroSourceFactory
import pl.touk.nussknacker.engine.api.NodeId
import pl.touk.nussknacker.engine.kafka.source.KafkaSourceFactory.KafkaSourceImplFactory
import pl.touk.nussknacker.engine.kafka.source.delayed.DelayedKafkaSourceFactory._
import scala.reflect.ClassTag
class DelayedKafkaAvroSourceFactory[K: ClassTag, V: ClassTag](schemaRegistryProvider: SchemaRegistryProvider,
processObjectDependencies: ProcessObjectDependencies,
implProvider: KafkaSourceImplFactory[K, V])
extends KafkaAvroSourceFactory[K, V](schemaRegistryProvider, processObjectDependencies, implProvider) {
override def paramsDeterminedAfterSchema: List[Parameter] = super.paramsDeterminedAfterSchema ++ List(
TimestampFieldParameter, DelayParameter
)
override protected def nextSteps(context: ValidationContext, dependencies: List[NodeDependencyValue])
(implicit nodeId: NodeId): NodeTransformationDefinition = {
case step@TransformationStep(
(`topicParamName`, DefinedEagerParameter(topic: String, _)) ::
(SchemaVersionParamName, DefinedEagerParameter(version: String, _)) ::
(TimestampFieldParamName, DefinedEagerParameter(field, _)) ::
(DelayParameterName, DefinedEagerParameter(delay, _)) :: Nil, _) =>
val preparedTopic = prepareTopic(topic)
val versionOption = parseVersionOption(version)
val valueValidationResult = determineSchemaAndType(prepareValueSchemaDeterminer(preparedTopic, versionOption), Some(SchemaVersionParamName))
valueValidationResult match {
case Valid((valueRuntimeSchema, typingResult)) =>
val delayValidationErrors = Option(delay.asInstanceOf[java.lang.Long]).map(d => validateDelay(d)).getOrElse(Nil)
val timestampValidationErrors = Option(field.asInstanceOf[String]).map(f => validateTimestampField(f, typingResult)).getOrElse(Nil)
val errors = delayValidationErrors ++ timestampValidationErrors
prepareSourceFinalResults(preparedTopic, valueValidationResult, context, dependencies, step.parameters, errors)
case Invalid(exc) =>
prepareSourceFinalErrors(context, dependencies, step.parameters, List(exc))
}
case step@TransformationStep((`topicParamName`, _) :: (SchemaVersionParamName, _) :: (TimestampFieldParamName, _) :: (DelayParameterName, _) :: Nil, _) =>
prepareSourceFinalErrors(context, dependencies, step.parameters, errors = Nil)
}
} | TouK/nussknacker | utils/avro-components-utils/src/main/scala/pl/touk/nussknacker/engine/avro/source/delayed/DelayedKafkaAvroSourceFactory.scala | Scala | apache-2.0 | 3,179 |
import sbt._
import Keys._
import java.io.File
import scala.util.control.NonFatal
object Protobuf {
val paths = SettingKey[Seq[File]]("protobuf-paths", "The paths that contain *.proto files.")
val outputPaths = SettingKey[Seq[File]]("protobuf-output-paths", "The paths where to save the generated *.java files.")
val protoc = SettingKey[String]("protobuf-protoc", "The path and name of the protoc executable.")
val protocVersion = SettingKey[String]("protobuf-protoc-version", "The version of the protoc executable.")
val generate = TaskKey[Unit]("protobuf-generate", "Compile the protobuf sources and do all processing.")
lazy val settings: Seq[Setting[_]] = Seq(
paths := Seq((sourceDirectory in Compile).value, (sourceDirectory in Test).value).map(_ / "protobuf"),
outputPaths := Seq((sourceDirectory in Compile).value, (sourceDirectory in Test).value).map(_ / "java"),
protoc := "protoc",
protocVersion := "2.5.0",
generate := {
val sourceDirs = paths.value
val targetDirs = outputPaths.value
if (sourceDirs.size != targetDirs.size)
sys.error(s"Unbalanced number of paths and destination paths!\\nPaths: $sourceDirs\\nDestination Paths: $targetDirs")
if (sourceDirs exists (_.exists)) {
val cmd = protoc.value
val log = streams.value.log
checkProtocVersion(cmd, protocVersion.value, log)
val base = baseDirectory.value
(sourceDirs zip targetDirs) foreach { case (src, dst) =>
IO.delete(dst)
generate(cmd, src, dst, log)
}
}
}
)
private def callProtoc[T](protoc: String, args: Seq[String], log: Logger, thunk: (ProcessBuilder, Logger) => T): T =
try {
val proc = Process(protoc, args)
thunk(proc, log)
} catch {
case NonFatal(e) => throw new RuntimeException("error while executing '%s' with args: %s" format(protoc, args.mkString(" ")), e)
}
private def checkProtocVersion(protoc: String, protocVersion: String, log: Logger): Unit = {
val res = callProtoc(protoc, Seq("--version"), log, { (p, l) => p !! l })
val version = res.split(" ").last.trim
if (version != protocVersion) {
sys.error("Wrong protoc version! Expected %s but got %s" format (protocVersion, version))
}
}
private def generate(protoc: String, srcDir: File, targetDir: File, log: Logger): Unit = {
val protoFiles = (srcDir ** "*.proto").get
if (srcDir.exists)
if (protoFiles.isEmpty)
log.info("Skipping empty source directory %s" format srcDir)
else {
targetDir.mkdirs()
log.info("Generating %d protobuf files from %s to %s".format(protoFiles.size, srcDir, targetDir))
protoFiles.foreach { proto => log.info("Compiling %s" format proto) }
val exitCode = callProtoc(protoc, Seq("-I" + srcDir.absolutePath, "--java_out=%s" format targetDir.absolutePath) ++
protoFiles.map(_.absolutePath), log, { (p, l) => p ! l })
if (exitCode != 0)
sys.error("protoc returned exit code: %d" format exitCode)
}
}
} | Product-Foundry/akka-cqrs | project/Protobuf.scala | Scala | apache-2.0 | 3,078 |
package mesosphere.marathon
package core.health.impl
import akka.event.EventStream
import com.typesafe.config.{ Config, ConfigFactory }
import mesosphere.AkkaUnitTest
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.core.health.{ Health, HealthCheck, MesosCommandHealthCheck }
import mesosphere.marathon.core.instance.{ Instance, TestInstanceBuilder, TestTaskBuilder }
import mesosphere.marathon.core.leadership.{ AlwaysElectedLeadershipModule, LeadershipModule }
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.task.termination.KillService
import mesosphere.marathon.core.task.tracker.{ InstanceTracker, InstanceTrackerModule, InstanceStateOpProcessor }
import mesosphere.marathon.state.PathId.StringPathId
import mesosphere.marathon.state._
import mesosphere.marathon.test.{ CaptureEvents, MarathonTestHelper, SettableClock }
import org.apache.mesos.{ Protos => mesos }
import org.scalatest.concurrent.Eventually
import scala.collection.immutable.Set
import scala.concurrent.Future
import scala.concurrent.duration._
class MarathonHealthCheckManagerTest extends AkkaUnitTest with Eventually {
override protected lazy val akkaConfig: Config = ConfigFactory.parseString(
"""akka.loggers = ["akka.testkit.TestEventListener"]"""
)
private val appId = "test".toRootPath
private val clock = new SettableClock()
case class Fixture() {
val leadershipModule: LeadershipModule = AlwaysElectedLeadershipModule.forRefFactory(system)
val taskTrackerModule: InstanceTrackerModule = MarathonTestHelper.createTaskTrackerModule(leadershipModule)
val taskTracker: InstanceTracker = taskTrackerModule.instanceTracker
implicit val stateOpProcessor: InstanceStateOpProcessor = taskTrackerModule.stateOpProcessor
val groupManager: GroupManager = mock[GroupManager]
implicit val eventStream: EventStream = new EventStream(system)
val killService: KillService = mock[KillService]
implicit val hcManager: MarathonHealthCheckManager = new MarathonHealthCheckManager(
system,
killService,
eventStream,
taskTracker,
groupManager
)
}
def makeRunningTask(appId: PathId, version: Timestamp)(implicit stateOpProcessor: InstanceStateOpProcessor): (Instance.Id, Task.Id) = {
val instance = TestInstanceBuilder.newBuilder(appId, version = version).addTaskStaged().getInstance()
val (taskId, _) = instance.tasksMap.head
val taskStatus = TestTaskBuilder.Helper.runningTask(taskId).status.mesosStatus.get
stateOpProcessor.launchEphemeral(instance).futureValue
stateOpProcessor.updateStatus(instance, taskStatus, clock.now()).futureValue
(instance.instanceId, taskId)
}
def updateTaskHealth(taskId: Task.Id, version: Timestamp, healthy: Boolean)(implicit hcManager: MarathonHealthCheckManager): Unit = {
val taskStatus = mesos.TaskStatus.newBuilder
.setTaskId(taskId.mesosTaskId)
.setState(mesos.TaskState.TASK_RUNNING)
.setHealthy(healthy)
.build
hcManager.update(taskStatus, version)
}
"HealthCheckManager" should {
"add for a known app" in new Fixture {
val app: AppDefinition = AppDefinition(id = appId)
val healthCheck = MesosCommandHealthCheck(gracePeriod = 0.seconds, command = Command("true"))
hcManager.add(app, healthCheck, Seq.empty)
assert(hcManager.list(appId).size == 1)
}
"add for not-yet-known app" in new Fixture {
val app: AppDefinition = AppDefinition(id = appId)
val healthCheck = MesosCommandHealthCheck(gracePeriod = 0.seconds, command = Command("true"))
hcManager.add(app, healthCheck, Seq.empty)
assert(hcManager.list(appId).size == 1)
}
"update" in new Fixture {
val app: AppDefinition = AppDefinition(id = appId, versionInfo = VersionInfo.NoVersion)
val instance = TestInstanceBuilder.newBuilder(appId).addTaskStaged().getInstance()
val instanceId = instance.instanceId
val (taskId, _) = instance.tasksMap.head
val taskStatus = TestTaskBuilder.Helper.unhealthyTask(taskId).status.mesosStatus.get
val healthCheck = MesosCommandHealthCheck(gracePeriod = 0.seconds, command = Command("true"))
stateOpProcessor.launchEphemeral(instance).futureValue
stateOpProcessor.updateStatus(instance, taskStatus, clock.now()).futureValue
hcManager.add(app, healthCheck, Seq.empty)
val status1 = hcManager.status(appId, instanceId).futureValue
assert(status1 == Seq(Health(instanceId)))
// send unhealthy task status
hcManager.update(taskStatus.toBuilder.setHealthy(false).build, app.version)
eventually {
val Seq(health2) = hcManager.status(appId, instanceId).futureValue
assert(health2.lastFailure.isDefined)
assert(health2.lastSuccess.isEmpty)
}
// send healthy task status
hcManager.update(taskStatus.toBuilder.setHealthy(true).build, app.version)
eventually {
val Seq(health3) = hcManager.status(appId, instanceId).futureValue
assert(health3.lastFailure.isDefined)
assert(health3.lastSuccess.isDefined)
assert(health3.lastSuccess > health3.lastFailure)
}
}
"statuses" in new Fixture {
val app: AppDefinition = AppDefinition(id = appId)
val version = app.version
val healthCheck = MesosCommandHealthCheck(gracePeriod = 0.seconds, command = Command("true"))
hcManager.add(app, healthCheck, Seq.empty)
val (instanceId1, taskId1) = makeRunningTask(appId, version)
val (instanceId2, taskId2) = makeRunningTask(appId, version)
val (instanceId3, taskId3) = makeRunningTask(appId, version)
def statuses = hcManager.statuses(appId).futureValue
statuses.foreach {
case (_, health) => assert(health.isEmpty)
}
updateTaskHealth(taskId1, version, healthy = true)
eventually {
statuses.foreach {
case (id, health) if id == instanceId1 =>
assert(health.size == 1)
assert(health.head.alive)
case (_, health) => assert(health.isEmpty)
}
}
updateTaskHealth(taskId2, version, healthy = true)
eventually {
statuses.foreach {
case (id, health) if id == instanceId3 =>
assert(health.isEmpty)
case (_, health) =>
assert(health.size == 1)
assert(health.head.alive)
}
}
updateTaskHealth(taskId3, version, healthy = false)
eventually {
statuses.foreach {
case (id, health) if id == instanceId3 =>
assert(health.size == 1)
assert(!health.head.alive)
case (_, health) =>
assert(health.size == 1)
assert(health.head.alive)
}
}
updateTaskHealth(taskId1, version, healthy = false)
eventually {
statuses.foreach {
case (id, health) if id == instanceId2 =>
assert(health.size == 1)
assert(health.head.alive)
case (_, health) =>
assert(health.size == 1)
assert(!health.head.alive)
}
}
}
"reconcile" in new Fixture {
def taskStatus(instance: Instance, state: mesos.TaskState = mesos.TaskState.TASK_RUNNING) =
mesos.TaskStatus.newBuilder
.setTaskId(mesos.TaskID.newBuilder()
.setValue(instance.tasksMap.keys.head.idString)
.build)
.setState(state)
.setHealthy(true)
.build
val healthChecks = List(0, 1, 2).map { i =>
(0 until i).map { j =>
val check: HealthCheck = MesosCommandHealthCheck(gracePeriod = (i * 3 + j).seconds, command = Command("true"))
check
}.to[Set]
}
val versions = List(0L, 1L, 2L).map {
Timestamp(_)
}.toArray
val instances = List(0, 1, 2).map { i =>
TestInstanceBuilder.newBuilder(appId, version = versions(i)).addTaskStaged(version = Some(versions(i))).getInstance()
}
def startTask(appId: PathId, instance: Instance, version: Timestamp, healthChecks: Set[HealthCheck]): AppDefinition = {
val app = AppDefinition(
id = appId,
versionInfo = VersionInfo.forNewConfig(version),
healthChecks = healthChecks
)
stateOpProcessor.launchEphemeral(instance).futureValue
stateOpProcessor.updateStatus(instance, taskStatus(instance), clock.now()).futureValue
app
}
def startTask_i(i: Int): AppDefinition = startTask(appId, instances(i), versions(i), healthChecks(i))
def stopTask(instance: Instance) =
stateOpProcessor.forceExpunge(instance.instanceId).futureValue
// one other task of another app
val otherAppId = "other".toRootPath
val otherInstance = TestInstanceBuilder.newBuilder(appId).addTaskStaged(version = Some(Timestamp.zero)).getInstance()
val otherHealthChecks = Set[HealthCheck](MesosCommandHealthCheck(gracePeriod = 0.seconds, command = Command("true")))
val otherApp = startTask(otherAppId, otherInstance, Timestamp(42), otherHealthChecks)
hcManager.addAllFor(otherApp, Seq.empty)
assert(hcManager.list(otherAppId) == otherHealthChecks) // linter:ignore:UnlikelyEquality
// start task 0 without running health check
var currentAppVersion = startTask_i(0)
assert(hcManager.list(appId) == Set.empty[HealthCheck])
groupManager.appVersion(currentAppVersion.id, currentAppVersion.version.toOffsetDateTime) returns Future.successful(Some(currentAppVersion))
// reconcile doesn't do anything b/c task 0 has no health checks
hcManager.reconcile(Seq(currentAppVersion))
assert(hcManager.list(appId) == Set.empty[HealthCheck])
// reconcile starts health checks of task 1
val captured1 = captureEvents.forBlock {
currentAppVersion = startTask_i(1)
assert(hcManager.list(appId) == Set.empty[HealthCheck])
groupManager.appVersion(currentAppVersion.id, currentAppVersion.version.toOffsetDateTime) returns Future.successful(Some(currentAppVersion))
hcManager.reconcile(Seq(currentAppVersion)).futureValue
}
assert(captured1.map(_.eventType).count(_ == "add_health_check_event") == 1)
assert(hcManager.list(appId) == healthChecks(1)) // linter:ignore:UnlikelyEquality
// reconcile leaves health check running
val captured2 = captureEvents.forBlock {
hcManager.reconcile(Seq(currentAppVersion)).futureValue
}
assert(captured2.isEmpty)
assert(hcManager.list(appId) == healthChecks(1)) // linter:ignore:UnlikelyEquality
// reconcile starts health checks of task 2 and leaves those of task 1 running
val captured3 = captureEvents.forBlock {
currentAppVersion = startTask_i(2)
groupManager.appVersion(currentAppVersion.id, currentAppVersion.version.toOffsetDateTime) returns Future.successful(Some(currentAppVersion))
hcManager.reconcile(Seq(currentAppVersion)).futureValue
}
assert(captured3.map(_.eventType).count(_ == "add_health_check_event") == 2)
assert(hcManager.list(appId) == healthChecks(1) ++ healthChecks(2)) // linter:ignore:UnlikelyEquality
// reconcile stops health checks which are not current and which are without tasks
val captured4 = captureEvents.forBlock {
stopTask(instances(1))
assert(hcManager.list(appId) == healthChecks(1) ++ healthChecks(2)) // linter:ignore:UnlikelyEquality
hcManager.reconcile(Seq(currentAppVersion)).futureValue //wrong
}
assert(captured4.map(_.eventType) == Vector("remove_health_check_event"))
assert(hcManager.list(appId) == healthChecks(2)) // linter:ignore:UnlikelyEquality
// reconcile leaves current version health checks running after termination
val captured5 = captureEvents.forBlock {
stopTask(instances(2))
assert(hcManager.list(appId) == healthChecks(2)) // linter:ignore:UnlikelyEquality
hcManager.reconcile(Seq(currentAppVersion)).futureValue //wrong
}
assert(captured5.map(_.eventType) == Vector.empty)
assert(hcManager.list(appId) == healthChecks(2)) // linter:ignore:UnlikelyEquality
// other task was not touched
assert(hcManager.list(otherAppId) == otherHealthChecks) // linter:ignore:UnlikelyEquality
}
"reconcile loads the last known task health state" in new Fixture {
val healthCheck = MesosCommandHealthCheck(command = Command("true"))
val app: AppDefinition = AppDefinition(id = appId, healthChecks = Set(healthCheck))
// Create a task
val instance: Instance = TestInstanceBuilder.newBuilder(appId, version = app.version).addTaskStaged().getInstance()
val instanceId = instance.instanceId
val (taskId, _) = instance.tasksMap.head
stateOpProcessor.launchEphemeral(instance).futureValue
// Send an unhealthy update
val taskStatus = TestTaskBuilder.Helper.unhealthyTask(taskId).status.mesosStatus.get
stateOpProcessor.updateStatus(instance, taskStatus, clock.now()).futureValue
assert(hcManager.status(app.id, instanceId).futureValue.isEmpty)
groupManager.appVersion(app.id, app.version.toOffsetDateTime) returns Future.successful(Some(app))
// Reconcile health checks
hcManager.reconcile(Seq(app)).futureValue
val health = hcManager.status(app.id, instanceId).futureValue.head
assert(health.lastFailure.isDefined)
assert(health.lastSuccess.isEmpty)
}
}
def captureEvents(implicit eventStream: EventStream) = new CaptureEvents(eventStream)
}
| guenter/marathon | src/test/scala/mesosphere/marathon/core/health/impl/MarathonHealthCheckManagerTest.scala | Scala | apache-2.0 | 13,655 |
package net.sansa_stack.query.spark.api.domain
import org.apache.jena.sparql.algebra.{Table, TableFactory}
import org.apache.jena.sparql.core.Var
import org.apache.jena.sparql.engine.binding.Binding
import org.apache.spark.rdd.RDD
trait ResultSetSpark {
def getResultVars: Seq[Var]
def getBindings: RDD[Binding]
/** Load the whole result set into a Jena table */
def collectToTable(): Table = {
import collection.JavaConverters._
val result = TableFactory.create(getResultVars.toList.asJava)
getBindings.collect.foreach(b => result.addBinding(b))
result
}
}
| SANSA-Stack/SANSA-RDF | sansa-query/sansa-query-spark/src/main/scala/net/sansa_stack/query/spark/api/domain/ResultSetSpark.scala | Scala | apache-2.0 | 587 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import scala.sys.process._
import scala.util.control.NonFatal
import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkConf, TestUtils}
import org.apache.spark.internal.config.MASTER_REST_SERVER_ENABLED
import org.apache.spark.internal.config.UI.UI_ENABLED
import org.apache.spark.sql.{QueryTest, Row, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.CatalogTableType
import org.apache.spark.sql.internal.StaticSQLConf.WAREHOUSE_PATH
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.tags.{ExtendedHiveTest, SlowHiveTest}
import org.apache.spark.util.Utils
/**
* Test HiveExternalCatalog backward compatibility.
*
* Note that, this test suite will automatically download spark binary packages of different
* versions to a local directory. If the `spark.test.cache-dir` system property is defined, this
* directory will be used. If there is already a spark folder with expected version under this
* local directory, e.g. `/{cache-dir}/spark-2.0.3`, downloading for this spark version will be
* skipped. If the system property is not present, a temporary directory will be used and cleaned
* up after the test.
*/
@SlowHiveTest
@ExtendedHiveTest
class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
import HiveExternalCatalogVersionsSuite._
private val wareHousePath = Utils.createTempDir(namePrefix = "warehouse")
private val tmpDataDir = Utils.createTempDir(namePrefix = "test-data")
// For local test, you can set `spark.test.cache-dir` to a static value like `/tmp/test-spark`, to
// avoid downloading Spark of different versions in each run.
private val sparkTestingDir = Option(System.getProperty(SPARK_TEST_CACHE_DIR_SYSTEM_PROPERTY))
.map(new File(_)).getOrElse(Utils.createTempDir(namePrefix = "test-spark"))
private val unusedJar = TestUtils.createJarWithClasses(Seq.empty)
val hiveVersion = if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9)) {
HiveUtils.builtinHiveVersion
} else {
"1.2.1"
}
override def afterAll(): Unit = {
try {
Utils.deleteRecursively(wareHousePath)
Utils.deleteRecursively(tmpDataDir)
// Only delete sparkTestingDir if it wasn't defined to a static location by the system prop
if (Option(System.getProperty(SPARK_TEST_CACHE_DIR_SYSTEM_PROPERTY)).isEmpty) {
Utils.deleteRecursively(sparkTestingDir)
}
} finally {
super.afterAll()
}
}
private def tryDownloadSpark(version: String, path: String): Unit = {
// Try a few mirrors first; fall back to Apache archive
val mirrors =
(0 until 2).flatMap { _ =>
try {
Some(getStringFromUrl("https://www.apache.org/dyn/closer.lua?preferred=true"))
} catch {
// If we can't get a mirror URL, skip it. No retry.
case _: Exception => None
}
}
val sites =
mirrors.distinct :+ "https://archive.apache.org/dist" :+ PROCESS_TABLES.releaseMirror
logInfo(s"Trying to download Spark $version from $sites")
for (site <- sites) {
val filename = if (version.startsWith("3")) {
s"spark-$version-bin-hadoop3.2.tgz"
} else {
s"spark-$version-bin-hadoop2.7.tgz"
}
val url = s"$site/spark/spark-$version/$filename"
logInfo(s"Downloading Spark $version from $url")
try {
getFileFromUrl(url, path, filename)
val downloaded = new File(sparkTestingDir, filename).getCanonicalPath
val targetDir = new File(sparkTestingDir, s"spark-$version").getCanonicalPath
Seq("mkdir", targetDir).!
val exitCode = Seq("tar", "-xzf", downloaded, "-C", targetDir, "--strip-components=1").!
Seq("rm", downloaded).!
// For a corrupted file, `tar` returns non-zero values. However, we also need to check
// the extracted file because `tar` returns 0 for empty file.
val sparkSubmit = new File(sparkTestingDir, s"spark-$version/bin/spark-submit")
if (exitCode == 0 && sparkSubmit.exists()) {
return
} else {
Seq("rm", "-rf", targetDir).!
}
} catch {
case ex: Exception =>
logWarning(s"Failed to download Spark $version from $url: ${ex.getMessage}")
}
}
fail(s"Unable to download Spark $version")
}
private def genDataDir(name: String): String = {
new File(tmpDataDir, name).getCanonicalPath
}
private def getFileFromUrl(urlString: String, targetDir: String, filename: String): Unit = {
val conf = new SparkConf
// if the caller passes the name of an existing file, we want doFetchFile to write over it with
// the contents from the specified url.
conf.set("spark.files.overwrite", "true")
val hadoopConf = new Configuration
val outDir = new File(targetDir)
if (!outDir.exists()) {
outDir.mkdirs()
}
// propagate exceptions up to the caller of getFileFromUrl
Utils.doFetchFile(urlString, outDir, filename, conf, hadoopConf)
}
private def getStringFromUrl(urlString: String): String = {
val contentFile = File.createTempFile("string-", ".txt")
contentFile.deleteOnExit()
// exceptions will propagate to the caller of getStringFromUrl
getFileFromUrl(urlString, contentFile.getParent, contentFile.getName)
val contentPath = Paths.get(contentFile.toURI)
new String(Files.readAllBytes(contentPath), StandardCharsets.UTF_8)
}
override def beforeAll(): Unit = {
super.beforeAll()
val tempPyFile = File.createTempFile("test", ".py")
// scalastyle:off line.size.limit
Files.write(tempPyFile.toPath,
s"""
|from pyspark.sql import SparkSession
|import os
|
|spark = SparkSession.builder.enableHiveSupport().getOrCreate()
|version_index = spark.conf.get("spark.sql.test.version.index", None)
|
|spark.sql("create table data_source_tbl_{} using json as select 1 i".format(version_index))
|
|spark.sql("create table hive_compatible_data_source_tbl_{} using parquet as select 1 i".format(version_index))
|
|json_file = "${genDataDir("json_")}" + str(version_index)
|spark.range(1, 2).selectExpr("cast(id as int) as i").write.json(json_file)
|spark.sql("create table external_data_source_tbl_{}(i int) using json options (path '{}')".format(version_index, json_file))
|
|parquet_file = "${genDataDir("parquet_")}" + str(version_index)
|spark.range(1, 2).selectExpr("cast(id as int) as i").write.parquet(parquet_file)
|spark.sql("create table hive_compatible_external_data_source_tbl_{}(i int) using parquet options (path '{}')".format(version_index, parquet_file))
|
|json_file2 = "${genDataDir("json2_")}" + str(version_index)
|spark.range(1, 2).selectExpr("cast(id as int) as i").write.json(json_file2)
|spark.sql("create table external_table_without_schema_{} using json options (path '{}')".format(version_index, json_file2))
|
|parquet_file2 = "${genDataDir("parquet2_")}" + str(version_index)
|spark.range(1, 3).selectExpr("1 as i", "cast(id as int) as p", "1 as j").write.parquet(os.path.join(parquet_file2, "p=1"))
|spark.sql("create table tbl_with_col_overlap_{} using parquet options(path '{}')".format(version_index, parquet_file2))
|
|spark.sql("create view v_{} as select 1 i".format(version_index))
""".stripMargin.getBytes("utf8"))
// scalastyle:on line.size.limit
if (PROCESS_TABLES.testingVersions.isEmpty) {
logError("Fail to get the latest Spark versions to test.")
}
PROCESS_TABLES.testingVersions.zipWithIndex.foreach { case (version, index) =>
val sparkHome = new File(sparkTestingDir, s"spark-$version")
if (!sparkHome.exists()) {
tryDownloadSpark(version, sparkTestingDir.getCanonicalPath)
}
// Extract major.minor for testing Spark 3.1.x and 3.0.x with metastore 2.3.9 and Java 11.
val hiveMetastoreVersion = """^\\d+\\.\\d+""".r.findFirstIn(hiveVersion).get
val args = Seq(
"--name", "prepare testing tables",
"--master", "local[2]",
"--conf", s"${UI_ENABLED.key}=false",
"--conf", s"${MASTER_REST_SERVER_ENABLED.key}=false",
"--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=$hiveMetastoreVersion",
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
"--conf", s"${WAREHOUSE_PATH.key}=${wareHousePath.getCanonicalPath}",
"--conf", s"spark.sql.test.version.index=$index",
"--driver-java-options", s"-Dderby.system.home=${wareHousePath.getCanonicalPath}",
tempPyFile.getCanonicalPath)
runSparkSubmit(args, Some(sparkHome.getCanonicalPath), false)
}
tempPyFile.delete()
}
test("backward compatibility") {
val args = Seq(
"--class", PROCESS_TABLES.getClass.getName.stripSuffix("$"),
"--name", "HiveExternalCatalog backward compatibility test",
"--master", "local[2]",
"--conf", s"${UI_ENABLED.key}=false",
"--conf", s"${MASTER_REST_SERVER_ENABLED.key}=false",
"--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=$hiveVersion",
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
"--conf", s"${WAREHOUSE_PATH.key}=${wareHousePath.getCanonicalPath}",
"--driver-java-options", s"-Dderby.system.home=${wareHousePath.getCanonicalPath}",
unusedJar.toString)
if (PROCESS_TABLES.testingVersions.nonEmpty) runSparkSubmit(args)
}
}
object PROCESS_TABLES extends QueryTest with SQLTestUtils {
val releaseMirror = sys.env.getOrElse("SPARK_RELEASE_MIRROR",
"https://dist.apache.org/repos/dist/release")
// Tests the latest version of every release line.
val testingVersions: Seq[String] = {
import scala.io.Source
val versions: Seq[String] = try Utils.tryWithResource(
Source.fromURL(s"$releaseMirror/spark")) { source =>
source.mkString
.split("\\n")
.filter(_.contains("""<a href="spark-"""))
.filterNot(_.contains("preview"))
.map("""<a href="spark-(\\d.\\d.\\d)/">""".r.findFirstMatchIn(_).get.group(1))
.filter(_ < org.apache.spark.SPARK_VERSION)
} catch {
// Do not throw exception during object initialization.
case NonFatal(_) => Nil
}
versions
.filter(v => v.startsWith("3") || !TestUtils.isPythonVersionAtLeast38())
.filter(v => v.startsWith("3") || !SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9))
}
protected var spark: SparkSession = _
def main(args: Array[String]): Unit = {
val session = SparkSession.builder()
.enableHiveSupport()
.getOrCreate()
spark = session
import session.implicits._
testingVersions.indices.foreach { index =>
Seq(
s"data_source_tbl_$index",
s"hive_compatible_data_source_tbl_$index",
s"external_data_source_tbl_$index",
s"hive_compatible_external_data_source_tbl_$index",
s"external_table_without_schema_$index").foreach { tbl =>
val tableMeta = spark.sharedState.externalCatalog.getTable("default", tbl)
// make sure we can insert and query these tables.
session.sql(s"insert into $tbl select 2")
checkAnswer(session.sql(s"select * from $tbl"), Row(1) :: Row(2) :: Nil)
checkAnswer(session.sql(s"select i from $tbl where i > 1"), Row(2))
// make sure we can rename table.
val newName = tbl + "_renamed"
sql(s"ALTER TABLE $tbl RENAME TO $newName")
val readBack = spark.sharedState.externalCatalog.getTable("default", newName)
val actualTableLocation = readBack.storage.locationUri.get.getPath
val expectedLocation = if (tableMeta.tableType == CatalogTableType.EXTERNAL) {
tableMeta.storage.locationUri.get.getPath
} else {
spark.sessionState.catalog.defaultTablePath(TableIdentifier(newName, None)).getPath
}
assert(actualTableLocation == expectedLocation)
// make sure we can alter table location.
withTempDir { dir =>
val path = dir.toURI.toString.stripSuffix("/")
sql(s"ALTER TABLE ${tbl}_renamed SET LOCATION '$path'")
val readBack = spark.sharedState.externalCatalog.getTable("default", tbl + "_renamed")
val actualTableLocation = readBack.storage.locationUri.get.getPath
val expected = dir.toURI.getPath.stripSuffix("/")
assert(actualTableLocation == expected)
}
}
// test permanent view
checkAnswer(sql(s"select i from v_$index"), Row(1))
// SPARK-22356: overlapped columns between data and partition schema in data source tables
val tbl_with_col_overlap = s"tbl_with_col_overlap_$index"
assert(spark.table(tbl_with_col_overlap).columns === Array("i", "p", "j"))
checkAnswer(spark.table(tbl_with_col_overlap), Row(1, 1, 1) :: Row(1, 1, 1) :: Nil)
assert(sql("desc " + tbl_with_col_overlap).select("col_name")
.as[String].collect().mkString(",").contains("i,p,j"))
}
}
}
object HiveExternalCatalogVersionsSuite {
private val SPARK_TEST_CACHE_DIR_SYSTEM_PROPERTY = "spark.test.cache-dir"
}
| maropu/spark | sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala | Scala | apache-2.0 | 14,251 |
package org.apache.datacommons.protectr.encryptors
import com.n1analytics.paillier.{PaillierPrivateKey, PaillierPublicKey}
class EncryptionKeyPair(seed:Int) extends Serializable{
private val paillierPrivateKey: PaillierPrivateKey = PaillierPrivateKey.create(seed)
def getPrivateKey: PaillierPrivateKey = {
paillierPrivateKey
}
def getPublicKey: PaillierPublicKey = {
paillierPrivateKey.getPublicKey
}
}
| data-commons/protectr | src/main/scala/org/apache/datacommons/protectr/encryptors/EncryptionKeyPair.scala | Scala | apache-2.0 | 426 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.